In this case the idea is a "weather" agent — the user can ask for the weather in multiple locations,
the agent will use the get_lat_lng tool to get the latitude and longitude of the locations, then use
the get_weather tool to get the weather for those locations.
Running the Example
To run this example properly, you might want to add two extra API keys (Note if either key is missing, the code will fall back to dummy data, so they're not required):
A weather API key from tomorrow.io set via WEATHER_API_KEY
A geocoding API key from geocode.maps.co set via GEO_API_KEY
from__future__importannotationsas_annotationsimportasyncioimportosfromdataclassesimportdataclassfromtypingimportAnyimportlogfirefromdevtoolsimportdebugfromhttpximportAsyncClientfrompydantic_aiimportAgent,ModelRetry,RunContext# 'if-token-present' means nothing will be sent (and the example will work) if you don't have logfire configuredlogfire.configure(send_to_logfire='if-token-present')@dataclassclassDeps:client:AsyncClientweather_api_key:str|Nonegeo_api_key:str|Noneweather_agent=Agent('openai:gpt-4o',# 'Be concise, reply with one sentence.' is enough for some models (like openai) to use# the below tools appropriately, but others like anthropic and gemini require a bit more direction.system_prompt=('Be concise, reply with one sentence.''Use the `get_lat_lng` tool to get the latitude and longitude of the locations, ''then use the `get_weather` tool to get the weather.'),deps_type=Deps,retries=2,instrument=True,)@weather_agent.toolasyncdefget_lat_lng(ctx:RunContext[Deps],location_description:str)->dict[str,float]:"""Get the latitude and longitude of a location. Args: ctx: The context. location_description: A description of a location. """ifctx.deps.geo_api_keyisNone:# if no API key is provided, return a dummy response (London)return{'lat':51.1,'lng':-0.1}params={'q':location_description,'api_key':ctx.deps.geo_api_key,}withlogfire.span('calling geocode API',params=params)asspan:r=awaitctx.deps.client.get('https://geocode.maps.co/search',params=params)r.raise_for_status()data=r.json()span.set_attribute('response',data)ifdata:return{'lat':data[0]['lat'],'lng':data[0]['lon']}else:raiseModelRetry('Could not find the location')@weather_agent.toolasyncdefget_weather(ctx:RunContext[Deps],lat:float,lng:float)->dict[str,Any]:"""Get the weather at a location. Args: ctx: The context. lat: Latitude of the location. lng: Longitude of the location. """ifctx.deps.weather_api_keyisNone:# if no API key is provided, return a dummy responsereturn{'temperature':'21 °C','description':'Sunny'}params={'apikey':ctx.deps.weather_api_key,'location':f'{lat},{lng}','units':'metric',}withlogfire.span('calling weather API',params=params)asspan:r=awaitctx.deps.client.get('https://api.tomorrow.io/v4/weather/realtime',params=params)r.raise_for_status()data=r.json()span.set_attribute('response',data)values=data['data']['values']# https://docs.tomorrow.io/reference/data-layers-weather-codescode_lookup={1000:'Clear, Sunny',1100:'Mostly Clear',1101:'Partly Cloudy',1102:'Mostly Cloudy',1001:'Cloudy',2000:'Fog',2100:'Light Fog',4000:'Drizzle',4001:'Rain',4200:'Light Rain',4201:'Heavy Rain',5000:'Snow',5001:'Flurries',5100:'Light Snow',5101:'Heavy Snow',6000:'Freezing Drizzle',6001:'Freezing Rain',6200:'Light Freezing Rain',6201:'Heavy Freezing Rain',7000:'Ice Pellets',7101:'Heavy Ice Pellets',7102:'Light Ice Pellets',8000:'Thunderstorm',}return{'temperature':f'{values["temperatureApparent"]:0.0f}°C','description':code_lookup.get(values['weatherCode'],'Unknown'),}asyncdefmain():asyncwithAsyncClient()asclient:# create a free API key at https://www.tomorrow.io/weather-api/weather_api_key=os.getenv('WEATHER_API_KEY')# create a free API key at https://geocode.maps.co/geo_api_key=os.getenv('GEO_API_KEY')deps=Deps(client=client,weather_api_key=weather_api_key,geo_api_key=geo_api_key)result=awaitweather_agent.run('What is the weather like in London and in Wiltshire?',deps=deps)debug(result)print('Response:',result.data)if__name__=='__main__':asyncio.run(main())
Running the UI
You can build multi-turn chat applications for your agent with Gradio, a framework for building AI web applications entirely in python. Gradio comes with built-in chat components and agent support so the entire UI will be implemented in a single python file!
Here's what the UI looks like for the weather agent:
from__future__importannotationsas_annotationsimportjsonimportosfromhttpximportAsyncClientfrompydantic_ai.messagesimportToolCallPart,ToolReturnPartfrompydantic_ai_examples.weather_agentimportDeps,weather_agenttry:importgradioasgrexceptImportErrorase:raiseImportError('Please install gradio with `pip install gradio`. You must use python>=3.10.')fromeTOOL_TO_DISPLAY_NAME={'get_lat_lng':'Geocoding API','get_weather':'Weather API'}client=AsyncClient()weather_api_key=os.getenv('WEATHER_API_KEY')# create a free API key at https://geocode.maps.co/geo_api_key=os.getenv('GEO_API_KEY')deps=Deps(client=client,weather_api_key=weather_api_key,geo_api_key=geo_api_key)asyncdefstream_from_agent(prompt:str,chatbot:list[dict],past_messages:list):chatbot.append({'role':'user','content':prompt})yieldgr.Textbox(interactive=False,value=''),chatbot,gr.skip()asyncwithweather_agent.run_stream(prompt,deps=deps,message_history=past_messages)asresult:formessageinresult.new_messages():forcallinmessage.parts:ifisinstance(call,ToolCallPart):call_args=(call.args.args_jsonifhasattr(call.args,'args_json')elsejson.dumps(call.args.args_dict))metadata={'title':f'🛠️ Using {TOOL_TO_DISPLAY_NAME[call.tool_name]}',}ifcall.tool_call_idisnotNone:metadata['id']={call.tool_call_id}gr_message={'role':'assistant','content':'Parameters: '+call_args,'metadata':metadata,}chatbot.append(gr_message)ifisinstance(call,ToolReturnPart):forgr_messageinchatbot:if(gr_message.get('metadata',{}).get('id','')==call.tool_call_id):gr_message['content']+=(f'\nOutput: {json.dumps(call.content)}')yieldgr.skip(),chatbot,gr.skip()chatbot.append({'role':'assistant','content':''})asyncformessageinresult.stream_text():chatbot[-1]['content']=messageyieldgr.skip(),chatbot,gr.skip()past_messages=result.all_messages()yieldgr.Textbox(interactive=True),gr.skip(),past_messagesasyncdefhandle_retry(chatbot,past_messages:list,retry_data:gr.RetryData):new_history=chatbot[:retry_data.index]previous_prompt=chatbot[retry_data.index]['content']past_messages=past_messages[:retry_data.index]asyncforupdateinstream_from_agent(previous_prompt,new_history,past_messages):yieldupdatedefundo(chatbot,past_messages:list,undo_data:gr.UndoData):new_history=chatbot[:undo_data.index]past_messages=past_messages[:undo_data.index]returnchatbot[undo_data.index]['content'],new_history,past_messagesdefselect_data(message:gr.SelectData)->str:returnmessage.value['text']withgr.Blocks()asdemo:gr.HTML("""<div style="display: flex; justify-content: center; align-items: center; gap: 2rem; padding: 1rem; width: 100%"> <img src="https://ai.pydantic.dev/img/logo-white.svg" style="max-width: 200px; height: auto"> <div> <h1 style="margin: 0 0 1rem 0">Weather Assistant</h1> <h3 style="margin: 0 0 0.5rem 0"> This assistant answer your weather questions. </h3> </div></div>""")past_messages=gr.State([])chatbot=gr.Chatbot(label='Packing Assistant',type='messages',avatar_images=(None,'https://ai.pydantic.dev/img/logo-white.svg'),examples=[{'text':'What is the weather like in Miami?'},{'text':'What is the weather like in London?'},],)withgr.Row():prompt=gr.Textbox(lines=1,show_label=False,placeholder='What is the weather like in New York City?',)generation=prompt.submit(stream_from_agent,inputs=[prompt,chatbot,past_messages],outputs=[prompt,chatbot,past_messages],)chatbot.example_select(select_data,None,[prompt])chatbot.retry(handle_retry,[chatbot,past_messages],[prompt,chatbot,past_messages])chatbot.undo(undo,[chatbot,past_messages],[prompt,chatbot,past_messages])if__name__=='__main__':demo.launch()