diff --git a/docs/src/content/docs/cookbook/examples/api-agent.md b/docs/src/content/docs/cookbook/examples/api-agent.mdx similarity index 100% rename from docs/src/content/docs/cookbook/examples/api-agent.md rename to docs/src/content/docs/cookbook/examples/api-agent.mdx diff --git a/docs/src/content/docs/cookbook/tools/weather-api.md b/docs/src/content/docs/cookbook/tools/weather-api.mdx similarity index 57% rename from docs/src/content/docs/cookbook/tools/weather-api.md rename to docs/src/content/docs/cookbook/tools/weather-api.mdx index e20db032..3021e850 100644 --- a/docs/src/content/docs/cookbook/tools/weather-api.md +++ b/docs/src/content/docs/cookbook/tools/weather-api.mdx @@ -5,15 +5,17 @@ description: Understanding Tool use in Bedrock LLM Agent This guide demonstrates how to create a specialized weather agent using BedrockLLMAgent and a custom weather tool. We'll walk through the process of defining the tool, setting up the agent, and integrating it into your Multi-Agent Orchestrator system. -
- 1. **Define the Weather Tool** Let's break down the weather tool definition into its key components: -A. Tool Description +**A. Tool Description** + +import { Tabs, TabItem } from '@astrojs/starlight/components'; + + ```typescript export const weatherToolDescription = [ { @@ -40,6 +42,35 @@ export const weatherToolDescription = [ } ]; ``` + + + ```python +weather_tool_description = [{ + "toolSpec": { + "name": "Weather_Tool", + "description": "Get the current weather for a given location, based on its WGS84 coordinates.", + "inputSchema": { + "json": { + "type": "object", + "properties": { + "latitude": { + "type": "string", + "description": "Geographical WGS84 latitude of the location.", + }, + "longitude": { + "type": "string", + "description": "Geographical WGS84 longitude of the location.", + }, + }, + "required": ["latitude", "longitude"], + } + }, + } +}] + +``` + + **Explanation:** - This describes the tool's interface to the LLM. @@ -49,11 +80,9 @@ export const weatherToolDescription = [ - Requires `latitude` and `longitude` as strings. - This schema helps the LLM understand how to use the tool correctly. -
-
- -B. Custom Prompt - +**B. Custom Prompt** + + ```typescript export const WEATHER_PROMPT = ` You are a weather assistant that provides current weather data for user-specified locations using only @@ -72,6 +101,29 @@ To use the tool, you strictly apply the provided tool specification. - Complete the entire process until you have all required data before sending the complete response. `; ``` + + +```python +weather_tool_prompt = """ +You are a weather assistant that provides current weather data for user-specified locations using only +the Weather_Tool, which expects latitude and longitude. Infer the coordinates from the location yourself. +If the user provides coordinates, infer the approximate location and refer to it in your response. +To use the tool, you strictly apply the provided tool specification. + +- Explain your step-by-step process, and give brief updates before each step. +- Only use the Weather_Tool for data. Never guess or make up information. +- Repeat the tool use for subsequent requests if necessary. +- If the tool errors, apologize, explain weather is unavailable, and suggest other options. +- Report temperatures in °C (°F) and wind in km/h (mph). Keep weather reports concise. Sparingly use + emojis where appropriate. +- Only respond to weather queries. Remind off-topic users of your purpose. +- Never claim to search online, access external data, or use tools besides Weather_Tool. +- Complete the entire process until you have all required data before sending the complete response. +""" +``` + + + **Explanation:** - This prompt sets the behavior and limitations for the LLM. @@ -83,13 +135,11 @@ To use the tool, you strictly apply the provided tool specification. - Format responses consistently (units, conciseness). - Stay on topic and use only the provided tool. -
-
- -C. Tool Handler +**C. Tool Handler** + + ```typescript - import { ConversationMessage, ParticipantRole } from "multi-agent-orchestrator"; @@ -123,6 +173,50 @@ export async function weatherToolHandler(response, conversation: ConversationMes return message; } ``` + + +```python +import requests +from requests.exceptions import RequestException +from typing import List, Dict, Any +from multi_agent_orchestrator.types import ConversationMessage, ParticipantRole + +async def weather_tool_handler(response: ConversationMessage, conversation: List[Dict[str, Any]]) -> ConversationMessage: + response_content_blocks = response.content + + # Initialize an empty list of tool results + tool_results = [] + + if not response_content_blocks: + raise ValueError("No content blocks in response") + + for content_block in response_content_blocks: + if "text" in content_block: + # Handle text content if needed + pass + + if "toolUse" in content_block: + tool_use_block = content_block["toolUse"] + tool_use_name = tool_use_block.get("name") + + if tool_use_name == "Weather_Tool": + tool_response = await fetch_weather_data(tool_use_block["input"]) + tool_results.append({ + "toolResult": { + "toolUseId": tool_use_block["toolUseId"], + "content": [{"json": {"result": tool_response}}], + } + }) + + # Embed the tool results in a new user message + message = ConversationMessage( + role=ParticipantRole.USER.value, + content=tool_results) + + return message +``` + + **Explanation:** - This handler processes the LLM's request to use the Weather_Tool. @@ -130,13 +224,12 @@ export async function weatherToolHandler(response, conversation: ConversationMes - When it finds a Weather_Tool use: - It calls `fetchWeatherData` with the provided coordinates. - It formats the result into a tool result object. -- Finally, it adds the tool results to the conversation as a new user message. - -
-
+- Finally, it returns the tool results to the caller as a new user message. -D. Data Fetching Function +**D. Data Fetching Function** + + ```typescript async function fetchWeatherData(inputData: { latitude: number; longitude: number }) { const endpoint = "https://api.open-meteo.com/v1/forecast"; @@ -158,6 +251,34 @@ async function fetchWeatherData(inputData: { latitude: number; longitude: number } } ``` + + +```python +async def fetch_weather_data(input_data): + """ + Fetches weather data for the given latitude and longitude using the Open-Meteo API. + Returns the weather data or an error message if the request fails. + + :param input_data: The input data containing the latitude and longitude. + :return: The weather data or an error message. + """ + endpoint = "https://api.open-meteo.com/v1/forecast" + latitude = input_data.get("latitude") + longitude = input_data.get("longitude", "") + params = {"latitude": latitude, "longitude": longitude, "current_weather": True} + + try: + response = requests.get(endpoint, params=params) + weather_data = {"weather_data": response.json()} + response.raise_for_status() + return weather_data + except RequestException as e: + return e.response.json() + except Exception as e: + return {"error": type(e), "message": str(e)} +``` + + **Explanation:** - This function makes the actual API call to get weather data. @@ -174,13 +295,13 @@ These components work together to create a functional weather tool: 4. The fetch function retrieves real weather data based on the LLM's input. This setup allows the BedrockLLMAgent to provide weather information by seamlessly integrating external data into its responses. -
-
+ 2. **Create the Weather Agent** Now that we have our weather tool defined and the code above in a file called `weatherTool.ts`, let's create a BedrockLLMAgent that uses this tool. - + + ```typescript // weatherAgent.ts @@ -206,13 +327,34 @@ const weatherAgent = new BedrockLLMAgent({ }); weatherAgent.setSystemPrompt(WEATHER_PROMPT); - ``` + + +```python +from tools import weather_tool +from multi_agent_orchestrator.agents import (BedrockLLMAgent, BedrockLLMAgentOptions) + +weather_agent = BedrockLLMAgent(BedrockLLMAgentOptions( + name="Weather Agent", + streaming=False, + description="Specialized agent for giving weather condition from a city.", + tool_config={ + 'tool':weather_tool.weather_tool_description, + 'toolMaxRecursions': 5, + 'useToolHandler': weather_tool.weather_tool_handler + } + )) +weather_agent.set_system_prompt(weather_tool.weather_tool_prompt) +``` + + 3. **Add the Weather Agent to the Orchestrator** Now we can add our weather agent to the Multi-Agent Orchestrator: + + ```typescript import { MultiAgentOrchestrator } from "multi-agent-orchestrator"; @@ -222,11 +364,23 @@ const orchestrator = new MultiAgentOrchestrator(); orchestrator.addAgent(weatherAgent); ``` + + +```python +from multi_agent_orchestrator.orchestrator import MultiAgentOrchestrator + +orchestrator = MultiAgentOrchestrator() +orchestrator.add_agent(weather_agent) +``` + + ## 4. Using the Weather Agent Now that our weather agent is set up and added to the orchestrator, we can use it to get weather information: + + ```typescript const response = await orchestrator.routeRequest( @@ -235,6 +389,13 @@ const response = await orchestrator.routeRequest( "session456" ); ``` + + +```python +response = await orchestrator.route_request("What's the weather like in New York City?", "user123", "session456") +``` + + ### How It Works