diff --git a/README-zh_CN.md b/README-zh_CN.md index 4b58b7f..080ff2e 100644 --- a/README-zh_CN.md +++ b/README-zh_CN.md @@ -87,6 +87,14 @@ "protocol": "目前支持openapi、openmodbus、jsonrpcHttp" } ], + "sessionList": [ + { + "id": "" + }, + { + "id": "" + } + ], "timeoutSeconds": 3600 } ``` diff --git a/README.md b/README.md index 6f8ed01..9c1035d 100644 --- a/README.md +++ b/README.md @@ -87,6 +87,14 @@ LLM `AI Agent` multi session HTTP/WebSocket service "protocol": "Support openapi, openmodbus, jsonrpcHttp" } ], + "sessionList": [ + { + "id": "" + }, + { + "id": "" + } + ], "timeoutSeconds": 3600 } ``` diff --git a/example/multi_agent_example.dart b/example/multi_agent_example.dart index 8dc40e4..6d85601 100644 --- a/example/multi_agent_example.dart +++ b/example/multi_agent_example.dart @@ -14,8 +14,10 @@ Dio dio = Dio(BaseOptions( // headers: {"Authorization": "Bearer "} )); - - +/// [IMPORTANT] Prepare: +/// 1. HTTP Server, according to `/example/mock/server/mock_http_server`, which server is running. +/// 2. OneAPI JSON file, which is described the HTTP Server API. +/// 3. Add LLM baseUrl and apiKey to `.env` file Future main() async { DotEnv env = DotEnv();env.load(['example/.env']);LLMConfigDto llmConfig = LLMConfigDto(baseUrl: env["baseUrl"]!, apiKey: env["apiKey"]!, model: "gpt-4o-mini");