diff --git a/fastagency/api/openapi/client.py b/fastagency/api/openapi/client.py index 2d694b5b..be851ea6 100644 --- a/fastagency/api/openapi/client.py +++ b/fastagency/api/openapi/client.py @@ -98,15 +98,16 @@ def _process_params( url = self.servers[0]["url"] + expanded_path - body_dict = ( - { - "json": kwargs[body].model_dump() - if hasattr(kwargs[body], "model_dump") - else kwargs[body].dict() - } - if body and body in kwargs - else {} - ) + body_dict = {} + if body and body in kwargs: + body_value = kwargs[body] + if isinstance(body_value, dict): + body_dict = {"json": body_value} + elif hasattr(body_value, "model_dump"): + body_dict = {"json": body_value.model_dump()} + else: + body_dict = {"json": body_value.dict()} + body_dict["headers"] = {"Content-Type": "application/json"} if security: q_params, body_dict = kwargs["security"].add_security(q_params, body_dict) diff --git a/tests/api/openapi/test_endpoint_with_body.py b/tests/api/openapi/test_endpoint_with_body.py new file mode 100644 index 00000000..930ba6e0 --- /dev/null +++ b/tests/api/openapi/test_endpoint_with_body.py @@ -0,0 +1,70 @@ +from typing import Any + +import pytest +from autogen import ConversableAgent, UserProxyAgent +from fastapi import FastAPI +from pydantic import BaseModel + +from fastagency.api.openapi.client import OpenAPI + + +def create_fastapi_app_with_body(host: str, port: int) -> FastAPI: + class Item(BaseModel): + name: str + price: float + + app = FastAPI( + servers=[ + {"url": f"http://{host}:{port}", "description": "Local development server"} + ] + ) + + @app.post("/items") + async def create_item(item: Item) -> str: + return "Item created" + + return app + + +@pytest.mark.azure_oai +@pytest.mark.parametrize( + "fastapi_openapi_url", + [(create_fastapi_app_with_body)], + indirect=["fastapi_openapi_url"], +) +def test_end2end( + fastapi_openapi_url: str, + azure_gpt35_turbo_16k_llm_config: dict[str, Any], +) -> None: + api = OpenAPI.create(openapi_url=fastapi_openapi_url) + + agent = ConversableAgent(name="agent", llm_config=azure_gpt35_turbo_16k_llm_config) + user_proxy = UserProxyAgent( + name="user_proxy", + llm_config=azure_gpt35_turbo_16k_llm_config, + human_input_mode="NEVER", + ) + + api._register_for_llm(agent) + api._register_for_execution(user_proxy) + + message = "Add item with name 'apple', price 1.0" + user_proxy.initiate_chat( + agent, + message=message, + summary_method="reflection_with_llm", + max_turns=3, + ) + + message_existed = False + expected_message = "Item created" + for message in agent.chat_messages[user_proxy]: + if ( + isinstance(message, dict) + and "content" in message + and isinstance(message["content"], str) + and message["content"] == expected_message + ): + message_existed = True + break + assert message_existed, f"Expected message '{expected_message}' not found"