From 79b237977b6c499742f09054865769cd6c8db92e Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Thu, 12 Dec 2024 10:34:18 +0530
Subject: [PATCH 001/274] wip: Minor refactors
Signed-off-by: Diwank Singh Tomer
---
agents-api/.gitignore | 3 ++-
agents-api/agents_api/activities/demo.py | 4 +---
agents-api/agents_api/activities/truncation.py | 9 +++++----
agents-api/pyproject.toml | 6 ++++++
agents-api/uv.lock | 11 +++++++++++
5 files changed, 25 insertions(+), 8 deletions(-)
diff --git a/agents-api/.gitignore b/agents-api/.gitignore
index 33217a796..651078450 100644
--- a/agents-api/.gitignore
+++ b/agents-api/.gitignore
@@ -1,5 +1,6 @@
# Local database files
-cozo.db
+cozo*
+.cozo*
temporal.db
*.bak
*.dat
diff --git a/agents-api/agents_api/activities/demo.py b/agents-api/agents_api/activities/demo.py
index f6d63f206..797ef6c90 100644
--- a/agents-api/agents_api/activities/demo.py
+++ b/agents-api/agents_api/activities/demo.py
@@ -1,5 +1,3 @@
-from typing import Callable
-
from temporalio import activity
from ..env import testing
@@ -14,6 +12,6 @@ async def mock_demo_activity(a: int, b: int) -> int:
return a + b
-demo_activity: Callable[[int, int], int] = activity.defn(name="demo_activity")(
+demo_activity = activity.defn(name="demo_activity")(
demo_activity if not testing else mock_demo_activity
)
diff --git a/agents-api/agents_api/activities/truncation.py b/agents-api/agents_api/activities/truncation.py
index afdb43da4..719cf12e3 100644
--- a/agents-api/agents_api/activities/truncation.py
+++ b/agents-api/agents_api/activities/truncation.py
@@ -14,10 +14,10 @@
def get_extra_entries(messages: list[Entry], token_count_threshold: int) -> list[UUID]:
raise NotImplementedError()
- if not len(messages):
- return messages
+ # if not len(messages):
+ # return messages
- _token_cnt, _offset = 0, 0
+ # _token_cnt, _offset = 0, 0
# if messages[0].role == Role.system:
# token_cnt, offset = messages[0].token_count, 1
@@ -36,7 +36,8 @@ def get_extra_entries(messages: list[Entry], token_count_threshold: int) -> list
@activity.defn
@beartype
async def truncation(session_id: str, token_count_threshold: int) -> None:
- session_id = UUID(session_id)
+ raise NotImplementedError()
+ # session_id = UUID(session_id)
# delete_entries(
# get_extra_entries(
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index 677abd678..350949523 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -60,6 +60,7 @@ dev = [
"ipywidgets>=8.1.5",
"julep>=1.43.1",
"jupyterlab>=4.3.1",
+ "pip>=24.3.1",
"poethepoet>=0.31.1",
"pyjwt>=2.10.1",
"pyright>=1.1.389",
@@ -68,6 +69,11 @@ dev = [
"ward>=0.68.0b0",
]
+[tool.setuptools]
+py-modules = [
+ "agents_api"
+]
+
[tool.uv.sources]
litellm = { url = "https://github.com/julep-ai/litellm/archive/fix_anthropic_tool_image_content.zip" }
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 9517c86f3..1f03aadca 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -65,6 +65,7 @@ dev = [
{ name = "ipywidgets" },
{ name = "julep" },
{ name = "jupyterlab" },
+ { name = "pip" },
{ name = "poethepoet" },
{ name = "pyjwt" },
{ name = "pyright" },
@@ -130,6 +131,7 @@ dev = [
{ name = "ipywidgets", specifier = ">=8.1.5" },
{ name = "julep", specifier = ">=1.43.1" },
{ name = "jupyterlab", specifier = ">=4.3.1" },
+ { name = "pip", specifier = ">=24.3.1" },
{ name = "poethepoet", specifier = ">=0.31.1" },
{ name = "pyjwt", specifier = ">=2.10.1" },
{ name = "pyright", specifier = ">=1.1.389" },
@@ -2014,6 +2016,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 },
]
+[[package]]
+name = "pip"
+version = "24.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f4/b1/b422acd212ad7eedddaf7981eee6e5de085154ff726459cf2da7c5a184c1/pip-24.3.1.tar.gz", hash = "sha256:ebcb60557f2aefabc2e0f918751cd24ea0d56d8ec5445fe1807f1d2109660b99", size = 1931073 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/7d/500c9ad20238fcfcb4cb9243eede163594d7020ce87bd9610c9e02771876/pip-24.3.1-py3-none-any.whl", hash = "sha256:3790624780082365f47549d032f3770eeb2b1e8bd1f7b2e02dace1afa361b4ed", size = 1822182 },
+]
+
[[package]]
name = "platformdirs"
version = "4.3.6"
From 36f8511da83c339bdd8cb969ca7c24172986d5db Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Thu, 12 Dec 2024 17:54:00 +0530
Subject: [PATCH 002/274] feat(agents-api): Use uuid7 instead of uuid4 (has
database benefits)
Signed-off-by: Diwank Singh Tomer
---
.../agents_api/activities/execute_system.py | 1 -
agents-api/agents_api/common/utils/cozo.py | 2 +-
.../agents_api/models/agent/create_agent.py | 5 +++--
agents-api/agents_api/models/docs/create_doc.py | 5 +++--
.../agents_api/models/entry/create_entries.py | 5 +++--
.../agents_api/models/entry/get_history.py | 2 +-
.../models/execution/create_execution.py | 5 +++--
.../execution/create_execution_transition.py | 5 +++--
.../agents_api/models/files/create_file.py | 5 +++--
.../agents_api/models/session/create_session.py | 5 +++--
.../agents_api/models/task/create_task.py | 5 +++--
.../agents_api/models/tools/create_tools.py | 5 +++--
.../agents_api/models/user/create_user.py | 5 +++--
agents-api/agents_api/models/utils.py | 4 ++--
.../agents_api/routers/docs/create_doc.py | 7 ++++---
agents-api/agents_api/routers/sessions/chat.py | 5 +++--
.../routers/tasks/create_task_execution.py | 7 ++++---
.../workflows/task_execution/transition.py | 1 -
agents-api/pyproject.toml | 1 +
agents-api/tests/fixtures.py | 5 +++--
.../tests/sample_tasks/test_find_selector.py | 9 ++++-----
agents-api/tests/test_activities.py | 4 ++--
agents-api/tests/test_agent_queries.py | 6 +++---
agents-api/tests/test_agent_routes.py | 6 +++---
agents-api/tests/test_developer_queries.py | 4 ++--
agents-api/tests/test_execution_workflow.py | 1 -
agents-api/tests/test_messages_truncation.py | 17 +++++++++--------
agents-api/tests/test_session_queries.py | 6 +++---
agents-api/tests/test_task_queries.py | 8 ++++----
agents-api/tests/test_task_routes.py | 7 +++----
agents-api/tests/test_user_queries.py | 7 +++----
agents-api/tests/test_user_routes.py | 4 ++--
agents-api/tests/test_workflow_routes.py | 7 +++----
agents-api/uv.lock | 12 ++++++++++++
34 files changed, 102 insertions(+), 81 deletions(-)
diff --git a/agents-api/agents_api/activities/execute_system.py b/agents-api/agents_api/activities/execute_system.py
index 8d85a2639..ca269417d 100644
--- a/agents-api/agents_api/activities/execute_system.py
+++ b/agents-api/agents_api/activities/execute_system.py
@@ -6,7 +6,6 @@
from beartype import beartype
from box import Box, BoxList
-from fastapi import HTTPException
from fastapi.background import BackgroundTasks
from temporalio import activity
diff --git a/agents-api/agents_api/common/utils/cozo.py b/agents-api/agents_api/common/utils/cozo.py
index f5567dc4a..f342ba617 100644
--- a/agents-api/agents_api/common/utils/cozo.py
+++ b/agents-api/agents_api/common/utils/cozo.py
@@ -22,5 +22,5 @@
@beartype
-def uuid_int_list_to_uuid4(data: list[int]) -> UUID:
+def uuid_int_list_to_uuid(data: list[int]) -> UUID:
return UUID(bytes=b"".join([i.to_bytes(1, "big") for i in data]))
diff --git a/agents-api/agents_api/models/agent/create_agent.py b/agents-api/agents_api/models/agent/create_agent.py
index a9f0bfb8f..1872a6f36 100644
--- a/agents-api/agents_api/models/agent/create_agent.py
+++ b/agents-api/agents_api/models/agent/create_agent.py
@@ -4,12 +4,13 @@
"""
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ...common.utils.cozo import cozo_process_mutate_data
@@ -78,7 +79,7 @@ def create_agent(
Agent: The newly created agent record.
"""
- agent_id = agent_id or uuid4()
+ agent_id = agent_id or uuid7()
# Extract the agent data from the payload
data.metadata = data.metadata or {}
diff --git a/agents-api/agents_api/models/docs/create_doc.py b/agents-api/agents_api/models/docs/create_doc.py
index 3b9c8c9f7..ceb8b5fe0 100644
--- a/agents-api/agents_api/models/docs/create_doc.py
+++ b/agents-api/agents_api/models/docs/create_doc.py
@@ -1,10 +1,11 @@
from typing import Any, Literal, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateDocRequest, Doc
from ...common.utils.cozo import cozo_process_mutate_data
@@ -58,7 +59,7 @@ def create_doc(
data (CreateDocRequest): The content of the document.
"""
- doc_id = str(doc_id or uuid4())
+ doc_id = str(doc_id or uuid7())
owner_id = str(owner_id)
if isinstance(data.content, str):
diff --git a/agents-api/agents_api/models/entry/create_entries.py b/agents-api/agents_api/models/entry/create_entries.py
index a8671a6dd..140a5696b 100644
--- a/agents-api/agents_api/models/entry/create_entries.py
+++ b/agents-api/agents_api/models/entry/create_entries.py
@@ -1,10 +1,11 @@
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
from ...common.utils.cozo import cozo_process_mutate_data
@@ -58,7 +59,7 @@ def create_entries(
for item in data_dicts:
item["content"] = content_to_json(item["content"] or [])
item["session_id"] = session_id
- item["entry_id"] = item.pop("id", None) or str(uuid4())
+ item["entry_id"] = item.pop("id", None) or str(uuid7())
item["created_at"] = (item.get("created_at") or utcnow()).timestamp()
cols, rows = cozo_process_mutate_data(data_dicts)
diff --git a/agents-api/agents_api/models/entry/get_history.py b/agents-api/agents_api/models/entry/get_history.py
index 4be23804e..bb12b1c5b 100644
--- a/agents-api/agents_api/models/entry/get_history.py
+++ b/agents-api/agents_api/models/entry/get_history.py
@@ -7,7 +7,7 @@
from pydantic import ValidationError
from ...autogen.openapi_model import History
-from ...common.utils.cozo import uuid_int_list_to_uuid4 as fix_uuid
+from ...common.utils.cozo import uuid_int_list_to_uuid as fix_uuid
from ..utils import (
cozo_query,
partialclass,
diff --git a/agents-api/agents_api/models/execution/create_execution.py b/agents-api/agents_api/models/execution/create_execution.py
index 832532d6d..59efd7ac3 100644
--- a/agents-api/agents_api/models/execution/create_execution.py
+++ b/agents-api/agents_api/models/execution/create_execution.py
@@ -1,10 +1,11 @@
from typing import Annotated, Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateExecutionRequest, Execution
from ...common.utils.cozo import cozo_process_mutate_data
@@ -47,7 +48,7 @@ def create_execution(
execution_id: UUID | None = None,
data: Annotated[CreateExecutionRequest | dict, dict_like(CreateExecutionRequest)],
) -> tuple[list[str], dict]:
- execution_id = execution_id or uuid4()
+ execution_id = execution_id or uuid7()
developer_id = str(developer_id)
task_id = str(task_id)
diff --git a/agents-api/agents_api/models/execution/create_execution_transition.py b/agents-api/agents_api/models/execution/create_execution_transition.py
index 59a63ed09..5cbcb97bc 100644
--- a/agents-api/agents_api/models/execution/create_execution_transition.py
+++ b/agents-api/agents_api/models/execution/create_execution_transition.py
@@ -1,9 +1,10 @@
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import (
CreateTransitionRequest,
@@ -38,7 +39,7 @@ def _create_execution_transition(
update_execution_status: bool = False,
task_id: UUID | None = None,
) -> tuple[list[str | None], dict]:
- transition_id = transition_id or uuid4()
+ transition_id = transition_id or uuid7()
data.metadata = data.metadata or {}
data.execution_id = execution_id
diff --git a/agents-api/agents_api/models/files/create_file.py b/agents-api/agents_api/models/files/create_file.py
index 224597180..58948038b 100644
--- a/agents-api/agents_api/models/files/create_file.py
+++ b/agents-api/agents_api/models/files/create_file.py
@@ -6,12 +6,13 @@
import base64
import hashlib
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateFileRequest, File
from ...metrics.counters import increase_counter
@@ -79,7 +80,7 @@ def create_file(
developer_id (UUID): The unique identifier for the developer creating the file.
"""
- file_id = file_id or uuid4()
+ file_id = file_id or uuid7()
file_data = data.model_dump(exclude={"content"})
content_bytes = base64.b64decode(data.content)
diff --git a/agents-api/agents_api/models/session/create_session.py b/agents-api/agents_api/models/session/create_session.py
index ce804399d..a08059961 100644
--- a/agents-api/agents_api/models/session/create_session.py
+++ b/agents-api/agents_api/models/session/create_session.py
@@ -4,12 +4,13 @@
"""
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateSessionRequest, Session
from ...metrics.counters import increase_counter
@@ -57,7 +58,7 @@ def create_session(
Constructs and executes a datalog query to create a new session in the database.
"""
- session_id = session_id or uuid4()
+ session_id = session_id or uuid7()
data.metadata = data.metadata or {}
session_data = data.model_dump(exclude={"auto_run_tools", "disable_cache"})
diff --git a/agents-api/agents_api/models/task/create_task.py b/agents-api/agents_api/models/task/create_task.py
index ab68a5b0c..7cd1e8f4a 100644
--- a/agents-api/agents_api/models/task/create_task.py
+++ b/agents-api/agents_api/models/task/create_task.py
@@ -4,12 +4,13 @@
"""
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import (
CreateTaskRequest,
@@ -74,7 +75,7 @@ def create_task(
data.metadata = data.metadata or {}
data.input_schema = data.input_schema or {}
- task_id = task_id or uuid4()
+ task_id = task_id or uuid7()
task_spec = task_to_spec(data)
# Prepares the update data by filtering out None values and adding user_id and developer_id.
diff --git a/agents-api/agents_api/models/tools/create_tools.py b/agents-api/agents_api/models/tools/create_tools.py
index 9b2be387a..578a1268d 100644
--- a/agents-api/agents_api/models/tools/create_tools.py
+++ b/agents-api/agents_api/models/tools/create_tools.py
@@ -1,12 +1,13 @@
"""This module contains functions for creating tools in the CozoDB database."""
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateToolRequest, Tool
from ...metrics.counters import increase_counter
@@ -70,7 +71,7 @@ def create_tools(
tools_data = [
[
str(agent_id),
- str(uuid4()),
+ str(uuid7()),
tool.type,
tool.name,
getattr(tool, tool.type) and getattr(tool, tool.type).model_dump(),
diff --git a/agents-api/agents_api/models/user/create_user.py b/agents-api/agents_api/models/user/create_user.py
index ba96bd2b5..62975a6d4 100644
--- a/agents-api/agents_api/models/user/create_user.py
+++ b/agents-api/agents_api/models/user/create_user.py
@@ -4,12 +4,13 @@
"""
from typing import Any, TypeVar
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateUserRequest, User
from ...metrics.counters import increase_counter
@@ -80,7 +81,7 @@ def create_user(
pd.DataFrame: A DataFrame containing the result of the query execution.
"""
- user_id = user_id or uuid4()
+ user_id = user_id or uuid7()
data.metadata = data.metadata or {}
user_data = data.model_dump()
diff --git a/agents-api/agents_api/models/utils.py b/agents-api/agents_api/models/utils.py
index fc3f4e9b9..880f7e30f 100644
--- a/agents-api/agents_api/models/utils.py
+++ b/agents-api/agents_api/models/utils.py
@@ -14,7 +14,7 @@
from pydantic import BaseModel
from requests.exceptions import ConnectionError, Timeout
-from ..common.utils.cozo import uuid_int_list_to_uuid4
+from ..common.utils.cozo import uuid_int_list_to_uuid
from ..env import do_verify_developer, do_verify_developer_owns_resource
P = ParamSpec("P")
@@ -36,7 +36,7 @@ def fix_uuid(
fixed = {
**item,
**{
- attr: uuid_int_list_to_uuid4(item[attr])
+ attr: uuid_int_list_to_uuid(item[attr])
for attr in id_attrs
if isinstance(item[attr], list)
},
diff --git a/agents-api/agents_api/routers/docs/create_doc.py b/agents-api/agents_api/routers/docs/create_doc.py
index b3cac1a87..ce48b9b86 100644
--- a/agents-api/agents_api/routers/docs/create_doc.py
+++ b/agents-api/agents_api/routers/docs/create_doc.py
@@ -1,9 +1,10 @@
from typing import Annotated
-from uuid import UUID, uuid4
+from uuid import UUID
from fastapi import BackgroundTasks, Depends
from starlette.status import HTTP_201_CREATED
from temporalio.client import Client as TemporalClient
+from uuid_extensions import uuid7
from ...activities.types import EmbedDocsPayload
from ...autogen.openapi_model import CreateDocRequest, Doc, ResourceCreatedResponse
@@ -82,7 +83,7 @@ async def create_user_doc(
data=data,
)
- embed_job_id = uuid4()
+ embed_job_id = uuid7()
await run_embed_docs_task(
developer_id=x_developer_id,
@@ -113,7 +114,7 @@ async def create_agent_doc(
data=data,
)
- embed_job_id = uuid4()
+ embed_job_id = uuid7()
await run_embed_docs_task(
developer_id=x_developer_id,
diff --git a/agents-api/agents_api/routers/sessions/chat.py b/agents-api/agents_api/routers/sessions/chat.py
index 85a1574ef..7cf1110fb 100644
--- a/agents-api/agents_api/routers/sessions/chat.py
+++ b/agents-api/agents_api/routers/sessions/chat.py
@@ -1,8 +1,9 @@
from typing import Annotated, Optional
-from uuid import UUID, uuid4
+from uuid import UUID
from fastapi import BackgroundTasks, Depends, Header, HTTPException, status
from starlette.status import HTTP_201_CREATED
+from uuid_extensions import uuid7
from ...autogen.openapi_model import (
ChatInput,
@@ -236,7 +237,7 @@ async def chat(
ChunkChatResponse if chat_input.stream else MessageChatResponse
)
chat_response: ChatResponse = chat_response_class(
- id=uuid4(),
+ id=uuid7(),
created_at=utcnow(),
jobs=jobs,
docs=doc_references,
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index 09342bf84..bb1497b4c 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -1,6 +1,6 @@
import logging
from typing import Annotated
-from uuid import UUID, uuid4
+from uuid import UUID
from beartype import beartype
from fastapi import BackgroundTasks, Depends, HTTPException, status
@@ -9,6 +9,7 @@
from pycozo.client import QueryException
from starlette.status import HTTP_201_CREATED
from temporalio.client import WorkflowHandle
+from uuid_extensions import uuid7
from ...autogen.openapi_model import (
CreateExecutionRequest,
@@ -47,7 +48,7 @@ async def start_execution(
data: CreateExecutionRequest,
client=None,
) -> tuple[Execution, WorkflowHandle]:
- execution_id = uuid4()
+ execution_id = uuid7()
execution = create_execution_query(
developer_id=developer_id,
@@ -64,7 +65,7 @@ async def start_execution(
client=client,
)
- job_id = uuid4()
+ job_id = uuid7()
try:
handle = await run_task_execution_workflow(
diff --git a/agents-api/agents_api/workflows/task_execution/transition.py b/agents-api/agents_api/workflows/task_execution/transition.py
index a26ac1778..c6197fed1 100644
--- a/agents-api/agents_api/workflows/task_execution/transition.py
+++ b/agents-api/agents_api/workflows/task_execution/transition.py
@@ -14,7 +14,6 @@
from ...common.retry_policies import DEFAULT_RETRY_POLICY
from ...env import (
debug,
- temporal_activity_after_retry_timeout,
temporal_heartbeat_timeout,
temporal_schedule_to_close_timeout,
testing,
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index 350949523..f8ec61367 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -50,6 +50,7 @@ dependencies = [
"uvloop~=0.21.0",
"xxhash~=3.5.0",
"spacy-chunks>=0.0.2",
+ "uuid7>=0.1.0",
]
[dependency-groups]
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 2ed346892..231a40b75 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,11 +1,12 @@
import time
-from uuid import UUID, uuid4
+from uuid import UUID
from cozo_migrate.api import apply, init
from fastapi.testclient import TestClient
from pycozo import Client as CozoClient
from pycozo_async import Client as AsyncCozoClient
from temporalio.client import WorkflowHandle
+from uuid_extensions import uuid7
from ward import fixture
from agents_api.autogen.openapi_model import (
@@ -96,7 +97,7 @@ def test_developer_id(cozo_client=cozo_client):
yield UUID(int=0)
return
- developer_id = uuid4()
+ developer_id = uuid7()
cozo_client.run(
f"""
diff --git a/agents-api/tests/sample_tasks/test_find_selector.py b/agents-api/tests/sample_tasks/test_find_selector.py
index 5af7aac54..616d4cd38 100644
--- a/agents-api/tests/sample_tasks/test_find_selector.py
+++ b/agents-api/tests/sample_tasks/test_find_selector.py
@@ -1,8 +1,7 @@
# Tests for task queries
-
import os
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import raises, test
from ..fixtures import cozo_client, test_agent, test_developer_id
@@ -18,7 +17,7 @@ async def _(
agent=test_agent,
):
agent_id = str(agent.id)
- task_id = str(uuid4())
+ task_id = str(uuid7())
with (
patch_embed_acompletion(),
@@ -47,7 +46,7 @@ async def _(
agent=test_agent,
):
agent_id = str(agent.id)
- task_id = str(uuid4())
+ task_id = str(uuid7())
with (
patch_embed_acompletion(),
@@ -85,7 +84,7 @@ async def _(
agent=test_agent,
):
agent_id = str(agent.id)
- task_id = str(uuid4())
+ task_id = str(uuid7())
with (
patch_embed_acompletion(
diff --git a/agents-api/tests/test_activities.py b/agents-api/tests/test_activities.py
index 6f65cd034..879cf2377 100644
--- a/agents-api/tests/test_activities.py
+++ b/agents-api/tests/test_activities.py
@@ -1,5 +1,5 @@
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import test
from agents_api.activities.embed_docs import embed_docs
@@ -48,7 +48,7 @@ async def _():
result = await client.execute_workflow(
DemoWorkflow.run,
args=[1, 2],
- id=str(uuid4()),
+ id=str(uuid7()),
task_queue=temporal_task_queue,
retry_policy=DEFAULT_RETRY_POLICY,
)
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 8c0099419..f4a2a0c12 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,6 +1,6 @@
# Tests for agent queries
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import raises, test
from agents_api.autogen.openapi_model import (
@@ -52,7 +52,7 @@ def _(client=cozo_client, developer_id=test_developer_id):
def _(client=cozo_client, developer_id=test_developer_id):
create_or_update_agent(
developer_id=developer_id,
- agent_id=uuid4(),
+ agent_id=uuid7(),
data=CreateOrUpdateAgentRequest(
name="test agent",
about="test agent about",
@@ -65,7 +65,7 @@ def _(client=cozo_client, developer_id=test_developer_id):
@test("model: get agent not exists")
def _(client=cozo_client, developer_id=test_developer_id):
- agent_id = uuid4()
+ agent_id = uuid7()
with raises(Exception):
get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
diff --git a/agents-api/tests/test_agent_routes.py b/agents-api/tests/test_agent_routes.py
index 91ddf9f1a..ecab7c1e4 100644
--- a/agents-api/tests/test_agent_routes.py
+++ b/agents-api/tests/test_agent_routes.py
@@ -1,6 +1,6 @@
# Tests for agent queries
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import test
from tests.fixtures import client, make_request, test_agent
@@ -60,7 +60,7 @@ def _(make_request=make_request):
@test("route: create or update agent")
def _(make_request=make_request):
- agent_id = str(uuid4())
+ agent_id = str(uuid7())
data = dict(
name="test agent",
@@ -80,7 +80,7 @@ def _(make_request=make_request):
@test("route: get agent not exists")
def _(make_request=make_request):
- agent_id = str(uuid4())
+ agent_id = str(uuid7())
response = make_request(
method="GET",
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index 569733fa5..734afdd65 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -1,6 +1,6 @@
# Tests for agent queries
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import raises, test
from agents_api.common.protocol.developers import Developer
@@ -31,6 +31,6 @@ def _(client=cozo_client, developer_id=test_developer_id):
def _(client=cozo_client):
with raises(Exception):
verify_developer(
- developer_id=uuid4(),
+ developer_id=uuid7(),
client=client,
)
diff --git a/agents-api/tests/test_execution_workflow.py b/agents-api/tests/test_execution_workflow.py
index e733f81c0..ae440ff02 100644
--- a/agents-api/tests/test_execution_workflow.py
+++ b/agents-api/tests/test_execution_workflow.py
@@ -16,7 +16,6 @@
from agents_api.models.task.create_task import create_task
from agents_api.routers.tasks.create_task_execution import start_execution
from tests.fixtures import (
- async_cozo_client,
cozo_client,
cozo_clients_with_migrations,
test_agent,
diff --git a/agents-api/tests/test_messages_truncation.py b/agents-api/tests/test_messages_truncation.py
index 97516617a..39cc02c2c 100644
--- a/agents-api/tests/test_messages_truncation.py
+++ b/agents-api/tests/test_messages_truncation.py
@@ -1,4 +1,5 @@
# from uuid import uuid4
+# from uuid_extensions import uuid7
# from ward import raises, test
@@ -26,9 +27,9 @@
# threshold = sum([len(c) // 3.5 for c in contents])
# messages: list[Entry] = [
-# Entry(session_id=uuid4(), role=Role.user, content=contents[0][0]),
-# Entry(session_id=uuid4(), role=Role.assistant, content=contents[1][0]),
-# Entry(session_id=uuid4(), role=Role.user, content=contents[2][0]),
+# Entry(session_id=uuid7(), role=Role.user, content=contents[0][0]),
+# Entry(session_id=uuid7(), role=Role.assistant, content=contents[1][0]),
+# Entry(session_id=uuid7(), role=Role.user, content=contents[2][0]),
# ]
# result = session.truncate(messages, threshold)
@@ -45,7 +46,7 @@
# ("content5", True),
# ("content6", True),
# ]
-# session_ids = [uuid4()] * len(contents)
+# session_ids = [uuid7()] * len(contents)
# threshold = sum([len(c) // 3.5 for c, i in contents if i])
# messages: list[Entry] = [
@@ -99,7 +100,7 @@
# ("content5", True),
# ("content6", True),
# ]
-# session_ids = [uuid4()] * len(contents)
+# session_ids = [uuid7()] * len(contents)
# threshold = sum([len(c) // 3.5 for c, i in contents if i])
# messages: list[Entry] = [
@@ -146,7 +147,7 @@
# ("content6", True),
# ("content7", False),
# ]
-# session_ids = [uuid4()] * len(contents)
+# session_ids = [uuid7()] * len(contents)
# threshold = sum([len(c) // 3.5 for c, i in contents if i])
# messages: list[Entry] = [
@@ -204,7 +205,7 @@
# ("content12", True),
# ("content13", False),
# ]
-# session_ids = [uuid4()] * len(contents)
+# session_ids = [uuid7()] * len(contents)
# threshold = sum([len(c) // 3.5 for c, i in contents if i])
# messages: list[Entry] = [
@@ -271,7 +272,7 @@
# ("content9", True),
# ("content10", False),
# ]
-# session_ids = [uuid4()] * len(contents)
+# session_ids = [uuid7()] * len(contents)
# threshold = sum([len(c) // 3.5 for c, i in contents if i])
# all_tokens = sum([len(c) // 3.5 for c, _ in contents])
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 01fea1375..d59ac9250 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -1,6 +1,6 @@
# Tests for session queries
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import test
from agents_api.autogen.openapi_model import (
@@ -54,7 +54,7 @@ def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
@test("model: get session not exists")
def _(client=cozo_client, developer_id=test_developer_id):
- session_id = uuid4()
+ session_id = uuid7()
try:
get_session(
@@ -136,7 +136,7 @@ def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
def _(
client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
):
- session_id = uuid4()
+ session_id = uuid7()
create_or_update_session(
session_id=session_id,
diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py
index e61489df8..85c38ba81 100644
--- a/agents-api/tests/test_task_queries.py
+++ b/agents-api/tests/test_task_queries.py
@@ -1,6 +1,6 @@
# Tests for task queries
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import test
from agents_api.autogen.openapi_model import (
@@ -20,7 +20,7 @@
@test("model: create task")
def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- task_id = uuid4()
+ task_id = uuid7()
create_task(
developer_id=developer_id,
@@ -40,7 +40,7 @@ def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
@test("model: create or update task")
def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- task_id = uuid4()
+ task_id = uuid7()
create_or_update_task(
developer_id=developer_id,
@@ -60,7 +60,7 @@ def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
@test("model: get task not exists")
def _(client=cozo_client, developer_id=test_developer_id):
- task_id = uuid4()
+ task_id = uuid7()
try:
get_task(
diff --git a/agents-api/tests/test_task_routes.py b/agents-api/tests/test_task_routes.py
index 5d3c2f998..6f758c852 100644
--- a/agents-api/tests/test_task_routes.py
+++ b/agents-api/tests/test_task_routes.py
@@ -1,7 +1,6 @@
# Tests for task routes
-from uuid import uuid4
-
+from uuid_extensions import uuid7
from ward import test
from tests.fixtures import (
@@ -79,7 +78,7 @@ async def _(make_request=make_request, task=test_task):
@test("route: get execution not exists")
def _(make_request=make_request):
- execution_id = str(uuid4())
+ execution_id = str(uuid7())
response = make_request(
method="GET",
@@ -101,7 +100,7 @@ def _(make_request=make_request, execution=test_execution):
@test("route: get task not exists")
def _(make_request=make_request):
- task_id = str(uuid4())
+ task_id = str(uuid7())
response = make_request(
method="GET",
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index ab5c62ed0..abdc597ea 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -1,8 +1,7 @@
# This module contains tests for user-related queries against the 'cozodb' database. It includes tests for creating, updating, and retrieving user information.
-
# Tests for user queries
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import test
from agents_api.autogen.openapi_model import (
@@ -40,7 +39,7 @@ def _(client=cozo_client, developer_id=test_developer_id):
create_or_update_user(
developer_id=developer_id,
- user_id=uuid4(),
+ user_id=uuid7(),
data=CreateOrUpdateUserRequest(
name="test user",
about="test user about",
@@ -73,7 +72,7 @@ def _(client=cozo_client, developer_id=test_developer_id, user=test_user):
def _(client=cozo_client, developer_id=test_developer_id):
"""Test that retrieving a non-existent user returns an empty result."""
- user_id = uuid4()
+ user_id = uuid7()
# Ensure that the query for an existing user returns exactly one result.
try:
diff --git a/agents-api/tests/test_user_routes.py b/agents-api/tests/test_user_routes.py
index 229d85619..a0696ed51 100644
--- a/agents-api/tests/test_user_routes.py
+++ b/agents-api/tests/test_user_routes.py
@@ -1,6 +1,6 @@
# Tests for user routes
-from uuid import uuid4
+from uuid_extensions import uuid7
from ward import test
from tests.fixtures import client, make_request, test_user
@@ -40,7 +40,7 @@ def _(make_request=make_request):
@test("route: get user not exists")
def _(make_request=make_request):
- user_id = str(uuid4())
+ user_id = str(uuid7())
response = make_request(
method="GET",
diff --git a/agents-api/tests/test_workflow_routes.py b/agents-api/tests/test_workflow_routes.py
index 2ffc73173..d7bdad027 100644
--- a/agents-api/tests/test_workflow_routes.py
+++ b/agents-api/tests/test_workflow_routes.py
@@ -1,7 +1,6 @@
# Tests for task queries
-from uuid import uuid4
-
+from uuid_extensions import uuid7
from ward import test
from tests.fixtures import cozo_client, test_agent, test_developer_id
@@ -15,7 +14,7 @@ async def _(
agent=test_agent,
):
agent_id = str(agent.id)
- task_id = str(uuid4())
+ task_id = str(uuid7())
async with patch_http_client_with_temporal(
cozo_client=cozo_client, developer_id=developer_id
@@ -100,7 +99,7 @@ async def _(
agent=test_agent,
):
agent_id = str(agent.id)
- task_id = str(uuid4())
+ task_id = str(uuid7())
async with patch_http_client_with_temporal(
cozo_client=cozo_client, developer_id=developer_id
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 1f03aadca..381d91e79 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -52,6 +52,7 @@ dependencies = [
{ name = "tenacity" },
{ name = "thefuzz" },
{ name = "tiktoken" },
+ { name = "uuid7" },
{ name = "uvicorn" },
{ name = "uvloop" },
{ name = "xxhash" },
@@ -118,6 +119,7 @@ requires-dist = [
{ name = "tenacity", specifier = "~=9.0.0" },
{ name = "thefuzz", specifier = "~=0.22.1" },
{ name = "tiktoken", specifier = "~=0.7.0" },
+ { name = "uuid7", specifier = ">=0.1.0" },
{ name = "uvicorn", specifier = "~=0.30.6" },
{ name = "uvloop", specifier = "~=0.21.0" },
{ name = "xxhash", specifier = "~=3.5.0" },
@@ -2644,6 +2646,7 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd", size = 706497 },
{ url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31", size = 698042 },
{ url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680", size = 745831 },
+ { url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d", size = 715692 },
{ url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5", size = 98777 },
{ url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4", size = 115523 },
]
@@ -3216,6 +3219,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 },
]
+[[package]]
+name = "uuid7"
+version = "0.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5c/19/7472bd526591e2192926247109dbf78692e709d3e56775792fec877a7720/uuid7-0.1.0.tar.gz", hash = "sha256:8c57aa32ee7456d3cc68c95c4530bc571646defac01895cfc73545449894a63c", size = 14052 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b5/77/8852f89a91453956582a85024d80ad96f30a41fed4c2b3dce0c9f12ecc7e/uuid7-0.1.0-py2.py3-none-any.whl", hash = "sha256:5e259bb63c8cb4aded5927ff41b444a80d0c7124e8a0ced7cf44efa1f5cccf61", size = 7477 },
+]
+
[[package]]
name = "uvicorn"
version = "0.30.6"
From 78726aa34cfcbc6d570ae5bbd2061e762bb50731 Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Thu, 12 Dec 2024 16:03:36 +0000
Subject: [PATCH 003/274] refactor: Lint agents-api (CI)
---
agents-api/tests/test_activities.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/tests/test_activities.py b/agents-api/tests/test_activities.py
index 879cf2377..d81e30038 100644
--- a/agents-api/tests/test_activities.py
+++ b/agents-api/tests/test_activities.py
@@ -1,4 +1,3 @@
-
from uuid_extensions import uuid7
from ward import test
From 83ea8c388f712d088e99a9f1c07b7f6c991c0f1f Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Fri, 13 Dec 2024 19:45:22 +0530
Subject: [PATCH 004/274] wip(agents-api): Initial migrations for postgres
Signed-off-by: Diwank Singh Tomer
---
agents-api/docker-compose.yml | 18 --
blob-store/docker-compose.yml | 2 +-
memory-store/Dockerfile | 64 ------
memory-store/README.md | 28 ---
memory-store/backup.sh | 38 ----
memory-store/docker-compose.yml | 59 ++---
memory-store/migrations/00001_initial.sql | 25 +++
memory-store/migrations/00002_developers.sql | 33 +++
memory-store/migrations/00003_users.sql | 34 +++
memory-store/migrations/00004_agents.sql | 40 ++++
memory-store/migrations/00005_files.sql | 63 ++++++
memory-store/migrations/00006_docs.sql | 146 +++++++++++++
memory-store/migrations/00007_ann.sql | 37 ++++
memory-store/migrations/00008_tools.sql | 33 +++
memory-store/migrations/00009_sessions.sql | 99 +++++++++
memory-store/migrations/00010_tasks.sql | 1 +
memory-store/options | 213 -------------------
memory-store/run.sh | 23 --
18 files changed, 529 insertions(+), 427 deletions(-)
delete mode 100644 memory-store/Dockerfile
delete mode 100644 memory-store/README.md
delete mode 100644 memory-store/backup.sh
create mode 100644 memory-store/migrations/00001_initial.sql
create mode 100644 memory-store/migrations/00002_developers.sql
create mode 100644 memory-store/migrations/00003_users.sql
create mode 100644 memory-store/migrations/00004_agents.sql
create mode 100644 memory-store/migrations/00005_files.sql
create mode 100644 memory-store/migrations/00006_docs.sql
create mode 100644 memory-store/migrations/00007_ann.sql
create mode 100644 memory-store/migrations/00008_tools.sql
create mode 100644 memory-store/migrations/00009_sessions.sql
create mode 100644 memory-store/migrations/00010_tasks.sql
delete mode 100644 memory-store/options
delete mode 100755 memory-store/run.sh
diff --git a/agents-api/docker-compose.yml b/agents-api/docker-compose.yml
index 94129896c..67591e945 100644
--- a/agents-api/docker-compose.yml
+++ b/agents-api/docker-compose.yml
@@ -111,21 +111,3 @@ services:
path: uv.lock
- action: rebuild
path: Dockerfile.worker
-
- cozo-migrate:
- image: julepai/cozo-migrate:${TAG:-dev}
- container_name: cozo-migrate
- build:
- context: .
- dockerfile: Dockerfile.migration
- restart: "no" # Make sure to double quote this
- environment:
- <<: *shared-environment
-
- develop:
- watch:
- - action: sync+restart
- path: ./migrations
- target: /app/migrations
- - action: rebuild
- path: Dockerfile.migration
diff --git a/blob-store/docker-compose.yml b/blob-store/docker-compose.yml
index 089b31f39..64d238df4 100644
--- a/blob-store/docker-compose.yml
+++ b/blob-store/docker-compose.yml
@@ -12,7 +12,7 @@ services:
environment:
- S3_ACCESS_KEY=${S3_ACCESS_KEY}
- S3_SECRET_KEY=${S3_SECRET_KEY}
- - DEBUG=${DEBUG:-true}
+ - DEBUG=${DEBUG:-false}
ports:
- 9333:9333 # master port
diff --git a/memory-store/Dockerfile b/memory-store/Dockerfile
deleted file mode 100644
index fa384cb12..000000000
--- a/memory-store/Dockerfile
+++ /dev/null
@@ -1,64 +0,0 @@
-# syntax=docker/dockerfile:1
-# check=error=true
-# We need to build the cozo binary first from the repo
-# https://github.com/cozodb/cozo
-# Then copy the binary to the ./bin directory
-# Then copy the run.sh script to the ./run.sh file
-
-# First stage: Build the Rust project
-FROM rust:1.83-bookworm AS builder
-
-# Install required dependencies
-RUN apt-get update && apt-get install -y \
- liburing-dev \
- libclang-dev \
- clang
-
-# Build cozo-ce-bin from crates.io
-WORKDIR /usr/src
-# RUN cargo install cozo-ce-bin@0.7.13-alpha.3 --features "requests graph-algo storage-new-rocksdb storage-sqlite jemalloc io-uring malloc-usable-size"
-RUN cargo install --git https://github.com/cozo-community/cozo.git --branch f/publish-crate --rev 592f49b --profile release -F graph-algo -F jemalloc -F io-uring -F storage-new-rocksdb -F malloc-usable-size --target x86_64-unknown-linux-gnu cozo-ce-bin
-
-# Copy the built binary to /usr/local/bin
-RUN cp /usr/local/cargo/bin/cozo-ce-bin /usr/local/bin/cozo
-
-# -------------------------------------------------------------------------------------------------
-
-# Second stage: Create the final image
-FROM debian:bookworm-slim
-
-# Install dependencies
-RUN \
- apt-get update -yqq && \
- apt-get install -y \
- ca-certificates tini nfs-common nfs-kernel-server procps netbase \
- liburing-dev curl && \
- apt-get clean && \
- rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
-
-# Set fallback mount directory
-ENV COZO_MNT_DIR=/data COZO_BACKUP_DIR=/backup APP_HOME=/app COZO_PORT=9070
-WORKDIR $APP_HOME
-
-# Copy the cozo binary
-COPY --from=builder /usr/local/bin/cozo $APP_HOME/bin/cozo
-
-# Copy local code to the container image.
-COPY ./run.sh ./run.sh
-COPY ./backup.sh ./backup.sh
-
-# Ensure the script is executable
-RUN \
- mkdir -p $COZO_MNT_DIR $COZO_BACKUP_DIR && \
- chmod +x $APP_HOME/bin/cozo && \
- chmod +x $APP_HOME/run.sh
-
-# Copy the options file into the image
-COPY ./options ./options
-
-# Use tini to manage zombie processes and signal forwarding
-# https://github.com/krallin/tini
-ENTRYPOINT ["/usr/bin/tini", "--"]
-
-# Pass the startup script as arguments to tini
-CMD ["/app/run.sh"]
diff --git a/memory-store/README.md b/memory-store/README.md
deleted file mode 100644
index a58ba79d1..000000000
--- a/memory-store/README.md
+++ /dev/null
@@ -1,28 +0,0 @@
-Cozo Server
-
-The `memory-store` directory within the julep repository serves as a critical component for managing data persistence and availability. It encompasses functionalities for data backup, service deployment, and containerization, ensuring that the julep project's data management is efficient and scalable.
-
-## Backup Script
-
-The `backup.py` script within the `backup` subdirectory is designed to periodically back up data while also cleaning up old backups based on a specified retention period. This ensures that the system maintains only the necessary backups, optimizing storage use. For more details, see the `backup.py` file.
-
-## Dockerfile
-
-The Dockerfile is instrumental in creating a Docker image for the memory-store service. It outlines the steps for installing necessary dependencies and setting up the environment to run the service. This includes the installation of software packages and configuration of environment variables. For specifics, refer to the Dockerfile.
-
-## Docker Compose
-
-The `docker-compose.yml` file is used to define and run multi-container Docker applications, specifically tailored for the memory-store service. It specifies the service configurations, including environment variables, volumes, and ports, facilitating an organized deployment. For more details, see the `docker-compose.yml` file.
-
-## Deployment Script
-
-The `deploy.sh` script is aimed at deploying the memory-store service to a cloud provider, utilizing specific configurations to ensure seamless integration and operation. This script includes commands for setting environment variables and deploying the service. For specifics, refer to the `deploy.sh` script.
-
-## Usage
-
-To utilize the components of the memory-store directory, follow these general instructions:
-
-- To build and run the Docker containers, use the Docker and Docker Compose commands as specified in the `docker-compose.yml` file.
-- To execute the backup script, run `python backup.py` with the appropriate arguments as detailed in the `backup.py` file.
-
-This README provides a comprehensive guide to understanding and using the memory-store components within the julep project.
diff --git a/memory-store/backup.sh b/memory-store/backup.sh
deleted file mode 100644
index 0a4fff0dd..000000000
--- a/memory-store/backup.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env bash
-
-set -eo pipefail # Exit on error
-set -u # Exit on undefined variable
-
-# Ensure environment variables are set
-if [ -z "$COZO_AUTH_TOKEN" ]; then
- echo "COZO_AUTH_TOKEN is not set"
- exit 1
-fi
-
-COZO_PORT=${COZO_PORT:-9070}
-COZO_BACKUP_DIR=${COZO_BACKUP_DIR:-/backup}
-TIMESTAMP=$(date +%Y-%m-%d_%H-%M-%S)
-MAX_BACKUPS=${MAX_BACKUPS:-10}
-
-curl -X POST \
- http://0.0.0.0:$COZO_PORT/backup \
- -H 'Content-Type: application/json' \
- -H "X-Cozo-Auth: ${COZO_AUTH_TOKEN}" \
- -d "{\"path\": \"${COZO_BACKUP_DIR}/cozo-backup-${TIMESTAMP}.bk\"}" \
- -w "\nStatus: %{http_code}\nResponse:\n" \
- -o /dev/stdout
-
-# Print the number of backups
-echo "Number of backups: $(ls -l ${COZO_BACKUP_DIR} | grep -c "cozo-backup-")"
-
-# If the backup is successful, remove the oldest backup if the number of backups exceeds MAX_BACKUPS
-if [ $(ls -l ${COZO_BACKUP_DIR} | grep -c "cozo-backup-") -gt $MAX_BACKUPS ]; then
- oldest_backup=$(ls -t ${COZO_BACKUP_DIR}/cozo-backup-*.bk | tail -n 1)
-
- if [ -n "$oldest_backup" ]; then
- rm "$oldest_backup"
- echo "Removed oldest backup: $oldest_backup"
- else
- echo "No backups found to remove"
- fi
-fi
\ No newline at end of file
diff --git a/memory-store/docker-compose.yml b/memory-store/docker-compose.yml
index f00d003de..775a97b82 100644
--- a/memory-store/docker-compose.yml
+++ b/memory-store/docker-compose.yml
@@ -1,46 +1,21 @@
-name: julep-memory-store
-
+name: pgai
services:
- memory-store:
- image: julepai/memory-store:${TAG:-dev}
- environment:
- - COZO_AUTH_TOKEN=${COZO_AUTH_TOKEN}
- - COZO_PORT=${COZO_PORT:-9070}
- - COZO_MNT_DIR=${MNT_DIR:-/data}
- - COZO_BACKUP_DIR=${COZO_BACKUP_DIR:-/backup}
- volumes:
- - cozo_data:/data
- - cozo_backup:/backup
- build:
- context: .
- ports:
- - "9070:9070"
-
- develop:
- watch:
- - action: sync+restart
- path: ./options
- target: /data/cozo.db/OPTIONS-000007
- - action: rebuild
- path: Dockerfile
-
- labels:
- ofelia.enabled: "true"
- ofelia.job-exec.backupcron.schedule: "@every 3h"
- ofelia.job-exec.backupcron.environment: '["COZO_PORT=${COZO_PORT}", "COZO_AUTH_TOKEN=${COZO_AUTH_TOKEN}", "COZO_BACKUP_DIR=${COZO_BACKUP_DIR}"]'
- ofelia.job-exec.backupcron.command: bash /app/backup.sh
-
- memory-store-backup-cron:
- image: mcuadros/ofelia:latest
- restart: unless-stopped
- depends_on:
- - memory-store
- command: daemon --docker -f label=com.docker.compose.project=${COMPOSE_PROJECT_NAME}
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock:ro
+ db:
+ image: timescale/timescaledb-ha:pg17
+ environment:
+ - POSTGRES_PASSWORD=${MEMORY_STORE_PASSWORD:-postgres}
+ - VOYAGE_API_KEY=${VOYAGE_API_KEY}
+ ports:
+ - "5432:5432"
+ volumes:
+ - memory_store_data:/home/postgres/pgdata/data
+ vectorizer-worker:
+ image: timescale/pgai-vectorizer-worker:v0.3.0
+ environment:
+ - PGAI_VECTORIZER_WORKER_DB_URL=postgres://postgres:${MEMORY_STORE_PASSWORD:-postgres}@db:5432/postgres
+ - VOYAGE_API_KEY=${VOYAGE_API_KEY}
+ command: [ "--poll-interval", "5s" ]
volumes:
- cozo_data:
- external: true
- cozo_backup:
+ memory_store_data:
external: true
diff --git a/memory-store/migrations/00001_initial.sql b/memory-store/migrations/00001_initial.sql
new file mode 100644
index 000000000..3be41ef68
--- /dev/null
+++ b/memory-store/migrations/00001_initial.sql
@@ -0,0 +1,25 @@
+-- init timescaledb
+CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE;
+CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit CASCADE;
+
+-- add timescale's pgai extension
+CREATE EXTENSION IF NOT EXISTS vector CASCADE;
+CREATE EXTENSION IF NOT EXISTS vectorscale CASCADE;
+CREATE EXTENSION IF NOT EXISTS ai CASCADE;
+
+-- add misc extensions (for indexing etc)
+CREATE EXTENSION IF NOT EXISTS btree_gin CASCADE;
+CREATE EXTENSION IF NOT EXISTS btree_gist CASCADE;
+CREATE EXTENSION IF NOT EXISTS citext CASCADE;
+CREATE EXTENSION IF NOT EXISTS "uuid-ossp" CASCADE;
+
+-- Create function to update the updated_at timestamp
+CREATE OR REPLACE FUNCTION update_updated_at_column()
+RETURNS TRIGGER AS $$
+BEGIN
+ NEW.updated_at = CURRENT_TIMESTAMP;
+ RETURN NEW;
+END;
+$$ language 'plpgsql';
+
+COMMENT ON FUNCTION update_updated_at_column() IS 'Trigger function to automatically update updated_at timestamp';
diff --git a/memory-store/migrations/00002_developers.sql b/memory-store/migrations/00002_developers.sql
new file mode 100644
index 000000000..b8d9b7673
--- /dev/null
+++ b/memory-store/migrations/00002_developers.sql
@@ -0,0 +1,33 @@
+-- Create developers table
+CREATE TABLE developers (
+ developer_id UUID NOT NULL,
+ email TEXT NOT NULL CONSTRAINT ct_developers_email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$'),
+ active BOOLEAN NOT NULL DEFAULT true,
+ tags TEXT[] DEFAULT ARRAY[]::TEXT[],
+ settings JSONB NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_developers PRIMARY KEY (developer_id),
+ CONSTRAINT uq_developers_email UNIQUE (email)
+);
+
+-- Create sorted index on developer_id (optimized for UUID v7)
+CREATE INDEX idx_developers_id_sorted ON developers (developer_id DESC);
+
+-- Create index on email
+CREATE INDEX idx_developers_email ON developers (email);
+
+-- Create GIN index for tags array
+CREATE INDEX idx_developers_tags ON developers USING GIN (tags);
+
+-- Create partial index for active developers
+CREATE INDEX idx_developers_active ON developers (developer_id) WHERE active = true;
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_developers_updated_at
+ BEFORE UPDATE ON developers
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE developers IS 'Stores developer information including their settings and tags';
\ No newline at end of file
diff --git a/memory-store/migrations/00003_users.sql b/memory-store/migrations/00003_users.sql
new file mode 100644
index 000000000..0d9f76ff7
--- /dev/null
+++ b/memory-store/migrations/00003_users.sql
@@ -0,0 +1,34 @@
+-- Create users table
+CREATE TABLE users (
+ developer_id UUID NOT NULL,
+ user_id UUID NOT NULL,
+ name TEXT NOT NULL,
+ about TEXT,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_users PRIMARY KEY (developer_id, user_id)
+);
+
+-- Create sorted index on user_id (optimized for UUID v7)
+CREATE INDEX users_id_sorted_idx ON users (user_id DESC);
+
+-- Create foreign key constraint and index on developer_id
+ALTER TABLE users
+ ADD CONSTRAINT users_developer_id_fkey
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+
+CREATE INDEX users_developer_id_idx ON users (developer_id);
+
+-- Create a GIN index on the entire metadata column
+CREATE INDEX users_metadata_gin_idx ON users USING GIN (metadata);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER update_users_updated_at
+ BEFORE UPDATE ON users
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE users IS 'Stores user information linked to developers';
\ No newline at end of file
diff --git a/memory-store/migrations/00004_agents.sql b/memory-store/migrations/00004_agents.sql
new file mode 100644
index 000000000..8eb8b2f35
--- /dev/null
+++ b/memory-store/migrations/00004_agents.sql
@@ -0,0 +1,40 @@
+-- Create agents table
+CREATE TABLE agents (
+ developer_id UUID NOT NULL,
+ agent_id UUID NOT NULL,
+ canonical_name citext NOT NULL CONSTRAINT ct_agents_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
+ name TEXT NOT NULL CONSTRAINT ct_agents_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
+ about TEXT CONSTRAINT ct_agents_about_length CHECK (about IS NULL OR length(about) <= 1000),
+ instructions TEXT[] DEFAULT ARRAY[]::TEXT[],
+ model TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ default_settings JSONB NOT NULL DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_agents PRIMARY KEY (developer_id, agent_id),
+ CONSTRAINT uq_agents_canonical_name_unique UNIQUE (developer_id, canonical_name), -- per developer
+ CONSTRAINT ct_agents_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
+);
+
+-- Create sorted index on agent_id (optimized for UUID v7)
+CREATE INDEX idx_agents_id_sorted ON agents (agent_id DESC);
+
+-- Create foreign key constraint and index on developer_id
+ALTER TABLE agents
+ ADD CONSTRAINT fk_agents_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+
+CREATE INDEX idx_agents_developer ON agents (developer_id);
+
+-- Create a GIN index on the entire metadata column
+CREATE INDEX idx_agents_metadata ON agents USING GIN (metadata);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_agents_updated_at
+ BEFORE UPDATE ON agents
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE agents IS 'Stores AI agent configurations and metadata for developers';
\ No newline at end of file
diff --git a/memory-store/migrations/00005_files.sql b/memory-store/migrations/00005_files.sql
new file mode 100644
index 000000000..3d8c2900b
--- /dev/null
+++ b/memory-store/migrations/00005_files.sql
@@ -0,0 +1,63 @@
+-- Create files table
+CREATE TABLE files (
+ developer_id UUID NOT NULL,
+ file_id UUID NOT NULL,
+ name TEXT NOT NULL CONSTRAINT ct_files_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
+ description TEXT DEFAULT NULL CONSTRAINT ct_files_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ mime_type TEXT DEFAULT NULL CONSTRAINT ct_files_mime_type_length CHECK (mime_type IS NULL OR length(mime_type) <= 127),
+ size BIGINT NOT NULL CONSTRAINT ct_files_size_positive CHECK (size > 0),
+ hash BYTEA NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_files PRIMARY KEY (developer_id, file_id)
+);
+
+-- Create sorted index on file_id (optimized for UUID v7)
+CREATE INDEX idx_files_id_sorted ON files (file_id DESC);
+
+-- Create foreign key constraint and index on developer_id
+ALTER TABLE files
+ ADD CONSTRAINT fk_files_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+
+CREATE INDEX idx_files_developer ON files (developer_id);
+
+-- Before creating the user_files and agent_files tables, we need to ensure that the file_id is unique for each developer
+ALTER TABLE files
+ ADD CONSTRAINT uq_files_developer_id_file_id UNIQUE (developer_id, file_id);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_files_updated_at
+ BEFORE UPDATE ON files
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE files IS 'Stores file metadata and references for developers';
+
+-- Create the user_files table
+CREATE TABLE user_files (
+ developer_id UUID NOT NULL,
+ user_id UUID NOT NULL,
+ file_id UUID NOT NULL,
+ CONSTRAINT pk_user_files PRIMARY KEY (developer_id, user_id, file_id),
+ CONSTRAINT fk_user_files_user FOREIGN KEY (developer_id, user_id) REFERENCES users(developer_id, user_id),
+ CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files(developer_id, file_id)
+);
+
+-- Indexes for efficient querying
+CREATE INDEX idx_user_files_user ON user_files (developer_id, user_id);
+
+-- Create the agent_files table
+CREATE TABLE agent_files (
+ developer_id UUID NOT NULL,
+ agent_id UUID NOT NULL,
+ file_id UUID NOT NULL,
+ CONSTRAINT pk_agent_files PRIMARY KEY (developer_id, agent_id, file_id),
+ CONSTRAINT fk_agent_files_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents(developer_id, agent_id),
+ CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files(developer_id, file_id)
+);
+
+-- Indexes for efficient querying
+CREATE INDEX idx_agent_files_agent ON agent_files (developer_id, agent_id);
diff --git a/memory-store/migrations/00006_docs.sql b/memory-store/migrations/00006_docs.sql
new file mode 100644
index 000000000..88c7ff2a7
--- /dev/null
+++ b/memory-store/migrations/00006_docs.sql
@@ -0,0 +1,146 @@
+-- Create function to validate language
+CREATE OR REPLACE FUNCTION is_valid_language(lang text)
+RETURNS boolean AS $$
+BEGIN
+ RETURN EXISTS (
+ SELECT 1 FROM pg_ts_config WHERE cfgname::text = lang
+ );
+END;
+$$ LANGUAGE plpgsql;
+
+-- Create docs table
+CREATE TABLE docs (
+ developer_id UUID NOT NULL,
+ doc_id UUID NOT NULL,
+ title TEXT NOT NULL,
+ content TEXT NOT NULL,
+ index INTEGER NOT NULL,
+ modality TEXT NOT NULL,
+ embedding_model TEXT NOT NULL,
+ embedding_dimensions INTEGER NOT NULL,
+ language TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id),
+ CONSTRAINT uq_docs_doc_id_index UNIQUE (doc_id, index),
+ CONSTRAINT ct_docs_embedding_dimensions_positive CHECK (embedding_dimensions > 0),
+ CONSTRAINT ct_docs_valid_modality CHECK (modality IN ('text', 'image', 'mixed')),
+ CONSTRAINT ct_docs_index_positive CHECK (index >= 0),
+ CONSTRAINT ct_docs_valid_language
+ CHECK (is_valid_language(language))
+);
+
+-- Create sorted index on doc_id (optimized for UUID v7)
+CREATE INDEX idx_docs_id_sorted ON docs (doc_id DESC);
+
+-- Create foreign key constraint and index on developer_id
+ALTER TABLE docs
+ ADD CONSTRAINT fk_docs_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+
+CREATE INDEX idx_docs_developer ON docs (developer_id);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_docs_updated_at
+ BEFORE UPDATE ON docs
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE docs IS 'Stores document metadata for developers';
+
+-- Create the user_docs table
+CREATE TABLE user_docs (
+ developer_id UUID NOT NULL,
+ user_id UUID NOT NULL,
+ doc_id UUID NOT NULL,
+ CONSTRAINT pk_user_docs PRIMARY KEY (developer_id, user_id, doc_id),
+ CONSTRAINT fk_user_docs_user FOREIGN KEY (developer_id, user_id) REFERENCES users(developer_id, user_id),
+ CONSTRAINT fk_user_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs(developer_id, doc_id)
+);
+
+-- Create the agent_docs table
+CREATE TABLE agent_docs (
+ developer_id UUID NOT NULL,
+ agent_id UUID NOT NULL,
+ doc_id UUID NOT NULL,
+ CONSTRAINT pk_agent_docs PRIMARY KEY (developer_id, agent_id, doc_id),
+ CONSTRAINT fk_agent_docs_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents(developer_id, agent_id),
+ CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs(developer_id, doc_id)
+);
+
+-- Indexes for efficient querying
+CREATE INDEX idx_user_docs_user ON user_docs (developer_id, user_id);
+CREATE INDEX idx_agent_docs_agent ON agent_docs (developer_id, agent_id);
+
+-- Create a GIN index on the metadata column for efficient searching
+CREATE INDEX idx_docs_metadata ON docs USING GIN (metadata);
+
+-- Enable necessary PostgreSQL extensions
+CREATE EXTENSION IF NOT EXISTS unaccent;
+CREATE EXTENSION IF NOT EXISTS pg_trgm;
+CREATE EXTENSION IF NOT EXISTS dict_int CASCADE;
+CREATE EXTENSION IF NOT EXISTS dict_xsyn CASCADE;
+CREATE EXTENSION IF NOT EXISTS fuzzystrmatch CASCADE;
+
+-- Configure text search for all supported languages
+DO $$
+DECLARE
+ lang text;
+BEGIN
+ FOR lang IN (SELECT cfgname FROM pg_ts_config WHERE cfgname IN (
+ 'arabic', 'danish', 'dutch', 'english', 'finnish', 'french',
+ 'german', 'greek', 'hungarian', 'indonesian', 'irish', 'italian',
+ 'lithuanian', 'nepali', 'norwegian', 'portuguese', 'romanian',
+ 'russian', 'spanish', 'swedish', 'tamil', 'turkish'
+ ))
+ LOOP
+ -- Configure integer dictionary
+ EXECUTE format('ALTER TEXT SEARCH CONFIGURATION %I
+ ALTER MAPPING FOR int, uint WITH intdict', lang);
+
+ -- Configure synonym and stemming
+ EXECUTE format('ALTER TEXT SEARCH CONFIGURATION %I
+ ALTER MAPPING FOR asciihword, hword_asciipart, hword, hword_part, word, asciiword
+ WITH xsyn, %I_stem', lang, lang);
+ END LOOP;
+END
+$$;
+
+-- Add the column (not generated)
+ALTER TABLE docs ADD COLUMN search_tsv tsvector;
+
+-- Create function to update tsvector
+CREATE OR REPLACE FUNCTION docs_update_search_tsv()
+RETURNS trigger AS $$
+BEGIN
+ NEW.search_tsv :=
+ setweight(to_tsvector(NEW.language::regconfig, unaccent(coalesce(NEW.title, ''))), 'A') ||
+ setweight(to_tsvector(NEW.language::regconfig, unaccent(coalesce(NEW.content, ''))), 'B');
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+-- Create trigger
+CREATE TRIGGER trg_docs_search_tsv
+ BEFORE INSERT OR UPDATE OF title, content, language
+ ON docs
+ FOR EACH ROW
+ EXECUTE FUNCTION docs_update_search_tsv();
+
+-- Create the index
+CREATE INDEX idx_docs_search_tsv ON docs USING GIN (search_tsv);
+
+-- Update existing rows (if any)
+UPDATE docs SET search_tsv =
+ setweight(to_tsvector(language::regconfig, unaccent(coalesce(title, ''))), 'A') ||
+ setweight(to_tsvector(language::regconfig, unaccent(coalesce(content, ''))), 'B');
+
+-- Create GIN trigram indexes for both title and content
+CREATE INDEX idx_docs_title_trgm
+ON docs USING GIN (title gin_trgm_ops);
+
+CREATE INDEX idx_docs_content_trgm
+ON docs USING GIN (content gin_trgm_ops);
\ No newline at end of file
diff --git a/memory-store/migrations/00007_ann.sql b/memory-store/migrations/00007_ann.sql
new file mode 100644
index 000000000..5f2157f02
--- /dev/null
+++ b/memory-store/migrations/00007_ann.sql
@@ -0,0 +1,37 @@
+-- Create vector similarity search index using diskann and timescale vectorizer
+select ai.create_vectorizer(
+ source => 'docs',
+ destination => 'docs_embeddings',
+ embedding => ai.embedding_voyageai('voyage-3', 1024), -- need to parameterize this
+ -- actual chunking is managed by the docs table
+ -- this is to prevent running out of context window
+ chunking => ai.chunking_recursive_character_text_splitter(
+ chunk_column => 'content',
+ chunk_size => 30000, -- 30k characters ~= 7.5k tokens
+ chunk_overlap => 600, -- 600 characters ~= 150 tokens
+ separators => array[ -- tries separators in order
+ -- markdown headers
+ E'\n#',
+ E'\n##',
+ E'\n###',
+ E'\n---',
+ E'\n***',
+ -- html tags
+ E'', -- Split on major document sections
+ E'', -- Split on div boundaries
+ E'',
+ E'
', -- Split on paragraphs
+ E'
', -- Split on line breaks
+ -- other separators
+ E'\n\n', -- paragraphs
+ '. ', '? ', '! ', '; ', -- sentences (note space after punctuation)
+ E'\n', -- line breaks
+ ' ' -- words (last resort)
+ ]
+ ),
+ scheduling => ai.scheduling_timescaledb(),
+ indexing => ai.indexing_diskann(),
+ formatting => ai.formatting_python_template(E'Title: $title\n\n$chunk'),
+ processing => ai.processing_default(),
+ enqueue_existing => true
+);
\ No newline at end of file
diff --git a/memory-store/migrations/00008_tools.sql b/memory-store/migrations/00008_tools.sql
new file mode 100644
index 000000000..ec5d8590d
--- /dev/null
+++ b/memory-store/migrations/00008_tools.sql
@@ -0,0 +1,33 @@
+-- Create tools table
+CREATE TABLE tools (
+ developer_id UUID NOT NULL,
+ agent_id UUID NOT NULL,
+ tool_id UUID NOT NULL,
+ type TEXT NOT NULL CONSTRAINT ct_tools_type_length CHECK (length(type) >= 1 AND length(type) <= 255),
+ name TEXT NOT NULL CONSTRAINT ct_tools_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
+ description TEXT CONSTRAINT ct_tools_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ spec JSONB NOT NULL,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id)
+);
+
+-- Create sorted index on tool_id (optimized for UUID v7)
+CREATE INDEX idx_tools_id_sorted ON tools (tool_id DESC);
+
+-- Create foreign key constraint and index on developer_id and agent_id
+ALTER TABLE tools
+ ADD CONSTRAINT fk_tools_agent
+ FOREIGN KEY (developer_id, agent_id)
+ REFERENCES agents(developer_id, agent_id);
+
+CREATE INDEX idx_tools_developer_agent ON tools (developer_id, agent_id);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_tools_updated_at
+ BEFORE UPDATE ON tools
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE tools IS 'Stores tool configurations and specifications for AI agents';
\ No newline at end of file
diff --git a/memory-store/migrations/00009_sessions.sql b/memory-store/migrations/00009_sessions.sql
new file mode 100644
index 000000000..d79517f86
--- /dev/null
+++ b/memory-store/migrations/00009_sessions.sql
@@ -0,0 +1,99 @@
+-- Create sessions table
+CREATE TABLE sessions (
+ developer_id UUID NOT NULL,
+ session_id UUID NOT NULL,
+ situation TEXT,
+ system_template TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ render_templates BOOLEAN NOT NULL DEFAULT true,
+ token_budget INTEGER,
+ context_overflow TEXT,
+ forward_tool_calls BOOLEAN,
+ recall_options JSONB NOT NULL DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id)
+);
+
+-- Create sorted index on session_id (optimized for UUID v7)
+CREATE INDEX idx_sessions_id_sorted ON sessions (session_id DESC);
+
+-- Create index for updated_at since we'll sort by it
+CREATE INDEX idx_sessions_updated_at ON sessions (updated_at DESC);
+
+-- Create foreign key constraint and index on developer_id
+ALTER TABLE sessions
+ ADD CONSTRAINT fk_sessions_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+
+CREATE INDEX idx_sessions_developer ON sessions (developer_id);
+
+-- Create a GIN index on the metadata column
+CREATE INDEX idx_sessions_metadata ON sessions USING GIN (metadata);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_sessions_updated_at
+ BEFORE UPDATE ON sessions
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE sessions IS 'Stores chat sessions and their configurations';
+
+-- Create session_lookup table with participant type enum
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'participant_type') THEN
+ CREATE TYPE participant_type AS ENUM ('user', 'agent');
+ END IF;
+END
+$$;
+
+-- Create session_lookup table without the CHECK constraint
+CREATE TABLE session_lookup (
+ developer_id UUID NOT NULL,
+ session_id UUID NOT NULL,
+ participant_type participant_type NOT NULL,
+ participant_id UUID NOT NULL,
+ PRIMARY KEY (developer_id, session_id, participant_type, participant_id),
+ FOREIGN KEY (developer_id, session_id) REFERENCES sessions(developer_id, session_id)
+);
+
+-- Create indexes for common query patterns
+CREATE INDEX idx_session_lookup_by_session ON session_lookup (developer_id, session_id);
+CREATE INDEX idx_session_lookup_by_participant ON session_lookup (developer_id, participant_id);
+
+-- Add comments to the table
+COMMENT ON TABLE session_lookup IS 'Maps sessions to their participants (users and agents)';
+
+-- Create trigger function to enforce conditional foreign keys
+CREATE OR REPLACE FUNCTION validate_participant() RETURNS trigger AS $$
+BEGIN
+ IF NEW.participant_type = 'user' THEN
+ PERFORM 1 FROM users WHERE developer_id = NEW.developer_id AND user_id = NEW.participant_id;
+ IF NOT FOUND THEN
+ RAISE EXCEPTION 'Invalid participant_id: % for participant_type user', NEW.participant_id;
+ END IF;
+ ELSIF NEW.participant_type = 'agent' THEN
+ PERFORM 1 FROM agents WHERE developer_id = NEW.developer_id AND agent_id = NEW.participant_id;
+ IF NOT FOUND THEN
+ RAISE EXCEPTION 'Invalid participant_id: % for participant_type agent', NEW.participant_id;
+ END IF;
+ ELSE
+ RAISE EXCEPTION 'Unknown participant_type: %', NEW.participant_type;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+-- Create triggers for INSERT and UPDATE operations
+CREATE TRIGGER trg_validate_participant_before_insert
+ BEFORE INSERT ON session_lookup
+ FOR EACH ROW
+ EXECUTE FUNCTION validate_participant();
+
+CREATE TRIGGER trg_validate_participant_before_update
+ BEFORE UPDATE ON session_lookup
+ FOR EACH ROW
+ EXECUTE FUNCTION validate_participant();
\ No newline at end of file
diff --git a/memory-store/migrations/00010_tasks.sql b/memory-store/migrations/00010_tasks.sql
new file mode 100644
index 000000000..20018c297
--- /dev/null
+++ b/memory-store/migrations/00010_tasks.sql
@@ -0,0 +1 @@
+-- write your migration here
\ No newline at end of file
diff --git a/memory-store/options b/memory-store/options
deleted file mode 100644
index 8a2a30378..000000000
--- a/memory-store/options
+++ /dev/null
@@ -1,213 +0,0 @@
-# This is a RocksDB option file.
-#
-# For detailed file format spec, please refer to the example file
-# in examples/rocksdb_option_file_example.ini
-#
-
-[Version]
- rocksdb_version=9.8.4
- options_file_version=1.1
-
-[DBOptions]
- compaction_readahead_size=2097152
- strict_bytes_per_sync=false
- bytes_per_sync=0
- max_background_jobs=8
- avoid_flush_during_shutdown=false
- max_background_flushes=1
- delayed_write_rate=16777216
- max_open_files=-1
- max_subcompactions=1
- writable_file_max_buffer_size=1048576
- wal_bytes_per_sync=0
- max_background_compactions=6
- max_total_wal_size=0
- delete_obsolete_files_period_micros=21600000000
- stats_dump_period_sec=600
- stats_history_buffer_size=1048576
- stats_persist_period_sec=600
- follower_refresh_catchup_period_ms=10000
- enforce_single_del_contracts=true
- lowest_used_cache_tier=kNonVolatileBlockTier
- bgerror_resume_retry_interval=1000000
- metadata_write_temperature=kUnknown
- best_efforts_recovery=false
- log_readahead_size=0
- write_identity_file=true
- write_dbid_to_manifest=true
- prefix_seek_opt_in_only=false
- wal_compression=kNoCompression
- manual_wal_flush=false
- db_host_id=__hostname__
- two_write_queues=false
- random_access_max_buffer_size=1048576
- avoid_unnecessary_blocking_io=false
- skip_checking_sst_file_sizes_on_db_open=false
- flush_verify_memtable_count=true
- fail_if_options_file_error=true
- atomic_flush=false
- verify_sst_unique_id_in_manifest=true
- skip_stats_update_on_db_open=false
- track_and_verify_wals_in_manifest=false
- compaction_verify_record_count=true
- paranoid_checks=true
- create_if_missing=true
- max_write_batch_group_size_bytes=1048576
- follower_catchup_retry_count=10
- avoid_flush_during_recovery=false
- file_checksum_gen_factory=nullptr
- enable_thread_tracking=false
- allow_fallocate=true
- allow_data_in_errors=false
- error_if_exists=false
- use_direct_io_for_flush_and_compaction=false
- background_close_inactive_wals=false
- create_missing_column_families=false
- WAL_size_limit_MB=0
- use_direct_reads=false
- persist_stats_to_disk=true
- allow_2pc=false
- is_fd_close_on_exec=true
- max_log_file_size=0
- max_file_opening_threads=16
- wal_filter=nullptr
- wal_write_temperature=kUnknown
- follower_catchup_retry_wait_ms=100
- allow_mmap_reads=false
- allow_mmap_writes=false
- use_adaptive_mutex=false
- use_fsync=false
- table_cache_numshardbits=6
- dump_malloc_stats=true
- db_write_buffer_size=17179869184
- allow_ingest_behind=false
- keep_log_file_num=1000
- max_bgerror_resume_count=2147483647
- allow_concurrent_memtable_write=true
- recycle_log_file_num=0
- log_file_time_to_roll=0
- manifest_preallocation_size=4194304
- enable_write_thread_adaptive_yield=true
- WAL_ttl_seconds=0
- max_manifest_file_size=1073741824
- wal_recovery_mode=kPointInTimeRecovery
- enable_pipelined_write=false
- write_thread_slow_yield_usec=3
- unordered_write=false
- write_thread_max_yield_usec=100
- advise_random_on_open=true
- info_log_level=INFO_LEVEL
-
-
-[CFOptions "default"]
- memtable_max_range_deletions=0
- compression_opts={checksum=false;max_dict_buffer_bytes=0;enabled=false;max_dict_bytes=0;max_compressed_bytes_per_kb=896;parallel_threads=1;zstd_max_train_bytes=0;level=32767;use_zstd_dict_trainer=true;strategy=0;window_bits=-14;}
- paranoid_memory_checks=false
- block_protection_bytes_per_key=0
- uncache_aggressiveness=0
- bottommost_file_compaction_delay=0
- memtable_protection_bytes_per_key=0
- experimental_mempurge_threshold=0.000000
- bottommost_compression=kZSTD
- sample_for_compression=0
- prepopulate_blob_cache=kDisable
- table_factory=BlockBasedTable
- max_successive_merges=0
- max_write_buffer_number=2
- prefix_extractor=nullptr
- memtable_huge_page_size=0
- write_buffer_size=33554427
- strict_max_successive_merges=false
- blob_compaction_readahead_size=0
- arena_block_size=1048576
- level0_file_num_compaction_trigger=4
- report_bg_io_stats=true
- inplace_update_num_locks=10000
- memtable_prefix_bloom_size_ratio=0.000000
- level0_stop_writes_trigger=36
- blob_compression_type=kNoCompression
- level0_slowdown_writes_trigger=20
- hard_pending_compaction_bytes_limit=274877906944
- target_file_size_multiplier=1
- bottommost_compression_opts={checksum=false;max_dict_buffer_bytes=0;enabled=false;max_dict_bytes=0;max_compressed_bytes_per_kb=896;parallel_threads=1;zstd_max_train_bytes=0;level=32767;use_zstd_dict_trainer=true;strategy=0;window_bits=-14;}
- paranoid_file_checks=false
- blob_garbage_collection_force_threshold=1.000000
- enable_blob_files=true
- blob_file_starting_level=0
- soft_pending_compaction_bytes_limit=68719476736
- target_file_size_base=67108864
- max_compaction_bytes=1677721600
- disable_auto_compactions=false
- min_blob_size=0
- memtable_whole_key_filtering=false
- max_bytes_for_level_base=268435456
- last_level_temperature=kUnknown
- compaction_options_fifo={file_temperature_age_thresholds=;allow_compaction=false;age_for_warm=0;max_table_files_size=1073741824;}
- max_bytes_for_level_multiplier=10.000000
- max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1
- max_sequential_skip_in_iterations=8
- compression=kLZ4Compression
- default_write_temperature=kUnknown
- compaction_options_universal={incremental=false;compression_size_percent=-1;allow_trivial_move=false;max_size_amplification_percent=200;max_merge_width=4294967295;stop_style=kCompactionStopStyleTotalSize;min_merge_width=2;max_read_amp=-1;size_ratio=1;}
- blob_garbage_collection_age_cutoff=0.250000
- ttl=2592000
- periodic_compaction_seconds=0
- blob_file_size=268435456
- enable_blob_garbage_collection=true
- persist_user_defined_timestamps=true
- preserve_internal_time_seconds=0
- preclude_last_level_data_seconds=0
- sst_partitioner_factory=nullptr
- num_levels=7
- force_consistency_checks=true
- memtable_insert_with_hint_prefix_extractor=nullptr
- memtable_factory=SkipListFactory
- max_write_buffer_number_to_maintain=0
- optimize_filters_for_hits=false
- level_compaction_dynamic_level_bytes=true
- default_temperature=kUnknown
- inplace_update_support=false
- merge_operator=nullptr
- min_write_buffer_number_to_merge=1
- compaction_filter=nullptr
- compaction_style=kCompactionStyleLevel
- bloom_locality=0
- comparator=leveldb.BytewiseComparator
- compaction_filter_factory=nullptr
- max_write_buffer_size_to_maintain=134217728
- compaction_pri=kMinOverlappingRatio
-
-[TableOptions/BlockBasedTable "default"]
- num_file_reads_for_auto_readahead=2
- initial_auto_readahead_size=8192
- metadata_cache_options={unpartitioned_pinning=kFallback;partition_pinning=kFallback;top_level_index_pinning=kFallback;}
- enable_index_compression=true
- verify_compression=false
- prepopulate_block_cache=kDisable
- format_version=6
- use_delta_encoding=true
- pin_top_level_index_and_filter=true
- read_amp_bytes_per_bit=0
- decouple_partitioned_filters=false
- partition_filters=false
- metadata_block_size=4096
- max_auto_readahead_size=262144
- index_block_restart_interval=1
- block_size_deviation=10
- block_size=4096
- detect_filter_construct_corruption=false
- no_block_cache=false
- checksum=kXXH3
- filter_policy=ribbonfilter:10
- data_block_hash_table_util_ratio=0.750000
- block_restart_interval=16
- index_type=kBinarySearch
- pin_l0_filter_and_index_blocks_in_cache=false
- data_block_index_type=kDataBlockBinarySearch
- cache_index_and_filter_blocks_with_high_priority=true
- whole_key_filtering=true
- index_shortening=kShortenSeparators
- cache_index_and_filter_blocks=true
- block_align=false
- optimize_filters_for_memory=true
- flush_block_policy_factory=FlushBlockBySizePolicyFactory
\ No newline at end of file
diff --git a/memory-store/run.sh b/memory-store/run.sh
deleted file mode 100755
index fa03f664d..000000000
--- a/memory-store/run.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env bash
-
-set -eo pipefail
-
-# Create mount directory for service and RocksDB directory
-mkdir -p ${COZO_MNT_DIR:=/data}/${COZO_ROCKSDB_DIR:-cozo.db}
-
-# Create auth token if not exists.
-export COZO_AUTH_TOKEN=${COZO_AUTH_TOKEN:=`tr -dc A-Za-z0-9 $COZO_MNT_DIR/${COZO_ROCKSDB_DIR}.newrocksdb.cozo_auth
-
-# Copy options file to the RocksDB directory
-cp /app/options $COZO_MNT_DIR/${COZO_ROCKSDB_DIR}/OPTIONS-000007
-
-# Start server
-${APP_HOME:=.}/bin/cozo server \
- --engine newrocksdb \
- --path $COZO_MNT_DIR/${COZO_ROCKSDB_DIR} \
- --bind 0.0.0.0 \
- --port ${COZO_PORT:=9070} \
- -c '{"enable_write_buffer_manager": true, "allow_stall": true, "lru_cache_mb": 4096, "write_buffer_mb": 4096}'
From 3d5656978823ee596e39f13f7197ff6b60320f8d Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 14 Dec 2024 02:13:49 +0530
Subject: [PATCH 005/274] wip(agents-api): Add transitions migrations
Signed-off-by: Diwank Singh Tomer
---
memory-store/migrations/00010_tasks.sql | 41 +++++++++++-
memory-store/migrations/00011_executions.sql | 31 +++++++++
memory-store/migrations/00012_transitions.sql | 66 +++++++++++++++++++
3 files changed, 137 insertions(+), 1 deletion(-)
create mode 100644 memory-store/migrations/00011_executions.sql
create mode 100644 memory-store/migrations/00012_transitions.sql
diff --git a/memory-store/migrations/00010_tasks.sql b/memory-store/migrations/00010_tasks.sql
index 20018c297..66bd8ffc4 100644
--- a/memory-store/migrations/00010_tasks.sql
+++ b/memory-store/migrations/00010_tasks.sql
@@ -1 +1,40 @@
--- write your migration here
\ No newline at end of file
+-- Create tasks table
+CREATE TABLE tasks (
+ developer_id UUID NOT NULL,
+ canonical_name CITEXT NOT NULL CONSTRAINT ct_tasks_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
+ agent_id UUID NOT NULL,
+ task_id UUID NOT NULL,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
+ description TEXT DEFAULT NULL CONSTRAINT ct_tasks_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ input_schema JSON NOT NULL,
+ tools JSON[] DEFAULT ARRAY[]::JSON[],
+ inherit_tools BOOLEAN DEFAULT FALSE,
+ workflows JSON[] DEFAULT ARRAY[]::JSON[],
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id),
+ CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
+ CONSTRAINT fk_tasks_agent
+ FOREIGN KEY (developer_id, agent_id)
+ REFERENCES agents(developer_id, agent_id),
+ CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
+);
+
+-- Create sorted index on task_id (optimized for UUID v7)
+CREATE INDEX idx_tasks_id_sorted ON tasks (task_id DESC);
+
+-- Create foreign key constraint and index on developer_id
+CREATE INDEX idx_tasks_developer ON tasks (developer_id);
+
+-- Create a GIN index on the entire metadata column
+CREATE INDEX idx_tasks_metadata ON tasks USING GIN (metadata);
+
+-- Create trigger to automatically update updated_at
+CREATE TRIGGER trg_tasks_updated_at
+ BEFORE UPDATE ON tasks
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE tasks IS 'Stores tasks associated with AI agents for developers';
\ No newline at end of file
diff --git a/memory-store/migrations/00011_executions.sql b/memory-store/migrations/00011_executions.sql
new file mode 100644
index 000000000..031deea0e
--- /dev/null
+++ b/memory-store/migrations/00011_executions.sql
@@ -0,0 +1,31 @@
+-- Migration to create executions table
+CREATE TABLE executions (
+ developer_id UUID NOT NULL,
+ task_id UUID NOT NULL,
+ execution_id UUID NOT NULL,
+ input JSONB NOT NULL,
+ -- TODO: These will be generated using continuous aggregates from transitions
+ -- status TEXT DEFAULT 'pending',
+ -- output JSONB DEFAULT NULL,
+ -- error TEXT DEFAULT NULL,
+ -- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_executions PRIMARY KEY (execution_id),
+ CONSTRAINT fk_executions_developer
+ FOREIGN KEY (developer_id) REFERENCES developers(developer_id),
+ CONSTRAINT fk_executions_task
+ FOREIGN KEY (developer_id, task_id) REFERENCES tasks(developer_id, task_id)
+);
+
+-- Create sorted index on execution_id (optimized for UUID v7)
+CREATE INDEX idx_executions_execution_id_sorted ON executions (execution_id DESC);
+
+-- Create index on developer_id
+CREATE INDEX idx_executions_developer_id ON executions (developer_id);
+
+-- Create a GIN index on the metadata column
+CREATE INDEX idx_executions_metadata ON executions USING GIN (metadata);
+
+-- Add comment to table
+COMMENT ON TABLE executions IS 'Stores executions associated with AI agents for developers';
\ No newline at end of file
diff --git a/memory-store/migrations/00012_transitions.sql b/memory-store/migrations/00012_transitions.sql
new file mode 100644
index 000000000..3bc3ea290
--- /dev/null
+++ b/memory-store/migrations/00012_transitions.sql
@@ -0,0 +1,66 @@
+-- Create transition type enum
+CREATE TYPE transition_type AS ENUM (
+ 'init',
+ 'finish',
+ 'init_branch',
+ 'finish_branch',
+ 'wait',
+ 'resume',
+ 'error',
+ 'step',
+ 'cancelled'
+);
+
+-- Create transition cursor type
+CREATE TYPE transition_cursor AS (
+ workflow_name TEXT,
+ step_index INT
+);
+
+-- Create transitions table
+CREATE TABLE transitions (
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ execution_id UUID NOT NULL,
+ transition_id UUID NOT NULL,
+ type transition_type NOT NULL,
+ step_definition JSONB NOT NULL,
+ step_label TEXT DEFAULT NULL,
+ current_step transition_cursor NOT NULL,
+ next_step transition_cursor DEFAULT NULL,
+ output JSONB,
+ task_token TEXT DEFAULT NULL,
+ metadata JSONB DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_transitions PRIMARY KEY (created_at, execution_id, transition_id)
+);
+
+-- Convert to hypertable
+SELECT create_hypertable('transitions', 'created_at');
+
+-- Create unique constraint for current step
+CREATE UNIQUE INDEX idx_transitions_current ON transitions (execution_id, current_step, created_at DESC);
+
+-- Create unique constraint for next step (excluding nulls)
+CREATE UNIQUE INDEX idx_transitions_next ON transitions (execution_id, next_step, created_at DESC)
+WHERE next_step IS NOT NULL;
+
+-- Create unique constraint for step label (excluding nulls)
+CREATE UNIQUE INDEX idx_transitions_label ON transitions (execution_id, step_label, created_at DESC)
+WHERE step_label IS NOT NULL;
+
+-- Create sorted index on transition_id (optimized for UUID v7)
+CREATE INDEX idx_transitions_transition_id_sorted ON transitions (transition_id DESC, created_at DESC);
+
+-- Create sorted index on execution_id (optimized for UUID v7)
+CREATE INDEX idx_transitions_execution_id_sorted ON transitions (execution_id DESC, created_at DESC);
+
+-- Create a GIN index on the metadata column
+CREATE INDEX idx_transitions_metadata ON transitions USING GIN (metadata);
+
+-- Add foreign key constraint
+ALTER TABLE transitions
+ ADD CONSTRAINT fk_transitions_execution
+ FOREIGN KEY (execution_id)
+ REFERENCES executions(execution_id);
+
+-- Add comment to table
+COMMENT ON TABLE transitions IS 'Stores transitions associated with AI agents for developers';
\ No newline at end of file
From 516b8033422fe86c549e22631b565b033d589ea7 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 14 Dec 2024 15:30:55 +0530
Subject: [PATCH 006/274] fix(memory-store): Misc fixes; Switch to
golang-migrate
Signed-off-by: Diwank Singh Tomer
---
memory-store/README.md | 7 +
.../migrations/000001_initial.down.sql | 17 ++
...0001_initial.sql => 000001_initial.up.sql} | 4 +
.../migrations/000002_developers.down.sql | 4 +
...evelopers.sql => 000002_developers.up.sql} | 29 ++--
memory-store/migrations/000003_users.down.sql | 18 ++
memory-store/migrations/000003_users.up.sql | 49 ++++++
.../migrations/000004_agents.down.sql | 14 ++
...{00004_agents.sql => 000004_agents.up.sql} | 23 ++-
memory-store/migrations/000005_files.down.sql | 13 ++
.../{00005_files.sql => 000005_files.up.sql} | 65 +++++---
memory-store/migrations/000006_docs.down.sql | 29 ++++
.../{00006_docs.sql => 000006_docs.up.sql} | 118 ++++++++------
memory-store/migrations/000007_ann.down.sql | 17 ++
.../{00007_ann.sql => 000007_ann.up.sql} | 2 +-
memory-store/migrations/000008_tools.down.sql | 6 +
memory-store/migrations/000008_tools.up.sql | 49 ++++++
.../migrations/000009_sessions.down.sql | 20 +++
.../migrations/000009_sessions.up.sql | 115 +++++++++++++
memory-store/migrations/000010_tasks.down.sql | 18 ++
memory-store/migrations/000010_tasks.up.sql | 83 ++++++++++
.../migrations/000011_executions.down.sql | 5 +
...xecutions.sql => 000011_executions.up.sql} | 26 ++-
.../migrations/000012_transitions.down.sql | 26 +++
.../migrations/000012_transitions.up.sql | 154 ++++++++++++++++++
memory-store/migrations/00003_users.sql | 34 ----
memory-store/migrations/00008_tools.sql | 33 ----
memory-store/migrations/00009_sessions.sql | 99 -----------
memory-store/migrations/00010_tasks.sql | 40 -----
memory-store/migrations/00012_transitions.sql | 66 --------
30 files changed, 813 insertions(+), 370 deletions(-)
create mode 100644 memory-store/README.md
create mode 100644 memory-store/migrations/000001_initial.down.sql
rename memory-store/migrations/{00001_initial.sql => 000001_initial.up.sql} (98%)
create mode 100644 memory-store/migrations/000002_developers.down.sql
rename memory-store/migrations/{00002_developers.sql => 000002_developers.up.sql} (54%)
create mode 100644 memory-store/migrations/000003_users.down.sql
create mode 100644 memory-store/migrations/000003_users.up.sql
create mode 100644 memory-store/migrations/000004_agents.down.sql
rename memory-store/migrations/{00004_agents.sql => 000004_agents.up.sql} (70%)
create mode 100644 memory-store/migrations/000005_files.down.sql
rename memory-store/migrations/{00005_files.sql => 000005_files.up.sql} (51%)
create mode 100644 memory-store/migrations/000006_docs.down.sql
rename memory-store/migrations/{00006_docs.sql => 000006_docs.up.sql} (61%)
create mode 100644 memory-store/migrations/000007_ann.down.sql
rename memory-store/migrations/{00007_ann.sql => 000007_ann.up.sql} (98%)
create mode 100644 memory-store/migrations/000008_tools.down.sql
create mode 100644 memory-store/migrations/000008_tools.up.sql
create mode 100644 memory-store/migrations/000009_sessions.down.sql
create mode 100644 memory-store/migrations/000009_sessions.up.sql
create mode 100644 memory-store/migrations/000010_tasks.down.sql
create mode 100644 memory-store/migrations/000010_tasks.up.sql
create mode 100644 memory-store/migrations/000011_executions.down.sql
rename memory-store/migrations/{00011_executions.sql => 000011_executions.up.sql} (57%)
create mode 100644 memory-store/migrations/000012_transitions.down.sql
create mode 100644 memory-store/migrations/000012_transitions.up.sql
delete mode 100644 memory-store/migrations/00003_users.sql
delete mode 100644 memory-store/migrations/00008_tools.sql
delete mode 100644 memory-store/migrations/00009_sessions.sql
delete mode 100644 memory-store/migrations/00010_tasks.sql
delete mode 100644 memory-store/migrations/00012_transitions.sql
diff --git a/memory-store/README.md b/memory-store/README.md
new file mode 100644
index 000000000..3441d47a4
--- /dev/null
+++ b/memory-store/README.md
@@ -0,0 +1,7 @@
+### prototyping flow:
+
+1. Install `pgmigrate` (until I move to golang-migrate)
+2. In a separate window, `docker compose up db vectorizer-worker` to start db instances
+3. `cd memory-store` and `pgmigrate migrate --database "postgres://postgres:postgres@0.0.0.0:5432/postgres" --migrations ./migrations` to apply the migrations
+4. `pip install --user -U pgcli`
+5. `pgcli "postgres://postgres:postgres@localhost:5432/postgres"`
diff --git a/memory-store/migrations/000001_initial.down.sql b/memory-store/migrations/000001_initial.down.sql
new file mode 100644
index 000000000..ddc44dbc8
--- /dev/null
+++ b/memory-store/migrations/000001_initial.down.sql
@@ -0,0 +1,17 @@
+-- Drop the update_updated_at_column function
+DROP FUNCTION IF EXISTS update_updated_at_column();
+
+-- Drop misc extensions
+DROP EXTENSION IF EXISTS "uuid-ossp" CASCADE;
+DROP EXTENSION IF EXISTS citext CASCADE;
+DROP EXTENSION IF EXISTS btree_gist CASCADE;
+DROP EXTENSION IF EXISTS btree_gin CASCADE;
+
+-- Drop timescale's pgai extensions
+DROP EXTENSION IF EXISTS ai CASCADE;
+DROP EXTENSION IF EXISTS vectorscale CASCADE;
+DROP EXTENSION IF EXISTS vector CASCADE;
+
+-- Drop timescaledb extensions
+DROP EXTENSION IF EXISTS timescaledb_toolkit CASCADE;
+DROP EXTENSION IF EXISTS timescaledb CASCADE;
diff --git a/memory-store/migrations/00001_initial.sql b/memory-store/migrations/000001_initial.up.sql
similarity index 98%
rename from memory-store/migrations/00001_initial.sql
rename to memory-store/migrations/000001_initial.up.sql
index 3be41ef68..da04e3c4b 100644
--- a/memory-store/migrations/00001_initial.sql
+++ b/memory-store/migrations/000001_initial.up.sql
@@ -1,3 +1,5 @@
+BEGIN;
+
-- init timescaledb
CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE;
CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit CASCADE;
@@ -23,3 +25,5 @@ END;
$$ language 'plpgsql';
COMMENT ON FUNCTION update_updated_at_column() IS 'Trigger function to automatically update updated_at timestamp';
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000002_developers.down.sql b/memory-store/migrations/000002_developers.down.sql
new file mode 100644
index 000000000..ea6c58509
--- /dev/null
+++ b/memory-store/migrations/000002_developers.down.sql
@@ -0,0 +1,4 @@
+-- Drop the table (this will automatically drop associated indexes and triggers)
+DROP TABLE IF EXISTS developers CASCADE;
+
+-- Note: The update_updated_at_column() function is not dropped as it might be used by other tables
diff --git a/memory-store/migrations/00002_developers.sql b/memory-store/migrations/000002_developers.up.sql
similarity index 54%
rename from memory-store/migrations/00002_developers.sql
rename to memory-store/migrations/000002_developers.up.sql
index b8d9b7673..0802dcf6f 100644
--- a/memory-store/migrations/00002_developers.sql
+++ b/memory-store/migrations/000002_developers.up.sql
@@ -1,5 +1,7 @@
+BEGIN;
+
-- Create developers table
-CREATE TABLE developers (
+CREATE TABLE IF NOT EXISTS developers (
developer_id UUID NOT NULL,
email TEXT NOT NULL CONSTRAINT ct_developers_email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$'),
active BOOLEAN NOT NULL DEFAULT true,
@@ -12,22 +14,29 @@ CREATE TABLE developers (
);
-- Create sorted index on developer_id (optimized for UUID v7)
-CREATE INDEX idx_developers_id_sorted ON developers (developer_id DESC);
+CREATE INDEX IF NOT EXISTS idx_developers_id_sorted ON developers (developer_id DESC);
-- Create index on email
-CREATE INDEX idx_developers_email ON developers (email);
+CREATE INDEX IF NOT EXISTS idx_developers_email ON developers (email);
-- Create GIN index for tags array
-CREATE INDEX idx_developers_tags ON developers USING GIN (tags);
+CREATE INDEX IF NOT EXISTS idx_developers_tags ON developers USING GIN (tags);
-- Create partial index for active developers
-CREATE INDEX idx_developers_active ON developers (developer_id) WHERE active = true;
+CREATE INDEX IF NOT EXISTS idx_developers_active ON developers (developer_id) WHERE active = true;
-- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_developers_updated_at
- BEFORE UPDATE ON developers
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'trg_developers_updated_at') THEN
+ CREATE TRIGGER trg_developers_updated_at
+ BEFORE UPDATE ON developers
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+ END IF;
+END
+$$;
-- Add comment to table
-COMMENT ON TABLE developers IS 'Stores developer information including their settings and tags';
\ No newline at end of file
+COMMENT ON TABLE developers IS 'Stores developer information including their settings and tags';
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000003_users.down.sql b/memory-store/migrations/000003_users.down.sql
new file mode 100644
index 000000000..3b1b98648
--- /dev/null
+++ b/memory-store/migrations/000003_users.down.sql
@@ -0,0 +1,18 @@
+BEGIN;
+
+-- Drop trigger first
+DROP TRIGGER IF EXISTS update_users_updated_at ON users;
+
+-- Drop indexes
+DROP INDEX IF EXISTS users_metadata_gin_idx;
+DROP INDEX IF EXISTS users_developer_id_idx;
+DROP INDEX IF EXISTS users_id_sorted_idx;
+
+-- Drop foreign key constraint
+ALTER TABLE IF EXISTS users
+ DROP CONSTRAINT IF EXISTS users_developer_id_fkey;
+
+-- Finally drop the table
+DROP TABLE IF EXISTS users;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000003_users.up.sql b/memory-store/migrations/000003_users.up.sql
new file mode 100644
index 000000000..c32ff48fe
--- /dev/null
+++ b/memory-store/migrations/000003_users.up.sql
@@ -0,0 +1,49 @@
+BEGIN;
+
+-- Create users table if it doesn't exist
+CREATE TABLE IF NOT EXISTS users (
+ developer_id UUID NOT NULL,
+ user_id UUID NOT NULL,
+ name TEXT NOT NULL,
+ about TEXT,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_users PRIMARY KEY (developer_id, user_id)
+);
+
+-- Create sorted index on user_id if it doesn't exist
+CREATE INDEX IF NOT EXISTS users_id_sorted_idx ON users (user_id DESC);
+
+-- Create foreign key constraint and index if they don't exist
+DO $$ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'users_developer_id_fkey'
+ ) THEN
+ ALTER TABLE users
+ ADD CONSTRAINT users_developer_id_fkey
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+ END IF;
+END $$;
+
+CREATE INDEX IF NOT EXISTS users_developer_id_idx ON users (developer_id);
+
+-- Create a GIN index on the entire metadata column if it doesn't exist
+CREATE INDEX IF NOT EXISTS users_metadata_gin_idx ON users USING GIN (metadata);
+
+-- Create trigger if it doesn't exist
+DO $$ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_trigger WHERE tgname = 'update_users_updated_at'
+ ) THEN
+ CREATE TRIGGER update_users_updated_at
+ BEFORE UPDATE ON users
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+ END IF;
+END $$;
+
+-- Add comment to table (comments are idempotent by default)
+COMMENT ON TABLE users IS 'Stores user information linked to developers';
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000004_agents.down.sql b/memory-store/migrations/000004_agents.down.sql
new file mode 100644
index 000000000..0504684fb
--- /dev/null
+++ b/memory-store/migrations/000004_agents.down.sql
@@ -0,0 +1,14 @@
+BEGIN;
+
+-- Drop trigger first
+DROP TRIGGER IF EXISTS trg_agents_updated_at ON agents;
+
+-- Drop indexes
+DROP INDEX IF EXISTS idx_agents_metadata;
+DROP INDEX IF EXISTS idx_agents_developer;
+DROP INDEX IF EXISTS idx_agents_id_sorted;
+
+-- Drop table (this will automatically drop associated constraints)
+DROP TABLE IF EXISTS agents;
+
+COMMIT;
diff --git a/memory-store/migrations/00004_agents.sql b/memory-store/migrations/000004_agents.up.sql
similarity index 70%
rename from memory-store/migrations/00004_agents.sql
rename to memory-store/migrations/000004_agents.up.sql
index 8eb8b2f35..82eb9c84f 100644
--- a/memory-store/migrations/00004_agents.sql
+++ b/memory-store/migrations/000004_agents.up.sql
@@ -1,5 +1,14 @@
+BEGIN;
+
+-- Drop existing objects if they exist
+DROP TRIGGER IF EXISTS trg_agents_updated_at ON agents;
+DROP INDEX IF EXISTS idx_agents_metadata;
+DROP INDEX IF EXISTS idx_agents_developer;
+DROP INDEX IF EXISTS idx_agents_id_sorted;
+DROP TABLE IF EXISTS agents;
+
-- Create agents table
-CREATE TABLE agents (
+CREATE TABLE IF NOT EXISTS agents (
developer_id UUID NOT NULL,
agent_id UUID NOT NULL,
canonical_name citext NOT NULL CONSTRAINT ct_agents_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
@@ -17,24 +26,26 @@ CREATE TABLE agents (
);
-- Create sorted index on agent_id (optimized for UUID v7)
-CREATE INDEX idx_agents_id_sorted ON agents (agent_id DESC);
+CREATE INDEX IF NOT EXISTS idx_agents_id_sorted ON agents (agent_id DESC);
-- Create foreign key constraint and index on developer_id
ALTER TABLE agents
+ DROP CONSTRAINT IF EXISTS fk_agents_developer,
ADD CONSTRAINT fk_agents_developer
FOREIGN KEY (developer_id)
REFERENCES developers(developer_id);
-CREATE INDEX idx_agents_developer ON agents (developer_id);
+CREATE INDEX IF NOT EXISTS idx_agents_developer ON agents (developer_id);
-- Create a GIN index on the entire metadata column
-CREATE INDEX idx_agents_metadata ON agents USING GIN (metadata);
+CREATE INDEX IF NOT EXISTS idx_agents_metadata ON agents USING GIN (metadata);
-- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_agents_updated_at
+CREATE OR REPLACE TRIGGER trg_agents_updated_at
BEFORE UPDATE ON agents
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Add comment to table
-COMMENT ON TABLE agents IS 'Stores AI agent configurations and metadata for developers';
\ No newline at end of file
+COMMENT ON TABLE agents IS 'Stores AI agent configurations and metadata for developers';
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000005_files.down.sql b/memory-store/migrations/000005_files.down.sql
new file mode 100644
index 000000000..870eac359
--- /dev/null
+++ b/memory-store/migrations/000005_files.down.sql
@@ -0,0 +1,13 @@
+BEGIN;
+
+-- Drop agent_files table and its dependencies
+DROP TABLE IF EXISTS agent_files;
+
+-- Drop user_files table and its dependencies
+DROP TABLE IF EXISTS user_files;
+
+-- Drop files table and its dependencies
+DROP TRIGGER IF EXISTS trg_files_updated_at ON files;
+DROP TABLE IF EXISTS files;
+
+COMMIT;
diff --git a/memory-store/migrations/00005_files.sql b/memory-store/migrations/000005_files.up.sql
similarity index 51%
rename from memory-store/migrations/00005_files.sql
rename to memory-store/migrations/000005_files.up.sql
index 3d8c2900b..bf368db9a 100644
--- a/memory-store/migrations/00005_files.sql
+++ b/memory-store/migrations/000005_files.up.sql
@@ -1,5 +1,7 @@
+BEGIN;
+
-- Create files table
-CREATE TABLE files (
+CREATE TABLE IF NOT EXISTS files (
developer_id UUID NOT NULL,
file_id UUID NOT NULL,
name TEXT NOT NULL CONSTRAINT ct_files_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
@@ -12,32 +14,41 @@ CREATE TABLE files (
CONSTRAINT pk_files PRIMARY KEY (developer_id, file_id)
);
--- Create sorted index on file_id (optimized for UUID v7)
-CREATE INDEX idx_files_id_sorted ON files (file_id DESC);
-
--- Create foreign key constraint and index on developer_id
-ALTER TABLE files
- ADD CONSTRAINT fk_files_developer
- FOREIGN KEY (developer_id)
- REFERENCES developers(developer_id);
+-- Create sorted index on file_id if it doesn't exist
+CREATE INDEX IF NOT EXISTS idx_files_id_sorted ON files (file_id DESC);
-CREATE INDEX idx_files_developer ON files (developer_id);
+-- Create foreign key constraint and index if they don't exist
+DO $$ BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_files_developer') THEN
+ ALTER TABLE files
+ ADD CONSTRAINT fk_files_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+ END IF;
+END $$;
--- Before creating the user_files and agent_files tables, we need to ensure that the file_id is unique for each developer
-ALTER TABLE files
- ADD CONSTRAINT uq_files_developer_id_file_id UNIQUE (developer_id, file_id);
+CREATE INDEX IF NOT EXISTS idx_files_developer ON files (developer_id);
--- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_files_updated_at
- BEFORE UPDATE ON files
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
+-- Add unique constraint if it doesn't exist
+DO $$ BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_files_developer_id_file_id') THEN
+ ALTER TABLE files
+ ADD CONSTRAINT uq_files_developer_id_file_id UNIQUE (developer_id, file_id);
+ END IF;
+END $$;
--- Add comment to table
-COMMENT ON TABLE files IS 'Stores file metadata and references for developers';
+-- Create trigger if it doesn't exist
+DO $$ BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'trg_files_updated_at') THEN
+ CREATE TRIGGER trg_files_updated_at
+ BEFORE UPDATE ON files
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+ END IF;
+END $$;
-- Create the user_files table
-CREATE TABLE user_files (
+CREATE TABLE IF NOT EXISTS user_files (
developer_id UUID NOT NULL,
user_id UUID NOT NULL,
file_id UUID NOT NULL,
@@ -46,11 +57,11 @@ CREATE TABLE user_files (
CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files(developer_id, file_id)
);
--- Indexes for efficient querying
-CREATE INDEX idx_user_files_user ON user_files (developer_id, user_id);
+-- Create index if it doesn't exist
+CREATE INDEX IF NOT EXISTS idx_user_files_user ON user_files (developer_id, user_id);
-- Create the agent_files table
-CREATE TABLE agent_files (
+CREATE TABLE IF NOT EXISTS agent_files (
developer_id UUID NOT NULL,
agent_id UUID NOT NULL,
file_id UUID NOT NULL,
@@ -59,5 +70,7 @@ CREATE TABLE agent_files (
CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files(developer_id, file_id)
);
--- Indexes for efficient querying
-CREATE INDEX idx_agent_files_agent ON agent_files (developer_id, agent_id);
+-- Create index if it doesn't exist
+CREATE INDEX IF NOT EXISTS idx_agent_files_agent ON agent_files (developer_id, agent_id);
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000006_docs.down.sql b/memory-store/migrations/000006_docs.down.sql
new file mode 100644
index 000000000..50139bb87
--- /dev/null
+++ b/memory-store/migrations/000006_docs.down.sql
@@ -0,0 +1,29 @@
+BEGIN;
+
+-- Drop indexes
+DROP INDEX IF EXISTS idx_docs_content_trgm;
+DROP INDEX IF EXISTS idx_docs_title_trgm;
+DROP INDEX IF EXISTS idx_docs_search_tsv;
+DROP INDEX IF EXISTS idx_docs_metadata;
+DROP INDEX IF EXISTS idx_agent_docs_agent;
+DROP INDEX IF EXISTS idx_user_docs_user;
+DROP INDEX IF EXISTS idx_docs_developer;
+DROP INDEX IF EXISTS idx_docs_id_sorted;
+
+-- Drop triggers
+DROP TRIGGER IF EXISTS trg_docs_search_tsv ON docs;
+DROP TRIGGER IF EXISTS trg_docs_updated_at ON docs;
+
+-- Drop the constraint that depends on is_valid_language function
+ALTER TABLE IF EXISTS docs DROP CONSTRAINT IF EXISTS ct_docs_valid_language;
+
+-- Drop functions
+DROP FUNCTION IF EXISTS docs_update_search_tsv();
+DROP FUNCTION IF EXISTS is_valid_language(text);
+
+-- Drop tables (in correct order due to foreign key constraints)
+DROP TABLE IF EXISTS agent_docs;
+DROP TABLE IF EXISTS user_docs;
+DROP TABLE IF EXISTS docs;
+
+COMMIT;
diff --git a/memory-store/migrations/00006_docs.sql b/memory-store/migrations/000006_docs.up.sql
similarity index 61%
rename from memory-store/migrations/00006_docs.sql
rename to memory-store/migrations/000006_docs.up.sql
index 88c7ff2a7..c4a241e65 100644
--- a/memory-store/migrations/00006_docs.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -1,4 +1,6 @@
--- Create function to validate language
+BEGIN;
+
+-- Create function to validate language (make it OR REPLACE)
CREATE OR REPLACE FUNCTION is_valid_language(lang text)
RETURNS boolean AS $$
BEGIN
@@ -9,7 +11,7 @@ END;
$$ LANGUAGE plpgsql;
-- Create docs table
-CREATE TABLE docs (
+CREATE TABLE IF NOT EXISTS docs (
developer_id UUID NOT NULL,
doc_id UUID NOT NULL,
title TEXT NOT NULL,
@@ -31,28 +33,39 @@ CREATE TABLE docs (
CHECK (is_valid_language(language))
);
--- Create sorted index on doc_id (optimized for UUID v7)
-CREATE INDEX idx_docs_id_sorted ON docs (doc_id DESC);
-
--- Create foreign key constraint and index on developer_id
-ALTER TABLE docs
- ADD CONSTRAINT fk_docs_developer
- FOREIGN KEY (developer_id)
- REFERENCES developers(developer_id);
-
-CREATE INDEX idx_docs_developer ON docs (developer_id);
-
--- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_docs_updated_at
- BEFORE UPDATE ON docs
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
-
--- Add comment to table
-COMMENT ON TABLE docs IS 'Stores document metadata for developers';
+-- Create sorted index on doc_id if not exists
+CREATE INDEX IF NOT EXISTS idx_docs_id_sorted ON docs (doc_id DESC);
+
+-- Create foreign key constraint if not exists (using DO block for safety)
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'fk_docs_developer'
+ ) THEN
+ ALTER TABLE docs
+ ADD CONSTRAINT fk_docs_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+ END IF;
+END $$;
+
+CREATE INDEX IF NOT EXISTS idx_docs_developer ON docs (developer_id);
+
+-- Create trigger if not exists
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_docs_updated_at'
+ ) THEN
+ CREATE TRIGGER trg_docs_updated_at
+ BEFORE UPDATE ON docs
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+ END IF;
+END $$;
-- Create the user_docs table
-CREATE TABLE user_docs (
+CREATE TABLE IF NOT EXISTS user_docs (
developer_id UUID NOT NULL,
user_id UUID NOT NULL,
doc_id UUID NOT NULL,
@@ -62,7 +75,7 @@ CREATE TABLE user_docs (
);
-- Create the agent_docs table
-CREATE TABLE agent_docs (
+CREATE TABLE IF NOT EXISTS agent_docs (
developer_id UUID NOT NULL,
agent_id UUID NOT NULL,
doc_id UUID NOT NULL,
@@ -71,12 +84,10 @@ CREATE TABLE agent_docs (
CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs(developer_id, doc_id)
);
--- Indexes for efficient querying
-CREATE INDEX idx_user_docs_user ON user_docs (developer_id, user_id);
-CREATE INDEX idx_agent_docs_agent ON agent_docs (developer_id, agent_id);
-
--- Create a GIN index on the metadata column for efficient searching
-CREATE INDEX idx_docs_metadata ON docs USING GIN (metadata);
+-- Create indexes if not exists
+CREATE INDEX IF NOT EXISTS idx_user_docs_user ON user_docs (developer_id, user_id);
+CREATE INDEX IF NOT EXISTS idx_agent_docs_agent ON agent_docs (developer_id, agent_id);
+CREATE INDEX IF NOT EXISTS idx_docs_metadata ON docs USING GIN (metadata);
-- Enable necessary PostgreSQL extensions
CREATE EXTENSION IF NOT EXISTS unaccent;
@@ -109,8 +120,16 @@ BEGIN
END
$$;
--- Add the column (not generated)
-ALTER TABLE docs ADD COLUMN search_tsv tsvector;
+-- Add the search_tsv column if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'docs' AND column_name = 'search_tsv'
+ ) THEN
+ ALTER TABLE docs ADD COLUMN search_tsv tsvector;
+ END IF;
+END $$;
-- Create function to update tsvector
CREATE OR REPLACE FUNCTION docs_update_search_tsv()
@@ -123,24 +142,29 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
--- Create trigger
-CREATE TRIGGER trg_docs_search_tsv
- BEFORE INSERT OR UPDATE OF title, content, language
- ON docs
- FOR EACH ROW
- EXECUTE FUNCTION docs_update_search_tsv();
-
--- Create the index
-CREATE INDEX idx_docs_search_tsv ON docs USING GIN (search_tsv);
+-- Create trigger if not exists
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_docs_search_tsv'
+ ) THEN
+ CREATE TRIGGER trg_docs_search_tsv
+ BEFORE INSERT OR UPDATE OF title, content, language
+ ON docs
+ FOR EACH ROW
+ EXECUTE FUNCTION docs_update_search_tsv();
+ END IF;
+END $$;
+
+-- Create indexes if not exists
+CREATE INDEX IF NOT EXISTS idx_docs_search_tsv ON docs USING GIN (search_tsv);
+CREATE INDEX IF NOT EXISTS idx_docs_title_trgm ON docs USING GIN (title gin_trgm_ops);
+CREATE INDEX IF NOT EXISTS idx_docs_content_trgm ON docs USING GIN (content gin_trgm_ops);
-- Update existing rows (if any)
UPDATE docs SET search_tsv =
setweight(to_tsvector(language::regconfig, unaccent(coalesce(title, ''))), 'A') ||
- setweight(to_tsvector(language::regconfig, unaccent(coalesce(content, ''))), 'B');
-
--- Create GIN trigram indexes for both title and content
-CREATE INDEX idx_docs_title_trgm
-ON docs USING GIN (title gin_trgm_ops);
+ setweight(to_tsvector(language::regconfig, unaccent(coalesce(content, ''))), 'B')
+WHERE search_tsv IS NULL;
-CREATE INDEX idx_docs_content_trgm
-ON docs USING GIN (content gin_trgm_ops);
\ No newline at end of file
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000007_ann.down.sql b/memory-store/migrations/000007_ann.down.sql
new file mode 100644
index 000000000..2458c3dbd
--- /dev/null
+++ b/memory-store/migrations/000007_ann.down.sql
@@ -0,0 +1,17 @@
+BEGIN;
+
+DO $$
+DECLARE
+ vectorizer_id INTEGER;
+BEGIN
+ SELECT id INTO vectorizer_id
+ FROM ai.vectorizer
+ WHERE source_table = 'docs';
+
+ -- Drop the vectorizer if it exists
+ IF vectorizer_id IS NOT NULL THEN
+ PERFORM ai.drop_vectorizer(vectorizer_id, drop_all => true);
+ END IF;
+END $$;
+
+COMMIT;
diff --git a/memory-store/migrations/00007_ann.sql b/memory-store/migrations/000007_ann.up.sql
similarity index 98%
rename from memory-store/migrations/00007_ann.sql
rename to memory-store/migrations/000007_ann.up.sql
index 5f2157f02..0b08e9b07 100644
--- a/memory-store/migrations/00007_ann.sql
+++ b/memory-store/migrations/000007_ann.up.sql
@@ -1,5 +1,5 @@
-- Create vector similarity search index using diskann and timescale vectorizer
-select ai.create_vectorizer(
+SELECT ai.create_vectorizer(
source => 'docs',
destination => 'docs_embeddings',
embedding => ai.embedding_voyageai('voyage-3', 1024), -- need to parameterize this
diff --git a/memory-store/migrations/000008_tools.down.sql b/memory-store/migrations/000008_tools.down.sql
new file mode 100644
index 000000000..2fa3077c0
--- /dev/null
+++ b/memory-store/migrations/000008_tools.down.sql
@@ -0,0 +1,6 @@
+BEGIN;
+
+-- Drop table and all its dependent objects (indexes, constraints, triggers)
+DROP TABLE IF EXISTS tools CASCADE;
+
+COMMIT;
diff --git a/memory-store/migrations/000008_tools.up.sql b/memory-store/migrations/000008_tools.up.sql
new file mode 100644
index 000000000..bcf59def8
--- /dev/null
+++ b/memory-store/migrations/000008_tools.up.sql
@@ -0,0 +1,49 @@
+BEGIN;
+
+-- Create tools table if it doesn't exist
+CREATE TABLE IF NOT EXISTS tools (
+ developer_id UUID NOT NULL,
+ agent_id UUID NOT NULL,
+ tool_id UUID NOT NULL,
+ task_id UUID DEFAULT NULL,
+ task_version INT DEFAULT NULL,
+ type TEXT NOT NULL CONSTRAINT ct_tools_type_length CHECK (length(type) >= 1 AND length(type) <= 255),
+ name TEXT NOT NULL CONSTRAINT ct_tools_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
+ description TEXT CONSTRAINT ct_tools_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ spec JSONB NOT NULL,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id, type, name)
+);
+
+-- Create sorted index on tool_id if it doesn't exist
+CREATE INDEX IF NOT EXISTS idx_tools_id_sorted ON tools (tool_id DESC);
+
+-- Create sorted index on task_id if it doesn't exist
+CREATE INDEX IF NOT EXISTS idx_tools_task_id_sorted ON tools (task_id DESC) WHERE task_id IS NOT NULL;
+
+-- Create foreign key constraint and index if they don't exist
+DO $$ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'fk_tools_agent'
+ ) THEN
+ ALTER TABLE tools
+ ADD CONSTRAINT fk_tools_agent
+ FOREIGN KEY (developer_id, agent_id)
+ REFERENCES agents(developer_id, agent_id);
+ END IF;
+END $$;
+
+CREATE INDEX IF NOT EXISTS idx_tools_developer_agent ON tools (developer_id, agent_id);
+
+-- Drop trigger if exists and recreate
+DROP TRIGGER IF EXISTS trg_tools_updated_at ON tools;
+CREATE TRIGGER trg_tools_updated_at
+ BEFORE UPDATE ON tools
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+
+-- Add comment to table
+COMMENT ON TABLE tools IS 'Stores tool configurations and specifications for AI agents';
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000009_sessions.down.sql b/memory-store/migrations/000009_sessions.down.sql
new file mode 100644
index 000000000..d1c0b2911
--- /dev/null
+++ b/memory-store/migrations/000009_sessions.down.sql
@@ -0,0 +1,20 @@
+BEGIN;
+
+-- Drop triggers first
+DROP TRIGGER IF EXISTS trg_validate_participant_before_update ON session_lookup;
+DROP TRIGGER IF EXISTS trg_validate_participant_before_insert ON session_lookup;
+
+-- Drop the validation function
+DROP FUNCTION IF EXISTS validate_participant();
+
+-- Drop session_lookup table and its indexes
+DROP TABLE IF EXISTS session_lookup;
+
+-- Drop sessions table and its indexes
+DROP TRIGGER IF EXISTS trg_sessions_updated_at ON sessions;
+DROP TABLE IF EXISTS sessions CASCADE;
+
+-- Drop the enum type
+DROP TYPE IF EXISTS participant_type;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000009_sessions.up.sql b/memory-store/migrations/000009_sessions.up.sql
new file mode 100644
index 000000000..30f135ed7
--- /dev/null
+++ b/memory-store/migrations/000009_sessions.up.sql
@@ -0,0 +1,115 @@
+BEGIN;
+
+-- Create sessions table if it doesn't exist
+CREATE TABLE IF NOT EXISTS sessions (
+ developer_id UUID NOT NULL,
+ session_id UUID NOT NULL,
+ situation TEXT,
+ system_template TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ -- TODO: Derived from entries
+ -- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
+ render_templates BOOLEAN NOT NULL DEFAULT true,
+ token_budget INTEGER,
+ context_overflow TEXT,
+ forward_tool_calls BOOLEAN,
+ recall_options JSONB NOT NULL DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id)
+);
+
+-- Create indexes if they don't exist
+CREATE INDEX IF NOT EXISTS idx_sessions_id_sorted ON sessions (session_id DESC);
+CREATE INDEX IF NOT EXISTS idx_sessions_metadata ON sessions USING GIN (metadata);
+
+-- Create foreign key if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'fk_sessions_developer'
+ ) THEN
+ ALTER TABLE sessions
+ ADD CONSTRAINT fk_sessions_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
+ END IF;
+END $$;
+
+-- Create trigger if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_sessions_updated_at'
+ ) THEN
+ CREATE TRIGGER trg_sessions_updated_at
+ BEFORE UPDATE ON sessions
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+ END IF;
+END $$;
+
+-- Create participant_type enum if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'participant_type') THEN
+ CREATE TYPE participant_type AS ENUM ('user', 'agent');
+ END IF;
+END $$;
+
+-- Create session_lookup table if it doesn't exist
+CREATE TABLE IF NOT EXISTS session_lookup (
+ developer_id UUID NOT NULL,
+ session_id UUID NOT NULL,
+ participant_type participant_type NOT NULL,
+ participant_id UUID NOT NULL,
+ PRIMARY KEY (developer_id, session_id, participant_type, participant_id),
+ FOREIGN KEY (developer_id, session_id) REFERENCES sessions(developer_id, session_id)
+);
+
+-- Create indexes if they don't exist
+CREATE INDEX IF NOT EXISTS idx_session_lookup_by_session ON session_lookup (developer_id, session_id);
+CREATE INDEX IF NOT EXISTS idx_session_lookup_by_participant ON session_lookup (developer_id, participant_id);
+
+-- Create or replace the validation function
+CREATE OR REPLACE FUNCTION validate_participant() RETURNS trigger AS $$
+BEGIN
+ IF NEW.participant_type = 'user' THEN
+ PERFORM 1 FROM users WHERE developer_id = NEW.developer_id AND user_id = NEW.participant_id;
+ IF NOT FOUND THEN
+ RAISE EXCEPTION 'Invalid participant_id: % for participant_type user', NEW.participant_id;
+ END IF;
+ ELSIF NEW.participant_type = 'agent' THEN
+ PERFORM 1 FROM agents WHERE developer_id = NEW.developer_id AND agent_id = NEW.participant_id;
+ IF NOT FOUND THEN
+ RAISE EXCEPTION 'Invalid participant_id: % for participant_type agent', NEW.participant_id;
+ END IF;
+ ELSE
+ RAISE EXCEPTION 'Unknown participant_type: %', NEW.participant_type;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+-- Create triggers if they don't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_validate_participant_before_insert'
+ ) THEN
+ CREATE TRIGGER trg_validate_participant_before_insert
+ BEFORE INSERT ON session_lookup
+ FOR EACH ROW
+ EXECUTE FUNCTION validate_participant();
+ END IF;
+
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_validate_participant_before_update'
+ ) THEN
+ CREATE TRIGGER trg_validate_participant_before_update
+ BEFORE UPDATE ON session_lookup
+ FOR EACH ROW
+ EXECUTE FUNCTION validate_participant();
+ END IF;
+END $$;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000010_tasks.down.sql b/memory-store/migrations/000010_tasks.down.sql
new file mode 100644
index 000000000..b7f758779
--- /dev/null
+++ b/memory-store/migrations/000010_tasks.down.sql
@@ -0,0 +1,18 @@
+BEGIN;
+
+-- Drop the foreign key constraint from tools table if it exists
+DO $$
+BEGIN
+ IF EXISTS (
+ SELECT 1
+ FROM information_schema.table_constraints
+ WHERE constraint_name = 'fk_tools_task_id'
+ ) THEN
+ ALTER TABLE tools DROP CONSTRAINT fk_tools_task_id;
+ END IF;
+END $$;
+
+-- Drop the tasks table and all its dependent objects (CASCADE will handle indexes, triggers, and constraints)
+DROP TABLE IF EXISTS tasks CASCADE;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
new file mode 100644
index 000000000..c2bfeb454
--- /dev/null
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -0,0 +1,83 @@
+BEGIN;
+
+-- Create tasks table if it doesn't exist
+CREATE TABLE IF NOT EXISTS tasks (
+ developer_id UUID NOT NULL,
+ canonical_name CITEXT NOT NULL CONSTRAINT ct_tasks_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
+ agent_id UUID NOT NULL,
+ task_id UUID NOT NULL,
+ version INTEGER NOT NULL DEFAULT 1,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
+ description TEXT DEFAULT NULL CONSTRAINT ct_tasks_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ input_schema JSON NOT NULL,
+ inherit_tools BOOLEAN DEFAULT FALSE,
+ workflows JSON[] DEFAULT ARRAY[]::JSON[],
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ metadata JSONB DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id),
+ CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
+ CONSTRAINT uq_tasks_version_unique UNIQUE (task_id, version),
+ CONSTRAINT fk_tasks_agent
+ FOREIGN KEY (developer_id, agent_id)
+ REFERENCES agents(developer_id, agent_id),
+ CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
+);
+
+-- Create sorted index on task_id if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_id_sorted') THEN
+ CREATE INDEX idx_tasks_id_sorted ON tasks (task_id DESC);
+ END IF;
+END $$;
+
+-- Create index on developer_id if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_developer') THEN
+ CREATE INDEX idx_tasks_developer ON tasks (developer_id);
+ END IF;
+END $$;
+
+-- Create a GIN index on metadata if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_metadata') THEN
+ CREATE INDEX idx_tasks_metadata ON tasks USING GIN (metadata);
+ END IF;
+END $$;
+
+-- Add foreign key constraint if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM information_schema.table_constraints
+ WHERE constraint_name = 'fk_tools_task_id'
+ ) THEN
+ ALTER TABLE tools ADD CONSTRAINT fk_tools_task_id
+ FOREIGN KEY (task_id, task_version) REFERENCES tasks(task_id, version)
+ DEFERRABLE INITIALLY DEFERRED;
+ END IF;
+END $$;
+
+-- Create trigger if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_trigger
+ WHERE tgname = 'trg_tasks_updated_at'
+ ) THEN
+ CREATE TRIGGER trg_tasks_updated_at
+ BEFORE UPDATE ON tasks
+ FOR EACH ROW
+ EXECUTE FUNCTION update_updated_at_column();
+ END IF;
+END $$;
+
+-- Add comment to table (comments are idempotent by default)
+COMMENT ON TABLE tasks IS 'Stores tasks associated with AI agents for developers';
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000011_executions.down.sql b/memory-store/migrations/000011_executions.down.sql
new file mode 100644
index 000000000..e6c362d0e
--- /dev/null
+++ b/memory-store/migrations/000011_executions.down.sql
@@ -0,0 +1,5 @@
+BEGIN;
+
+DROP TABLE IF EXISTS executions CASCADE;
+
+COMMIT;
diff --git a/memory-store/migrations/00011_executions.sql b/memory-store/migrations/000011_executions.up.sql
similarity index 57%
rename from memory-store/migrations/00011_executions.sql
rename to memory-store/migrations/000011_executions.up.sql
index 031deea0e..74ab5bf97 100644
--- a/memory-store/migrations/00011_executions.sql
+++ b/memory-store/migrations/000011_executions.up.sql
@@ -1,16 +1,22 @@
--- Migration to create executions table
-CREATE TABLE executions (
+BEGIN;
+
+-- Create executions table if it doesn't exist
+CREATE TABLE IF NOT EXISTS executions (
developer_id UUID NOT NULL,
task_id UUID NOT NULL,
+ task_version INTEGER NOT NULL,
execution_id UUID NOT NULL,
input JSONB NOT NULL,
- -- TODO: These will be generated using continuous aggregates from transitions
+
+ -- NOTE: These will be generated using continuous aggregates from transitions
-- status TEXT DEFAULT 'pending',
-- output JSONB DEFAULT NULL,
-- error TEXT DEFAULT NULL,
-- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
CONSTRAINT pk_executions PRIMARY KEY (execution_id),
CONSTRAINT fk_executions_developer
FOREIGN KEY (developer_id) REFERENCES developers(developer_id),
@@ -19,13 +25,17 @@ CREATE TABLE executions (
);
-- Create sorted index on execution_id (optimized for UUID v7)
-CREATE INDEX idx_executions_execution_id_sorted ON executions (execution_id DESC);
+CREATE INDEX IF NOT EXISTS idx_executions_execution_id_sorted ON executions (execution_id DESC);
-- Create index on developer_id
-CREATE INDEX idx_executions_developer_id ON executions (developer_id);
+CREATE INDEX IF NOT EXISTS idx_executions_developer_id ON executions (developer_id);
+
+-- Create index on task_id
+CREATE INDEX IF NOT EXISTS idx_executions_task_id ON executions (task_id);
-- Create a GIN index on the metadata column
-CREATE INDEX idx_executions_metadata ON executions USING GIN (metadata);
+CREATE INDEX IF NOT EXISTS idx_executions_metadata ON executions USING GIN (metadata);
--- Add comment to table
-COMMENT ON TABLE executions IS 'Stores executions associated with AI agents for developers';
\ No newline at end of file
+-- Add comment to table (comments are idempotent by default)
+COMMENT ON TABLE executions IS 'Stores executions associated with AI agents for developers';
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000012_transitions.down.sql b/memory-store/migrations/000012_transitions.down.sql
new file mode 100644
index 000000000..590ebc901
--- /dev/null
+++ b/memory-store/migrations/000012_transitions.down.sql
@@ -0,0 +1,26 @@
+BEGIN;
+
+-- Drop foreign key constraint if exists
+ALTER TABLE IF EXISTS transitions
+ DROP CONSTRAINT IF EXISTS fk_transitions_execution;
+
+-- Drop indexes if they exist
+DROP INDEX IF EXISTS idx_transitions_metadata;
+DROP INDEX IF EXISTS idx_transitions_execution_id_sorted;
+DROP INDEX IF EXISTS idx_transitions_transition_id_sorted;
+DROP INDEX IF EXISTS idx_transitions_label;
+DROP INDEX IF EXISTS idx_transitions_next;
+DROP INDEX IF EXISTS idx_transitions_current;
+
+-- Drop the transitions table (this will also remove it from hypertables)
+DROP TABLE IF EXISTS transitions;
+
+-- Drop custom types if they exist
+DROP TYPE IF EXISTS transition_cursor;
+DROP TYPE IF EXISTS transition_type;
+
+-- Drop the trigger and function for transition validation
+DROP TRIGGER IF EXISTS validate_transition ON transitions;
+DROP FUNCTION IF EXISTS check_valid_transition();
+
+COMMIT;
diff --git a/memory-store/migrations/000012_transitions.up.sql b/memory-store/migrations/000012_transitions.up.sql
new file mode 100644
index 000000000..515af713c
--- /dev/null
+++ b/memory-store/migrations/000012_transitions.up.sql
@@ -0,0 +1,154 @@
+BEGIN;
+
+-- Create transition type enum if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'transition_type') THEN
+ CREATE TYPE transition_type AS ENUM (
+ 'init',
+ 'finish',
+ 'init_branch',
+ 'finish_branch',
+ 'wait',
+ 'resume',
+ 'error',
+ 'step',
+ 'cancelled'
+ );
+ END IF;
+END $$;
+
+-- Create transition cursor type if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'transition_cursor') THEN
+ CREATE TYPE transition_cursor AS (
+ workflow_name TEXT,
+ step_index INT
+ );
+ END IF;
+END $$;
+
+-- Create transitions table if it doesn't exist
+CREATE TABLE IF NOT EXISTS transitions (
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ execution_id UUID NOT NULL,
+ transition_id UUID NOT NULL,
+ type transition_type NOT NULL,
+ step_definition JSONB NOT NULL,
+ step_label TEXT DEFAULT NULL,
+ current_step transition_cursor NOT NULL,
+ next_step transition_cursor DEFAULT NULL,
+ output JSONB,
+ task_token TEXT DEFAULT NULL,
+ metadata JSONB DEFAULT '{}'::JSONB,
+ CONSTRAINT pk_transitions PRIMARY KEY (created_at, execution_id, transition_id)
+);
+
+-- Convert to hypertable if not already
+SELECT create_hypertable('transitions', by_range('created_at', INTERVAL '1 day'), if_not_exists => TRUE);
+SELECT add_dimension('transitions', by_hash('execution_id', 2), if_not_exists => TRUE);
+
+-- Create indexes if they don't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_current') THEN
+ CREATE UNIQUE INDEX idx_transitions_current ON transitions (execution_id, current_step, created_at DESC);
+ END IF;
+
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_next') THEN
+ CREATE UNIQUE INDEX idx_transitions_next ON transitions (execution_id, next_step, created_at DESC)
+ WHERE next_step IS NOT NULL;
+ END IF;
+
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_label') THEN
+ CREATE UNIQUE INDEX idx_transitions_label ON transitions (execution_id, step_label, created_at DESC)
+ WHERE step_label IS NOT NULL;
+ END IF;
+
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_transition_id_sorted') THEN
+ CREATE INDEX idx_transitions_transition_id_sorted ON transitions (transition_id DESC, created_at DESC);
+ END IF;
+
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_execution_id_sorted') THEN
+ CREATE INDEX idx_transitions_execution_id_sorted ON transitions (execution_id DESC, created_at DESC);
+ END IF;
+
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_metadata') THEN
+ CREATE INDEX idx_transitions_metadata ON transitions USING GIN (metadata);
+ END IF;
+END $$;
+
+-- Add foreign key constraint if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_transitions_execution') THEN
+ ALTER TABLE transitions
+ ADD CONSTRAINT fk_transitions_execution
+ FOREIGN KEY (execution_id)
+ REFERENCES executions(execution_id);
+ END IF;
+END $$;
+
+-- Add comment to table
+COMMENT ON TABLE transitions IS 'Stores transitions associated with AI agents for developers';
+
+-- Create a trigger function that checks for valid transitions
+CREATE OR REPLACE FUNCTION check_valid_transition() RETURNS trigger AS $$
+DECLARE
+ previous_type transition_type;
+ valid_next_types transition_type[];
+BEGIN
+ -- Get the latest transition_type for this execution_id
+ SELECT t.type INTO previous_type
+ FROM transitions t
+ WHERE t.execution_id = NEW.execution_id
+ ORDER BY t.created_at DESC
+ LIMIT 1;
+
+ IF previous_type IS NULL THEN
+ -- If there is no previous transition, allow only 'init' or 'init_branch'
+ IF NEW.type NOT IN ('init', 'init_branch') THEN
+ RAISE EXCEPTION 'First transition must be init or init_branch, got %', NEW.type;
+ END IF;
+ ELSE
+ -- Define the valid_next_types array based on previous_type
+ CASE previous_type
+ WHEN 'init' THEN
+ valid_next_types := ARRAY['wait', 'error', 'step', 'cancelled', 'init_branch', 'finish'];
+ WHEN 'init_branch' THEN
+ valid_next_types := ARRAY['wait', 'error', 'step', 'cancelled', 'init_branch', 'finish_branch', 'finish'];
+ WHEN 'wait' THEN
+ valid_next_types := ARRAY['resume', 'step', 'cancelled', 'finish', 'finish_branch'];
+ WHEN 'resume' THEN
+ valid_next_types := ARRAY['wait', 'error', 'cancelled', 'step', 'finish', 'finish_branch', 'init_branch'];
+ WHEN 'step' THEN
+ valid_next_types := ARRAY['wait', 'error', 'cancelled', 'step', 'finish', 'finish_branch', 'init_branch'];
+ WHEN 'finish_branch' THEN
+ valid_next_types := ARRAY['wait', 'error', 'cancelled', 'step', 'finish', 'init_branch', 'finish_branch'];
+ WHEN 'finish' THEN
+ valid_next_types := ARRAY[]::transition_type[]; -- No valid next transitions
+ WHEN 'error' THEN
+ valid_next_types := ARRAY[]::transition_type[]; -- No valid next transitions
+ WHEN 'cancelled' THEN
+ valid_next_types := ARRAY[]::transition_type[]; -- No valid next transitions
+ ELSE
+ RAISE EXCEPTION 'Unknown previous transition type: %', previous_type;
+ END CASE;
+
+ IF NOT NEW.type = ANY(valid_next_types) THEN
+ RAISE EXCEPTION 'Invalid transition from % to %', previous_type, NEW.type;
+ END IF;
+ END IF;
+
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+-- Create a trigger on the transitions table
+CREATE TRIGGER validate_transition
+BEFORE INSERT ON transitions
+FOR EACH ROW
+EXECUTE FUNCTION check_valid_transition();
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/00003_users.sql b/memory-store/migrations/00003_users.sql
deleted file mode 100644
index 0d9f76ff7..000000000
--- a/memory-store/migrations/00003_users.sql
+++ /dev/null
@@ -1,34 +0,0 @@
--- Create users table
-CREATE TABLE users (
- developer_id UUID NOT NULL,
- user_id UUID NOT NULL,
- name TEXT NOT NULL,
- about TEXT,
- created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
- CONSTRAINT pk_users PRIMARY KEY (developer_id, user_id)
-);
-
--- Create sorted index on user_id (optimized for UUID v7)
-CREATE INDEX users_id_sorted_idx ON users (user_id DESC);
-
--- Create foreign key constraint and index on developer_id
-ALTER TABLE users
- ADD CONSTRAINT users_developer_id_fkey
- FOREIGN KEY (developer_id)
- REFERENCES developers(developer_id);
-
-CREATE INDEX users_developer_id_idx ON users (developer_id);
-
--- Create a GIN index on the entire metadata column
-CREATE INDEX users_metadata_gin_idx ON users USING GIN (metadata);
-
--- Create trigger to automatically update updated_at
-CREATE TRIGGER update_users_updated_at
- BEFORE UPDATE ON users
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
-
--- Add comment to table
-COMMENT ON TABLE users IS 'Stores user information linked to developers';
\ No newline at end of file
diff --git a/memory-store/migrations/00008_tools.sql b/memory-store/migrations/00008_tools.sql
deleted file mode 100644
index ec5d8590d..000000000
--- a/memory-store/migrations/00008_tools.sql
+++ /dev/null
@@ -1,33 +0,0 @@
--- Create tools table
-CREATE TABLE tools (
- developer_id UUID NOT NULL,
- agent_id UUID NOT NULL,
- tool_id UUID NOT NULL,
- type TEXT NOT NULL CONSTRAINT ct_tools_type_length CHECK (length(type) >= 1 AND length(type) <= 255),
- name TEXT NOT NULL CONSTRAINT ct_tools_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
- description TEXT CONSTRAINT ct_tools_description_length CHECK (description IS NULL OR length(description) <= 1000),
- spec JSONB NOT NULL,
- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id)
-);
-
--- Create sorted index on tool_id (optimized for UUID v7)
-CREATE INDEX idx_tools_id_sorted ON tools (tool_id DESC);
-
--- Create foreign key constraint and index on developer_id and agent_id
-ALTER TABLE tools
- ADD CONSTRAINT fk_tools_agent
- FOREIGN KEY (developer_id, agent_id)
- REFERENCES agents(developer_id, agent_id);
-
-CREATE INDEX idx_tools_developer_agent ON tools (developer_id, agent_id);
-
--- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_tools_updated_at
- BEFORE UPDATE ON tools
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
-
--- Add comment to table
-COMMENT ON TABLE tools IS 'Stores tool configurations and specifications for AI agents';
\ No newline at end of file
diff --git a/memory-store/migrations/00009_sessions.sql b/memory-store/migrations/00009_sessions.sql
deleted file mode 100644
index d79517f86..000000000
--- a/memory-store/migrations/00009_sessions.sql
+++ /dev/null
@@ -1,99 +0,0 @@
--- Create sessions table
-CREATE TABLE sessions (
- developer_id UUID NOT NULL,
- session_id UUID NOT NULL,
- situation TEXT,
- system_template TEXT NOT NULL,
- created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
- render_templates BOOLEAN NOT NULL DEFAULT true,
- token_budget INTEGER,
- context_overflow TEXT,
- forward_tool_calls BOOLEAN,
- recall_options JSONB NOT NULL DEFAULT '{}'::JSONB,
- CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id)
-);
-
--- Create sorted index on session_id (optimized for UUID v7)
-CREATE INDEX idx_sessions_id_sorted ON sessions (session_id DESC);
-
--- Create index for updated_at since we'll sort by it
-CREATE INDEX idx_sessions_updated_at ON sessions (updated_at DESC);
-
--- Create foreign key constraint and index on developer_id
-ALTER TABLE sessions
- ADD CONSTRAINT fk_sessions_developer
- FOREIGN KEY (developer_id)
- REFERENCES developers(developer_id);
-
-CREATE INDEX idx_sessions_developer ON sessions (developer_id);
-
--- Create a GIN index on the metadata column
-CREATE INDEX idx_sessions_metadata ON sessions USING GIN (metadata);
-
--- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_sessions_updated_at
- BEFORE UPDATE ON sessions
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
-
--- Add comment to table
-COMMENT ON TABLE sessions IS 'Stores chat sessions and their configurations';
-
--- Create session_lookup table with participant type enum
-DO $$
-BEGIN
- IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'participant_type') THEN
- CREATE TYPE participant_type AS ENUM ('user', 'agent');
- END IF;
-END
-$$;
-
--- Create session_lookup table without the CHECK constraint
-CREATE TABLE session_lookup (
- developer_id UUID NOT NULL,
- session_id UUID NOT NULL,
- participant_type participant_type NOT NULL,
- participant_id UUID NOT NULL,
- PRIMARY KEY (developer_id, session_id, participant_type, participant_id),
- FOREIGN KEY (developer_id, session_id) REFERENCES sessions(developer_id, session_id)
-);
-
--- Create indexes for common query patterns
-CREATE INDEX idx_session_lookup_by_session ON session_lookup (developer_id, session_id);
-CREATE INDEX idx_session_lookup_by_participant ON session_lookup (developer_id, participant_id);
-
--- Add comments to the table
-COMMENT ON TABLE session_lookup IS 'Maps sessions to their participants (users and agents)';
-
--- Create trigger function to enforce conditional foreign keys
-CREATE OR REPLACE FUNCTION validate_participant() RETURNS trigger AS $$
-BEGIN
- IF NEW.participant_type = 'user' THEN
- PERFORM 1 FROM users WHERE developer_id = NEW.developer_id AND user_id = NEW.participant_id;
- IF NOT FOUND THEN
- RAISE EXCEPTION 'Invalid participant_id: % for participant_type user', NEW.participant_id;
- END IF;
- ELSIF NEW.participant_type = 'agent' THEN
- PERFORM 1 FROM agents WHERE developer_id = NEW.developer_id AND agent_id = NEW.participant_id;
- IF NOT FOUND THEN
- RAISE EXCEPTION 'Invalid participant_id: % for participant_type agent', NEW.participant_id;
- END IF;
- ELSE
- RAISE EXCEPTION 'Unknown participant_type: %', NEW.participant_type;
- END IF;
- RETURN NEW;
-END;
-$$ LANGUAGE plpgsql;
-
--- Create triggers for INSERT and UPDATE operations
-CREATE TRIGGER trg_validate_participant_before_insert
- BEFORE INSERT ON session_lookup
- FOR EACH ROW
- EXECUTE FUNCTION validate_participant();
-
-CREATE TRIGGER trg_validate_participant_before_update
- BEFORE UPDATE ON session_lookup
- FOR EACH ROW
- EXECUTE FUNCTION validate_participant();
\ No newline at end of file
diff --git a/memory-store/migrations/00010_tasks.sql b/memory-store/migrations/00010_tasks.sql
deleted file mode 100644
index 66bd8ffc4..000000000
--- a/memory-store/migrations/00010_tasks.sql
+++ /dev/null
@@ -1,40 +0,0 @@
--- Create tasks table
-CREATE TABLE tasks (
- developer_id UUID NOT NULL,
- canonical_name CITEXT NOT NULL CONSTRAINT ct_tasks_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
- agent_id UUID NOT NULL,
- task_id UUID NOT NULL,
- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
- description TEXT DEFAULT NULL CONSTRAINT ct_tasks_description_length CHECK (description IS NULL OR length(description) <= 1000),
- input_schema JSON NOT NULL,
- tools JSON[] DEFAULT ARRAY[]::JSON[],
- inherit_tools BOOLEAN DEFAULT FALSE,
- workflows JSON[] DEFAULT ARRAY[]::JSON[],
- created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- metadata JSONB DEFAULT '{}'::JSONB,
- CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id),
- CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
- CONSTRAINT fk_tasks_agent
- FOREIGN KEY (developer_id, agent_id)
- REFERENCES agents(developer_id, agent_id),
- CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
-);
-
--- Create sorted index on task_id (optimized for UUID v7)
-CREATE INDEX idx_tasks_id_sorted ON tasks (task_id DESC);
-
--- Create foreign key constraint and index on developer_id
-CREATE INDEX idx_tasks_developer ON tasks (developer_id);
-
--- Create a GIN index on the entire metadata column
-CREATE INDEX idx_tasks_metadata ON tasks USING GIN (metadata);
-
--- Create trigger to automatically update updated_at
-CREATE TRIGGER trg_tasks_updated_at
- BEFORE UPDATE ON tasks
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
-
--- Add comment to table
-COMMENT ON TABLE tasks IS 'Stores tasks associated with AI agents for developers';
\ No newline at end of file
diff --git a/memory-store/migrations/00012_transitions.sql b/memory-store/migrations/00012_transitions.sql
deleted file mode 100644
index 3bc3ea290..000000000
--- a/memory-store/migrations/00012_transitions.sql
+++ /dev/null
@@ -1,66 +0,0 @@
--- Create transition type enum
-CREATE TYPE transition_type AS ENUM (
- 'init',
- 'finish',
- 'init_branch',
- 'finish_branch',
- 'wait',
- 'resume',
- 'error',
- 'step',
- 'cancelled'
-);
-
--- Create transition cursor type
-CREATE TYPE transition_cursor AS (
- workflow_name TEXT,
- step_index INT
-);
-
--- Create transitions table
-CREATE TABLE transitions (
- created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- execution_id UUID NOT NULL,
- transition_id UUID NOT NULL,
- type transition_type NOT NULL,
- step_definition JSONB NOT NULL,
- step_label TEXT DEFAULT NULL,
- current_step transition_cursor NOT NULL,
- next_step transition_cursor DEFAULT NULL,
- output JSONB,
- task_token TEXT DEFAULT NULL,
- metadata JSONB DEFAULT '{}'::JSONB,
- CONSTRAINT pk_transitions PRIMARY KEY (created_at, execution_id, transition_id)
-);
-
--- Convert to hypertable
-SELECT create_hypertable('transitions', 'created_at');
-
--- Create unique constraint for current step
-CREATE UNIQUE INDEX idx_transitions_current ON transitions (execution_id, current_step, created_at DESC);
-
--- Create unique constraint for next step (excluding nulls)
-CREATE UNIQUE INDEX idx_transitions_next ON transitions (execution_id, next_step, created_at DESC)
-WHERE next_step IS NOT NULL;
-
--- Create unique constraint for step label (excluding nulls)
-CREATE UNIQUE INDEX idx_transitions_label ON transitions (execution_id, step_label, created_at DESC)
-WHERE step_label IS NOT NULL;
-
--- Create sorted index on transition_id (optimized for UUID v7)
-CREATE INDEX idx_transitions_transition_id_sorted ON transitions (transition_id DESC, created_at DESC);
-
--- Create sorted index on execution_id (optimized for UUID v7)
-CREATE INDEX idx_transitions_execution_id_sorted ON transitions (execution_id DESC, created_at DESC);
-
--- Create a GIN index on the metadata column
-CREATE INDEX idx_transitions_metadata ON transitions USING GIN (metadata);
-
--- Add foreign key constraint
-ALTER TABLE transitions
- ADD CONSTRAINT fk_transitions_execution
- FOREIGN KEY (execution_id)
- REFERENCES executions(execution_id);
-
--- Add comment to table
-COMMENT ON TABLE transitions IS 'Stores transitions associated with AI agents for developers';
\ No newline at end of file
From e32f4ef5d46f9248010fe0e634d1f152a8fa57f1 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 14 Dec 2024 18:06:27 +0530
Subject: [PATCH 007/274] feat(memory-store): Add continuous aggregates on
executions
Signed-off-by: Diwank Singh Tomer
---
...000013_executions_continuous_view.down.sql | 13 +++
.../000013_executions_continuous_view.up.sql | 89 +++++++++++++++++++
2 files changed, 102 insertions(+)
create mode 100644 memory-store/migrations/000013_executions_continuous_view.down.sql
create mode 100644 memory-store/migrations/000013_executions_continuous_view.up.sql
diff --git a/memory-store/migrations/000013_executions_continuous_view.down.sql b/memory-store/migrations/000013_executions_continuous_view.down.sql
new file mode 100644
index 000000000..d833ca4d4
--- /dev/null
+++ b/memory-store/migrations/000013_executions_continuous_view.down.sql
@@ -0,0 +1,13 @@
+BEGIN;
+
+-- Drop the continuous aggregate policy
+SELECT remove_continuous_aggregate_policy('latest_transitions');
+
+-- Drop the views
+DROP VIEW IF EXISTS latest_executions;
+DROP MATERIALIZED VIEW IF EXISTS latest_transitions;
+
+-- Drop the helper function
+DROP FUNCTION IF EXISTS to_text(transition_type);
+
+COMMIT;
diff --git a/memory-store/migrations/000013_executions_continuous_view.up.sql b/memory-store/migrations/000013_executions_continuous_view.up.sql
new file mode 100644
index 000000000..b33530824
--- /dev/null
+++ b/memory-store/migrations/000013_executions_continuous_view.up.sql
@@ -0,0 +1,89 @@
+BEGIN;
+
+-- create a function to convert transition_type to text (needed coz ::text is stable not immutable)
+create or replace function to_text(transition_type)
+RETURNS text AS
+$$
+ select $1
+$$ STRICT IMMUTABLE LANGUAGE sql;
+
+-- create a continuous view that aggregates the transitions table
+create materialized view if not exists latest_transitions
+with
+ (
+ timescaledb.continuous,
+ timescaledb.materialized_only = false
+ ) as
+select
+ time_bucket ('1 day', created_at) as bucket,
+ execution_id,
+ count(*) as total_transitions,
+ state_agg (created_at, to_text (type)) as state,
+ max(created_at) as created_at,
+ last (type, created_at) as type,
+ last (step_definition, created_at) as step_definition,
+ last (step_label, created_at) as step_label,
+ last (current_step, created_at) as current_step,
+ last (next_step, created_at) as next_step,
+ last (output, created_at) as output,
+ last (task_token, created_at) as task_token,
+ last (metadata, created_at) as metadata
+from
+ transitions
+group by
+ bucket,
+ execution_id
+with no data;
+
+SELECT
+ add_continuous_aggregate_policy (
+ 'latest_transitions',
+ start_offset => NULL,
+ end_offset => INTERVAL '10 minutes',
+ schedule_interval => INTERVAL '10 minutes'
+ );
+
+-- Create a view that combines executions with their latest transitions
+create or replace view latest_executions as
+SELECT
+ e.developer_id,
+ e.task_id,
+ e.task_version,
+ e.execution_id,
+ e.input,
+ e.metadata,
+ e.created_at,
+ lt.created_at as updated_at,
+ -- Map transition types to status using CASE statement
+ CASE lt.type::text
+ WHEN 'init' THEN 'starting'
+ WHEN 'init_branch' THEN 'running'
+ WHEN 'wait' THEN 'awaiting_input'
+ WHEN 'resume' THEN 'running'
+ WHEN 'step' THEN 'running'
+ WHEN 'finish' THEN 'succeeded'
+ WHEN 'finish_branch' THEN 'running'
+ WHEN 'error' THEN 'failed'
+ WHEN 'cancelled' THEN 'cancelled'
+ ELSE 'queued'
+ END as status,
+ lt.output,
+ -- Extract error from output if type is 'error'
+ CASE
+ WHEN lt.type::text = 'error' THEN lt.output ->> 'error'
+ ELSE NULL
+ END as error,
+ lt.total_transitions,
+ lt.current_step,
+ lt.next_step,
+ lt.step_definition,
+ lt.step_label,
+ lt.task_token,
+ lt.metadata as transition_metadata
+FROM
+ executions e,
+ latest_transitions lt
+WHERE
+ e.execution_id = lt.execution_id;
+
+COMMIT;
\ No newline at end of file
From 9c974e8da8fe921902f0de2328a5763995e877e8 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 14 Dec 2024 18:15:09 +0530
Subject: [PATCH 008/274] feat(memory-store): Add migrations for
temporal_lookup table
Signed-off-by: Diwank Singh Tomer
---
.../000014_temporal_lookup.down.sql | 5 +++++
.../migrations/000014_temporal_lookup.up.sql | 22 +++++++++++++++++++
2 files changed, 27 insertions(+)
create mode 100644 memory-store/migrations/000014_temporal_lookup.down.sql
create mode 100644 memory-store/migrations/000014_temporal_lookup.up.sql
diff --git a/memory-store/migrations/000014_temporal_lookup.down.sql b/memory-store/migrations/000014_temporal_lookup.down.sql
new file mode 100644
index 000000000..4c836f911
--- /dev/null
+++ b/memory-store/migrations/000014_temporal_lookup.down.sql
@@ -0,0 +1,5 @@
+BEGIN;
+
+DROP TABLE IF EXISTS temporal_executions_lookup;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000014_temporal_lookup.up.sql b/memory-store/migrations/000014_temporal_lookup.up.sql
new file mode 100644
index 000000000..1650ab3ac
--- /dev/null
+++ b/memory-store/migrations/000014_temporal_lookup.up.sql
@@ -0,0 +1,22 @@
+BEGIN;
+
+-- Create temporal_executions_lookup table
+CREATE TABLE
+ IF NOT EXISTS temporal_executions_lookup (
+ execution_id UUID NOT NULL,
+ id TEXT NOT NULL,
+ run_id TEXT,
+ first_execution_run_id TEXT,
+ result_run_id TEXT,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_temporal_executions_lookup PRIMARY KEY (execution_id, id),
+ CONSTRAINT fk_temporal_executions_lookup_execution FOREIGN KEY (execution_id) REFERENCES executions (execution_id)
+ );
+
+-- Create sorted index on execution_id (optimized for UUID v7)
+CREATE INDEX IF NOT EXISTS idx_temporal_executions_lookup_execution_id_sorted ON temporal_executions_lookup (execution_id DESC);
+
+-- Add comment to table
+COMMENT ON TABLE temporal_executions_lookup IS 'Stores temporal workflow execution lookup data for AI agent executions';
+
+COMMIT;
\ No newline at end of file
From 7afe5b281d467edbc1e1b404fb8f0b79b7ca6c09 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 14 Dec 2024 16:12:12 +0300
Subject: [PATCH 009/274] feat: Add PostgreSQL client and query decorator
---
agents-api/agents_api/clients/pg.py | 12 +++
agents-api/agents_api/env.py | 7 ++
agents-api/agents_api/models/utils.py | 119 +++++++++++++++++++++++++-
agents-api/pyproject.toml | 1 +
agents-api/uv.lock | 18 ++++
5 files changed, 153 insertions(+), 4 deletions(-)
create mode 100644 agents-api/agents_api/clients/pg.py
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
new file mode 100644
index 000000000..debc81184
--- /dev/null
+++ b/agents-api/agents_api/clients/pg.py
@@ -0,0 +1,12 @@
+import asyncpg
+
+from ..env import db_dsn
+from ..web import app
+
+
+async def get_pg_client():
+ client = getattr(app.state, "pg_client", await asyncpg.connect(db_dsn))
+ if not hasattr(app.state, "pg_client"):
+ app.state.pg_client = client
+
+ return client
diff --git a/agents-api/agents_api/env.py b/agents-api/agents_api/env.py
index 2e7173b17..48623b771 100644
--- a/agents-api/agents_api/env.py
+++ b/agents-api/agents_api/env.py
@@ -59,6 +59,13 @@
"DO_VERIFY_DEVELOPER_OWNS_RESOURCE", default=True
)
+# PostgreSQL
+# ----
+db_dsn: str = env.str(
+ "DB_DSN",
+ default="postgres://postgres:postgres@0.0.0.0:5432/postgres?sslmode=disable",
+)
+
# Auth
# ----
diff --git a/agents-api/agents_api/models/utils.py b/agents-api/agents_api/models/utils.py
index 880f7e30f..9b5e454e6 100644
--- a/agents-api/agents_api/models/utils.py
+++ b/agents-api/agents_api/models/utils.py
@@ -7,6 +7,7 @@
from uuid import UUID
import pandas as pd
+from asyncpg import Record
from fastapi import HTTPException
from httpcore import ConnectError, NetworkError, TimeoutException
from httpx import ConnectError as HttpxConnectError
@@ -457,18 +458,128 @@ async def wrapper(
return cozo_query_dec
+def pg_query(
+ func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
+ debug: bool | None = None,
+ only_on_error: bool = False,
+ timeit: bool = False,
+):
+ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
+ """
+ Decorator that wraps a function that takes arbitrary arguments, and
+ returns a (query string, variables) tuple.
+
+ The wrapped function should additionally take a client keyword argument
+ and then run the query using the client, returning a Record.
+ """
+
+ from pprint import pprint
+
+ from tenacity import (
+ retry,
+ retry_if_exception,
+ stop_after_attempt,
+ wait_exponential,
+ )
+
+ def is_resource_busy(e: Exception) -> bool:
+ return (
+ isinstance(e, HTTPException)
+ and e.status_code == 429
+ and not getattr(e, "cozo_offline", False)
+ )
+
+ @retry(
+ stop=stop_after_attempt(4),
+ wait=wait_exponential(multiplier=1, min=4, max=10),
+ retry=retry_if_exception(is_resource_busy),
+ )
+ @wraps(func)
+ async def wrapper(
+ *args: P.args, client=None, **kwargs: P.kwargs
+ ) -> list[Record]:
+ if inspect.iscoroutinefunction(func):
+ query, variables = await func(*args, **kwargs)
+ else:
+ query, variables = func(*args, **kwargs)
+
+ not only_on_error and debug and print(query)
+ not only_on_error and debug and pprint(
+ dict(
+ variables=variables,
+ )
+ )
+
+ # Run the query
+ from ..clients import pg
+
+ try:
+ client = client or await pg.get_pg_client()
+
+ start = timeit and time.perf_counter()
+ sqlglot.parse()
+ results: list[Record] = await client.fetch(query, *variables)
+ end = timeit and time.perf_counter()
+
+ timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
+
+ except Exception as e:
+ if only_on_error and debug:
+ print(query)
+ pprint(variables)
+
+ debug and print(repr(e))
+ connection_error = isinstance(
+ e,
+ (
+ ConnectionError,
+ Timeout,
+ TimeoutException,
+ NetworkError,
+ RequestError,
+ ),
+ )
+
+ if connection_error:
+ exc = HTTPException(
+ status_code=429, detail="Resource busy. Please try again later."
+ )
+ raise exc from e
+
+ raise
+
+ not only_on_error and debug and pprint(
+ dict(
+ results=[dict(result.items()) for result in results],
+ )
+ )
+
+ return results
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return wrapper
+
+ if func is not None and callable(func):
+ return pg_query_dec(func)
+
+ return pg_query_dec
+
+
def wrap_in_class(
cls: Type[ModelT] | Callable[..., ModelT],
one: bool = False,
transform: Callable[[dict], dict] | None = None,
_kind: str | None = None,
):
- def _return_data(df: pd.DataFrame):
+ def _return_data(rec: Record):
# Convert df to list of dicts
- if _kind:
- df = df[df["_kind"] == _kind]
+ # if _kind:
+ # rec = rec[rec["_kind"] == _kind]
- data = df.to_dict(orient="records")
+ data = list(rec.items())
nonlocal transform
transform = transform or (lambda x: x)
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index f8ec61367..65ed6903c 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -51,6 +51,7 @@ dependencies = [
"xxhash~=3.5.0",
"spacy-chunks>=0.0.2",
"uuid7>=0.1.0",
+ "asyncpg>=0.30.0",
]
[dependency-groups]
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 381d91e79..c7c27c5b4 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -15,6 +15,7 @@ dependencies = [
{ name = "anyio" },
{ name = "arrow" },
{ name = "async-lru" },
+ { name = "asyncpg" },
{ name = "beartype" },
{ name = "en-core-web-sm" },
{ name = "environs" },
@@ -82,6 +83,7 @@ requires-dist = [
{ name = "anyio", specifier = "~=4.4.0" },
{ name = "arrow", specifier = "~=1.3.0" },
{ name = "async-lru", specifier = "~=2.0.4" },
+ { name = "asyncpg", specifier = ">=0.30.0" },
{ name = "beartype", specifier = "~=0.18.5" },
{ name = "en-core-web-sm", url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl" },
{ name = "environs", specifier = "~=10.3.0" },
@@ -342,6 +344,22 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/9f/3c3503693386c4b0f245eaf5ca6198e3b28879ca0a40bde6b0e319793453/async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224", size = 6111 },
]
+[[package]]
+name = "asyncpg"
+version = "0.30.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162 },
+ { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025 },
+ { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243 },
+ { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059 },
+ { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596 },
+ { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632 },
+ { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186 },
+ { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064 },
+]
+
[[package]]
name = "attrs"
version = "24.2.0"
From db00fbd2fe6c493fb1b34633ef9fa2d27ad7b124 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 14 Dec 2024 16:12:29 +0300
Subject: [PATCH 010/274] feat: Reimplement get developer query
---
.../models/developer/get_developer.py | 38 +++++--------------
1 file changed, 9 insertions(+), 29 deletions(-)
diff --git a/agents-api/agents_api/models/developer/get_developer.py b/agents-api/agents_api/models/developer/get_developer.py
index 0ae5421aa..e05c000ff 100644
--- a/agents-api/agents_api/models/developer/get_developer.py
+++ b/agents-api/agents_api/models/developer/get_developer.py
@@ -12,6 +12,7 @@
from ..utils import (
cozo_query,
partialclass,
+ pg_query,
rewrap_exceptions,
verify_developer_id_query,
wrap_in_class,
@@ -38,37 +39,16 @@ def verify_developer(
}
)
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
-@cozo_query
+@pg_query
@beartype
-def get_developer(
+async def get_developer(
*,
developer_id: UUID,
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
developer_id = str(developer_id)
+ query = "SELECT * FROM developers WHERE developer_id = $1"
- query = """
- input[developer_id] <- [[to_uuid($developer_id)]]
- ?[
- developer_id,
- email,
- active,
- tags,
- settings,
- created_at,
- updated_at,
- ] :=
- input[developer_id],
- *developers {
- developer_id,
- email,
- active,
- tags,
- settings,
- created_at,
- updated_at,
- }
-
- :limit 1
- """
-
- return (query, {"developer_id": developer_id})
+ return (
+ query,
+ [developer_id],
+ )
From 85a4e8be2fee2a7d19ff184176b11948cdec4934 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 14 Dec 2024 22:57:27 +0530
Subject: [PATCH 011/274] refactor(memory-store): Reformat migrations using
sql-formatter
Signed-off-by: Diwank Singh Tomer
---
.../migrations/000001_initial.down.sql | 12 ++-
memory-store/migrations/000001_initial.up.sql | 12 ++-
.../migrations/000002_developers.up.sql | 11 ++-
memory-store/migrations/000003_users.down.sql | 4 +-
memory-store/migrations/000003_users.up.sql | 1 +
.../migrations/000004_agents.down.sql | 2 +
memory-store/migrations/000004_agents.up.sql | 36 ++++++---
memory-store/migrations/000005_files.down.sql | 1 +
memory-store/migrations/000005_files.up.sql | 23 ++++--
memory-store/migrations/000006_docs.down.sql | 18 ++++-
memory-store/migrations/000006_docs.up.sql | 48 ++++++++----
memory-store/migrations/000007_ann.up.sql | 76 ++++++++++---------
memory-store/migrations/000008_tools.up.sql | 29 ++++---
.../migrations/000009_sessions.down.sql | 4 +-
.../migrations/000009_sessions.up.sql | 22 ++++--
memory-store/migrations/000010_tasks.down.sql | 13 +++-
memory-store/migrations/000010_tasks.up.sql | 23 ++++--
.../migrations/000011_executions.up.sql | 10 +--
.../migrations/000012_transitions.down.sql | 13 +++-
.../migrations/000012_transitions.up.sql | 24 ++++--
...000013_executions_continuous_view.down.sql | 6 +-
.../000013_executions_continuous_view.up.sql | 56 +++++++-------
.../migrations/000014_temporal_lookup.up.sql | 21 +++--
23 files changed, 298 insertions(+), 167 deletions(-)
diff --git a/memory-store/migrations/000001_initial.down.sql b/memory-store/migrations/000001_initial.down.sql
index ddc44dbc8..6f5aa4b5c 100644
--- a/memory-store/migrations/000001_initial.down.sql
+++ b/memory-store/migrations/000001_initial.down.sql
@@ -1,17 +1,27 @@
+BEGIN;
+
-- Drop the update_updated_at_column function
-DROP FUNCTION IF EXISTS update_updated_at_column();
+DROP FUNCTION IF EXISTS update_updated_at_column ();
-- Drop misc extensions
DROP EXTENSION IF EXISTS "uuid-ossp" CASCADE;
+
DROP EXTENSION IF EXISTS citext CASCADE;
+
DROP EXTENSION IF EXISTS btree_gist CASCADE;
+
DROP EXTENSION IF EXISTS btree_gin CASCADE;
-- Drop timescale's pgai extensions
DROP EXTENSION IF EXISTS ai CASCADE;
+
DROP EXTENSION IF EXISTS vectorscale CASCADE;
+
DROP EXTENSION IF EXISTS vector CASCADE;
-- Drop timescaledb extensions
DROP EXTENSION IF EXISTS timescaledb_toolkit CASCADE;
+
DROP EXTENSION IF EXISTS timescaledb CASCADE;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000001_initial.up.sql b/memory-store/migrations/000001_initial.up.sql
index da04e3c4b..6eba5ab6c 100644
--- a/memory-store/migrations/000001_initial.up.sql
+++ b/memory-store/migrations/000001_initial.up.sql
@@ -2,28 +2,34 @@ BEGIN;
-- init timescaledb
CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE;
+
CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit CASCADE;
-- add timescale's pgai extension
CREATE EXTENSION IF NOT EXISTS vector CASCADE;
+
CREATE EXTENSION IF NOT EXISTS vectorscale CASCADE;
+
CREATE EXTENSION IF NOT EXISTS ai CASCADE;
-- add misc extensions (for indexing etc)
CREATE EXTENSION IF NOT EXISTS btree_gin CASCADE;
+
CREATE EXTENSION IF NOT EXISTS btree_gist CASCADE;
+
CREATE EXTENSION IF NOT EXISTS citext CASCADE;
+
CREATE EXTENSION IF NOT EXISTS "uuid-ossp" CASCADE;
-- Create function to update the updated_at timestamp
-CREATE OR REPLACE FUNCTION update_updated_at_column()
-RETURNS TRIGGER AS $$
+CREATE
+OR REPLACE FUNCTION update_updated_at_column () RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
-COMMENT ON FUNCTION update_updated_at_column() IS 'Trigger function to automatically update updated_at timestamp';
+COMMENT ON FUNCTION update_updated_at_column () IS 'Trigger function to automatically update updated_at timestamp';
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000002_developers.up.sql b/memory-store/migrations/000002_developers.up.sql
index 0802dcf6f..9ca9dca69 100644
--- a/memory-store/migrations/000002_developers.up.sql
+++ b/memory-store/migrations/000002_developers.up.sql
@@ -3,8 +3,10 @@ BEGIN;
-- Create developers table
CREATE TABLE IF NOT EXISTS developers (
developer_id UUID NOT NULL,
- email TEXT NOT NULL CONSTRAINT ct_developers_email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$'),
- active BOOLEAN NOT NULL DEFAULT true,
+ email TEXT NOT NULL CONSTRAINT ct_developers_email_format CHECK (
+ email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$'
+ ),
+ active BOOLEAN NOT NULL DEFAULT TRUE,
tags TEXT[] DEFAULT ARRAY[]::TEXT[],
settings JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
@@ -23,7 +25,9 @@ CREATE INDEX IF NOT EXISTS idx_developers_email ON developers (email);
CREATE INDEX IF NOT EXISTS idx_developers_tags ON developers USING GIN (tags);
-- Create partial index for active developers
-CREATE INDEX IF NOT EXISTS idx_developers_active ON developers (developer_id) WHERE active = true;
+CREATE INDEX IF NOT EXISTS idx_developers_active ON developers (developer_id)
+WHERE
+ active = TRUE;
-- Create trigger to automatically update updated_at
DO $$
@@ -39,4 +43,5 @@ $$;
-- Add comment to table
COMMENT ON TABLE developers IS 'Stores developer information including their settings and tags';
+
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000003_users.down.sql b/memory-store/migrations/000003_users.down.sql
index 3b1b98648..41a27bfc4 100644
--- a/memory-store/migrations/000003_users.down.sql
+++ b/memory-store/migrations/000003_users.down.sql
@@ -5,12 +5,14 @@ DROP TRIGGER IF EXISTS update_users_updated_at ON users;
-- Drop indexes
DROP INDEX IF EXISTS users_metadata_gin_idx;
+
DROP INDEX IF EXISTS users_developer_id_idx;
+
DROP INDEX IF EXISTS users_id_sorted_idx;
-- Drop foreign key constraint
ALTER TABLE IF EXISTS users
- DROP CONSTRAINT IF EXISTS users_developer_id_fkey;
+DROP CONSTRAINT IF EXISTS users_developer_id_fkey;
-- Finally drop the table
DROP TABLE IF EXISTS users;
diff --git a/memory-store/migrations/000003_users.up.sql b/memory-store/migrations/000003_users.up.sql
index c32ff48fe..028e40ef5 100644
--- a/memory-store/migrations/000003_users.up.sql
+++ b/memory-store/migrations/000003_users.up.sql
@@ -46,4 +46,5 @@ END $$;
-- Add comment to table (comments are idempotent by default)
COMMENT ON TABLE users IS 'Stores user information linked to developers';
+
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000004_agents.down.sql b/memory-store/migrations/000004_agents.down.sql
index 0504684fb..be81aaa30 100644
--- a/memory-store/migrations/000004_agents.down.sql
+++ b/memory-store/migrations/000004_agents.down.sql
@@ -5,7 +5,9 @@ DROP TRIGGER IF EXISTS trg_agents_updated_at ON agents;
-- Drop indexes
DROP INDEX IF EXISTS idx_agents_metadata;
+
DROP INDEX IF EXISTS idx_agents_developer;
+
DROP INDEX IF EXISTS idx_agents_id_sorted;
-- Drop table (this will automatically drop associated constraints)
diff --git a/memory-store/migrations/000004_agents.up.sql b/memory-store/migrations/000004_agents.up.sql
index 82eb9c84f..32e066f71 100644
--- a/memory-store/migrations/000004_agents.up.sql
+++ b/memory-store/migrations/000004_agents.up.sql
@@ -2,18 +2,31 @@ BEGIN;
-- Drop existing objects if they exist
DROP TRIGGER IF EXISTS trg_agents_updated_at ON agents;
+
DROP INDEX IF EXISTS idx_agents_metadata;
+
DROP INDEX IF EXISTS idx_agents_developer;
+
DROP INDEX IF EXISTS idx_agents_id_sorted;
+
DROP TABLE IF EXISTS agents;
-- Create agents table
CREATE TABLE IF NOT EXISTS agents (
developer_id UUID NOT NULL,
agent_id UUID NOT NULL,
- canonical_name citext NOT NULL CONSTRAINT ct_agents_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
- name TEXT NOT NULL CONSTRAINT ct_agents_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
- about TEXT CONSTRAINT ct_agents_about_length CHECK (about IS NULL OR length(about) <= 1000),
+ canonical_name citext NOT NULL CONSTRAINT ct_agents_canonical_name_length CHECK (
+ length(canonical_name) >= 1
+ AND length(canonical_name) <= 255
+ ),
+ name TEXT NOT NULL CONSTRAINT ct_agents_name_length CHECK (
+ length(name) >= 1
+ AND length(name) <= 255
+ ),
+ about TEXT CONSTRAINT ct_agents_about_length CHECK (
+ about IS NULL
+ OR length(about) <= 1000
+ ),
instructions TEXT[] DEFAULT ARRAY[]::TEXT[],
model TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
@@ -29,11 +42,9 @@ CREATE TABLE IF NOT EXISTS agents (
CREATE INDEX IF NOT EXISTS idx_agents_id_sorted ON agents (agent_id DESC);
-- Create foreign key constraint and index on developer_id
-ALTER TABLE agents
- DROP CONSTRAINT IF EXISTS fk_agents_developer,
- ADD CONSTRAINT fk_agents_developer
- FOREIGN KEY (developer_id)
- REFERENCES developers(developer_id);
+ALTER TABLE agents
+DROP CONSTRAINT IF EXISTS fk_agents_developer,
+ADD CONSTRAINT fk_agents_developer FOREIGN KEY (developer_id) REFERENCES developers (developer_id);
CREATE INDEX IF NOT EXISTS idx_agents_developer ON agents (developer_id);
@@ -41,11 +52,12 @@ CREATE INDEX IF NOT EXISTS idx_agents_developer ON agents (developer_id);
CREATE INDEX IF NOT EXISTS idx_agents_metadata ON agents USING GIN (metadata);
-- Create trigger to automatically update updated_at
-CREATE OR REPLACE TRIGGER trg_agents_updated_at
- BEFORE UPDATE ON agents
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
+CREATE
+OR REPLACE TRIGGER trg_agents_updated_at BEFORE
+UPDATE ON agents FOR EACH ROW
+EXECUTE FUNCTION update_updated_at_column ();
-- Add comment to table
COMMENT ON TABLE agents IS 'Stores AI agent configurations and metadata for developers';
+
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000005_files.down.sql b/memory-store/migrations/000005_files.down.sql
index 870eac359..80bf6fecd 100644
--- a/memory-store/migrations/000005_files.down.sql
+++ b/memory-store/migrations/000005_files.down.sql
@@ -8,6 +8,7 @@ DROP TABLE IF EXISTS user_files;
-- Drop files table and its dependencies
DROP TRIGGER IF EXISTS trg_files_updated_at ON files;
+
DROP TABLE IF EXISTS files;
COMMIT;
diff --git a/memory-store/migrations/000005_files.up.sql b/memory-store/migrations/000005_files.up.sql
index bf368db9a..ef4c22b3d 100644
--- a/memory-store/migrations/000005_files.up.sql
+++ b/memory-store/migrations/000005_files.up.sql
@@ -4,9 +4,18 @@ BEGIN;
CREATE TABLE IF NOT EXISTS files (
developer_id UUID NOT NULL,
file_id UUID NOT NULL,
- name TEXT NOT NULL CONSTRAINT ct_files_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
- description TEXT DEFAULT NULL CONSTRAINT ct_files_description_length CHECK (description IS NULL OR length(description) <= 1000),
- mime_type TEXT DEFAULT NULL CONSTRAINT ct_files_mime_type_length CHECK (mime_type IS NULL OR length(mime_type) <= 127),
+ name TEXT NOT NULL CONSTRAINT ct_files_name_length CHECK (
+ length(name) >= 1
+ AND length(name) <= 255
+ ),
+ description TEXT DEFAULT NULL CONSTRAINT ct_files_description_length CHECK (
+ description IS NULL
+ OR length(description) <= 1000
+ ),
+ mime_type TEXT DEFAULT NULL CONSTRAINT ct_files_mime_type_length CHECK (
+ mime_type IS NULL
+ OR length(mime_type) <= 127
+ ),
size BIGINT NOT NULL CONSTRAINT ct_files_size_positive CHECK (size > 0),
hash BYTEA NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
@@ -53,8 +62,8 @@ CREATE TABLE IF NOT EXISTS user_files (
user_id UUID NOT NULL,
file_id UUID NOT NULL,
CONSTRAINT pk_user_files PRIMARY KEY (developer_id, user_id, file_id),
- CONSTRAINT fk_user_files_user FOREIGN KEY (developer_id, user_id) REFERENCES users(developer_id, user_id),
- CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files(developer_id, file_id)
+ CONSTRAINT fk_user_files_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
+ CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id)
);
-- Create index if it doesn't exist
@@ -66,8 +75,8 @@ CREATE TABLE IF NOT EXISTS agent_files (
agent_id UUID NOT NULL,
file_id UUID NOT NULL,
CONSTRAINT pk_agent_files PRIMARY KEY (developer_id, agent_id, file_id),
- CONSTRAINT fk_agent_files_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents(developer_id, agent_id),
- CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files(developer_id, file_id)
+ CONSTRAINT fk_agent_files_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
+ CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id)
);
-- Create index if it doesn't exist
diff --git a/memory-store/migrations/000006_docs.down.sql b/memory-store/migrations/000006_docs.down.sql
index 50139bb87..468b1b483 100644
--- a/memory-store/migrations/000006_docs.down.sql
+++ b/memory-store/migrations/000006_docs.down.sql
@@ -2,28 +2,40 @@ BEGIN;
-- Drop indexes
DROP INDEX IF EXISTS idx_docs_content_trgm;
+
DROP INDEX IF EXISTS idx_docs_title_trgm;
+
DROP INDEX IF EXISTS idx_docs_search_tsv;
+
DROP INDEX IF EXISTS idx_docs_metadata;
+
DROP INDEX IF EXISTS idx_agent_docs_agent;
+
DROP INDEX IF EXISTS idx_user_docs_user;
+
DROP INDEX IF EXISTS idx_docs_developer;
+
DROP INDEX IF EXISTS idx_docs_id_sorted;
-- Drop triggers
DROP TRIGGER IF EXISTS trg_docs_search_tsv ON docs;
+
DROP TRIGGER IF EXISTS trg_docs_updated_at ON docs;
-- Drop the constraint that depends on is_valid_language function
-ALTER TABLE IF EXISTS docs DROP CONSTRAINT IF EXISTS ct_docs_valid_language;
+ALTER TABLE IF EXISTS docs
+DROP CONSTRAINT IF EXISTS ct_docs_valid_language;
-- Drop functions
-DROP FUNCTION IF EXISTS docs_update_search_tsv();
-DROP FUNCTION IF EXISTS is_valid_language(text);
+DROP FUNCTION IF EXISTS docs_update_search_tsv ();
+
+DROP FUNCTION IF EXISTS is_valid_language (text);
-- Drop tables (in correct order due to foreign key constraints)
DROP TABLE IF EXISTS agent_docs;
+
DROP TABLE IF EXISTS user_docs;
+
DROP TABLE IF EXISTS docs;
COMMIT;
diff --git a/memory-store/migrations/000006_docs.up.sql b/memory-store/migrations/000006_docs.up.sql
index c4a241e65..5b532bbef 100644
--- a/memory-store/migrations/000006_docs.up.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -1,8 +1,8 @@
BEGIN;
-- Create function to validate language (make it OR REPLACE)
-CREATE OR REPLACE FUNCTION is_valid_language(lang text)
-RETURNS boolean AS $$
+CREATE
+OR REPLACE FUNCTION is_valid_language (lang text) RETURNS boolean AS $$
BEGIN
RETURN EXISTS (
SELECT 1 FROM pg_ts_config WHERE cfgname::text = lang
@@ -29,8 +29,7 @@ CREATE TABLE IF NOT EXISTS docs (
CONSTRAINT ct_docs_embedding_dimensions_positive CHECK (embedding_dimensions > 0),
CONSTRAINT ct_docs_valid_modality CHECK (modality IN ('text', 'image', 'mixed')),
CONSTRAINT ct_docs_index_positive CHECK (index >= 0),
- CONSTRAINT ct_docs_valid_language
- CHECK (is_valid_language(language))
+ CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language))
);
-- Create sorted index on doc_id if not exists
@@ -70,8 +69,8 @@ CREATE TABLE IF NOT EXISTS user_docs (
user_id UUID NOT NULL,
doc_id UUID NOT NULL,
CONSTRAINT pk_user_docs PRIMARY KEY (developer_id, user_id, doc_id),
- CONSTRAINT fk_user_docs_user FOREIGN KEY (developer_id, user_id) REFERENCES users(developer_id, user_id),
- CONSTRAINT fk_user_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs(developer_id, doc_id)
+ CONSTRAINT fk_user_docs_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
+ CONSTRAINT fk_user_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id)
);
-- Create the agent_docs table
@@ -80,20 +79,26 @@ CREATE TABLE IF NOT EXISTS agent_docs (
agent_id UUID NOT NULL,
doc_id UUID NOT NULL,
CONSTRAINT pk_agent_docs PRIMARY KEY (developer_id, agent_id, doc_id),
- CONSTRAINT fk_agent_docs_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents(developer_id, agent_id),
- CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs(developer_id, doc_id)
+ CONSTRAINT fk_agent_docs_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
+ CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id)
);
-- Create indexes if not exists
CREATE INDEX IF NOT EXISTS idx_user_docs_user ON user_docs (developer_id, user_id);
+
CREATE INDEX IF NOT EXISTS idx_agent_docs_agent ON agent_docs (developer_id, agent_id);
+
CREATE INDEX IF NOT EXISTS idx_docs_metadata ON docs USING GIN (metadata);
-- Enable necessary PostgreSQL extensions
CREATE EXTENSION IF NOT EXISTS unaccent;
+
CREATE EXTENSION IF NOT EXISTS pg_trgm;
+
CREATE EXTENSION IF NOT EXISTS dict_int CASCADE;
+
CREATE EXTENSION IF NOT EXISTS dict_xsyn CASCADE;
+
CREATE EXTENSION IF NOT EXISTS fuzzystrmatch CASCADE;
-- Configure text search for all supported languages
@@ -132,8 +137,8 @@ BEGIN
END $$;
-- Create function to update tsvector
-CREATE OR REPLACE FUNCTION docs_update_search_tsv()
-RETURNS trigger AS $$
+CREATE
+OR REPLACE FUNCTION docs_update_search_tsv () RETURNS trigger AS $$
BEGIN
NEW.search_tsv :=
setweight(to_tsvector(NEW.language::regconfig, unaccent(coalesce(NEW.title, ''))), 'A') ||
@@ -158,13 +163,28 @@ END $$;
-- Create indexes if not exists
CREATE INDEX IF NOT EXISTS idx_docs_search_tsv ON docs USING GIN (search_tsv);
+
CREATE INDEX IF NOT EXISTS idx_docs_title_trgm ON docs USING GIN (title gin_trgm_ops);
+
CREATE INDEX IF NOT EXISTS idx_docs_content_trgm ON docs USING GIN (content gin_trgm_ops);
-- Update existing rows (if any)
-UPDATE docs SET search_tsv =
- setweight(to_tsvector(language::regconfig, unaccent(coalesce(title, ''))), 'A') ||
- setweight(to_tsvector(language::regconfig, unaccent(coalesce(content, ''))), 'B')
-WHERE search_tsv IS NULL;
+UPDATE docs
+SET
+ search_tsv = setweight(
+ to_tsvector(
+ language::regconfig,
+ unaccent (coalesce(title, ''))
+ ),
+ 'A'
+ ) || setweight(
+ to_tsvector(
+ language::regconfig,
+ unaccent (coalesce(content, ''))
+ ),
+ 'B'
+ )
+WHERE
+ search_tsv IS NULL;
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000007_ann.up.sql b/memory-store/migrations/000007_ann.up.sql
index 0b08e9b07..3cc606fde 100644
--- a/memory-store/migrations/000007_ann.up.sql
+++ b/memory-store/migrations/000007_ann.up.sql
@@ -1,37 +1,41 @@
-- Create vector similarity search index using diskann and timescale vectorizer
-SELECT ai.create_vectorizer(
- source => 'docs',
- destination => 'docs_embeddings',
- embedding => ai.embedding_voyageai('voyage-3', 1024), -- need to parameterize this
- -- actual chunking is managed by the docs table
- -- this is to prevent running out of context window
- chunking => ai.chunking_recursive_character_text_splitter(
- chunk_column => 'content',
- chunk_size => 30000, -- 30k characters ~= 7.5k tokens
- chunk_overlap => 600, -- 600 characters ~= 150 tokens
- separators => array[ -- tries separators in order
- -- markdown headers
- E'\n#',
- E'\n##',
- E'\n###',
- E'\n---',
- E'\n***',
- -- html tags
- E'', -- Split on major document sections
- E'', -- Split on div boundaries
- E'',
- E'', -- Split on paragraphs
- E'
', -- Split on line breaks
- -- other separators
- E'\n\n', -- paragraphs
- '. ', '? ', '! ', '; ', -- sentences (note space after punctuation)
- E'\n', -- line breaks
- ' ' -- words (last resort)
- ]
- ),
- scheduling => ai.scheduling_timescaledb(),
- indexing => ai.indexing_diskann(),
- formatting => ai.formatting_python_template(E'Title: $title\n\n$chunk'),
- processing => ai.processing_default(),
- enqueue_existing => true
-);
\ No newline at end of file
+SELECT
+ ai.create_vectorizer (
+ source => 'docs',
+ destination => 'docs_embeddings',
+ embedding => ai.embedding_voyageai ('voyage-3', 1024), -- need to parameterize this
+ -- actual chunking is managed by the docs table
+ -- this is to prevent running out of context window
+ chunking => ai.chunking_recursive_character_text_splitter (
+ chunk_column => 'content',
+ chunk_size => 30000, -- 30k characters ~= 7.5k tokens
+ chunk_overlap => 600, -- 600 characters ~= 150 tokens
+ separators => ARRAY[ -- tries separators in order
+ -- markdown headers
+ E'\n#',
+ E'\n##',
+ E'\n###',
+ E'\n---',
+ E'\n***',
+ -- html tags
+ E'', -- Split on major document sections
+ E'', -- Split on div boundaries
+ E'',
+ E'', -- Split on paragraphs
+ E'
', -- Split on line breaks
+ -- other separators
+ E'\n\n', -- paragraphs
+ '. ',
+ '? ',
+ '! ',
+ '; ', -- sentences (note space after punctuation)
+ E'\n', -- line breaks
+ ' ' -- words (last resort)
+ ]
+ ),
+ scheduling => ai.scheduling_timescaledb (),
+ indexing => ai.indexing_diskann (),
+ formatting => ai.formatting_python_template (E'Title: $title\n\n$chunk'),
+ processing => ai.processing_default (),
+ enqueue_existing => TRUE
+ );
\ No newline at end of file
diff --git a/memory-store/migrations/000008_tools.up.sql b/memory-store/migrations/000008_tools.up.sql
index bcf59def8..159ef3688 100644
--- a/memory-store/migrations/000008_tools.up.sql
+++ b/memory-store/migrations/000008_tools.up.sql
@@ -7,13 +7,21 @@ CREATE TABLE IF NOT EXISTS tools (
tool_id UUID NOT NULL,
task_id UUID DEFAULT NULL,
task_version INT DEFAULT NULL,
- type TEXT NOT NULL CONSTRAINT ct_tools_type_length CHECK (length(type) >= 1 AND length(type) <= 255),
- name TEXT NOT NULL CONSTRAINT ct_tools_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
- description TEXT CONSTRAINT ct_tools_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ type TEXT NOT NULL CONSTRAINT ct_tools_type_length CHECK (
+ length(type) >= 1
+ AND length(type) <= 255
+ ),
+ name TEXT NOT NULL CONSTRAINT ct_tools_name_length CHECK (
+ length(name) >= 1
+ AND length(name) <= 255
+ ),
+ description TEXT CONSTRAINT ct_tools_description_length CHECK (
+ description IS NULL
+ OR length(description) <= 1000
+ ),
spec JSONB NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
-
CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id, type, name)
);
@@ -21,7 +29,9 @@ CREATE TABLE IF NOT EXISTS tools (
CREATE INDEX IF NOT EXISTS idx_tools_id_sorted ON tools (tool_id DESC);
-- Create sorted index on task_id if it doesn't exist
-CREATE INDEX IF NOT EXISTS idx_tools_task_id_sorted ON tools (task_id DESC) WHERE task_id IS NOT NULL;
+CREATE INDEX IF NOT EXISTS idx_tools_task_id_sorted ON tools (task_id DESC)
+WHERE
+ task_id IS NOT NULL;
-- Create foreign key constraint and index if they don't exist
DO $$ BEGIN
@@ -39,11 +49,12 @@ CREATE INDEX IF NOT EXISTS idx_tools_developer_agent ON tools (developer_id, age
-- Drop trigger if exists and recreate
DROP TRIGGER IF EXISTS trg_tools_updated_at ON tools;
-CREATE TRIGGER trg_tools_updated_at
- BEFORE UPDATE ON tools
- FOR EACH ROW
- EXECUTE FUNCTION update_updated_at_column();
+
+CREATE TRIGGER trg_tools_updated_at BEFORE
+UPDATE ON tools FOR EACH ROW
+EXECUTE FUNCTION update_updated_at_column ();
-- Add comment to table
COMMENT ON TABLE tools IS 'Stores tool configurations and specifications for AI agents';
+
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000009_sessions.down.sql b/memory-store/migrations/000009_sessions.down.sql
index d1c0b2911..33d535e53 100644
--- a/memory-store/migrations/000009_sessions.down.sql
+++ b/memory-store/migrations/000009_sessions.down.sql
@@ -2,16 +2,18 @@ BEGIN;
-- Drop triggers first
DROP TRIGGER IF EXISTS trg_validate_participant_before_update ON session_lookup;
+
DROP TRIGGER IF EXISTS trg_validate_participant_before_insert ON session_lookup;
-- Drop the validation function
-DROP FUNCTION IF EXISTS validate_participant();
+DROP FUNCTION IF EXISTS validate_participant ();
-- Drop session_lookup table and its indexes
DROP TABLE IF EXISTS session_lookup;
-- Drop sessions table and its indexes
DROP TRIGGER IF EXISTS trg_sessions_updated_at ON sessions;
+
DROP TABLE IF EXISTS sessions CASCADE;
-- Drop the enum type
diff --git a/memory-store/migrations/000009_sessions.up.sql b/memory-store/migrations/000009_sessions.up.sql
index 30f135ed7..71e83b7ec 100644
--- a/memory-store/migrations/000009_sessions.up.sql
+++ b/memory-store/migrations/000009_sessions.up.sql
@@ -7,19 +7,21 @@ CREATE TABLE IF NOT EXISTS sessions (
situation TEXT,
system_template TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- -- TODO: Derived from entries
+ -- NOTE: Derived from entries
-- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
- render_templates BOOLEAN NOT NULL DEFAULT true,
+ render_templates BOOLEAN NOT NULL DEFAULT TRUE,
token_budget INTEGER,
context_overflow TEXT,
forward_tool_calls BOOLEAN,
recall_options JSONB NOT NULL DEFAULT '{}'::JSONB,
- CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id)
+ CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id),
+ CONSTRAINT uq_sessions_session_id UNIQUE (session_id)
);
-- Create indexes if they don't exist
CREATE INDEX IF NOT EXISTS idx_sessions_id_sorted ON sessions (session_id DESC);
+
CREATE INDEX IF NOT EXISTS idx_sessions_metadata ON sessions USING GIN (metadata);
-- Create foreign key if it doesn't exist
@@ -62,16 +64,23 @@ CREATE TABLE IF NOT EXISTS session_lookup (
session_id UUID NOT NULL,
participant_type participant_type NOT NULL,
participant_id UUID NOT NULL,
- PRIMARY KEY (developer_id, session_id, participant_type, participant_id),
- FOREIGN KEY (developer_id, session_id) REFERENCES sessions(developer_id, session_id)
+ PRIMARY KEY (
+ developer_id,
+ session_id,
+ participant_type,
+ participant_id
+ ),
+ FOREIGN KEY (developer_id, session_id) REFERENCES sessions (developer_id, session_id)
);
-- Create indexes if they don't exist
CREATE INDEX IF NOT EXISTS idx_session_lookup_by_session ON session_lookup (developer_id, session_id);
+
CREATE INDEX IF NOT EXISTS idx_session_lookup_by_participant ON session_lookup (developer_id, participant_id);
-- Create or replace the validation function
-CREATE OR REPLACE FUNCTION validate_participant() RETURNS trigger AS $$
+CREATE
+OR REPLACE FUNCTION validate_participant () RETURNS trigger AS $$
BEGIN
IF NEW.participant_type = 'user' THEN
PERFORM 1 FROM users WHERE developer_id = NEW.developer_id AND user_id = NEW.participant_id;
@@ -101,7 +110,6 @@ BEGIN
FOR EACH ROW
EXECUTE FUNCTION validate_participant();
END IF;
-
IF NOT EXISTS (
SELECT 1 FROM pg_trigger WHERE tgname = 'trg_validate_participant_before_update'
) THEN
diff --git a/memory-store/migrations/000010_tasks.down.sql b/memory-store/migrations/000010_tasks.down.sql
index b7f758779..84608ea71 100644
--- a/memory-store/migrations/000010_tasks.down.sql
+++ b/memory-store/migrations/000010_tasks.down.sql
@@ -4,11 +4,16 @@ BEGIN;
DO $$
BEGIN
IF EXISTS (
- SELECT 1
- FROM information_schema.table_constraints
- WHERE constraint_name = 'fk_tools_task_id'
+ SELECT
+ 1
+ FROM
+ information_schema.table_constraints
+ WHERE
+ constraint_name = 'fk_tools_task_id'
) THEN
- ALTER TABLE tools DROP CONSTRAINT fk_tools_task_id;
+ ALTER TABLE tools
+ DROP CONSTRAINT fk_tools_task_id;
+
END IF;
END $$;
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index c2bfeb454..2ba6b7910 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -3,13 +3,22 @@ BEGIN;
-- Create tasks table if it doesn't exist
CREATE TABLE IF NOT EXISTS tasks (
developer_id UUID NOT NULL,
- canonical_name CITEXT NOT NULL CONSTRAINT ct_tasks_canonical_name_length CHECK (length(canonical_name) >= 1 AND length(canonical_name) <= 255),
+ canonical_name CITEXT NOT NULL CONSTRAINT ct_tasks_canonical_name_length CHECK (
+ length(canonical_name) >= 1
+ AND length(canonical_name) <= 255
+ ),
agent_id UUID NOT NULL,
task_id UUID NOT NULL,
- version INTEGER NOT NULL DEFAULT 1,
+ VERSION INTEGER NOT NULL DEFAULT 1,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (length(name) >= 1 AND length(name) <= 255),
- description TEXT DEFAULT NULL CONSTRAINT ct_tasks_description_length CHECK (description IS NULL OR length(description) <= 1000),
+ name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (
+ length(name) >= 1
+ AND length(name) <= 255
+ ),
+ description TEXT DEFAULT NULL CONSTRAINT ct_tasks_description_length CHECK (
+ description IS NULL
+ OR length(description) <= 1000
+ ),
input_schema JSON NOT NULL,
inherit_tools BOOLEAN DEFAULT FALSE,
workflows JSON[] DEFAULT ARRAY[]::JSON[],
@@ -17,10 +26,8 @@ CREATE TABLE IF NOT EXISTS tasks (
metadata JSONB DEFAULT '{}'::JSONB,
CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id),
CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
- CONSTRAINT uq_tasks_version_unique UNIQUE (task_id, version),
- CONSTRAINT fk_tasks_agent
- FOREIGN KEY (developer_id, agent_id)
- REFERENCES agents(developer_id, agent_id),
+ CONSTRAINT uq_tasks_version_unique UNIQUE (task_id, VERSION),
+ CONSTRAINT fk_tasks_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
);
diff --git a/memory-store/migrations/000011_executions.up.sql b/memory-store/migrations/000011_executions.up.sql
index 74ab5bf97..cf0666136 100644
--- a/memory-store/migrations/000011_executions.up.sql
+++ b/memory-store/migrations/000011_executions.up.sql
@@ -7,21 +7,16 @@ CREATE TABLE IF NOT EXISTS executions (
task_version INTEGER NOT NULL,
execution_id UUID NOT NULL,
input JSONB NOT NULL,
-
-- NOTE: These will be generated using continuous aggregates from transitions
-- status TEXT DEFAULT 'pending',
-- output JSONB DEFAULT NULL,
-- error TEXT DEFAULT NULL,
-- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
-
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
-
CONSTRAINT pk_executions PRIMARY KEY (execution_id),
- CONSTRAINT fk_executions_developer
- FOREIGN KEY (developer_id) REFERENCES developers(developer_id),
- CONSTRAINT fk_executions_task
- FOREIGN KEY (developer_id, task_id) REFERENCES tasks(developer_id, task_id)
+ CONSTRAINT fk_executions_developer FOREIGN KEY (developer_id) REFERENCES developers (developer_id),
+ CONSTRAINT fk_executions_task FOREIGN KEY (developer_id, task_id) REFERENCES tasks (developer_id, task_id)
);
-- Create sorted index on execution_id (optimized for UUID v7)
@@ -38,4 +33,5 @@ CREATE INDEX IF NOT EXISTS idx_executions_metadata ON executions USING GIN (meta
-- Add comment to table (comments are idempotent by default)
COMMENT ON TABLE executions IS 'Stores executions associated with AI agents for developers';
+
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000012_transitions.down.sql b/memory-store/migrations/000012_transitions.down.sql
index 590ebc901..faac2e308 100644
--- a/memory-store/migrations/000012_transitions.down.sql
+++ b/memory-store/migrations/000012_transitions.down.sql
@@ -1,15 +1,20 @@
BEGIN;
-- Drop foreign key constraint if exists
-ALTER TABLE IF EXISTS transitions
- DROP CONSTRAINT IF EXISTS fk_transitions_execution;
+ALTER TABLE IF EXISTS transitions
+DROP CONSTRAINT IF EXISTS fk_transitions_execution;
-- Drop indexes if they exist
DROP INDEX IF EXISTS idx_transitions_metadata;
+
DROP INDEX IF EXISTS idx_transitions_execution_id_sorted;
+
DROP INDEX IF EXISTS idx_transitions_transition_id_sorted;
+
DROP INDEX IF EXISTS idx_transitions_label;
+
DROP INDEX IF EXISTS idx_transitions_next;
+
DROP INDEX IF EXISTS idx_transitions_current;
-- Drop the transitions table (this will also remove it from hypertables)
@@ -17,10 +22,12 @@ DROP TABLE IF EXISTS transitions;
-- Drop custom types if they exist
DROP TYPE IF EXISTS transition_cursor;
+
DROP TYPE IF EXISTS transition_type;
-- Drop the trigger and function for transition validation
DROP TRIGGER IF EXISTS validate_transition ON transitions;
-DROP FUNCTION IF EXISTS check_valid_transition();
+
+DROP FUNCTION IF EXISTS check_valid_transition ();
COMMIT;
diff --git a/memory-store/migrations/000012_transitions.up.sql b/memory-store/migrations/000012_transitions.up.sql
index 515af713c..6fd7dbcd1 100644
--- a/memory-store/migrations/000012_transitions.up.sql
+++ b/memory-store/migrations/000012_transitions.up.sql
@@ -46,8 +46,19 @@ CREATE TABLE IF NOT EXISTS transitions (
);
-- Convert to hypertable if not already
-SELECT create_hypertable('transitions', by_range('created_at', INTERVAL '1 day'), if_not_exists => TRUE);
-SELECT add_dimension('transitions', by_hash('execution_id', 2), if_not_exists => TRUE);
+SELECT
+ create_hypertable (
+ 'transitions',
+ by_range ('created_at', INTERVAL '1 day'),
+ if_not_exists => TRUE
+ );
+
+SELECT
+ add_dimension (
+ 'transitions',
+ by_hash ('execution_id', 2),
+ if_not_exists => TRUE
+ );
-- Create indexes if they don't exist
DO $$
@@ -94,7 +105,8 @@ END $$;
COMMENT ON TABLE transitions IS 'Stores transitions associated with AI agents for developers';
-- Create a trigger function that checks for valid transitions
-CREATE OR REPLACE FUNCTION check_valid_transition() RETURNS trigger AS $$
+CREATE
+OR REPLACE FUNCTION check_valid_transition () RETURNS trigger AS $$
DECLARE
previous_type transition_type;
valid_next_types transition_type[];
@@ -146,9 +158,7 @@ END;
$$ LANGUAGE plpgsql;
-- Create a trigger on the transitions table
-CREATE TRIGGER validate_transition
-BEFORE INSERT ON transitions
-FOR EACH ROW
-EXECUTE FUNCTION check_valid_transition();
+CREATE TRIGGER validate_transition BEFORE INSERT ON transitions FOR EACH ROW
+EXECUTE FUNCTION check_valid_transition ();
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000013_executions_continuous_view.down.sql b/memory-store/migrations/000013_executions_continuous_view.down.sql
index d833ca4d4..fcab7b023 100644
--- a/memory-store/migrations/000013_executions_continuous_view.down.sql
+++ b/memory-store/migrations/000013_executions_continuous_view.down.sql
@@ -1,13 +1,15 @@
BEGIN;
-- Drop the continuous aggregate policy
-SELECT remove_continuous_aggregate_policy('latest_transitions');
+SELECT
+ remove_continuous_aggregate_policy ('latest_transitions');
-- Drop the views
DROP VIEW IF EXISTS latest_executions;
+
DROP MATERIALIZED VIEW IF EXISTS latest_transitions;
-- Drop the helper function
-DROP FUNCTION IF EXISTS to_text(transition_type);
+DROP FUNCTION IF EXISTS to_text (transition_type);
COMMIT;
diff --git a/memory-store/migrations/000013_executions_continuous_view.up.sql b/memory-store/migrations/000013_executions_continuous_view.up.sql
index b33530824..43285efbc 100644
--- a/memory-store/migrations/000013_executions_continuous_view.up.sql
+++ b/memory-store/migrations/000013_executions_continuous_view.up.sql
@@ -1,39 +1,39 @@
BEGIN;
-- create a function to convert transition_type to text (needed coz ::text is stable not immutable)
-create or replace function to_text(transition_type)
-RETURNS text AS
-$$
+CREATE
+OR REPLACE function to_text (transition_type) RETURNS text AS $$
select $1
$$ STRICT IMMUTABLE LANGUAGE sql;
-- create a continuous view that aggregates the transitions table
-create materialized view if not exists latest_transitions
-with
+CREATE MATERIALIZED VIEW IF NOT EXISTS latest_transitions
+WITH
(
timescaledb.continuous,
- timescaledb.materialized_only = false
- ) as
-select
- time_bucket ('1 day', created_at) as bucket,
+ timescaledb.materialized_only = FALSE
+ ) AS
+SELECT
+ time_bucket ('1 day', created_at) AS bucket,
execution_id,
- count(*) as total_transitions,
- state_agg (created_at, to_text (type)) as state,
- max(created_at) as created_at,
- last (type, created_at) as type,
- last (step_definition, created_at) as step_definition,
- last (step_label, created_at) as step_label,
- last (current_step, created_at) as current_step,
- last (next_step, created_at) as next_step,
- last (output, created_at) as output,
- last (task_token, created_at) as task_token,
- last (metadata, created_at) as metadata
-from
+ count(*) AS total_transitions,
+ state_agg (created_at, to_text (type)) AS state,
+ max(created_at) AS created_at,
+ last (type, created_at) AS type,
+ last (step_definition, created_at) AS step_definition,
+ last (step_label, created_at) AS step_label,
+ last (current_step, created_at) AS current_step,
+ last (next_step, created_at) AS next_step,
+ last (output, created_at) AS output,
+ last (task_token, created_at) AS task_token,
+ last (metadata, created_at) AS metadata
+FROM
transitions
-group by
+GROUP BY
bucket,
execution_id
-with no data;
+WITH
+ no data;
SELECT
add_continuous_aggregate_policy (
@@ -44,7 +44,7 @@ SELECT
);
-- Create a view that combines executions with their latest transitions
-create or replace view latest_executions as
+CREATE OR REPLACE VIEW latest_executions AS
SELECT
e.developer_id,
e.task_id,
@@ -53,7 +53,7 @@ SELECT
e.input,
e.metadata,
e.created_at,
- lt.created_at as updated_at,
+ lt.created_at AS updated_at,
-- Map transition types to status using CASE statement
CASE lt.type::text
WHEN 'init' THEN 'starting'
@@ -66,20 +66,20 @@ SELECT
WHEN 'error' THEN 'failed'
WHEN 'cancelled' THEN 'cancelled'
ELSE 'queued'
- END as status,
+ END AS status,
lt.output,
-- Extract error from output if type is 'error'
CASE
WHEN lt.type::text = 'error' THEN lt.output ->> 'error'
ELSE NULL
- END as error,
+ END AS error,
lt.total_transitions,
lt.current_step,
lt.next_step,
lt.step_definition,
lt.step_label,
lt.task_token,
- lt.metadata as transition_metadata
+ lt.metadata AS transition_metadata
FROM
executions e,
latest_transitions lt
diff --git a/memory-store/migrations/000014_temporal_lookup.up.sql b/memory-store/migrations/000014_temporal_lookup.up.sql
index 1650ab3ac..724ee1340 100644
--- a/memory-store/migrations/000014_temporal_lookup.up.sql
+++ b/memory-store/migrations/000014_temporal_lookup.up.sql
@@ -1,17 +1,16 @@
BEGIN;
-- Create temporal_executions_lookup table
-CREATE TABLE
- IF NOT EXISTS temporal_executions_lookup (
- execution_id UUID NOT NULL,
- id TEXT NOT NULL,
- run_id TEXT,
- first_execution_run_id TEXT,
- result_run_id TEXT,
- created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- CONSTRAINT pk_temporal_executions_lookup PRIMARY KEY (execution_id, id),
- CONSTRAINT fk_temporal_executions_lookup_execution FOREIGN KEY (execution_id) REFERENCES executions (execution_id)
- );
+CREATE TABLE IF NOT EXISTS temporal_executions_lookup (
+ execution_id UUID NOT NULL,
+ id TEXT NOT NULL,
+ run_id TEXT,
+ first_execution_run_id TEXT,
+ result_run_id TEXT,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_temporal_executions_lookup PRIMARY KEY (execution_id, id),
+ CONSTRAINT fk_temporal_executions_lookup_execution FOREIGN KEY (execution_id) REFERENCES executions (execution_id)
+);
-- Create sorted index on execution_id (optimized for UUID v7)
CREATE INDEX IF NOT EXISTS idx_temporal_executions_lookup_execution_id_sorted ON temporal_executions_lookup (execution_id DESC);
From 47519108ab3a9d678e5bfedf94742ca577e665a9 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 14 Dec 2024 22:58:23 +0530
Subject: [PATCH 012/274] feat(memory-store): Add entry tables
Signed-off-by: Diwank Singh Tomer
---
.../migrations/000015_entries.down.sql | 16 ++++++
memory-store/migrations/000015_entries.up.sql | 55 +++++++++++++++++++
.../000016_entry_relations.down.sql | 12 ++++
.../migrations/000016_entry_relations.up.sql | 55 +++++++++++++++++++
4 files changed, 138 insertions(+)
create mode 100644 memory-store/migrations/000015_entries.down.sql
create mode 100644 memory-store/migrations/000015_entries.up.sql
create mode 100644 memory-store/migrations/000016_entry_relations.down.sql
create mode 100644 memory-store/migrations/000016_entry_relations.up.sql
diff --git a/memory-store/migrations/000015_entries.down.sql b/memory-store/migrations/000015_entries.down.sql
new file mode 100644
index 000000000..36ec58280
--- /dev/null
+++ b/memory-store/migrations/000015_entries.down.sql
@@ -0,0 +1,16 @@
+BEGIN;
+
+-- Drop foreign key constraint if it exists
+ALTER TABLE IF EXISTS entries
+DROP CONSTRAINT IF EXISTS fk_entries_session;
+
+-- Drop indexes
+DROP INDEX IF EXISTS idx_entries_by_session;
+
+-- Drop the hypertable (this will also drop the table)
+DROP TABLE IF EXISTS entries;
+
+-- Drop the enum type
+DROP TYPE IF EXISTS chat_role;
+
+COMMIT;
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
new file mode 100644
index 000000000..e03573464
--- /dev/null
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -0,0 +1,55 @@
+BEGIN;
+
+-- Create chat_role enum
+CREATE TYPE chat_role AS ENUM('user', 'assistant', 'tool', 'system');
+
+-- Create entries table
+CREATE TABLE IF NOT EXISTS entries (
+ session_id UUID NOT NULL,
+ entry_id UUID NOT NULL,
+ source TEXT NOT NULL,
+ role chat_role NOT NULL,
+ event_type TEXT NOT NULL DEFAULT 'message.create',
+ name TEXT,
+ content JSONB[] NOT NULL,
+ tool_call_id TEXT DEFAULT NULL,
+ tool_calls JSONB[] NOT NULL DEFAULT '{}',
+ token_count INTEGER NOT NULL,
+ model TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT pk_entries PRIMARY KEY (session_id, entry_id, created_at)
+);
+
+-- Convert to hypertable if not already
+SELECT
+ create_hypertable (
+ 'entries',
+ by_range ('created_at', INTERVAL '1 day'),
+ if_not_exists => TRUE
+ );
+
+SELECT
+ add_dimension (
+ 'entries',
+ by_hash ('session_id', 2),
+ if_not_exists => TRUE
+ );
+
+-- Create indexes for efficient querying
+CREATE INDEX IF NOT EXISTS idx_entries_by_session ON entries (session_id DESC, entry_id DESC);
+
+-- Add foreign key constraint to sessions table
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'fk_entries_session'
+ ) THEN
+ ALTER TABLE entries
+ ADD CONSTRAINT fk_entries_session
+ FOREIGN KEY (session_id)
+ REFERENCES sessions(session_id);
+ END IF;
+END $$;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000016_entry_relations.down.sql b/memory-store/migrations/000016_entry_relations.down.sql
new file mode 100644
index 000000000..6d54b0c08
--- /dev/null
+++ b/memory-store/migrations/000016_entry_relations.down.sql
@@ -0,0 +1,12 @@
+BEGIN;
+
+-- Drop trigger first
+DROP TRIGGER IF EXISTS trg_enforce_leaf_nodes ON entry_relations;
+
+-- Drop function
+DROP FUNCTION IF EXISTS enforce_leaf_nodes ();
+
+-- Drop the table and its constraints
+DROP TABLE IF EXISTS entry_relations CASCADE;
+
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000016_entry_relations.up.sql b/memory-store/migrations/000016_entry_relations.up.sql
new file mode 100644
index 000000000..c61c7cd24
--- /dev/null
+++ b/memory-store/migrations/000016_entry_relations.up.sql
@@ -0,0 +1,55 @@
+BEGIN;
+
+-- Create citext extension if not exists
+CREATE EXTENSION IF NOT EXISTS citext;
+
+-- Create entry_relations table
+CREATE TABLE IF NOT EXISTS entry_relations (
+ session_id UUID NOT NULL,
+ head UUID NOT NULL,
+ relation CITEXT NOT NULL,
+ tail UUID NOT NULL,
+ is_leaf BOOLEAN NOT NULL DEFAULT FALSE,
+ CONSTRAINT pk_entry_relations PRIMARY KEY (session_id, head, relation, tail)
+);
+
+-- Add foreign key constraint to sessions table
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'fk_entry_relations_session'
+ ) THEN
+ ALTER TABLE entry_relations
+ ADD CONSTRAINT fk_entry_relations_session
+ FOREIGN KEY (session_id)
+ REFERENCES sessions(session_id);
+ END IF;
+END $$;
+
+-- Create indexes for efficient querying
+CREATE INDEX idx_entry_relations_components ON entry_relations (session_id, head, relation, tail);
+
+CREATE INDEX idx_entry_relations_leaf ON entry_relations (session_id, relation, is_leaf);
+
+CREATE
+OR REPLACE FUNCTION enforce_leaf_nodes () RETURNS TRIGGER AS $$
+BEGIN
+ IF NEW.is_leaf THEN
+ -- Ensure no other relations point to this leaf node as a head
+ IF EXISTS (
+ SELECT 1 FROM entry_relations
+ WHERE tail = NEW.head AND session_id = NEW.session_id
+ ) THEN
+ RAISE EXCEPTION 'Cannot assign relations to a leaf node.';
+ END IF;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+CREATE TRIGGER trg_enforce_leaf_nodes BEFORE INSERT
+OR
+UPDATE ON entry_relations FOR EACH ROW
+EXECUTE FUNCTION enforce_leaf_nodes ();
+
+COMMIT;
\ No newline at end of file
From 418a504aeb5f501303000da28eedf45f0b708435 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Sat, 14 Dec 2024 22:14:32 +0300
Subject: [PATCH 013/274] feat(memory-store): Add Agent queries
---
.gitignore | 1 +
agents-api/agents_api/autogen/Agents.py | 75 +++---
.../agents_api/queries/agent/__init__.py | 21 ++
.../agents_api/queries/agent/create_agent.py | 138 ++++++++++
.../queries/agent/create_or_update_agent.py | 114 ++++++++
.../agents_api/queries/agent/delete_agent.py | 94 +++++++
.../agents_api/queries/agent/get_agent.py | 69 +++++
.../agents_api/queries/agent/list_agents.py | 100 +++++++
.../agents_api/queries/agent/patch_agent.py | 81 ++++++
.../agents_api/queries/agent/update_agent.py | 73 +++++
agents-api/agents_api/queries/utils.py | 254 ++++++++++++++++++
agents-api/pyproject.toml | 3 +
agents-api/uv.lock | 44 +++
.../integrations/autogen/Agents.py | 75 +++---
memory-store/migrations/000007_ann.up.sql | 14 +
typespec/agents/models.tsp | 5 +-
typespec/common/scalars.tsp | 17 ++
.../@typespec/openapi3/openapi-1.0.0.yaml | 50 +++-
18 files changed, 1147 insertions(+), 81 deletions(-)
create mode 100644 agents-api/agents_api/queries/agent/__init__.py
create mode 100644 agents-api/agents_api/queries/agent/create_agent.py
create mode 100644 agents-api/agents_api/queries/agent/create_or_update_agent.py
create mode 100644 agents-api/agents_api/queries/agent/delete_agent.py
create mode 100644 agents-api/agents_api/queries/agent/get_agent.py
create mode 100644 agents-api/agents_api/queries/agent/list_agents.py
create mode 100644 agents-api/agents_api/queries/agent/patch_agent.py
create mode 100644 agents-api/agents_api/queries/agent/update_agent.py
create mode 100644 agents-api/agents_api/queries/utils.py
diff --git a/.gitignore b/.gitignore
index 0adb06f10..591aabab1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ ngrok*
*/node_modules/
.aider*
.vscode/
+schema.sql
diff --git a/agents-api/agents_api/autogen/Agents.py b/agents-api/agents_api/autogen/Agents.py
index 5dab2c7b2..7390b6338 100644
--- a/agents-api/agents_api/autogen/Agents.py
+++ b/agents-api/agents_api/autogen/Agents.py
@@ -25,16 +25,17 @@ class Agent(BaseModel):
"""
When this resource was updated as UTC date-time
"""
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -62,16 +63,17 @@ class CreateAgentRequest(BaseModel):
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -96,16 +98,17 @@ class CreateOrUpdateAgentRequest(CreateAgentRequest):
)
id: UUID
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -133,16 +136,17 @@ class PatchAgentRequest(BaseModel):
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str | None, Field(max_length=255, min_length=1)] = None
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -170,16 +174,17 @@ class UpdateAgentRequest(BaseModel):
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
diff --git a/agents-api/agents_api/queries/agent/__init__.py b/agents-api/agents_api/queries/agent/__init__.py
new file mode 100644
index 000000000..709b051ea
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/__init__.py
@@ -0,0 +1,21 @@
+"""
+The `agent` module within the `queries` package provides a comprehensive suite of SQL query functions for managing agents in the PostgreSQL database. This includes:
+
+- Creating new agents
+- Updating existing agents
+- Retrieving details about specific agents
+- Listing agents with filtering and pagination
+- Deleting agents from the database
+
+Each function in this module constructs and returns SQL queries along with their parameters for database operations.
+"""
+
+# ruff: noqa: F401, F403, F405
+
+from .create_agent import create_agent
+from .create_or_update_agent import create_or_update_agent_query
+from .delete_agent import delete_agent_query
+from .get_agent import get_agent_query
+from .list_agents import list_agents_query
+from .patch_agent import patch_agent_query
+from .update_agent import update_agent_query
diff --git a/agents-api/agents_api/queries/agent/create_agent.py b/agents-api/agents_api/queries/agent/create_agent.py
new file mode 100644
index 000000000..30d73d179
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/create_agent.py
@@ -0,0 +1,138 @@
+"""
+This module contains the functionality for creating agents in the PostgreSQL database.
+It includes functions to construct and execute SQL queries for inserting new agent records.
+"""
+
+from typing import Any, TypeVar
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from pydantic import ValidationError
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import Agent, CreateAgentRequest
+from ...metrics.counters import increase_counter
+from ..utils import (
+ generate_canonical_name,
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ psycopg_errors.UniqueViolation: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="An agent with this canonical name already exists for this developer.",
+ ),
+ psycopg_errors.CheckViolation: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The provided data violates one or more constraints. Please check the input values.",
+ ),
+ ValidationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Input validation failed. Please check the provided data.",
+ ),
+ TypeError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="A type mismatch occurred. Please review the input.",
+ ),
+ }
+)
+@wrap_in_class(
+ Agent,
+ one=True,
+ transform=lambda d: {"id": d["agent_id"], **d},
+ _kind="inserted",
+)
+@pg_query
+@increase_counter("create_agent")
+@beartype
+def create_agent(
+ *,
+ developer_id: UUID,
+ agent_id: UUID | None = None,
+ data: CreateAgentRequest,
+) -> tuple[str, dict]:
+ """
+ Constructs and executes a SQL query to create a new agent in the database.
+
+ Parameters:
+ agent_id (UUID | None): The unique identifier for the agent.
+ developer_id (UUID): The unique identifier for the developer creating the agent.
+ data (CreateAgentRequest): The data for the new agent.
+
+ Returns:
+ tuple[str, dict]: SQL query and parameters for creating the agent.
+ """
+ agent_id = agent_id or uuid7()
+
+ # Ensure instructions is a list
+ data.instructions = (
+ data.instructions
+ if isinstance(data.instructions, list)
+ else [data.instructions]
+ )
+
+ # Convert default_settings to dict if it exists
+ default_settings = data.default_settings.model_dump() if data.default_settings else None
+
+ # Set default values
+ data.metadata = data.metadata or None
+ data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+
+ query = """
+ INSERT INTO agents (
+ developer_id,
+ agent_id,
+ canonical_name,
+ name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings
+ )
+ VALUES (
+ %(developer_id)s,
+ %(agent_id)s,
+ %(canonical_name)s,
+ %(name)s,
+ %(about)s,
+ %(instructions)s,
+ %(model)s,
+ %(metadata)s,
+ %(default_settings)s
+ )
+ RETURNING *;
+ """
+
+ params = {
+ "developer_id": developer_id,
+ "agent_id": agent_id,
+ "canonical_name": data.canonical_name,
+ "name": data.name,
+ "about": data.about,
+ "instructions": data.instructions,
+ "model": data.model,
+ "metadata": data.metadata,
+ "default_settings": default_settings,
+ }
+
+ return query, params
diff --git a/agents-api/agents_api/queries/agent/create_or_update_agent.py b/agents-api/agents_api/queries/agent/create_or_update_agent.py
new file mode 100644
index 000000000..e403c7bcf
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/create_or_update_agent.py
@@ -0,0 +1,114 @@
+"""
+This module contains the functionality for creating or updating agents in the PostgreSQL database.
+It constructs and executes SQL queries to insert a new agent or update an existing agent's details based on agent ID and developer ID.
+"""
+
+from typing import Any, TypeVar
+from uuid import UUID
+
+from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
+from fastapi import HTTPException
+from ...metrics.counters import increase_counter
+from ..utils import (
+ generate_canonical_name,
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+
+from beartype import beartype
+from psycopg import errors as psycopg_errors
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist."
+ )
+ }
+)
+@wrap_in_class(
+ Agent,
+ one=True,
+ transform=lambda d: {"id": d["agent_id"], **d},
+ _kind="inserted",
+)
+@pg_query
+@increase_counter("create_or_update_agent")
+@beartype
+def create_or_update_agent_query(
+ *,
+ agent_id: UUID,
+ developer_id: UUID,
+ data: CreateOrUpdateAgentRequest
+) -> tuple[list[str], dict]:
+ """
+ Constructs the SQL queries to create a new agent or update an existing agent's details.
+
+ Args:
+ agent_id (UUID): The UUID of the agent to create or update.
+ developer_id (UUID): The UUID of the developer owning the agent.
+ agent_data (Dict[str, Any]): A dictionary containing agent fields to insert or update.
+
+ Returns:
+ tuple[list[str], dict]: A tuple containing the list of SQL queries and their parameters.
+ """
+
+ # Ensure instructions is a list
+ data.instructions = (
+ data.instructions
+ if isinstance(data.instructions, list)
+ else [data.instructions]
+ )
+
+ # Convert default_settings to dict if it exists
+ default_settings = data.default_settings.model_dump() if data.default_settings else None
+
+ # Set default values
+ data.metadata = data.metadata or None
+ data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+
+ query = """
+ INSERT INTO agents (
+ developer_id,
+ agent_id,
+ canonical_name,
+ name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings
+ )
+ VALUES (
+ %(developer_id)s,
+ %(agent_id)s,
+ %(canonical_name)s,
+ %(name)s,
+ %(about)s,
+ %(instructions)s,
+ %(model)s,
+ %(metadata)s,
+ %(default_settings)s
+ )
+ RETURNING *;
+ """
+
+ params = {
+ "developer_id": developer_id,
+ "agent_id": agent_id,
+ "canonical_name": data.canonical_name,
+ "name": data.name,
+ "about": data.about,
+ "instructions": data.instructions,
+ "model": data.model,
+ "metadata": data.metadata,
+ "default_settings": default_settings,
+ }
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/agent/delete_agent.py b/agents-api/agents_api/queries/agent/delete_agent.py
new file mode 100644
index 000000000..4bd14f8ec
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/delete_agent.py
@@ -0,0 +1,94 @@
+"""
+This module contains the functionality for deleting agents from the PostgreSQL database.
+It constructs and executes SQL queries to remove agent records and associated data.
+"""
+
+from typing import Any, TypeVar
+from uuid import UUID
+
+from fastapi import HTTPException
+from ...metrics.counters import increase_counter
+from ..utils import (
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+from beartype import beartype
+from psycopg import errors as psycopg_errors
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist."
+ )
+ }
+ # TODO: Add more exceptions
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": UUID(d.pop("agent_id")),
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+ _kind="deleted",
+)
+@pg_query
+@increase_counter("delete_agent")
+@beartype
+def delete_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+ """
+ Constructs the SQL queries to delete an agent and its related settings.
+
+ Args:
+ agent_id (UUID): The UUID of the agent to be deleted.
+ developer_id (UUID): The UUID of the developer owning the agent.
+
+ Returns:
+ tuple[list[str], dict]: A tuple containing the list of SQL queries and their parameters.
+ """
+
+ queries = [
+ """
+ -- Delete docs that were only associated with this agent
+ DELETE FROM docs
+ WHERE developer_id = %(developer_id)s
+ AND doc_id IN (
+ SELECT ad.doc_id
+ FROM agent_docs ad
+ WHERE ad.agent_id = %(agent_id)s
+ AND ad.developer_id = %(developer_id)s
+ );
+ """,
+ """
+ -- Delete agent_docs entries
+ DELETE FROM agent_docs
+ WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
+ """,
+ """
+ -- Delete tools related to the agent
+ DELETE FROM tools
+ WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
+ """,
+ """
+ -- Delete the agent
+ DELETE FROM agents
+ WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
+ """
+ ]
+
+ params = {
+ "agent_id": agent_id,
+ "developer_id": developer_id,
+ }
+
+ return (queries, params)
diff --git a/agents-api/agents_api/queries/agent/get_agent.py b/agents-api/agents_api/queries/agent/get_agent.py
new file mode 100644
index 000000000..e5368eea1
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/get_agent.py
@@ -0,0 +1,69 @@
+"""
+This module contains the functionality for retrieving a single agent from the PostgreSQL database.
+It constructs and executes SQL queries to fetch agent details based on agent ID and developer ID.
+"""
+
+from typing import Any, TypeVar
+from uuid import UUID
+
+from fastapi import HTTPException
+from ...metrics.counters import increase_counter
+from ..utils import (
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+from beartype import beartype
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import Agent
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist."
+ )
+ }
+ # TODO: Add more exceptions
+)
+@wrap_in_class(Agent, one=True)
+@pg_query
+@increase_counter("get_agent")
+@beartype
+def get_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+ """
+ Constructs the SQL query to retrieve an agent's details.
+
+ Args:
+ agent_id (UUID): The UUID of the agent to retrieve.
+ developer_id (UUID): The UUID of the developer owning the agent.
+
+ Returns:
+ tuple[list[str], dict]: A tuple containing the SQL query and its parameters.
+ """
+ query = """
+ SELECT
+ agent_id,
+ developer_id,
+ name,
+ canonical_name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings,
+ created_at,
+ updated_at
+ FROM
+ agents
+ WHERE
+ agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
+ """
+
+ return (query, {"agent_id": agent_id, "developer_id": developer_id})
diff --git a/agents-api/agents_api/queries/agent/list_agents.py b/agents-api/agents_api/queries/agent/list_agents.py
new file mode 100644
index 000000000..db46704cf
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/list_agents.py
@@ -0,0 +1,100 @@
+"""
+This module contains the functionality for listing agents from the PostgreSQL database.
+It constructs and executes SQL queries to fetch a list of agents based on developer ID with pagination.
+"""
+
+from typing import Any, Literal, TypeVar
+from uuid import UUID
+
+from fastapi import HTTPException
+from ...metrics.counters import increase_counter
+from ..utils import (
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+from beartype import beartype
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import Agent
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist."
+ )
+ }
+ # TODO: Add more exceptions
+)
+@wrap_in_class(Agent)
+@pg_query
+@increase_counter("list_agents")
+@beartype
+def list_agents_query(
+ *,
+ developer_id: UUID,
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "updated_at"] = "created_at",
+ direction: Literal["asc", "desc"] = "desc",
+ metadata_filter: dict[str, Any] = {},
+) -> tuple[str, dict]:
+ """
+ Constructs query to list agents for a developer with pagination.
+
+ Args:
+ developer_id: UUID of the developer
+ limit: Maximum number of records to return
+ offset: Number of records to skip
+ sort_by: Field to sort by
+ direction: Sort direction ('asc' or 'desc')
+ metadata_filter: Optional metadata filters
+
+ Returns:
+ Tuple of (query, params)
+ """
+ # Validate sort direction
+ if direction.lower() not in ["asc", "desc"]:
+ raise HTTPException(status_code=400, detail="Invalid sort direction")
+
+ # Build metadata filter clause if needed
+ metadata_clause = ""
+ if metadata_filter:
+ metadata_clause = "AND metadata @> %(metadata_filter)s::jsonb"
+
+ query = f"""
+ SELECT
+ agent_id,
+ developer_id,
+ name,
+ canonical_name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings,
+ created_at,
+ updated_at
+ FROM agents
+ WHERE developer_id = %(developer_id)s
+ {metadata_clause}
+ ORDER BY {sort_by} {direction}
+ LIMIT %(limit)s OFFSET %(offset)s;
+ """
+
+ params = {
+ "developer_id": developer_id,
+ "limit": limit,
+ "offset": offset
+ }
+
+ if metadata_filter:
+ params["metadata_filter"] = metadata_filter
+
+ return query, params
diff --git a/agents-api/agents_api/queries/agent/patch_agent.py b/agents-api/agents_api/queries/agent/patch_agent.py
new file mode 100644
index 000000000..5f935d49b
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/patch_agent.py
@@ -0,0 +1,81 @@
+"""
+This module contains the functionality for partially updating an agent in the PostgreSQL database.
+It constructs and executes SQL queries to update specific fields of an agent based on agent ID and developer ID.
+"""
+
+from typing import Any, TypeVar
+from uuid import UUID
+
+from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
+from fastapi import HTTPException
+from ...metrics.counters import increase_counter
+from ..utils import (
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+from beartype import beartype
+from psycopg import errors as psycopg_errors
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist."
+ )
+ }
+ # TODO: Add more exceptions
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {"id": d["agent_id"], **d},
+ _kind="inserted",
+)
+@pg_query
+@increase_counter("patch_agent")
+@beartype
+def patch_agent_query(
+ *,
+ agent_id: UUID,
+ developer_id: UUID,
+ data: PatchAgentRequest
+) -> tuple[str, dict]:
+ """
+ Constructs the SQL query to partially update an agent's details.
+
+ Args:
+ agent_id (UUID): The UUID of the agent to update.
+ developer_id (UUID): The UUID of the developer owning the agent.
+ data (PatchAgentRequest): A dictionary of fields to update.
+
+ Returns:
+ tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ """
+ patch_fields = data.model_dump(exclude_unset=True)
+ set_clauses = []
+ params = {}
+
+ for key, value in patch_fields.items():
+ if value is not None: # Only update non-null values
+ set_clauses.append(f"{key} = %({key})s")
+ params[key] = value
+
+ set_clause = ", ".join(set_clauses)
+
+ query = f"""
+ UPDATE agents
+ SET {set_clause}
+ WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s
+ RETURNING *;
+ """
+
+ params["agent_id"] = agent_id
+ params["developer_id"] = developer_id
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/agent/update_agent.py b/agents-api/agents_api/queries/agent/update_agent.py
new file mode 100644
index 000000000..e26667874
--- /dev/null
+++ b/agents-api/agents_api/queries/agent/update_agent.py
@@ -0,0 +1,73 @@
+"""
+This module contains the functionality for fully updating an agent in the PostgreSQL database.
+It constructs and executes SQL queries to replace an agent's details based on agent ID and developer ID.
+"""
+
+from typing import Any, TypeVar
+from uuid import UUID
+
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
+from fastapi import HTTPException
+from ...metrics.counters import increase_counter
+from ..utils import (
+ pg_query,
+ partialclass,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+from beartype import beartype
+from psycopg import errors as psycopg_errors
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist."
+ )
+ }
+ # TODO: Add more exceptions
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {"id": d["agent_id"], "jobs": [], **d},
+ _kind="inserted",
+)
+@pg_query
+@increase_counter("update_agent")
+@beartype
+def update_agent_query(
+ *,
+ agent_id: UUID,
+ developer_id: UUID,
+ data: UpdateAgentRequest
+) -> tuple[str, dict]:
+ """
+ Constructs the SQL query to fully update an agent's details.
+
+ Args:
+ agent_id (UUID): The UUID of the agent to update.
+ developer_id (UUID): The UUID of the developer owning the agent.
+ data (UpdateAgentRequest): A dictionary containing all agent fields to update.
+
+ Returns:
+ tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ """
+ fields = ", ".join([f"{key} = %({key})s" for key in data.model_dump(exclude_unset=True).keys()])
+ params = {key: value for key, value in data.model_dump(exclude_unset=True).items()}
+
+ query = f"""
+ UPDATE agents
+ SET {fields}
+ WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s
+ RETURNING *;
+ """
+
+ params["agent_id"] = agent_id
+ params["developer_id"] = developer_id
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
new file mode 100644
index 000000000..704085a76
--- /dev/null
+++ b/agents-api/agents_api/queries/utils.py
@@ -0,0 +1,254 @@
+import re
+import time
+from typing import Awaitable, Callable, ParamSpec, Type, TypeVar
+import inspect
+from fastapi import HTTPException
+import pandas as pd
+from pydantic import BaseModel
+from functools import partialmethod, wraps
+from asyncpg import Record
+from requests.exceptions import ConnectionError, Timeout
+from httpcore import NetworkError, TimeoutException
+from httpx import RequestError
+import sqlglot
+
+from typing import Any
+
+P = ParamSpec("P")
+T = TypeVar("T")
+ModelT = TypeVar("ModelT", bound=BaseModel)
+
+def generate_canonical_name(name: str) -> str:
+ """Convert a display name to a canonical name.
+ Example: "My Cool Agent!" -> "my_cool_agent"
+ """
+ # Remove special characters, replace spaces with underscores
+ canonical = re.sub(r"[^\w\s-]", "", name.lower())
+ canonical = re.sub(r"[-\s]+", "_", canonical)
+
+ # Ensure it starts with a letter (prepend 'a' if not)
+ if not canonical[0].isalpha():
+ canonical = f"a_{canonical}"
+
+ return canonical
+
+def partialclass(cls, *args, **kwargs):
+ cls_signature = inspect.signature(cls)
+ bound = cls_signature.bind_partial(*args, **kwargs)
+
+ # The `updated=()` argument is necessary to avoid a TypeError when using @wraps for a class
+ @wraps(cls, updated=())
+ class NewCls(cls):
+ __init__ = partialmethod(cls.__init__, *bound.args, **bound.kwargs)
+
+ return NewCls
+
+
+def wrap_in_class(
+ cls: Type[ModelT] | Callable[..., ModelT],
+ one: bool = False,
+ transform: Callable[[dict], dict] | None = None,
+ _kind: str | None = None,
+):
+ def _return_data(rec: Record):
+ # Convert df to list of dicts
+ # if _kind:
+ # rec = rec[rec["_kind"] == _kind]
+
+ data = list(rec.items())
+
+ nonlocal transform
+ transform = transform or (lambda x: x)
+
+ if one:
+ assert len(data) >= 1, "Expected one result, got none"
+ obj: ModelT = cls(**transform(data[0]))
+ return obj
+
+ objs: list[ModelT] = [cls(**item) for item in map(transform, data)]
+ return objs
+
+ def decorator(func: Callable[P, pd.DataFrame | Awaitable[pd.DataFrame]]):
+ @wraps(func)
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> ModelT | list[ModelT]:
+ return _return_data(func(*args, **kwargs))
+
+ @wraps(func)
+ async def async_wrapper(
+ *args: P.args, **kwargs: P.kwargs
+ ) -> ModelT | list[ModelT]:
+ return _return_data(await func(*args, **kwargs))
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+ setattr(async_wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return async_wrapper if inspect.iscoroutinefunction(func) else wrapper
+
+ return decorator
+
+
+def rewrap_exceptions(
+ mapping: dict[
+ Type[BaseException] | Callable[[BaseException], bool],
+ Type[BaseException] | Callable[[BaseException], BaseException],
+ ],
+ /,
+):
+ def _check_error(error):
+ nonlocal mapping
+
+ for check, transform in mapping.items():
+ should_catch = (
+ isinstance(error, check) if isinstance(check, type) else check(error)
+ )
+
+ if should_catch:
+ new_error = (
+ transform(str(error))
+ if isinstance(transform, type)
+ else transform(error)
+ )
+
+ setattr(new_error, "__cause__", error)
+
+ raise new_error from error
+
+ def decorator(func: Callable[P, T | Awaitable[T]]):
+ @wraps(func)
+ async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
+ try:
+ result: T = await func(*args, **kwargs)
+ except BaseException as error:
+ _check_error(error)
+ raise
+
+ return result
+
+ @wraps(func)
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
+ try:
+ result: T = func(*args, **kwargs)
+ except BaseException as error:
+ _check_error(error)
+ raise
+
+ return result
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+ setattr(async_wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return async_wrapper if inspect.iscoroutinefunction(func) else wrapper
+
+ return decorator
+
+def pg_query(
+ func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
+ debug: bool | None = None,
+ only_on_error: bool = False,
+ timeit: bool = False,
+):
+ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
+ """
+ Decorator that wraps a function that takes arbitrary arguments, and
+ returns a (query string, variables) tuple.
+ The wrapped function should additionally take a client keyword argument
+ and then run the query using the client, returning a Record.
+ """
+
+ from pprint import pprint
+
+ from tenacity import (
+ retry,
+ retry_if_exception,
+ stop_after_attempt,
+ wait_exponential,
+ )
+
+ def is_resource_busy(e: Exception) -> bool:
+ return (
+ isinstance(e, HTTPException)
+ and e.status_code == 429
+ and not getattr(e, "cozo_offline", False)
+ )
+
+ @retry(
+ stop=stop_after_attempt(4),
+ wait=wait_exponential(multiplier=1, min=4, max=10),
+ retry=retry_if_exception(is_resource_busy),
+ )
+ @wraps(func)
+ async def wrapper(
+ *args: P.args, client=None, **kwargs: P.kwargs
+ ) -> list[Record]:
+ if inspect.iscoroutinefunction(func):
+ query, variables = await func(*args, **kwargs)
+ else:
+ query, variables = func(*args, **kwargs)
+
+ not only_on_error and debug and print(query)
+ not only_on_error and debug and pprint(
+ dict(
+ variables=variables,
+ )
+ )
+
+ # Run the query
+ from ..clients import pg
+
+ try:
+ client = client or await pg.get_pg_client()
+
+ start = timeit and time.perf_counter()
+ sqlglot.parse()
+ results: list[Record] = await client.fetch(query, *variables)
+ end = timeit and time.perf_counter()
+
+ timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
+
+ except Exception as e:
+ if only_on_error and debug:
+ print(query)
+ pprint(variables)
+
+ debug and print(repr(e))
+ connection_error = isinstance(
+ e,
+ (
+ ConnectionError,
+ Timeout,
+ TimeoutException,
+ NetworkError,
+ RequestError,
+ ),
+ )
+
+ if connection_error:
+ exc = HTTPException(
+ status_code=429, detail="Resource busy. Please try again later."
+ )
+ raise exc from e
+
+ raise
+
+ not only_on_error and debug and pprint(
+ dict(
+ results=[dict(result.items()) for result in results],
+ )
+ )
+
+ return results
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return wrapper
+
+ if func is not None and callable(func):
+ return pg_query_dec(func)
+
+ return pg_query_dec
\ No newline at end of file
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index f8ec61367..cd87586ec 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -51,6 +51,9 @@ dependencies = [
"xxhash~=3.5.0",
"spacy-chunks>=0.0.2",
"uuid7>=0.1.0",
+ "psycopg>=3.2.3",
+ "asyncpg>=0.30.0",
+ "sqlglot>=26.0.0",
]
[dependency-groups]
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 381d91e79..0c5422f0a 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -15,6 +15,7 @@ dependencies = [
{ name = "anyio" },
{ name = "arrow" },
{ name = "async-lru" },
+ { name = "asyncpg" },
{ name = "beartype" },
{ name = "en-core-web-sm" },
{ name = "environs" },
@@ -36,6 +37,7 @@ dependencies = [
{ name = "pandas" },
{ name = "prometheus-client" },
{ name = "prometheus-fastapi-instrumentator" },
+ { name = "psycopg" },
{ name = "pycozo", extra = ["embedded"] },
{ name = "pycozo-async" },
{ name = "pydantic", extra = ["email"] },
@@ -47,6 +49,7 @@ dependencies = [
{ name = "simsimd" },
{ name = "spacy" },
{ name = "spacy-chunks" },
+ { name = "sqlglot" },
{ name = "sse-starlette" },
{ name = "temporalio", extra = ["opentelemetry"] },
{ name = "tenacity" },
@@ -82,6 +85,7 @@ requires-dist = [
{ name = "anyio", specifier = "~=4.4.0" },
{ name = "arrow", specifier = "~=1.3.0" },
{ name = "async-lru", specifier = "~=2.0.4" },
+ { name = "asyncpg", specifier = ">=0.30.0" },
{ name = "beartype", specifier = "~=0.18.5" },
{ name = "en-core-web-sm", url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl" },
{ name = "environs", specifier = "~=10.3.0" },
@@ -103,6 +107,7 @@ requires-dist = [
{ name = "pandas", specifier = "~=2.2.2" },
{ name = "prometheus-client", specifier = "~=0.21.0" },
{ name = "prometheus-fastapi-instrumentator", specifier = "~=7.0.0" },
+ { name = "psycopg", specifier = ">=3.2.3" },
{ name = "pycozo", extras = ["embedded"], specifier = "~=0.7.6" },
{ name = "pycozo-async", specifier = "~=0.7.7" },
{ name = "pydantic", extras = ["email"], specifier = "~=2.10.2" },
@@ -114,6 +119,7 @@ requires-dist = [
{ name = "simsimd", specifier = "~=5.9.4" },
{ name = "spacy", specifier = "~=3.8.2" },
{ name = "spacy-chunks", specifier = ">=0.0.2" },
+ { name = "sqlglot", specifier = ">=26.0.0" },
{ name = "sse-starlette", specifier = "~=2.1.3" },
{ name = "temporalio", extras = ["opentelemetry"], specifier = "~=1.8" },
{ name = "tenacity", specifier = "~=9.0.0" },
@@ -342,6 +348,22 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/9f/3c3503693386c4b0f245eaf5ca6198e3b28879ca0a40bde6b0e319793453/async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224", size = 6111 },
]
+[[package]]
+name = "asyncpg"
+version = "0.30.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162 },
+ { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025 },
+ { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243 },
+ { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059 },
+ { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596 },
+ { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632 },
+ { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186 },
+ { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064 },
+]
+
[[package]]
name = "attrs"
version = "24.2.0"
@@ -2172,6 +2194,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 },
]
+[[package]]
+name = "psycopg"
+version = "3.2.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+ { name = "tzdata", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/ad/7ce016ae63e231575df0498d2395d15f005f05e32d3a2d439038e1bd0851/psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2", size = 155550 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ce/21/534b8f5bd9734b7a2fcd3a16b1ee82ef6cad81a4796e95ebf4e0c6a24119/psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907", size = 197934 },
+]
+
[[package]]
name = "ptyprocess"
version = "0.7.0"
@@ -2867,6 +2902,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/33/78/d1a1a026ef3af911159398c939b1509d5c36fe524c7b644f34a5146c4e16/spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645", size = 22343 },
]
+[[package]]
+name = "sqlglot"
+version = "26.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fc/9a/a815124044d598b7f6174be176f379eccd9d583e3130594c381fdfb5736f/sqlglot-26.0.0.tar.gz", hash = "sha256:eb4470e8b3aa2cff1a4ecca4cfe36658e9797ab82416e566abe12672195e1ab8", size = 19775305 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6b/1e/af60a2188773414a9fa65d0e8a32e81342cfcbedf113a19df724d2968c04/sqlglot-26.0.0-py3-none-any.whl", hash = "sha256:1ee9b285e3138c2642a5670c0dbec9afd01860246837788b0f3d228aa6aff619", size = 435457 },
+]
+
[[package]]
name = "srsly"
version = "2.4.8"
diff --git a/integrations-service/integrations/autogen/Agents.py b/integrations-service/integrations/autogen/Agents.py
index 5dab2c7b2..7390b6338 100644
--- a/integrations-service/integrations/autogen/Agents.py
+++ b/integrations-service/integrations/autogen/Agents.py
@@ -25,16 +25,17 @@ class Agent(BaseModel):
"""
When this resource was updated as UTC date-time
"""
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -62,16 +63,17 @@ class CreateAgentRequest(BaseModel):
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -96,16 +98,17 @@ class CreateOrUpdateAgentRequest(CreateAgentRequest):
)
id: UUID
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -133,16 +136,17 @@ class PatchAgentRequest(BaseModel):
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str | None, Field(max_length=255, min_length=1)] = None
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
@@ -170,16 +174,17 @@ class UpdateAgentRequest(BaseModel):
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
- name: Annotated[
- str,
- Field(
- max_length=120,
- pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
- ),
- ] = ""
+ name: Annotated[str, Field(max_length=255, min_length=1)]
"""
Name of the agent
"""
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ Canonical name of the agent
+ """
about: str = ""
"""
About the agent
diff --git a/memory-store/migrations/000007_ann.up.sql b/memory-store/migrations/000007_ann.up.sql
index 3cc606fde..64d0b8f49 100644
--- a/memory-store/migrations/000007_ann.up.sql
+++ b/memory-store/migrations/000007_ann.up.sql
@@ -1,3 +1,17 @@
+-- First, drop any existing vectorizer functions and triggers
+DO $$
+BEGIN
+ -- Drop existing vectorizer triggers
+ DROP TRIGGER IF EXISTS _vectorizer_src_trg_1 ON docs;
+
+ -- Drop existing vectorizer functions
+ DROP FUNCTION IF EXISTS _vectorizer_src_trg_1();
+ DROP FUNCTION IF EXISTS _vectorizer_src_trg_1_func();
+
+ -- Drop existing vectorizer tables
+ DROP TABLE IF EXISTS docs_embeddings;
+END $$;
+
-- Create vector similarity search index using diskann and timescale vectorizer
SELECT
ai.create_vectorizer (
diff --git a/typespec/agents/models.tsp b/typespec/agents/models.tsp
index b2763e285..374383c16 100644
--- a/typespec/agents/models.tsp
+++ b/typespec/agents/models.tsp
@@ -20,7 +20,10 @@ model Agent {
...HasTimestamps;
/** Name of the agent */
- name: identifierSafeUnicode = identifierSafeUnicode("");
+ name: displayName;
+
+ /** Canonical name of the agent */
+ canonical_name?: canonicalName;
/** About the agent */
about: string = "";
diff --git a/typespec/common/scalars.tsp b/typespec/common/scalars.tsp
index c718f6289..4e8f7b186 100644
--- a/typespec/common/scalars.tsp
+++ b/typespec/common/scalars.tsp
@@ -66,3 +66,20 @@ scalar PyExpression extends string;
/** A valid jinja template. */
scalar JinjaTemplate extends string;
+
+/**
+ * For canonical names (machine-friendly identifiers)
+ * Must start with a letter and can only contain letters, numbers, and underscores
+ */
+@minLength(1)
+@maxLength(255)
+@pattern("^[a-zA-Z][a-zA-Z0-9_]*$")
+scalar canonicalName extends string;
+
+/**
+ * For display names
+ * Must be between 1 and 255 characters
+ */
+@minLength(1)
+@maxLength(255)
+scalar displayName extends string;
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index eb58eeef2..0a12aac74 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -1449,9 +1449,12 @@ components:
readOnly: true
name:
allOf:
- - $ref: '#/components/schemas/Common.identifierSafeUnicode'
+ - $ref: '#/components/schemas/Common.displayName'
description: Name of the agent
- default: ''
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: Canonical name of the agent
about:
type: string
description: About the agent
@@ -1485,9 +1488,12 @@ components:
additionalProperties: {}
name:
allOf:
- - $ref: '#/components/schemas/Common.identifierSafeUnicode'
+ - $ref: '#/components/schemas/Common.displayName'
description: Name of the agent
- default: ''
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: Canonical name of the agent
about:
type: string
description: About the agent
@@ -1525,9 +1531,12 @@ components:
additionalProperties: {}
name:
allOf:
- - $ref: '#/components/schemas/Common.identifierSafeUnicode'
+ - $ref: '#/components/schemas/Common.displayName'
description: Name of the agent
- default: ''
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: Canonical name of the agent
about:
type: string
description: About the agent
@@ -1558,9 +1567,12 @@ components:
additionalProperties: {}
name:
allOf:
- - $ref: '#/components/schemas/Common.identifierSafeUnicode'
+ - $ref: '#/components/schemas/Common.displayName'
description: Name of the agent
- default: ''
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: Canonical name of the agent
about:
type: string
description: About the agent
@@ -1595,9 +1607,12 @@ components:
additionalProperties: {}
name:
allOf:
- - $ref: '#/components/schemas/Common.identifierSafeUnicode'
+ - $ref: '#/components/schemas/Common.displayName'
description: Name of the agent
- default: ''
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: Canonical name of the agent
about:
type: string
description: About the agent
@@ -2706,6 +2721,21 @@ components:
description: IDs (if any) of jobs created as part of this request
default: []
readOnly: true
+ Common.canonicalName:
+ type: string
+ minLength: 1
+ maxLength: 255
+ pattern: ^[a-zA-Z][a-zA-Z0-9_]*$
+ description: |-
+ For canonical names (machine-friendly identifiers)
+ Must start with a letter and can only contain letters, numbers, and underscores
+ Common.displayName:
+ type: string
+ minLength: 1
+ maxLength: 255
+ description: |-
+ For display names
+ Must be between 1 and 255 characters
Common.identifierSafeUnicode:
type: string
maxLength: 120
From b11247f219f1d0ca8b38c4f30843216afdf5bb11 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Sat, 14 Dec 2024 19:15:56 +0000
Subject: [PATCH 014/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/agent/create_agent.py | 6 ++--
.../queries/agent/create_or_update_agent.py | 24 +++++++-------
.../agents_api/queries/agent/delete_agent.py | 18 +++++-----
.../agents_api/queries/agent/get_agent.py | 15 +++++----
.../agents_api/queries/agent/list_agents.py | 33 +++++++++----------
.../agents_api/queries/agent/patch_agent.py | 21 ++++++------
.../agents_api/queries/agent/update_agent.py | 23 ++++++-------
agents-api/agents_api/queries/utils.py | 22 +++++++------
8 files changed, 83 insertions(+), 79 deletions(-)
diff --git a/agents-api/agents_api/queries/agent/create_agent.py b/agents-api/agents_api/queries/agent/create_agent.py
index 30d73d179..52a0a22f8 100644
--- a/agents-api/agents_api/queries/agent/create_agent.py
+++ b/agents-api/agents_api/queries/agent/create_agent.py
@@ -16,8 +16,8 @@
from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
@@ -91,7 +91,9 @@ def create_agent(
)
# Convert default_settings to dict if it exists
- default_settings = data.default_settings.model_dump() if data.default_settings else None
+ default_settings = (
+ data.default_settings.model_dump() if data.default_settings else None
+ )
# Set default values
data.metadata = data.metadata or None
diff --git a/agents-api/agents_api/queries/agent/create_or_update_agent.py b/agents-api/agents_api/queries/agent/create_or_update_agent.py
index e403c7bcf..c93a965a5 100644
--- a/agents-api/agents_api/queries/agent/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agent/create_or_update_agent.py
@@ -6,29 +6,30 @@
from typing import Any, TypeVar
from uuid import UUID
-from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
+from beartype import beartype
from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from psycopg import errors as psycopg_errors
-
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
@rewrap_exceptions(
{
psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
+ HTTPException,
status_code=404,
- detail="The specified developer does not exist."
+ detail="The specified developer does not exist.",
)
}
)
@@ -42,10 +43,7 @@
@increase_counter("create_or_update_agent")
@beartype
def create_or_update_agent_query(
- *,
- agent_id: UUID,
- developer_id: UUID,
- data: CreateOrUpdateAgentRequest
+ *, agent_id: UUID, developer_id: UUID, data: CreateOrUpdateAgentRequest
) -> tuple[list[str], dict]:
"""
Constructs the SQL queries to create a new agent or update an existing agent's details.
@@ -67,7 +65,9 @@ def create_or_update_agent_query(
)
# Convert default_settings to dict if it exists
- default_settings = data.default_settings.model_dump() if data.default_settings else None
+ default_settings = (
+ data.default_settings.model_dump() if data.default_settings else None
+ )
# Set default values
data.metadata = data.metadata or None
diff --git a/agents-api/agents_api/queries/agent/delete_agent.py b/agents-api/agents_api/queries/agent/delete_agent.py
index 4bd14f8ec..1d01daa20 100644
--- a/agents-api/agents_api/queries/agent/delete_agent.py
+++ b/agents-api/agents_api/queries/agent/delete_agent.py
@@ -6,28 +6,30 @@
from typing import Any, TypeVar
from uuid import UUID
+from beartype import beartype
from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from psycopg import errors as psycopg_errors
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
@rewrap_exceptions(
{
psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
+ HTTPException,
status_code=404,
- detail="The specified developer does not exist."
+ detail="The specified developer does not exist.",
)
}
# TODO: Add more exceptions
@@ -83,7 +85,7 @@ def delete_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str]
-- Delete the agent
DELETE FROM agents
WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """
+ """,
]
params = {
diff --git a/agents-api/agents_api/queries/agent/get_agent.py b/agents-api/agents_api/queries/agent/get_agent.py
index e5368eea1..982849f3a 100644
--- a/agents-api/agents_api/queries/agent/get_agent.py
+++ b/agents-api/agents_api/queries/agent/get_agent.py
@@ -6,28 +6,29 @@
from typing import Any, TypeVar
from uuid import UUID
+from beartype import beartype
from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from psycopg import errors as psycopg_errors
-
-from ...autogen.openapi_model import Agent
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
@rewrap_exceptions(
{
psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
+ HTTPException,
status_code=404,
- detail="The specified developer does not exist."
+ detail="The specified developer does not exist.",
)
}
# TODO: Add more exceptions
diff --git a/agents-api/agents_api/queries/agent/list_agents.py b/agents-api/agents_api/queries/agent/list_agents.py
index db46704cf..a4332372f 100644
--- a/agents-api/agents_api/queries/agent/list_agents.py
+++ b/agents-api/agents_api/queries/agent/list_agents.py
@@ -6,28 +6,29 @@
from typing import Any, Literal, TypeVar
from uuid import UUID
+from beartype import beartype
from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from psycopg import errors as psycopg_errors
-
-from ...autogen.openapi_model import Agent
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
@rewrap_exceptions(
{
psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
+ HTTPException,
status_code=404,
- detail="The specified developer does not exist."
+ detail="The specified developer does not exist.",
)
}
# TODO: Add more exceptions
@@ -47,7 +48,7 @@ def list_agents_query(
) -> tuple[str, dict]:
"""
Constructs query to list agents for a developer with pagination.
-
+
Args:
developer_id: UUID of the developer
limit: Maximum number of records to return
@@ -55,7 +56,7 @@ def list_agents_query(
sort_by: Field to sort by
direction: Sort direction ('asc' or 'desc')
metadata_filter: Optional metadata filters
-
+
Returns:
Tuple of (query, params)
"""
@@ -67,7 +68,7 @@ def list_agents_query(
metadata_clause = ""
if metadata_filter:
metadata_clause = "AND metadata @> %(metadata_filter)s::jsonb"
-
+
query = f"""
SELECT
agent_id,
@@ -87,14 +88,10 @@ def list_agents_query(
ORDER BY {sort_by} {direction}
LIMIT %(limit)s OFFSET %(offset)s;
"""
-
- params = {
- "developer_id": developer_id,
- "limit": limit,
- "offset": offset
- }
-
+
+ params = {"developer_id": developer_id, "limit": limit, "offset": offset}
+
if metadata_filter:
params["metadata_filter"] = metadata_filter
-
+
return query, params
diff --git a/agents-api/agents_api/queries/agent/patch_agent.py b/agents-api/agents_api/queries/agent/patch_agent.py
index 5f935d49b..74be99df8 100644
--- a/agents-api/agents_api/queries/agent/patch_agent.py
+++ b/agents-api/agents_api/queries/agent/patch_agent.py
@@ -6,27 +6,29 @@
from typing import Any, TypeVar
from uuid import UUID
-from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
+from beartype import beartype
from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from psycopg import errors as psycopg_errors
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
@rewrap_exceptions(
{
psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
+ HTTPException,
status_code=404,
- detail="The specified developer does not exist."
+ detail="The specified developer does not exist.",
)
}
# TODO: Add more exceptions
@@ -41,10 +43,7 @@
@increase_counter("patch_agent")
@beartype
def patch_agent_query(
- *,
- agent_id: UUID,
- developer_id: UUID,
- data: PatchAgentRequest
+ *, agent_id: UUID, developer_id: UUID, data: PatchAgentRequest
) -> tuple[str, dict]:
"""
Constructs the SQL query to partially update an agent's details.
@@ -67,7 +66,7 @@ def patch_agent_query(
params[key] = value
set_clause = ", ".join(set_clauses)
-
+
query = f"""
UPDATE agents
SET {set_clause}
diff --git a/agents-api/agents_api/queries/agent/update_agent.py b/agents-api/agents_api/queries/agent/update_agent.py
index e26667874..e0ed4a46d 100644
--- a/agents-api/agents_api/queries/agent/update_agent.py
+++ b/agents-api/agents_api/queries/agent/update_agent.py
@@ -6,27 +6,29 @@
from typing import Any, TypeVar
from uuid import UUID
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
+from beartype import beartype
from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from psycopg import errors as psycopg_errors
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
@rewrap_exceptions(
{
psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
+ HTTPException,
status_code=404,
- detail="The specified developer does not exist."
+ detail="The specified developer does not exist.",
)
}
# TODO: Add more exceptions
@@ -41,10 +43,7 @@
@increase_counter("update_agent")
@beartype
def update_agent_query(
- *,
- agent_id: UUID,
- developer_id: UUID,
- data: UpdateAgentRequest
+ *, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
) -> tuple[str, dict]:
"""
Constructs the SQL query to fully update an agent's details.
@@ -57,7 +56,9 @@ def update_agent_query(
Returns:
tuple[str, dict]: A tuple containing the SQL query and its parameters.
"""
- fields = ", ".join([f"{key} = %({key})s" for key in data.model_dump(exclude_unset=True).keys()])
+ fields = ", ".join(
+ [f"{key} = %({key})s" for key in data.model_dump(exclude_unset=True).keys()]
+ )
params = {key: value for key, value in data.model_dump(exclude_unset=True).items()}
query = f"""
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 704085a76..ba0e50fc0 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,23 +1,23 @@
+import inspect
import re
import time
-from typing import Awaitable, Callable, ParamSpec, Type, TypeVar
-import inspect
-from fastapi import HTTPException
-import pandas as pd
-from pydantic import BaseModel
from functools import partialmethod, wraps
+from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar
+
+import pandas as pd
+import sqlglot
from asyncpg import Record
-from requests.exceptions import ConnectionError, Timeout
+from fastapi import HTTPException
from httpcore import NetworkError, TimeoutException
from httpx import RequestError
-import sqlglot
-
-from typing import Any
+from pydantic import BaseModel
+from requests.exceptions import ConnectionError, Timeout
P = ParamSpec("P")
T = TypeVar("T")
ModelT = TypeVar("ModelT", bound=BaseModel)
+
def generate_canonical_name(name: str) -> str:
"""Convert a display name to a canonical name.
Example: "My Cool Agent!" -> "my_cool_agent"
@@ -32,6 +32,7 @@ def generate_canonical_name(name: str) -> str:
return canonical
+
def partialclass(cls, *args, **kwargs):
cls_signature = inspect.signature(cls)
bound = cls_signature.bind_partial(*args, **kwargs)
@@ -145,6 +146,7 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
return decorator
+
def pg_query(
func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
debug: bool | None = None,
@@ -251,4 +253,4 @@ async def wrapper(
if func is not None and callable(func):
return pg_query_dec(func)
- return pg_query_dec
\ No newline at end of file
+ return pg_query_dec
From 94f800ea7b7e3546dfddb8d7b6e892a1e583bcff Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 14 Dec 2024 22:34:30 +0300
Subject: [PATCH 015/274] fix: Move developer queries to another directory, add
query validator
---
.../agents_api/dependencies/developer_id.py | 2 +-
agents-api/agents_api/queries/__init__.py | 0
.../{models => queries}/developer/__init__.py | 0
.../developer/get_developer.py | 4 +-
agents-api/agents_api/queries/utils.py | 687 ++++++++++++++++++
agents-api/pyproject.toml | 1 +
agents-api/uv.lock | 11 +
7 files changed, 703 insertions(+), 2 deletions(-)
create mode 100644 agents-api/agents_api/queries/__init__.py
rename agents-api/agents_api/{models => queries}/developer/__init__.py (100%)
rename agents-api/agents_api/{models => queries}/developer/get_developer.py (91%)
create mode 100644 agents-api/agents_api/queries/utils.py
diff --git a/agents-api/agents_api/dependencies/developer_id.py b/agents-api/agents_api/dependencies/developer_id.py
index e71df35d7..b97e0ddeb 100644
--- a/agents-api/agents_api/dependencies/developer_id.py
+++ b/agents-api/agents_api/dependencies/developer_id.py
@@ -5,7 +5,7 @@
from ..common.protocol.developers import Developer
from ..env import multi_tenant_mode
-from ..models.developer.get_developer import get_developer, verify_developer
+from ..queries.developer.get_developer import get_developer, verify_developer
from .exceptions import InvalidHeaderFormat
diff --git a/agents-api/agents_api/queries/__init__.py b/agents-api/agents_api/queries/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents-api/agents_api/models/developer/__init__.py b/agents-api/agents_api/queries/developer/__init__.py
similarity index 100%
rename from agents-api/agents_api/models/developer/__init__.py
rename to agents-api/agents_api/queries/developer/__init__.py
diff --git a/agents-api/agents_api/models/developer/get_developer.py b/agents-api/agents_api/queries/developer/get_developer.py
similarity index 91%
rename from agents-api/agents_api/models/developer/get_developer.py
rename to agents-api/agents_api/queries/developer/get_developer.py
index e05c000ff..0a31a6de4 100644
--- a/agents-api/agents_api/models/developer/get_developer.py
+++ b/agents-api/agents_api/queries/developer/get_developer.py
@@ -7,6 +7,7 @@
from fastapi import HTTPException
from pycozo.client import QueryException
from pydantic import ValidationError
+from sqlglot import parse_one
from ...common.protocol.developers import Developer
from ..utils import (
@@ -18,6 +19,8 @@
wrap_in_class,
)
+query = parse_one("SELECT * FROM developers WHERE developer_id = $1").sql(pretty=True)
+
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -46,7 +49,6 @@ async def get_developer(
developer_id: UUID,
) -> tuple[str, list]:
developer_id = str(developer_id)
- query = "SELECT * FROM developers WHERE developer_id = $1"
return (
query,
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
new file mode 100644
index 000000000..65c234f15
--- /dev/null
+++ b/agents-api/agents_api/queries/utils.py
@@ -0,0 +1,687 @@
+import concurrent.futures
+import inspect
+import re
+import time
+from functools import partialmethod, wraps
+from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar
+from uuid import UUID
+
+import pandas as pd
+from asyncpg import Record
+from fastapi import HTTPException
+from httpcore import ConnectError, NetworkError, TimeoutException
+from httpx import ConnectError as HttpxConnectError
+from httpx import RequestError
+from pydantic import BaseModel
+from requests.exceptions import ConnectionError, Timeout
+
+from ..common.utils.cozo import uuid_int_list_to_uuid
+from ..env import do_verify_developer, do_verify_developer_owns_resource
+
+P = ParamSpec("P")
+T = TypeVar("T")
+ModelT = TypeVar("ModelT", bound=BaseModel)
+
+
+def fix_uuid(
+ item: dict[str, Any], attr_regex: str = r"^(?:id|.*_id)$"
+) -> dict[str, Any]:
+ # find the attributes that are ids
+ id_attrs = [
+ attr for attr in item.keys() if re.match(attr_regex, attr) and item[attr]
+ ]
+
+ if not id_attrs:
+ return item
+
+ fixed = {
+ **item,
+ **{
+ attr: uuid_int_list_to_uuid(item[attr])
+ for attr in id_attrs
+ if isinstance(item[attr], list)
+ },
+ }
+
+ return fixed
+
+
+def fix_uuid_list(
+ items: list[dict[str, Any]], attr_regex: str = r"^(?:id|.*_id)$"
+) -> list[dict[str, Any]]:
+ fixed = list(map(lambda item: fix_uuid(item, attr_regex), items))
+ return fixed
+
+
+def fix_uuid_if_present(item: Any, attr_regex: str = r"^(?:id|.*_id)$") -> Any:
+ match item:
+ case [dict(), *_]:
+ return fix_uuid_list(item, attr_regex)
+
+ case dict():
+ return fix_uuid(item, attr_regex)
+
+ case _:
+ return item
+
+
+def partialclass(cls, *args, **kwargs):
+ cls_signature = inspect.signature(cls)
+ bound = cls_signature.bind_partial(*args, **kwargs)
+
+ # The `updated=()` argument is necessary to avoid a TypeError when using @wraps for a class
+ @wraps(cls, updated=())
+ class NewCls(cls):
+ __init__ = partialmethod(cls.__init__, *bound.args, **bound.kwargs)
+
+ return NewCls
+
+
+def mark_session_updated_query(developer_id: UUID | str, session_id: UUID | str) -> str:
+ return f"""
+ input[developer_id, session_id] <- [[
+ to_uuid("{str(developer_id)}"),
+ to_uuid("{str(session_id)}"),
+ ]]
+
+ ?[
+ developer_id,
+ session_id,
+ situation,
+ summary,
+ created_at,
+ metadata,
+ render_templates,
+ token_budget,
+ context_overflow,
+ updated_at,
+ ] :=
+ input[developer_id, session_id],
+ *sessions {{
+ session_id,
+ situation,
+ summary,
+ created_at,
+ metadata,
+ render_templates,
+ token_budget,
+ context_overflow,
+ @ 'END'
+ }},
+ updated_at = [floor(now()), true]
+
+ :put sessions {{
+ developer_id,
+ session_id,
+ situation,
+ summary,
+ created_at,
+ metadata,
+ render_templates,
+ token_budget,
+ context_overflow,
+ updated_at,
+ }}
+ """
+
+
+def verify_developer_id_query(developer_id: UUID | str) -> str:
+ if not do_verify_developer:
+ return "?[exists] := exists = true"
+
+ return f"""
+ matched[count(developer_id)] :=
+ *developers{{
+ developer_id,
+ }}, developer_id = to_uuid("{str(developer_id)}")
+
+ ?[exists] :=
+ matched[num],
+ exists = num > 0,
+ assert(exists, "Developer does not exist")
+
+ :limit 1
+ """
+
+
+def verify_developer_owns_resource_query(
+ developer_id: UUID | str,
+ resource: str,
+ parents: list[tuple[str, str]] | None = None,
+ **resource_id,
+) -> str:
+ if not do_verify_developer_owns_resource:
+ return "?[exists] := exists = true"
+
+ parents = parents or []
+ resource_id_key, resource_id_value = next(iter(resource_id.items()))
+
+ parents.append((resource, resource_id_key))
+ parent_keys = ["developer_id", *map(lambda x: x[1], parents)]
+
+ rule_head = f"""
+ found[count({resource_id_key})] :=
+ developer_id = to_uuid("{str(developer_id)}"),
+ {resource_id_key} = to_uuid("{str(resource_id_value)}"),
+ """
+
+ rule_body = ""
+ for parent_key, (relation, key) in zip(parent_keys, parents):
+ rule_body += f"""
+ *{relation}{{
+ {parent_key},
+ {key},
+ }},
+ """
+
+ assertion = f"""
+ ?[exists] :=
+ found[num],
+ exists = num > 0,
+ assert(exists, "Developer does not own resource {resource} with {resource_id_key} {resource_id_value}")
+
+ :limit 1
+ """
+
+ rule = rule_head + rule_body + assertion
+ return rule
+
+
+def make_cozo_json_query(fields):
+ return ", ".join(f'"{field}": {field}' for field in fields).strip()
+
+
+def cozo_query(
+ func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
+ debug: bool | None = None,
+ only_on_error: bool = False,
+ timeit: bool = False,
+):
+ def cozo_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
+ """
+ Decorator that wraps a function that takes arbitrary arguments, and
+ returns a (query string, variables) tuple.
+
+ The wrapped function should additionally take a client keyword argument
+ and then run the query using the client, returning a DataFrame.
+ """
+
+ from pprint import pprint
+
+ from tenacity import (
+ retry,
+ retry_if_exception,
+ stop_after_attempt,
+ wait_exponential,
+ )
+
+ def is_resource_busy(e: Exception) -> bool:
+ return (
+ isinstance(e, HTTPException)
+ and e.status_code == 429
+ and not getattr(e, "cozo_offline", False)
+ )
+
+ @retry(
+ stop=stop_after_attempt(4),
+ wait=wait_exponential(multiplier=1, min=4, max=10),
+ retry=retry_if_exception(is_resource_busy),
+ )
+ @wraps(func)
+ def wrapper(*args: P.args, client=None, **kwargs: P.kwargs) -> pd.DataFrame:
+ queries, variables = func(*args, **kwargs)
+
+ if isinstance(queries, str):
+ query = queries
+ else:
+ queries = [str(query) for query in queries if query]
+ query = "}\n\n{\n".join(queries)
+ query = f"{{ {query} }}"
+
+ not only_on_error and debug and print(query)
+ not only_on_error and debug and pprint(
+ dict(
+ variables=variables,
+ )
+ )
+
+ # Run the query
+ from ..clients import cozo
+
+ try:
+ client = client or cozo.get_cozo_client()
+
+ start = timeit and time.perf_counter()
+ result = client.run(query, variables)
+ end = timeit and time.perf_counter()
+
+ timeit and print(f"Cozo query time: {end - start:.2f} seconds")
+
+ except Exception as e:
+ if only_on_error and debug:
+ print(query)
+ pprint(variables)
+
+ debug and print(repr(e))
+
+ pretty_error = repr(e).lower()
+ cozo_busy = ("busy" in pretty_error) or (
+ "when executing against relation '_" in pretty_error
+ )
+ cozo_offline = isinstance(e, ConnectionError) and (
+ ("connection refused" in pretty_error)
+ or ("name or service not known" in pretty_error)
+ )
+ connection_error = isinstance(
+ e,
+ (
+ ConnectionError,
+ Timeout,
+ TimeoutException,
+ NetworkError,
+ RequestError,
+ ),
+ )
+
+ if cozo_busy or connection_error or cozo_offline:
+ exc = HTTPException(
+ status_code=429, detail="Resource busy. Please try again later."
+ )
+ exc.cozo_offline = cozo_offline
+ raise exc from e
+
+ raise
+
+ # Need to fix the UUIDs in the result
+ result = result.map(fix_uuid_if_present)
+
+ not only_on_error and debug and pprint(
+ dict(
+ result=result.to_dict(orient="records"),
+ )
+ )
+
+ return result
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return wrapper
+
+ if func is not None and callable(func):
+ return cozo_query_dec(func)
+
+ return cozo_query_dec
+
+
+def cozo_query_async(
+ func: Callable[
+ P,
+ tuple[str | list[str | None], dict]
+ | Awaitable[tuple[str | list[str | None], dict]],
+ ]
+ | None = None,
+ debug: bool | None = None,
+ only_on_error: bool = False,
+ timeit: bool = False,
+):
+ def cozo_query_dec(
+ func: Callable[
+ P, tuple[str | list[Any], dict] | Awaitable[tuple[str | list[Any], dict]]
+ ],
+ ):
+ """
+ Decorator that wraps a function that takes arbitrary arguments, and
+ returns a (query string, variables) tuple.
+
+ The wrapped function should additionally take a client keyword argument
+ and then run the query using the client, returning a DataFrame.
+ """
+
+ from pprint import pprint
+
+ from tenacity import (
+ retry,
+ retry_if_exception,
+ stop_after_attempt,
+ wait_exponential,
+ )
+
+ def is_resource_busy(e: Exception) -> bool:
+ return (
+ isinstance(e, HTTPException)
+ and e.status_code == 429
+ and not getattr(e, "cozo_offline", False)
+ )
+
+ @retry(
+ stop=stop_after_attempt(6),
+ wait=wait_exponential(multiplier=1.2, min=3, max=10),
+ retry=retry_if_exception(is_resource_busy),
+ reraise=True,
+ )
+ @wraps(func)
+ async def wrapper(
+ *args: P.args, client=None, **kwargs: P.kwargs
+ ) -> pd.DataFrame:
+ if inspect.iscoroutinefunction(func):
+ queries, variables = await func(*args, **kwargs)
+ else:
+ queries, variables = func(*args, **kwargs)
+
+ if isinstance(queries, str):
+ query = queries
+ else:
+ queries = [str(query) for query in queries if query]
+ query = "}\n\n{\n".join(queries)
+ query = f"{{ {query} }}"
+
+ not only_on_error and debug and print(query)
+ not only_on_error and debug and pprint(
+ dict(
+ variables=variables,
+ )
+ )
+
+ # Run the query
+ from ..clients import cozo
+
+ try:
+ client = client or cozo.get_async_cozo_client()
+
+ start = timeit and time.perf_counter()
+ result = await client.run(query, variables)
+ end = timeit and time.perf_counter()
+
+ timeit and print(f"Cozo query time: {end - start:.2f} seconds")
+
+ except Exception as e:
+ if only_on_error and debug:
+ print(query)
+ pprint(variables)
+
+ debug and print(repr(e))
+
+ pretty_error = repr(e).lower()
+ cozo_busy = ("busy" in pretty_error) or (
+ "when executing against relation '_" in pretty_error
+ )
+ cozo_offline = (
+ isinstance(e, ConnectError)
+ or isinstance(e, HttpxConnectError)
+ and (
+ ("all connection attempts failed" in pretty_error)
+ or ("name or service not known" in pretty_error)
+ )
+ )
+ connection_error = isinstance(
+ e,
+ (
+ ConnectError,
+ HttpxConnectError,
+ TimeoutException,
+ NetworkError,
+ RequestError,
+ ),
+ )
+
+ if cozo_busy or connection_error or cozo_offline:
+ exc = HTTPException(
+ status_code=429, detail="Resource busy. Please try again later."
+ )
+ exc.cozo_offline = cozo_offline
+ raise exc from e
+
+ raise
+
+ # Need to fix the UUIDs in the result
+ result = result.map(fix_uuid_if_present)
+
+ not only_on_error and debug and pprint(
+ dict(
+ result=result.to_dict(orient="records"),
+ )
+ )
+
+ return result
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return wrapper
+
+ if func is not None and callable(func):
+ return cozo_query_dec(func)
+
+ return cozo_query_dec
+
+
+def pg_query(
+ func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
+ debug: bool | None = None,
+ only_on_error: bool = False,
+ timeit: bool = False,
+):
+ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
+ """
+ Decorator that wraps a function that takes arbitrary arguments, and
+ returns a (query string, variables) tuple.
+
+ The wrapped function should additionally take a client keyword argument
+ and then run the query using the client, returning a Record.
+ """
+
+ from pprint import pprint
+
+ from tenacity import (
+ retry,
+ retry_if_exception,
+ stop_after_attempt,
+ wait_exponential,
+ )
+
+ def is_resource_busy(e: Exception) -> bool:
+ return (
+ isinstance(e, HTTPException)
+ and e.status_code == 429
+ and not getattr(e, "cozo_offline", False)
+ )
+
+ @retry(
+ stop=stop_after_attempt(4),
+ wait=wait_exponential(multiplier=1, min=4, max=10),
+ retry=retry_if_exception(is_resource_busy),
+ )
+ @wraps(func)
+ async def wrapper(
+ *args: P.args, client=None, **kwargs: P.kwargs
+ ) -> list[Record]:
+ if inspect.iscoroutinefunction(func):
+ query, variables = await func(*args, **kwargs)
+ else:
+ query, variables = func(*args, **kwargs)
+
+ not only_on_error and debug and print(query)
+ not only_on_error and debug and pprint(
+ dict(
+ variables=variables,
+ )
+ )
+
+ # Run the query
+ from ..clients import pg
+
+ try:
+ client = client or await pg.get_pg_client()
+
+ start = timeit and time.perf_counter()
+ results: list[Record] = await client.fetch(query, *variables)
+ end = timeit and time.perf_counter()
+
+ timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
+
+ except Exception as e:
+ if only_on_error and debug:
+ print(query)
+ pprint(variables)
+
+ debug and print(repr(e))
+ connection_error = isinstance(
+ e,
+ (
+ ConnectionError,
+ Timeout,
+ TimeoutException,
+ NetworkError,
+ RequestError,
+ ),
+ )
+
+ if connection_error:
+ exc = HTTPException(
+ status_code=429, detail="Resource busy. Please try again later."
+ )
+ raise exc from e
+
+ raise
+
+ not only_on_error and debug and pprint(
+ dict(
+ results=[dict(result.items()) for result in results],
+ )
+ )
+
+ return results
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return wrapper
+
+ if func is not None and callable(func):
+ return pg_query_dec(func)
+
+ return pg_query_dec
+
+
+def wrap_in_class(
+ cls: Type[ModelT] | Callable[..., ModelT],
+ one: bool = False,
+ transform: Callable[[dict], dict] | None = None,
+ _kind: str | None = None,
+):
+ def _return_data(rec: Record):
+ # Convert df to list of dicts
+ # if _kind:
+ # rec = rec[rec["_kind"] == _kind]
+
+ data = list(rec.items())
+
+ nonlocal transform
+ transform = transform or (lambda x: x)
+
+ if one:
+ assert len(data) >= 1, "Expected one result, got none"
+ obj: ModelT = cls(**transform(data[0]))
+ return obj
+
+ objs: list[ModelT] = [cls(**item) for item in map(transform, data)]
+ return objs
+
+ def decorator(func: Callable[P, pd.DataFrame | Awaitable[pd.DataFrame]]):
+ @wraps(func)
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> ModelT | list[ModelT]:
+ return _return_data(func(*args, **kwargs))
+
+ @wraps(func)
+ async def async_wrapper(
+ *args: P.args, **kwargs: P.kwargs
+ ) -> ModelT | list[ModelT]:
+ return _return_data(await func(*args, **kwargs))
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+ setattr(async_wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return async_wrapper if inspect.iscoroutinefunction(func) else wrapper
+
+ return decorator
+
+
+def rewrap_exceptions(
+ mapping: dict[
+ Type[BaseException] | Callable[[BaseException], bool],
+ Type[BaseException] | Callable[[BaseException], BaseException],
+ ],
+ /,
+):
+ def _check_error(error):
+ nonlocal mapping
+
+ for check, transform in mapping.items():
+ should_catch = (
+ isinstance(error, check) if isinstance(check, type) else check(error)
+ )
+
+ if should_catch:
+ new_error = (
+ transform(str(error))
+ if isinstance(transform, type)
+ else transform(error)
+ )
+
+ setattr(new_error, "__cause__", error)
+
+ raise new_error from error
+
+ def decorator(func: Callable[P, T | Awaitable[T]]):
+ @wraps(func)
+ async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
+ try:
+ result: T = await func(*args, **kwargs)
+ except BaseException as error:
+ _check_error(error)
+ raise
+
+ return result
+
+ @wraps(func)
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
+ try:
+ result: T = func(*args, **kwargs)
+ except BaseException as error:
+ _check_error(error)
+ raise
+
+ return result
+
+ # Set the wrapped function as an attribute of the wrapper,
+ # forwards the __wrapped__ attribute if it exists.
+ setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+ setattr(async_wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
+
+ return async_wrapper if inspect.iscoroutinefunction(func) else wrapper
+
+ return decorator
+
+
+def run_concurrently(
+ fns: list[Callable[..., Any]],
+ *,
+ args_list: list[tuple] = [],
+ kwargs_list: list[dict] = [],
+) -> list[Any]:
+ args_list = args_list or [tuple()] * len(fns)
+ kwargs_list = kwargs_list or [dict()] * len(fns)
+
+ with concurrent.futures.ThreadPoolExecutor() as executor:
+ futures = [
+ executor.submit(fn, *args, **kwargs)
+ for fn, args, kwargs in zip(fns, args_list, kwargs_list)
+ ]
+
+ return [future.result() for future in concurrent.futures.as_completed(futures)]
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index 65ed6903c..af3c053e6 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -52,6 +52,7 @@ dependencies = [
"spacy-chunks>=0.0.2",
"uuid7>=0.1.0",
"asyncpg>=0.30.0",
+ "sqlglot>=26.0.0",
]
[dependency-groups]
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index c7c27c5b4..01a1178c4 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -48,6 +48,7 @@ dependencies = [
{ name = "simsimd" },
{ name = "spacy" },
{ name = "spacy-chunks" },
+ { name = "sqlglot" },
{ name = "sse-starlette" },
{ name = "temporalio", extra = ["opentelemetry"] },
{ name = "tenacity" },
@@ -116,6 +117,7 @@ requires-dist = [
{ name = "simsimd", specifier = "~=5.9.4" },
{ name = "spacy", specifier = "~=3.8.2" },
{ name = "spacy-chunks", specifier = ">=0.0.2" },
+ { name = "sqlglot", specifier = ">=26.0.0" },
{ name = "sse-starlette", specifier = "~=2.1.3" },
{ name = "temporalio", extras = ["opentelemetry"], specifier = "~=1.8" },
{ name = "tenacity", specifier = "~=9.0.0" },
@@ -2885,6 +2887,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/33/78/d1a1a026ef3af911159398c939b1509d5c36fe524c7b644f34a5146c4e16/spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645", size = 22343 },
]
+[[package]]
+name = "sqlglot"
+version = "26.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fc/9a/a815124044d598b7f6174be176f379eccd9d583e3130594c381fdfb5736f/sqlglot-26.0.0.tar.gz", hash = "sha256:eb4470e8b3aa2cff1a4ecca4cfe36658e9797ab82416e566abe12672195e1ab8", size = 19775305 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6b/1e/af60a2188773414a9fa65d0e8a32e81342cfcbedf113a19df724d2968c04/sqlglot-26.0.0-py3-none-any.whl", hash = "sha256:1ee9b285e3138c2642a5670c0dbec9afd01860246837788b0f3d228aa6aff619", size = 435457 },
+]
+
[[package]]
name = "srsly"
version = "2.4.8"
From e84bcd66573b14fdcb53fcf981f773d4076909d0 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 14 Dec 2024 22:43:10 +0300
Subject: [PATCH 016/274] fix: Call get_developer asynchronously
---
agents-api/agents_api/activities/execute_system.py | 4 ++--
agents-api/agents_api/dependencies/developer_id.py | 6 ++++--
2 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/agents-api/agents_api/activities/execute_system.py b/agents-api/agents_api/activities/execute_system.py
index ca269417d..590849080 100644
--- a/agents-api/agents_api/activities/execute_system.py
+++ b/agents-api/agents_api/activities/execute_system.py
@@ -21,7 +21,7 @@
from ..common.protocol.tasks import ExecutionInput, StepContext
from ..common.storage_handler import auto_blob_store, load_from_blob_store_if_remote
from ..env import testing
-from ..models.developer import get_developer
+from ..queries.developer import get_developer
from .utils import get_handler
# For running synchronous code in the background
@@ -94,7 +94,7 @@ async def execute_system(
# Handle chat operations
if system.operation == "chat" and system.resource == "session":
- developer = get_developer(developer_id=arguments.get("developer_id"))
+ developer = await get_developer(developer_id=arguments.get("developer_id"))
session_id = arguments.get("session_id")
x_custom_api_key = arguments.get("x_custom_api_key", None)
chat_input = ChatInput(**arguments)
diff --git a/agents-api/agents_api/dependencies/developer_id.py b/agents-api/agents_api/dependencies/developer_id.py
index b97e0ddeb..0ffc4896c 100644
--- a/agents-api/agents_api/dependencies/developer_id.py
+++ b/agents-api/agents_api/dependencies/developer_id.py
@@ -36,7 +36,9 @@ async def get_developer_data(
assert (
not x_developer_id
), "X-Developer-Id header not allowed in multi-tenant mode"
- return get_developer(developer_id=UUID("00000000-0000-0000-0000-000000000000"))
+ return await get_developer(
+ developer_id=UUID("00000000-0000-0000-0000-000000000000")
+ )
if not x_developer_id:
raise InvalidHeaderFormat("X-Developer-Id header required")
@@ -47,6 +49,6 @@ async def get_developer_data(
except ValueError as e:
raise InvalidHeaderFormat("X-Developer-Id must be a valid UUID") from e
- developer = get_developer(developer_id=x_developer_id)
+ developer = await get_developer(developer_id=x_developer_id)
return developer
From 19077873dddbf933e95c4fc21238361b40cf54dd Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 14 Dec 2024 22:50:40 +0300
Subject: [PATCH 017/274] chore: Remove pg_query from models.utils
---
agents-api/agents_api/models/utils.py | 110 --------------------------
1 file changed, 110 deletions(-)
diff --git a/agents-api/agents_api/models/utils.py b/agents-api/agents_api/models/utils.py
index 9b5e454e6..08006d1c7 100644
--- a/agents-api/agents_api/models/utils.py
+++ b/agents-api/agents_api/models/utils.py
@@ -458,116 +458,6 @@ async def wrapper(
return cozo_query_dec
-def pg_query(
- func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
- debug: bool | None = None,
- only_on_error: bool = False,
- timeit: bool = False,
-):
- def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
- """
- Decorator that wraps a function that takes arbitrary arguments, and
- returns a (query string, variables) tuple.
-
- The wrapped function should additionally take a client keyword argument
- and then run the query using the client, returning a Record.
- """
-
- from pprint import pprint
-
- from tenacity import (
- retry,
- retry_if_exception,
- stop_after_attempt,
- wait_exponential,
- )
-
- def is_resource_busy(e: Exception) -> bool:
- return (
- isinstance(e, HTTPException)
- and e.status_code == 429
- and not getattr(e, "cozo_offline", False)
- )
-
- @retry(
- stop=stop_after_attempt(4),
- wait=wait_exponential(multiplier=1, min=4, max=10),
- retry=retry_if_exception(is_resource_busy),
- )
- @wraps(func)
- async def wrapper(
- *args: P.args, client=None, **kwargs: P.kwargs
- ) -> list[Record]:
- if inspect.iscoroutinefunction(func):
- query, variables = await func(*args, **kwargs)
- else:
- query, variables = func(*args, **kwargs)
-
- not only_on_error and debug and print(query)
- not only_on_error and debug and pprint(
- dict(
- variables=variables,
- )
- )
-
- # Run the query
- from ..clients import pg
-
- try:
- client = client or await pg.get_pg_client()
-
- start = timeit and time.perf_counter()
- sqlglot.parse()
- results: list[Record] = await client.fetch(query, *variables)
- end = timeit and time.perf_counter()
-
- timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
-
- except Exception as e:
- if only_on_error and debug:
- print(query)
- pprint(variables)
-
- debug and print(repr(e))
- connection_error = isinstance(
- e,
- (
- ConnectionError,
- Timeout,
- TimeoutException,
- NetworkError,
- RequestError,
- ),
- )
-
- if connection_error:
- exc = HTTPException(
- status_code=429, detail="Resource busy. Please try again later."
- )
- raise exc from e
-
- raise
-
- not only_on_error and debug and pprint(
- dict(
- results=[dict(result.items()) for result in results],
- )
- )
-
- return results
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return wrapper
-
- if func is not None and callable(func):
- return pg_query_dec(func)
-
- return pg_query_dec
-
-
def wrap_in_class(
cls: Type[ModelT] | Callable[..., ModelT],
one: bool = False,
From 4840195e20bdc44c1ff1633c90801fa8566f0612 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sun, 15 Dec 2024 10:59:20 +0530
Subject: [PATCH 018/274] feat(memory-store): Auto calculate tokens in entries
table
Signed-off-by: Diwank Singh Tomer
---
.../migrations/000015_entries.down.sql | 4 +++
memory-store/migrations/000015_entries.up.sql | 35 ++++++++++++++++++-
2 files changed, 38 insertions(+), 1 deletion(-)
diff --git a/memory-store/migrations/000015_entries.down.sql b/memory-store/migrations/000015_entries.down.sql
index 36ec58280..d8afbb826 100644
--- a/memory-store/migrations/000015_entries.down.sql
+++ b/memory-store/migrations/000015_entries.down.sql
@@ -1,5 +1,9 @@
BEGIN;
+DROP TRIGGER IF EXISTS trg_optimized_update_token_count_after ON entries;
+
+DROP FUNCTION IF EXISTS optimized_update_token_count_after;
+
-- Drop foreign key constraint if it exists
ALTER TABLE IF EXISTS entries
DROP CONSTRAINT IF EXISTS fk_entries_session;
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index e03573464..9985e4c41 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -14,8 +14,8 @@ CREATE TABLE IF NOT EXISTS entries (
content JSONB[] NOT NULL,
tool_call_id TEXT DEFAULT NULL,
tool_calls JSONB[] NOT NULL DEFAULT '{}',
- token_count INTEGER NOT NULL,
model TEXT NOT NULL,
+ token_count INTEGER DEFAULT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_entries PRIMARY KEY (session_id, entry_id, created_at)
@@ -52,4 +52,37 @@ BEGIN
END IF;
END $$;
+-- TODO: We should consider using a timescale background job to update the token count
+-- instead of a trigger.
+-- https://docs.timescale.com/use-timescale/latest/user-defined-actions/create-and-register/
+CREATE
+OR REPLACE FUNCTION optimized_update_token_count_after () RETURNS TRIGGER AS $$
+DECLARE
+ token_count INTEGER;
+BEGIN
+ -- Compute token_count outside the UPDATE statement for clarity and potential optimization
+ token_count := cardinality(
+ ai.openai_tokenize(
+ 'gpt-4o', -- FIXME: Use `NEW.model`
+ array_to_string(NEW.content::TEXT[], ' ')
+ )
+ );
+
+ -- Perform the update only if token_count differs
+ IF token_count <> NEW.token_count THEN
+ UPDATE entries
+ SET token_count = token_count
+ WHERE entry_id = NEW.entry_id;
+ END IF;
+
+ RETURN NULL;
+END;
+$$ LANGUAGE plpgsql;
+
+CREATE TRIGGER trg_optimized_update_token_count_after
+AFTER INSERT
+OR
+UPDATE ON entries FOR EACH ROW
+EXECUTE FUNCTION optimized_update_token_count_after ();
+
COMMIT;
\ No newline at end of file
From f4e6b4861857514c60c406b1414334d925ef8dcb Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Sun, 15 Dec 2024 00:52:53 -0500
Subject: [PATCH 019/274] feat(agents-api): add user queries
---
.../agents_api/queries/users/__init__.py | 28 +++++++
.../queries/users/create_or_update_user.py | 72 +++++++++++++++++
.../agents_api/queries/users/create_user.py | 76 +++++++++++++++++
.../agents_api/queries/users/delete_user.py | 45 +++++++++++
.../agents_api/queries/users/get_user.py | 50 ++++++++++++
.../agents_api/queries/users/list_users.py | 81 +++++++++++++++++++
.../agents_api/queries/users/patch_user.py | 73 +++++++++++++++++
.../agents_api/queries/users/update_user.py | 68 ++++++++++++++++
8 files changed, 493 insertions(+)
create mode 100644 agents-api/agents_api/queries/users/__init__.py
create mode 100644 agents-api/agents_api/queries/users/create_or_update_user.py
create mode 100644 agents-api/agents_api/queries/users/create_user.py
create mode 100644 agents-api/agents_api/queries/users/delete_user.py
create mode 100644 agents-api/agents_api/queries/users/get_user.py
create mode 100644 agents-api/agents_api/queries/users/list_users.py
create mode 100644 agents-api/agents_api/queries/users/patch_user.py
create mode 100644 agents-api/agents_api/queries/users/update_user.py
diff --git a/agents-api/agents_api/queries/users/__init__.py b/agents-api/agents_api/queries/users/__init__.py
new file mode 100644
index 000000000..4e810a4fe
--- /dev/null
+++ b/agents-api/agents_api/queries/users/__init__.py
@@ -0,0 +1,28 @@
+"""
+The `user` module within the `queries` package provides SQL query functions for managing users
+in the TimescaleDB database. This includes operations for:
+
+- Creating new users
+- Updating existing users
+- Retrieving user details
+- Listing users with filtering and pagination
+- Deleting users
+"""
+
+from .create_user import create_user
+from .create_or_update_user import create_or_update_user_query
+from .delete_user import delete_user_query
+from .get_user import get_user_query
+from .list_users import list_users_query
+from .patch_user import patch_user_query
+from .update_user import update_user_query
+
+__all__ = [
+ "create_user",
+ "create_or_update_user_query",
+ "delete_user_query",
+ "get_user_query",
+ "list_users_query",
+ "patch_user_query",
+ "update_user_query",
+]
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
new file mode 100644
index 000000000..a6312b243
--- /dev/null
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -0,0 +1,72 @@
+from typing import Any
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from asyncpg import exceptions as asyncpg_exceptions
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import CreateUserRequest, User
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+
+@rewrap_exceptions({
+ asyncpg_exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+})
+@wrap_in_class(User)
+@increase_counter("create_or_update_user")
+@pg_query
+@beartype
+def create_or_update_user_query(
+ *,
+ developer_id: UUID,
+ user_id: UUID,
+ data: CreateUserRequest
+) -> tuple[str, dict]:
+ """
+ Constructs an SQL query to create or update a user.
+
+ Args:
+ developer_id (UUID): The UUID of the developer.
+ user_id (UUID): The UUID of the user.
+ data (CreateUserRequest): The user data to insert or update.
+
+ Returns:
+ tuple[str, dict]: SQL query and parameters.
+ """
+ query = parse_one("""
+ INSERT INTO users (
+ developer_id,
+ user_id,
+ name,
+ about,
+ metadata
+ )
+ VALUES (
+ %(developer_id)s,
+ %(user_id)s,
+ %(name)s,
+ %(about)s,
+ %(metadata)s
+ )
+ ON CONFLICT (developer_id, user_id) DO UPDATE SET
+ name = EXCLUDED.name,
+ about = EXCLUDED.about,
+ metadata = EXCLUDED.metadata
+ RETURNING *;
+ """).sql()
+
+ params = {
+ "developer_id": developer_id,
+ "user_id": user_id,
+ "name": data.name,
+ "about": data.about,
+ "metadata": data.metadata or {},
+ }
+
+ return query, params
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
new file mode 100644
index 000000000..194d9bf03
--- /dev/null
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -0,0 +1,76 @@
+from typing import Any
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from sqlglot import parse_one
+from pydantic import ValidationError
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import CreateUserRequest, User
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+@rewrap_exceptions({
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ ValidationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Input validation failed. Please check the provided data.",
+ ),
+})
+@wrap_in_class(User)
+@increase_counter("create_user")
+@pg_query
+@beartype
+def create_user(
+ *,
+ developer_id: UUID,
+ user_id: UUID | None = None,
+ data: CreateUserRequest,
+) -> tuple[str, dict]:
+ """
+ Constructs the SQL query to create a new user.
+
+ Args:
+ developer_id (UUID): The UUID of the developer creating the user.
+ user_id (UUID, optional): The UUID for the new user. If None, one will be generated.
+ data (CreateUserRequest): The user data to insert.
+
+ Returns:
+ tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ """
+ user_id = user_id or uuid7()
+
+ query = parse_one("""
+ INSERT INTO users (
+ developer_id,
+ user_id,
+ name,
+ about,
+ metadata
+ )
+ VALUES (
+ %(developer_id)s,
+ %(user_id)s,
+ %(name)s,
+ %(about)s,
+ %(metadata)s
+ )
+ RETURNING *;
+ """).sql()
+
+ params = {
+ "developer_id": developer_id,
+ "user_id": user_id,
+ "name": data.name,
+ "about": data.about,
+ "metadata": data.metadata or {},
+ }
+
+ return query, params
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
new file mode 100644
index 000000000..551129f00
--- /dev/null
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -0,0 +1,45 @@
+from typing import Any
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+@rewrap_exceptions({
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+})
+@wrap_in_class(ResourceDeletedResponse, one=True)
+@increase_counter("delete_user")
+@pg_query
+@beartype
+def delete_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[list[str], dict]:
+ """
+ Constructs optimized SQL queries to delete a user and related data.
+ Uses primary key for efficient deletion.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ user_id (UUID): The user's UUID
+
+ Returns:
+ tuple[list[str], dict]: List of SQL queries and parameters
+ """
+ query = parse_one("""
+ BEGIN;
+ DELETE FROM user_files WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s;
+ DELETE FROM user_docs WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s;
+ DELETE FROM users WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+ RETURNING user_id as id, developer_id;
+ COMMIT;
+ """).sql()
+
+ return [query], {"developer_id": developer_id, "user_id": user_id}
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
new file mode 100644
index 000000000..3982ea46e
--- /dev/null
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -0,0 +1,50 @@
+from typing import Any
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import User
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+@rewrap_exceptions({
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+})
+@wrap_in_class(User, one=True)
+@increase_counter("get_user")
+@pg_query
+@beartype
+def get_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
+ """
+ Constructs an optimized SQL query to retrieve a user's details.
+ Uses the primary key index (developer_id, user_id) for efficient lookup.
+
+ Args:
+ developer_id (UUID): The UUID of the developer.
+ user_id (UUID): The UUID of the user to retrieve.
+
+ Returns:
+ tuple[str, dict]: SQL query and parameters.
+ """
+ query = parse_one("""
+ SELECT
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at
+ FROM users
+ WHERE developer_id = %(developer_id)s
+ AND user_id = %(user_id)s;
+ """).sql()
+
+ return query, {"developer_id": developer_id, "user_id": user_id}
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
new file mode 100644
index 000000000..312299082
--- /dev/null
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -0,0 +1,81 @@
+from typing import Any, Literal
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import User
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+@rewrap_exceptions({
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+})
+@wrap_in_class(User)
+@increase_counter("list_users")
+@pg_query
+@beartype
+def list_users_query(
+ *,
+ developer_id: UUID,
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "updated_at"] = "created_at",
+ direction: Literal["asc", "desc"] = "desc",
+ metadata_filter: dict | None = None,
+) -> tuple[str, dict]:
+ """
+ Constructs an optimized SQL query for listing users with pagination and filtering.
+ Uses indexes on developer_id and metadata for efficient querying.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ limit (int): Maximum number of records to return
+ offset (int): Number of records to skip
+ sort_by (str): Field to sort by
+ direction (str): Sort direction
+ metadata_filter (dict, optional): Metadata-based filters
+
+ Returns:
+ tuple[str, dict]: SQL query and parameters
+ """
+ if limit < 1 or limit > 1000:
+ raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
+ if offset < 0:
+ raise HTTPException(status_code=400, detail="Offset must be non-negative")
+
+ metadata_clause = ""
+ params = {
+ "developer_id": developer_id,
+ "limit": limit,
+ "offset": offset
+ }
+
+ if metadata_filter:
+ metadata_clause = "AND metadata @> %(metadata_filter)s"
+ params["metadata_filter"] = metadata_filter
+
+ query = parse_one(f"""
+ SELECT
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at
+ FROM users
+ WHERE developer_id = %(developer_id)s
+ {metadata_clause}
+ ORDER BY {sort_by} {direction}
+ LIMIT %(limit)s
+ OFFSET %(offset)s;
+ """).sql()
+
+ return query, params
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
new file mode 100644
index 000000000..468b38b00
--- /dev/null
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -0,0 +1,73 @@
+from typing import Any
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+@rewrap_exceptions({
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+})
+@wrap_in_class(ResourceUpdatedResponse, one=True)
+@increase_counter("patch_user")
+@pg_query
+@beartype
+def patch_user_query(
+ *,
+ developer_id: UUID,
+ user_id: UUID,
+ data: PatchUserRequest
+) -> tuple[str, dict]:
+ """
+ Constructs an optimized SQL query for partial user updates.
+ Uses primary key for efficient update and jsonb_merge for metadata.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ user_id (UUID): The user's UUID
+ data (PatchUserRequest): Partial update data
+
+ Returns:
+ tuple[str, dict]: SQL query and parameters
+ """
+ update_parts = []
+ params = {
+ "developer_id": developer_id,
+ "user_id": user_id,
+ }
+
+ if data.name is not None:
+ update_parts.append("name = %(name)s")
+ params["name"] = data.name
+ if data.about is not None:
+ update_parts.append("about = %(about)s")
+ params["about"] = data.about
+ if data.metadata is not None:
+ update_parts.append("metadata = metadata || %(metadata)s")
+ params["metadata"] = data.metadata
+
+ query = parse_one(f"""
+ UPDATE users
+ SET {", ".join(update_parts)}
+ WHERE developer_id = %(developer_id)s
+ AND user_id = %(user_id)s
+ RETURNING
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at;
+ """).sql()
+
+ return query, params
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
new file mode 100644
index 000000000..ed33e3e42
--- /dev/null
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -0,0 +1,68 @@
+from typing import Any
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from psycopg import errors as psycopg_errors
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import UpdateUserRequest, ResourceUpdatedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+@rewrap_exceptions({
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+})
+@wrap_in_class(ResourceUpdatedResponse, one=True)
+@increase_counter("update_user")
+@pg_query
+@beartype
+def update_user_query(
+ *,
+ developer_id: UUID,
+ user_id: UUID,
+ data: UpdateUserRequest
+) -> tuple[str, dict]:
+ """
+ Constructs an optimized SQL query to update a user's details.
+ Uses primary key for efficient update.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ user_id (UUID): The user's UUID
+ data (UpdateUserRequest): Updated user data
+
+ Returns:
+ tuple[str, dict]: SQL query and parameters
+ """
+ query = parse_one("""
+ UPDATE users
+ SET
+ name = %(name)s,
+ about = %(about)s,
+ metadata = %(metadata)s
+ WHERE developer_id = %(developer_id)s
+ AND user_id = %(user_id)s
+ RETURNING
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at;
+ """).sql()
+
+ params = {
+ "developer_id": developer_id,
+ "user_id": user_id,
+ "name": data.name,
+ "about": data.about,
+ "metadata": data.metadata or {},
+ }
+
+ return query, params
\ No newline at end of file
From 55500d97223c10913b751bc781003259a12b784e Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Sun, 15 Dec 2024 06:07:50 +0000
Subject: [PATCH 020/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/users/__init__.py | 6 ++--
.../queries/users/create_or_update_user.py | 23 +++++++-------
.../agents_api/queries/users/create_user.py | 31 ++++++++++---------
.../agents_api/queries/users/delete_user.py | 19 +++++++-----
.../agents_api/queries/users/get_user.py | 19 +++++++-----
.../agents_api/queries/users/list_users.py | 25 +++++++--------
.../agents_api/queries/users/patch_user.py | 24 +++++++-------
.../agents_api/queries/users/update_user.py | 26 ++++++++--------
8 files changed, 90 insertions(+), 83 deletions(-)
diff --git a/agents-api/agents_api/queries/users/__init__.py b/agents-api/agents_api/queries/users/__init__.py
index 4e810a4fe..d7988279e 100644
--- a/agents-api/agents_api/queries/users/__init__.py
+++ b/agents-api/agents_api/queries/users/__init__.py
@@ -1,5 +1,5 @@
"""
-The `user` module within the `queries` package provides SQL query functions for managing users
+The `user` module within the `queries` package provides SQL query functions for managing users
in the TimescaleDB database. This includes operations for:
- Creating new users
@@ -9,8 +9,8 @@
- Deleting users
"""
-from .create_user import create_user
from .create_or_update_user import create_or_update_user_query
+from .create_user import create_user
from .delete_user import delete_user_query
from .get_user import get_user_query
from .list_users import list_users_query
@@ -25,4 +25,4 @@
"list_users_query",
"patch_user_query",
"update_user_query",
-]
\ No newline at end of file
+]
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index a6312b243..67182d047 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -1,9 +1,9 @@
from typing import Any
from uuid import UUID
+from asyncpg import exceptions as asyncpg_exceptions
from beartype import beartype
from fastapi import HTTPException
-from asyncpg import exceptions as asyncpg_exceptions
from sqlglot import parse_one
from ...autogen.openapi_model import CreateUserRequest, User
@@ -11,22 +11,21 @@
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- asyncpg_exceptions.ForeignKeyViolationError: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
-})
+@rewrap_exceptions(
+ {
+ asyncpg_exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(User)
@increase_counter("create_or_update_user")
@pg_query
@beartype
def create_or_update_user_query(
- *,
- developer_id: UUID,
- user_id: UUID,
- data: CreateUserRequest
+ *, developer_id: UUID, user_id: UUID, data: CreateUserRequest
) -> tuple[str, dict]:
"""
Constructs an SQL query to create or update a user.
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 194d9bf03..0f979ebdd 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -4,26 +4,29 @@
from beartype import beartype
from fastapi import HTTPException
from psycopg import errors as psycopg_errors
-from sqlglot import parse_one
from pydantic import ValidationError
+from sqlglot import parse_one
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateUserRequest, User
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data.",
- ),
-})
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ ValidationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Input validation failed. Please check the provided data.",
+ ),
+ }
+)
@wrap_in_class(User)
@increase_counter("create_user")
@pg_query
@@ -46,7 +49,7 @@ def create_user(
tuple[str, dict]: A tuple containing the SQL query and its parameters.
"""
user_id = user_id or uuid7()
-
+
query = parse_one("""
INSERT INTO users (
developer_id,
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 551129f00..2dfb0b156 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -10,13 +10,16 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
-})
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(ResourceDeletedResponse, one=True)
@increase_counter("delete_user")
@pg_query
@@ -42,4 +45,4 @@ def delete_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[list[str],
COMMIT;
""").sql()
- return [query], {"developer_id": developer_id, "user_id": user_id}
\ No newline at end of file
+ return [query], {"developer_id": developer_id, "user_id": user_id}
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 3982ea46e..bccf70ad2 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -10,13 +10,16 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
-})
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(User, one=True)
@increase_counter("get_user")
@pg_query
@@ -47,4 +50,4 @@ def get_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
AND user_id = %(user_id)s;
""").sql()
- return query, {"developer_id": developer_id, "user_id": user_id}
\ No newline at end of file
+ return query, {"developer_id": developer_id, "user_id": user_id}
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 312299082..3c8a3690c 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -10,13 +10,16 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
-})
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(User)
@increase_counter("list_users")
@pg_query
@@ -51,11 +54,7 @@ def list_users_query(
raise HTTPException(status_code=400, detail="Offset must be non-negative")
metadata_clause = ""
- params = {
- "developer_id": developer_id,
- "limit": limit,
- "offset": offset
- }
+ params = {"developer_id": developer_id, "limit": limit, "offset": offset}
if metadata_filter:
metadata_clause = "AND metadata @> %(metadata_filter)s"
@@ -78,4 +77,4 @@ def list_users_query(
OFFSET %(offset)s;
""").sql()
- return query, params
\ No newline at end of file
+ return query, params
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index 468b38b00..40c6aff4d 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -10,22 +10,22 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
-})
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(ResourceUpdatedResponse, one=True)
@increase_counter("patch_user")
@pg_query
@beartype
def patch_user_query(
- *,
- developer_id: UUID,
- user_id: UUID,
- data: PatchUserRequest
+ *, developer_id: UUID, user_id: UUID, data: PatchUserRequest
) -> tuple[str, dict]:
"""
Constructs an optimized SQL query for partial user updates.
@@ -70,4 +70,4 @@ def patch_user_query(
updated_at;
""").sql()
- return query, params
\ No newline at end of file
+ return query, params
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index ed33e3e42..58f7ae8b2 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -6,26 +6,26 @@
from psycopg import errors as psycopg_errors
from sqlglot import parse_one
-from ...autogen.openapi_model import UpdateUserRequest, ResourceUpdatedResponse
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-@rewrap_exceptions({
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
-})
+
+@rewrap_exceptions(
+ {
+ psycopg_errors.ForeignKeyViolation: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(ResourceUpdatedResponse, one=True)
@increase_counter("update_user")
@pg_query
@beartype
def update_user_query(
- *,
- developer_id: UUID,
- user_id: UUID,
- data: UpdateUserRequest
+ *, developer_id: UUID, user_id: UUID, data: UpdateUserRequest
) -> tuple[str, dict]:
"""
Constructs an optimized SQL query to update a user's details.
@@ -65,4 +65,4 @@ def update_user_query(
"metadata": data.metadata or {},
}
- return query, params
\ No newline at end of file
+ return query, params
From afc51abae47f5c65933c362bc43ceab3c9d82701 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Mon, 16 Dec 2024 00:02:32 -0500
Subject: [PATCH 021/274] fix(queries-user): major bug fixes, refactor and
added init user sql test
---
.../agents_api/queries/users/__init__.py | 24 +--
.../queries/users/create_or_update_user.py | 79 +++++---
.../agents_api/queries/users/create_user.py | 65 ++++---
.../agents_api/queries/users/delete_user.py | 49 +++--
.../agents_api/queries/users/get_user.py | 52 +++--
.../agents_api/queries/users/list_users.py | 64 ++++---
.../agents_api/queries/users/patch_user.py | 55 ++++--
.../agents_api/queries/users/update_user.py | 61 +++---
agents-api/tests/test_user_sql.py | 178 ++++++++++++++++++
agents-api/uv.lock | 15 --
10 files changed, 467 insertions(+), 175 deletions(-)
create mode 100644 agents-api/tests/test_user_sql.py
diff --git a/agents-api/agents_api/queries/users/__init__.py b/agents-api/agents_api/queries/users/__init__.py
index d7988279e..26eb37377 100644
--- a/agents-api/agents_api/queries/users/__init__.py
+++ b/agents-api/agents_api/queries/users/__init__.py
@@ -9,20 +9,20 @@
- Deleting users
"""
-from .create_or_update_user import create_or_update_user_query
+from .create_or_update_user import create_or_update_user
from .create_user import create_user
-from .delete_user import delete_user_query
-from .get_user import get_user_query
-from .list_users import list_users_query
-from .patch_user import patch_user_query
-from .update_user import update_user_query
+from .get_user import get_user
+from .list_users import list_users
+from .patch_user import patch_user
+from .update_user import update_user
+from .delete_user import delete_user
__all__ = [
"create_user",
- "create_or_update_user_query",
- "delete_user_query",
- "get_user_query",
- "list_users_query",
- "patch_user_query",
- "update_user_query",
+ "create_or_update_user",
+ "delete_user",
+ "get_user",
+ "list_users",
+ "patch_user",
+ "update_user",
]
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index 67182d047..b579e8de0 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -1,30 +1,72 @@
-from typing import Any
from uuid import UUID
-from asyncpg import exceptions as asyncpg_exceptions
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import CreateUserRequest, User
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Optimize the raw query by using COALESCE for metadata to avoid explicit check
+raw_query = """
+INSERT INTO users (
+ developer_id,
+ user_id,
+ name,
+ about,
+ metadata
+)
+VALUES (
+ %(developer_id)s,
+ %(user_id)s,
+ %(name)s,
+ %(about)s,
+ COALESCE(%(metadata)s, '{}'::jsonb)
+)
+ON CONFLICT (developer_id, user_id) DO UPDATE SET
+ name = EXCLUDED.name,
+ about = EXCLUDED.about,
+ metadata = EXCLUDED.metadata
+RETURNING *;
+"""
+
+# Add index hint for better performance
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "users": {
+ "developer_id": "UUID",
+ "user_id": "UUID",
+ "name": "STRING",
+ "about": "STRING",
+ "metadata": "JSONB",
+ }
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- asyncpg_exceptions.ForeignKeyViolationError: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
- )
+ ),
+ asyncpg.UniqueViolationError: partialclass( # Add handling for potential race conditions
+ HTTPException,
+ status_code=409,
+ detail="A user with this ID already exists.",
+ ),
}
)
@wrap_in_class(User)
@increase_counter("create_or_update_user")
@pg_query
@beartype
-def create_or_update_user_query(
+def create_or_update_user(
*, developer_id: UUID, user_id: UUID, data: CreateUserRequest
) -> tuple[str, dict]:
"""
@@ -37,35 +79,16 @@ def create_or_update_user_query(
Returns:
tuple[str, dict]: SQL query and parameters.
- """
- query = parse_one("""
- INSERT INTO users (
- developer_id,
- user_id,
- name,
- about,
- metadata
- )
- VALUES (
- %(developer_id)s,
- %(user_id)s,
- %(name)s,
- %(about)s,
- %(metadata)s
- )
- ON CONFLICT (developer_id, user_id) DO UPDATE SET
- name = EXCLUDED.name,
- about = EXCLUDED.about,
- metadata = EXCLUDED.metadata
- RETURNING *;
- """).sql()
+ Raises:
+ HTTPException: If developer doesn't exist (404) or on unique constraint violation (409)
+ """
params = {
"developer_id": developer_id,
"user_id": user_id,
"name": data.name,
"about": data.about,
- "metadata": data.metadata or {},
+ "metadata": data.metadata, # Let COALESCE handle None case in SQL
}
return query, params
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 0f979ebdd..691c43500 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -1,29 +1,60 @@
-from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
-from pydantic import ValidationError
-from sqlglot import parse_one
+from sqlglot import optimize, parse_one
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateUserRequest, User
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Define the raw SQL query outside the function
+raw_query = """
+INSERT INTO users (
+ developer_id,
+ user_id,
+ name,
+ about,
+ metadata
+)
+VALUES (
+ %(developer_id)s,
+ %(user_id)s,
+ %(name)s,
+ %(about)s,
+ %(metadata)s
+)
+RETURNING *;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "users": {
+ "developer_id": "UUID",
+ "user_id": "UUID",
+ "name": "STRING",
+ "about": "STRING",
+ "metadata": "JSONB",
+ }
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- psycopg_errors.ForeignKeyViolation: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
),
- ValidationError: partialclass(
+ asyncpg.NullValueNoIndicatorParameterError: partialclass(
HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data.",
+ status_code=404,
+ detail="The specified developer does not exist.",
),
}
)
@@ -50,24 +81,6 @@ def create_user(
"""
user_id = user_id or uuid7()
- query = parse_one("""
- INSERT INTO users (
- developer_id,
- user_id,
- name,
- about,
- metadata
- )
- VALUES (
- %(developer_id)s,
- %(user_id)s,
- %(name)s,
- %(about)s,
- %(metadata)s
- )
- RETURNING *;
- """).sql()
-
params = {
"developer_id": developer_id,
"user_id": user_id,
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 2dfb0b156..a21a4b9d9 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -1,19 +1,44 @@
-from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Define the raw SQL query outside the function
+raw_query = """
+WITH deleted_data AS (
+ DELETE FROM user_files
+ WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+),
+deleted_docs AS (
+ DELETE FROM user_docs
+ WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+)
+DELETE FROM users
+WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+RETURNING user_id as id, developer_id;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "user_files": {"developer_id": "UUID", "user_id": "UUID"},
+ "user_docs": {"developer_id": "UUID", "user_id": "UUID"},
+ "users": {"developer_id": "UUID", "user_id": "UUID"},
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- psycopg_errors.ForeignKeyViolation: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
@@ -24,9 +49,9 @@
@increase_counter("delete_user")
@pg_query
@beartype
-def delete_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[list[str], dict]:
+def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
"""
- Constructs optimized SQL queries to delete a user and related data.
+ Constructs optimized SQL query to delete a user and related data.
Uses primary key for efficient deletion.
Args:
@@ -34,15 +59,7 @@ def delete_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[list[str],
user_id (UUID): The user's UUID
Returns:
- tuple[list[str], dict]: List of SQL queries and parameters
+ tuple[str, dict]: SQL query and parameters
"""
- query = parse_one("""
- BEGIN;
- DELETE FROM user_files WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s;
- DELETE FROM user_docs WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s;
- DELETE FROM users WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
- RETURNING user_id as id, developer_id;
- COMMIT;
- """).sql()
-
- return [query], {"developer_id": developer_id, "user_id": user_id}
+
+ return query, {"developer_id": developer_id, "user_id": user_id}
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index bccf70ad2..ca5627701 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -1,19 +1,50 @@
-from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import User
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Define the raw SQL query outside the function
+raw_query = """
+SELECT
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at
+FROM users
+WHERE developer_id = %(developer_id)s
+AND user_id = %(user_id)s;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "users": {
+ "developer_id": "UUID",
+ "user_id": "UUID",
+ "name": "STRING",
+ "about": "STRING",
+ "metadata": "JSONB",
+ "created_at": "TIMESTAMP",
+ "updated_at": "TIMESTAMP",
+ }
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- psycopg_errors.ForeignKeyViolation: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
@@ -24,7 +55,7 @@
@increase_counter("get_user")
@pg_query
@beartype
-def get_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
+def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
"""
Constructs an optimized SQL query to retrieve a user's details.
Uses the primary key index (developer_id, user_id) for efficient lookup.
@@ -36,18 +67,5 @@ def get_user_query(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
Returns:
tuple[str, dict]: SQL query and parameters.
"""
- query = parse_one("""
- SELECT
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at
- FROM users
- WHERE developer_id = %(developer_id)s
- AND user_id = %(user_id)s;
- """).sql()
return query, {"developer_id": developer_id, "user_id": user_id}
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 3c8a3690c..e6f854410 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -1,19 +1,54 @@
-from typing import Any, Literal
+from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
-from sqlglot import parse_one
+from sqlglot import optimize, parse_one
from ...autogen.openapi_model import User
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Define the raw SQL query outside the function
+raw_query = """
+SELECT
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at
+FROM users
+WHERE developer_id = %(developer_id)s
+ {metadata_clause}
+ AND deleted_at IS NULL
+ORDER BY {sort_by} {direction} NULLS LAST
+LIMIT %(limit)s
+OFFSET %(offset)s;
+"""
+
+# Parse and optimize the query
+query_template = optimize(
+ parse_one(raw_query),
+ schema={
+ "users": {
+ "developer_id": "UUID",
+ "user_id": "UUID",
+ "name": "STRING",
+ "about": "STRING",
+ "metadata": "JSONB",
+ "created_at": "TIMESTAMP",
+ "updated_at": "TIMESTAMP",
+ }
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- psycopg_errors.ForeignKeyViolation: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
@@ -24,7 +59,7 @@
@increase_counter("list_users")
@pg_query
@beartype
-def list_users_query(
+def list_users(
*,
developer_id: UUID,
limit: int = 100,
@@ -60,21 +95,8 @@ def list_users_query(
metadata_clause = "AND metadata @> %(metadata_filter)s"
params["metadata_filter"] = metadata_filter
- query = parse_one(f"""
- SELECT
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at
- FROM users
- WHERE developer_id = %(developer_id)s
- {metadata_clause}
- ORDER BY {sort_by} {direction}
- LIMIT %(limit)s
- OFFSET %(offset)s;
- """).sql()
+ query = query_template.format(
+ metadata_clause=metadata_clause, sort_by=sort_by, direction=direction
+ )
return query, params
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index 40c6aff4d..d491b8e84 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -1,19 +1,51 @@
-from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Define the raw SQL query outside the function
+raw_query = """
+UPDATE users
+SET {update_parts}
+WHERE developer_id = %(developer_id)s
+AND user_id = %(user_id)s
+RETURNING
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at;
+"""
+
+# Parse and optimize the query
+query_template = optimize(
+ parse_one(raw_query),
+ schema={
+ "users": {
+ "developer_id": "UUID",
+ "user_id": "UUID",
+ "name": "STRING",
+ "about": "STRING",
+ "metadata": "JSONB",
+ "created_at": "TIMESTAMP",
+ "updated_at": "TIMESTAMP",
+ }
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- psycopg_errors.ForeignKeyViolation: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
@@ -24,7 +56,7 @@
@increase_counter("patch_user")
@pg_query
@beartype
-def patch_user_query(
+def patch_user(
*, developer_id: UUID, user_id: UUID, data: PatchUserRequest
) -> tuple[str, dict]:
"""
@@ -55,19 +87,6 @@ def patch_user_query(
update_parts.append("metadata = metadata || %(metadata)s")
params["metadata"] = data.metadata
- query = parse_one(f"""
- UPDATE users
- SET {", ".join(update_parts)}
- WHERE developer_id = %(developer_id)s
- AND user_id = %(user_id)s
- RETURNING
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at;
- """).sql()
+ query = query_template.format(update_parts=", ".join(update_parts))
return query, params
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 58f7ae8b2..9e622e40d 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -1,19 +1,54 @@
-from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Define the raw SQL query outside the function
+raw_query = """
+UPDATE users
+SET
+ name = %(name)s,
+ about = %(about)s,
+ metadata = %(metadata)s
+WHERE developer_id = %(developer_id)s
+AND user_id = %(user_id)s
+RETURNING
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "users": {
+ "developer_id": "UUID",
+ "user_id": "UUID",
+ "name": "STRING",
+ "about": "STRING",
+ "metadata": "JSONB",
+ "created_at": "TIMESTAMP",
+ "updated_at": "TIMESTAMP",
+ }
+ },
+).sql(pretty=True)
+
@rewrap_exceptions(
{
- psycopg_errors.ForeignKeyViolation: partialclass(
+ asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
@@ -24,7 +59,7 @@
@increase_counter("update_user")
@pg_query
@beartype
-def update_user_query(
+def update_user(
*, developer_id: UUID, user_id: UUID, data: UpdateUserRequest
) -> tuple[str, dict]:
"""
@@ -39,24 +74,6 @@ def update_user_query(
Returns:
tuple[str, dict]: SQL query and parameters
"""
- query = parse_one("""
- UPDATE users
- SET
- name = %(name)s,
- about = %(about)s,
- metadata = %(metadata)s
- WHERE developer_id = %(developer_id)s
- AND user_id = %(user_id)s
- RETURNING
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at;
- """).sql()
-
params = {
"developer_id": developer_id,
"user_id": user_id,
diff --git a/agents-api/tests/test_user_sql.py b/agents-api/tests/test_user_sql.py
new file mode 100644
index 000000000..50b6d096b
--- /dev/null
+++ b/agents-api/tests/test_user_sql.py
@@ -0,0 +1,178 @@
+"""
+This module contains tests for SQL query generation functions in the users module.
+Tests verify the SQL queries without actually executing them against a database.
+"""
+
+from uuid import UUID
+
+from uuid_extensions import uuid7
+from ward import raises, test
+
+from agents_api.autogen.openapi_model import (
+ CreateOrUpdateUserRequest,
+ CreateUserRequest,
+ PatchUserRequest,
+ ResourceUpdatedResponse,
+ UpdateUserRequest,
+ User,
+)
+from agents_api.queries.users import (
+ create_or_update_user,
+ create_user,
+ delete_user,
+ get_user,
+ list_users,
+ patch_user,
+ update_user,
+)
+from tests.fixtures import pg_client, test_developer_id, test_user
+
+# Test UUIDs for consistent testing
+TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
+TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
+
+
+@test("model: create user sql")
+def _(client=pg_client, developer_id=test_developer_id):
+ """Test that a user can be successfully created."""
+
+ create_user(
+ developer_id=developer_id,
+ data=CreateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ client=client,
+ )
+
+
+@test("model: create or update user sql")
+def _(client=pg_client, developer_id=test_developer_id):
+ """Test that a user can be successfully created or updated."""
+
+ create_or_update_user(
+ developer_id=developer_id,
+ user_id=uuid7(),
+ data=CreateOrUpdateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ client=client,
+ )
+
+
+@test("model: update user sql")
+def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+ """Test that an existing user's information can be successfully updated."""
+
+ # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update.
+ update_result = update_user(
+ user_id=user.id,
+ developer_id=developer_id,
+ data=UpdateUserRequest(
+ name="updated user",
+ about="updated user about",
+ ),
+ client=client,
+ )
+
+ assert update_result is not None
+ assert isinstance(update_result, ResourceUpdatedResponse)
+ assert update_result.updated_at > user.created_at
+
+
+@test("model: get user not exists sql")
+def _(client=pg_client, developer_id=test_developer_id):
+ """Test that retrieving a non-existent user returns an empty result."""
+
+ user_id = uuid7()
+
+ # Ensure that the query for an existing user returns exactly one result.
+ try:
+ get_user(
+ user_id=user_id,
+ developer_id=developer_id,
+ client=client,
+ )
+ except Exception:
+ pass
+ else:
+ assert (
+ False
+ ), "Expected an exception to be raised when retrieving a non-existent user."
+
+
+@test("model: get user exists sql")
+def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+ """Test that retrieving an existing user returns the correct user information."""
+
+ result = get_user(
+ user_id=user.id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert result is not None
+ assert isinstance(result, User)
+
+
+@test("model: list users sql")
+def _(client=pg_client, developer_id=test_developer_id):
+ """Test that listing users returns a collection of user information."""
+
+ result = list_users(
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert isinstance(result, list)
+ assert len(result) >= 1
+ assert all(isinstance(user, User) for user in result)
+
+
+@test("model: patch user sql")
+def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+ """Test that a user can be successfully patched."""
+
+ patch_result = patch_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ data=PatchUserRequest(
+ name="patched user",
+ about="patched user about",
+ metadata={"test": "metadata"},
+ ),
+ client=client,
+ )
+
+ assert patch_result is not None
+ assert isinstance(patch_result, ResourceUpdatedResponse)
+ assert patch_result.updated_at > user.created_at
+
+
+@test("model: delete user sql")
+def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+ """Test that a user can be successfully deleted."""
+
+ delete_result = delete_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ client=client,
+ )
+
+ assert delete_result is not None
+ assert isinstance(delete_result, ResourceUpdatedResponse)
+
+ # Verify the user no longer exists
+ try:
+ get_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ client=client,
+ )
+ except Exception:
+ pass
+ else:
+ assert (
+ False
+ ), "Expected an exception to be raised when retrieving a deleted user."
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 0c5422f0a..01a1178c4 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -37,7 +37,6 @@ dependencies = [
{ name = "pandas" },
{ name = "prometheus-client" },
{ name = "prometheus-fastapi-instrumentator" },
- { name = "psycopg" },
{ name = "pycozo", extra = ["embedded"] },
{ name = "pycozo-async" },
{ name = "pydantic", extra = ["email"] },
@@ -107,7 +106,6 @@ requires-dist = [
{ name = "pandas", specifier = "~=2.2.2" },
{ name = "prometheus-client", specifier = "~=0.21.0" },
{ name = "prometheus-fastapi-instrumentator", specifier = "~=7.0.0" },
- { name = "psycopg", specifier = ">=3.2.3" },
{ name = "pycozo", extras = ["embedded"], specifier = "~=0.7.6" },
{ name = "pycozo-async", specifier = "~=0.7.7" },
{ name = "pydantic", extras = ["email"], specifier = "~=2.10.2" },
@@ -2194,19 +2192,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 },
]
-[[package]]
-name = "psycopg"
-version = "3.2.3"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "typing-extensions" },
- { name = "tzdata", marker = "sys_platform == 'win32'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/d1/ad/7ce016ae63e231575df0498d2395d15f005f05e32d3a2d439038e1bd0851/psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2", size = 155550 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/ce/21/534b8f5bd9734b7a2fcd3a16b1ee82ef6cad81a4796e95ebf4e0c6a24119/psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907", size = 197934 },
-]
-
[[package]]
name = "ptyprocess"
version = "0.7.0"
From f2f3912cc40de4b3c4106def50a347efe15be177 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Mon, 16 Dec 2024 05:04:14 +0000
Subject: [PATCH 022/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/users/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/agents-api/agents_api/queries/users/__init__.py b/agents-api/agents_api/queries/users/__init__.py
index 26eb37377..fb878c1a6 100644
--- a/agents-api/agents_api/queries/users/__init__.py
+++ b/agents-api/agents_api/queries/users/__init__.py
@@ -11,11 +11,11 @@
from .create_or_update_user import create_or_update_user
from .create_user import create_user
+from .delete_user import delete_user
from .get_user import get_user
from .list_users import list_users
from .patch_user import patch_user
from .update_user import update_user
-from .delete_user import delete_user
__all__ = [
"create_user",
From 7ea5574bd4c7c693adf4e81c637a30da0538d41c Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 16 Dec 2024 10:11:36 +0300
Subject: [PATCH 023/274] chore: Remove unused stuff
---
agents-api/agents_api/queries/utils.py | 443 +------------------------
1 file changed, 3 insertions(+), 440 deletions(-)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 65c234f15..19a4c8d45 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,70 +1,24 @@
import concurrent.futures
import inspect
-import re
import time
from functools import partialmethod, wraps
from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar
-from uuid import UUID
import pandas as pd
from asyncpg import Record
from fastapi import HTTPException
-from httpcore import ConnectError, NetworkError, TimeoutException
-from httpx import ConnectError as HttpxConnectError
+from httpcore import NetworkError, TimeoutException
from httpx import RequestError
from pydantic import BaseModel
from requests.exceptions import ConnectionError, Timeout
from ..common.utils.cozo import uuid_int_list_to_uuid
-from ..env import do_verify_developer, do_verify_developer_owns_resource
P = ParamSpec("P")
T = TypeVar("T")
ModelT = TypeVar("ModelT", bound=BaseModel)
-def fix_uuid(
- item: dict[str, Any], attr_regex: str = r"^(?:id|.*_id)$"
-) -> dict[str, Any]:
- # find the attributes that are ids
- id_attrs = [
- attr for attr in item.keys() if re.match(attr_regex, attr) and item[attr]
- ]
-
- if not id_attrs:
- return item
-
- fixed = {
- **item,
- **{
- attr: uuid_int_list_to_uuid(item[attr])
- for attr in id_attrs
- if isinstance(item[attr], list)
- },
- }
-
- return fixed
-
-
-def fix_uuid_list(
- items: list[dict[str, Any]], attr_regex: str = r"^(?:id|.*_id)$"
-) -> list[dict[str, Any]]:
- fixed = list(map(lambda item: fix_uuid(item, attr_regex), items))
- return fixed
-
-
-def fix_uuid_if_present(item: Any, attr_regex: str = r"^(?:id|.*_id)$") -> Any:
- match item:
- case [dict(), *_]:
- return fix_uuid_list(item, attr_regex)
-
- case dict():
- return fix_uuid(item, attr_regex)
-
- case _:
- return item
-
-
def partialclass(cls, *args, **kwargs):
cls_signature = inspect.signature(cls)
bound = cls_signature.bind_partial(*args, **kwargs)
@@ -77,387 +31,6 @@ class NewCls(cls):
return NewCls
-def mark_session_updated_query(developer_id: UUID | str, session_id: UUID | str) -> str:
- return f"""
- input[developer_id, session_id] <- [[
- to_uuid("{str(developer_id)}"),
- to_uuid("{str(session_id)}"),
- ]]
-
- ?[
- developer_id,
- session_id,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- updated_at,
- ] :=
- input[developer_id, session_id],
- *sessions {{
- session_id,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- @ 'END'
- }},
- updated_at = [floor(now()), true]
-
- :put sessions {{
- developer_id,
- session_id,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- updated_at,
- }}
- """
-
-
-def verify_developer_id_query(developer_id: UUID | str) -> str:
- if not do_verify_developer:
- return "?[exists] := exists = true"
-
- return f"""
- matched[count(developer_id)] :=
- *developers{{
- developer_id,
- }}, developer_id = to_uuid("{str(developer_id)}")
-
- ?[exists] :=
- matched[num],
- exists = num > 0,
- assert(exists, "Developer does not exist")
-
- :limit 1
- """
-
-
-def verify_developer_owns_resource_query(
- developer_id: UUID | str,
- resource: str,
- parents: list[tuple[str, str]] | None = None,
- **resource_id,
-) -> str:
- if not do_verify_developer_owns_resource:
- return "?[exists] := exists = true"
-
- parents = parents or []
- resource_id_key, resource_id_value = next(iter(resource_id.items()))
-
- parents.append((resource, resource_id_key))
- parent_keys = ["developer_id", *map(lambda x: x[1], parents)]
-
- rule_head = f"""
- found[count({resource_id_key})] :=
- developer_id = to_uuid("{str(developer_id)}"),
- {resource_id_key} = to_uuid("{str(resource_id_value)}"),
- """
-
- rule_body = ""
- for parent_key, (relation, key) in zip(parent_keys, parents):
- rule_body += f"""
- *{relation}{{
- {parent_key},
- {key},
- }},
- """
-
- assertion = f"""
- ?[exists] :=
- found[num],
- exists = num > 0,
- assert(exists, "Developer does not own resource {resource} with {resource_id_key} {resource_id_value}")
-
- :limit 1
- """
-
- rule = rule_head + rule_body + assertion
- return rule
-
-
-def make_cozo_json_query(fields):
- return ", ".join(f'"{field}": {field}' for field in fields).strip()
-
-
-def cozo_query(
- func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
- debug: bool | None = None,
- only_on_error: bool = False,
- timeit: bool = False,
-):
- def cozo_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
- """
- Decorator that wraps a function that takes arbitrary arguments, and
- returns a (query string, variables) tuple.
-
- The wrapped function should additionally take a client keyword argument
- and then run the query using the client, returning a DataFrame.
- """
-
- from pprint import pprint
-
- from tenacity import (
- retry,
- retry_if_exception,
- stop_after_attempt,
- wait_exponential,
- )
-
- def is_resource_busy(e: Exception) -> bool:
- return (
- isinstance(e, HTTPException)
- and e.status_code == 429
- and not getattr(e, "cozo_offline", False)
- )
-
- @retry(
- stop=stop_after_attempt(4),
- wait=wait_exponential(multiplier=1, min=4, max=10),
- retry=retry_if_exception(is_resource_busy),
- )
- @wraps(func)
- def wrapper(*args: P.args, client=None, **kwargs: P.kwargs) -> pd.DataFrame:
- queries, variables = func(*args, **kwargs)
-
- if isinstance(queries, str):
- query = queries
- else:
- queries = [str(query) for query in queries if query]
- query = "}\n\n{\n".join(queries)
- query = f"{{ {query} }}"
-
- not only_on_error and debug and print(query)
- not only_on_error and debug and pprint(
- dict(
- variables=variables,
- )
- )
-
- # Run the query
- from ..clients import cozo
-
- try:
- client = client or cozo.get_cozo_client()
-
- start = timeit and time.perf_counter()
- result = client.run(query, variables)
- end = timeit and time.perf_counter()
-
- timeit and print(f"Cozo query time: {end - start:.2f} seconds")
-
- except Exception as e:
- if only_on_error and debug:
- print(query)
- pprint(variables)
-
- debug and print(repr(e))
-
- pretty_error = repr(e).lower()
- cozo_busy = ("busy" in pretty_error) or (
- "when executing against relation '_" in pretty_error
- )
- cozo_offline = isinstance(e, ConnectionError) and (
- ("connection refused" in pretty_error)
- or ("name or service not known" in pretty_error)
- )
- connection_error = isinstance(
- e,
- (
- ConnectionError,
- Timeout,
- TimeoutException,
- NetworkError,
- RequestError,
- ),
- )
-
- if cozo_busy or connection_error or cozo_offline:
- exc = HTTPException(
- status_code=429, detail="Resource busy. Please try again later."
- )
- exc.cozo_offline = cozo_offline
- raise exc from e
-
- raise
-
- # Need to fix the UUIDs in the result
- result = result.map(fix_uuid_if_present)
-
- not only_on_error and debug and pprint(
- dict(
- result=result.to_dict(orient="records"),
- )
- )
-
- return result
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return wrapper
-
- if func is not None and callable(func):
- return cozo_query_dec(func)
-
- return cozo_query_dec
-
-
-def cozo_query_async(
- func: Callable[
- P,
- tuple[str | list[str | None], dict]
- | Awaitable[tuple[str | list[str | None], dict]],
- ]
- | None = None,
- debug: bool | None = None,
- only_on_error: bool = False,
- timeit: bool = False,
-):
- def cozo_query_dec(
- func: Callable[
- P, tuple[str | list[Any], dict] | Awaitable[tuple[str | list[Any], dict]]
- ],
- ):
- """
- Decorator that wraps a function that takes arbitrary arguments, and
- returns a (query string, variables) tuple.
-
- The wrapped function should additionally take a client keyword argument
- and then run the query using the client, returning a DataFrame.
- """
-
- from pprint import pprint
-
- from tenacity import (
- retry,
- retry_if_exception,
- stop_after_attempt,
- wait_exponential,
- )
-
- def is_resource_busy(e: Exception) -> bool:
- return (
- isinstance(e, HTTPException)
- and e.status_code == 429
- and not getattr(e, "cozo_offline", False)
- )
-
- @retry(
- stop=stop_after_attempt(6),
- wait=wait_exponential(multiplier=1.2, min=3, max=10),
- retry=retry_if_exception(is_resource_busy),
- reraise=True,
- )
- @wraps(func)
- async def wrapper(
- *args: P.args, client=None, **kwargs: P.kwargs
- ) -> pd.DataFrame:
- if inspect.iscoroutinefunction(func):
- queries, variables = await func(*args, **kwargs)
- else:
- queries, variables = func(*args, **kwargs)
-
- if isinstance(queries, str):
- query = queries
- else:
- queries = [str(query) for query in queries if query]
- query = "}\n\n{\n".join(queries)
- query = f"{{ {query} }}"
-
- not only_on_error and debug and print(query)
- not only_on_error and debug and pprint(
- dict(
- variables=variables,
- )
- )
-
- # Run the query
- from ..clients import cozo
-
- try:
- client = client or cozo.get_async_cozo_client()
-
- start = timeit and time.perf_counter()
- result = await client.run(query, variables)
- end = timeit and time.perf_counter()
-
- timeit and print(f"Cozo query time: {end - start:.2f} seconds")
-
- except Exception as e:
- if only_on_error and debug:
- print(query)
- pprint(variables)
-
- debug and print(repr(e))
-
- pretty_error = repr(e).lower()
- cozo_busy = ("busy" in pretty_error) or (
- "when executing against relation '_" in pretty_error
- )
- cozo_offline = (
- isinstance(e, ConnectError)
- or isinstance(e, HttpxConnectError)
- and (
- ("all connection attempts failed" in pretty_error)
- or ("name or service not known" in pretty_error)
- )
- )
- connection_error = isinstance(
- e,
- (
- ConnectError,
- HttpxConnectError,
- TimeoutException,
- NetworkError,
- RequestError,
- ),
- )
-
- if cozo_busy or connection_error or cozo_offline:
- exc = HTTPException(
- status_code=429, detail="Resource busy. Please try again later."
- )
- exc.cozo_offline = cozo_offline
- raise exc from e
-
- raise
-
- # Need to fix the UUIDs in the result
- result = result.map(fix_uuid_if_present)
-
- not only_on_error and debug and pprint(
- dict(
- result=result.to_dict(orient="records"),
- )
- )
-
- return result
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return wrapper
-
- if func is not None and callable(func):
- return cozo_query_dec(func)
-
- return cozo_query_dec
-
-
def pg_query(
func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
debug: bool | None = None,
@@ -482,26 +55,16 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
wait_exponential,
)
- def is_resource_busy(e: Exception) -> bool:
- return (
- isinstance(e, HTTPException)
- and e.status_code == 429
- and not getattr(e, "cozo_offline", False)
- )
-
@retry(
stop=stop_after_attempt(4),
wait=wait_exponential(multiplier=1, min=4, max=10),
- retry=retry_if_exception(is_resource_busy),
+ # retry=retry_if_exception(is_resource_busy),
)
@wraps(func)
async def wrapper(
*args: P.args, client=None, **kwargs: P.kwargs
) -> list[Record]:
- if inspect.iscoroutinefunction(func):
- query, variables = await func(*args, **kwargs)
- else:
- query, variables = func(*args, **kwargs)
+ query, variables = await func(*args, **kwargs)
not only_on_error and debug and print(query)
not only_on_error and debug and pprint(
From 9b5ce34a4344d3050a09efd341d68e0e8ac705d0 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 16 Dec 2024 10:27:45 +0300
Subject: [PATCH 024/274] feat: Add retriable error
---
agents-api/agents_api/queries/utils.py | 14 ++------------
1 file changed, 2 insertions(+), 12 deletions(-)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 19a4c8d45..05c479120 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,5 +1,6 @@
import concurrent.futures
import inspect
+import socket
import time
from functools import partialmethod, wraps
from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar
@@ -7,12 +8,7 @@
import pandas as pd
from asyncpg import Record
from fastapi import HTTPException
-from httpcore import NetworkError, TimeoutException
-from httpx import RequestError
from pydantic import BaseModel
-from requests.exceptions import ConnectionError, Timeout
-
-from ..common.utils.cozo import uuid_int_list_to_uuid
P = ParamSpec("P")
T = TypeVar("T")
@@ -93,13 +89,7 @@ async def wrapper(
debug and print(repr(e))
connection_error = isinstance(
e,
- (
- ConnectionError,
- Timeout,
- TimeoutException,
- NetworkError,
- RequestError,
- ),
+ (socket.gaierror),
)
if connection_error:
From a0dad7b4cd8e62a1033852fd9952dc4468fc75c9 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 16 Dec 2024 10:27:59 +0300
Subject: [PATCH 025/274] chore: Remove unused stuff
---
.../queries/developer/get_developer.py | 25 +++++--------------
1 file changed, 6 insertions(+), 19 deletions(-)
diff --git a/agents-api/agents_api/queries/developer/get_developer.py b/agents-api/agents_api/queries/developer/get_developer.py
index 0a31a6de4..f0b9a89eb 100644
--- a/agents-api/agents_api/queries/developer/get_developer.py
+++ b/agents-api/agents_api/queries/developer/get_developer.py
@@ -11,11 +11,8 @@
from ...common.protocol.developers import Developer
from ..utils import (
- cozo_query,
- partialclass,
pg_query,
rewrap_exceptions,
- verify_developer_id_query,
wrap_in_class,
)
@@ -25,22 +22,12 @@
T = TypeVar("T")
-@rewrap_exceptions({QueryException: partialclass(HTTPException, status_code=401)})
-@cozo_query
-@beartype
-def verify_developer(
- *,
- developer_id: UUID,
-) -> tuple[str, dict]:
- return (verify_developer_id_query(developer_id), {})
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=403),
- ValidationError: partialclass(HTTPException, status_code=500),
- }
-)
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=403),
+# ValidationError: partialclass(HTTPException, status_code=500),
+# }
+# )
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
@pg_query
@beartype
From 45d60e9150ea8a02bdf9abc641c854ea622f4d35 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Mon, 16 Dec 2024 18:22:27 +0300
Subject: [PATCH 026/274] fix(agents-api): wip
---
agents-api/agents_api/clients/pg.py | 8 ++++++++
.../agents_api/queries/agent/create_agent.py | 6 +++---
.../queries/agent/create_or_update_agent.py | 6 +++---
.../agents_api/queries/agent/delete_agent.py | 2 +-
.../agents_api/queries/agent/get_agent.py | 2 +-
.../agents_api/queries/agent/list_agents.py | 2 +-
.../agents_api/queries/agent/patch_agent.py | 2 +-
.../agents_api/queries/agent/update_agent.py | 2 +-
.../queries/developer/get_developer.py | 3 +++
agents-api/agents_api/queries/utils.py | 17 +++++++++--------
memory-store/migrations/000007_ann.up.sql | 14 --------------
11 files changed, 31 insertions(+), 33 deletions(-)
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index debc81184..639429076 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -1,12 +1,20 @@
import asyncpg
+import json
from ..env import db_dsn
from ..web import app
async def get_pg_client():
+ # TODO: Create a postgres connection pool
client = getattr(app.state, "pg_client", await asyncpg.connect(db_dsn))
if not hasattr(app.state, "pg_client"):
+ await client.set_type_codec(
+ "jsonb",
+ encoder=json.dumps,
+ decoder=json.loads,
+ schema="pg_catalog",
+ )
app.state.pg_client = client
return client
diff --git a/agents-api/agents_api/queries/agent/create_agent.py b/agents-api/agents_api/queries/agent/create_agent.py
index 52a0a22f8..46dc453f9 100644
--- a/agents-api/agents_api/queries/agent/create_agent.py
+++ b/agents-api/agents_api/queries/agent/create_agent.py
@@ -15,7 +15,7 @@
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
- generate_canonical_name,
+ # generate_canonical_name,
partialclass,
pg_query,
rewrap_exceptions,
@@ -62,7 +62,7 @@
_kind="inserted",
)
@pg_query
-@increase_counter("create_agent")
+# @increase_counter("create_agent")
@beartype
def create_agent(
*,
@@ -97,7 +97,7 @@ def create_agent(
# Set default values
data.metadata = data.metadata or None
- data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ # data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
query = """
INSERT INTO agents (
diff --git a/agents-api/agents_api/queries/agent/create_or_update_agent.py b/agents-api/agents_api/queries/agent/create_or_update_agent.py
index c93a965a5..261508237 100644
--- a/agents-api/agents_api/queries/agent/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agent/create_or_update_agent.py
@@ -13,7 +13,7 @@
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
- generate_canonical_name,
+ # generate_canonical_name,
partialclass,
pg_query,
rewrap_exceptions,
@@ -40,7 +40,7 @@
_kind="inserted",
)
@pg_query
-@increase_counter("create_or_update_agent")
+# @increase_counter("create_or_update_agent1")
@beartype
def create_or_update_agent_query(
*, agent_id: UUID, developer_id: UUID, data: CreateOrUpdateAgentRequest
@@ -71,7 +71,7 @@ def create_or_update_agent_query(
# Set default values
data.metadata = data.metadata or None
- data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ # data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
query = """
INSERT INTO agents (
diff --git a/agents-api/agents_api/queries/agent/delete_agent.py b/agents-api/agents_api/queries/agent/delete_agent.py
index 1d01daa20..cad3d774f 100644
--- a/agents-api/agents_api/queries/agent/delete_agent.py
+++ b/agents-api/agents_api/queries/agent/delete_agent.py
@@ -45,7 +45,7 @@
_kind="deleted",
)
@pg_query
-@increase_counter("delete_agent")
+# @increase_counter("delete_agent1")
@beartype
def delete_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
"""
diff --git a/agents-api/agents_api/queries/agent/get_agent.py b/agents-api/agents_api/queries/agent/get_agent.py
index 982849f3a..9061db7cf 100644
--- a/agents-api/agents_api/queries/agent/get_agent.py
+++ b/agents-api/agents_api/queries/agent/get_agent.py
@@ -35,7 +35,7 @@
)
@wrap_in_class(Agent, one=True)
@pg_query
-@increase_counter("get_agent")
+# @increase_counter("get_agent1")
@beartype
def get_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
"""
diff --git a/agents-api/agents_api/queries/agent/list_agents.py b/agents-api/agents_api/queries/agent/list_agents.py
index a4332372f..62aed6536 100644
--- a/agents-api/agents_api/queries/agent/list_agents.py
+++ b/agents-api/agents_api/queries/agent/list_agents.py
@@ -35,7 +35,7 @@
)
@wrap_in_class(Agent)
@pg_query
-@increase_counter("list_agents")
+# @increase_counter("list_agents1")
@beartype
def list_agents_query(
*,
diff --git a/agents-api/agents_api/queries/agent/patch_agent.py b/agents-api/agents_api/queries/agent/patch_agent.py
index 74be99df8..c418f5c26 100644
--- a/agents-api/agents_api/queries/agent/patch_agent.py
+++ b/agents-api/agents_api/queries/agent/patch_agent.py
@@ -40,7 +40,7 @@
_kind="inserted",
)
@pg_query
-@increase_counter("patch_agent")
+# @increase_counter("patch_agent1")
@beartype
def patch_agent_query(
*, agent_id: UUID, developer_id: UUID, data: PatchAgentRequest
diff --git a/agents-api/agents_api/queries/agent/update_agent.py b/agents-api/agents_api/queries/agent/update_agent.py
index e0ed4a46d..4e38adfac 100644
--- a/agents-api/agents_api/queries/agent/update_agent.py
+++ b/agents-api/agents_api/queries/agent/update_agent.py
@@ -40,7 +40,7 @@
_kind="inserted",
)
@pg_query
-@increase_counter("update_agent")
+# @increase_counter("update_agent1")
@beartype
def update_agent_query(
*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
diff --git a/agents-api/agents_api/queries/developer/get_developer.py b/agents-api/agents_api/queries/developer/get_developer.py
index f0b9a89eb..a6db40ada 100644
--- a/agents-api/agents_api/queries/developer/get_developer.py
+++ b/agents-api/agents_api/queries/developer/get_developer.py
@@ -16,6 +16,9 @@
wrap_in_class,
)
+# TODO: Add verify_developer
+# verify_developer = None
+
query = parse_one("SELECT * FROM developers WHERE developer_id = $1").sql(pretty=True)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 05c479120..aba5eca06 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -50,12 +50,13 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
stop_after_attempt,
wait_exponential,
)
-
- @retry(
- stop=stop_after_attempt(4),
- wait=wait_exponential(multiplier=1, min=4, max=10),
- # retry=retry_if_exception(is_resource_busy),
- )
+
+ # TODO: Remove all tenacity decorators
+ # @retry(
+ # stop=stop_after_attempt(4),
+ # wait=wait_exponential(multiplier=1, min=4, max=10),
+ # # retry=retry_if_exception(is_resource_busy),
+ # )
@wraps(func)
async def wrapper(
*args: P.args, client=None, **kwargs: P.kwargs
@@ -126,12 +127,12 @@ def wrap_in_class(
transform: Callable[[dict], dict] | None = None,
_kind: str | None = None,
):
- def _return_data(rec: Record):
+ def _return_data(rec: list[Record]):
# Convert df to list of dicts
# if _kind:
# rec = rec[rec["_kind"] == _kind]
- data = list(rec.items())
+ data = [dict(r.items()) for r in rec]
nonlocal transform
transform = transform or (lambda x: x)
diff --git a/memory-store/migrations/000007_ann.up.sql b/memory-store/migrations/000007_ann.up.sql
index 64d0b8f49..3cc606fde 100644
--- a/memory-store/migrations/000007_ann.up.sql
+++ b/memory-store/migrations/000007_ann.up.sql
@@ -1,17 +1,3 @@
--- First, drop any existing vectorizer functions and triggers
-DO $$
-BEGIN
- -- Drop existing vectorizer triggers
- DROP TRIGGER IF EXISTS _vectorizer_src_trg_1 ON docs;
-
- -- Drop existing vectorizer functions
- DROP FUNCTION IF EXISTS _vectorizer_src_trg_1();
- DROP FUNCTION IF EXISTS _vectorizer_src_trg_1_func();
-
- -- Drop existing vectorizer tables
- DROP TABLE IF EXISTS docs_embeddings;
-END $$;
-
-- Create vector similarity search index using diskann and timescale vectorizer
SELECT
ai.create_vectorizer (
From 4e42b3d6558a7be3284329a358c66cf1675bc942 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Mon, 16 Dec 2024 15:23:37 +0000
Subject: [PATCH 027/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/clients/pg.py | 3 ++-
agents-api/agents_api/queries/utils.py | 2 +-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index 639429076..987eb1178 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -1,6 +1,7 @@
-import asyncpg
import json
+import asyncpg
+
from ..env import db_dsn
from ..web import app
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index aba5eca06..bd23453d2 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -50,7 +50,7 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
stop_after_attempt,
wait_exponential,
)
-
+
# TODO: Remove all tenacity decorators
# @retry(
# stop=stop_after_attempt(4),
From 6c37070954948802067309dc482c29ca99a7cd3d Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Mon, 16 Dec 2024 13:20:17 -0500
Subject: [PATCH 028/274] chore: updated the user queries from named to
positional arguments
---
.../queries/users/create_or_update_user.py | 29 +++++----
.../agents_api/queries/users/create_user.py | 29 +++++----
.../agents_api/queries/users/delete_user.py | 11 ++--
.../agents_api/queries/users/get_user.py | 9 ++-
.../agents_api/queries/users/list_users.py | 61 +++++++++++--------
.../agents_api/queries/users/patch_user.py | 44 ++++++-------
.../agents_api/queries/users/update_user.py | 29 +++++----
7 files changed, 119 insertions(+), 93 deletions(-)
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index b579e8de0..1a7eddd26 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -20,11 +20,11 @@
metadata
)
VALUES (
- %(developer_id)s,
- %(user_id)s,
- %(name)s,
- %(about)s,
- COALESCE(%(metadata)s, '{}'::jsonb)
+ $1,
+ $2,
+ $3,
+ $4,
+ COALESCE($5, '{}'::jsonb)
)
ON CONFLICT (developer_id, user_id) DO UPDATE SET
name = EXCLUDED.name,
@@ -83,12 +83,15 @@ def create_or_update_user(
Raises:
HTTPException: If developer doesn't exist (404) or on unique constraint violation (409)
"""
- params = {
- "developer_id": developer_id,
- "user_id": user_id,
- "name": data.name,
- "about": data.about,
- "metadata": data.metadata, # Let COALESCE handle None case in SQL
- }
+ params = [
+ developer_id,
+ user_id,
+ data.name,
+ data.about,
+ data.metadata, # Let COALESCE handle None case in SQL
+ ]
- return query, params
+ return (
+ query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 691c43500..5b396ab5f 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -20,11 +20,11 @@
metadata
)
VALUES (
- %(developer_id)s,
- %(user_id)s,
- %(name)s,
- %(about)s,
- %(metadata)s
+ $1,
+ $2,
+ $3,
+ $4,
+ $5
)
RETURNING *;
"""
@@ -81,12 +81,15 @@ def create_user(
"""
user_id = user_id or uuid7()
- params = {
- "developer_id": developer_id,
- "user_id": user_id,
- "name": data.name,
- "about": data.about,
- "metadata": data.metadata or {},
- }
+ params = [
+ developer_id,
+ user_id,
+ data.name,
+ data.about,
+ data.metadata or {},
+ ]
- return query, params
+ return (
+ query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index a21a4b9d9..8ca2202f0 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -14,14 +14,14 @@
raw_query = """
WITH deleted_data AS (
DELETE FROM user_files
- WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+ WHERE developer_id = $1 AND user_id = $2
),
deleted_docs AS (
DELETE FROM user_docs
- WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+ WHERE developer_id = $1 AND user_id = $2
)
DELETE FROM users
-WHERE developer_id = %(developer_id)s AND user_id = %(user_id)s
+WHERE developer_id = $1 AND user_id = $2
RETURNING user_id as id, developer_id;
"""
@@ -62,4 +62,7 @@ def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
tuple[str, dict]: SQL query and parameters
"""
- return query, {"developer_id": developer_id, "user_id": user_id}
+ return (
+ query,
+ [developer_id, user_id],
+ )
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index ca5627701..d6a895013 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -21,8 +21,8 @@
created_at,
updated_at
FROM users
-WHERE developer_id = %(developer_id)s
-AND user_id = %(user_id)s;
+WHERE developer_id = $1
+AND user_id = $2;
"""
# Parse and optimize the query
@@ -68,4 +68,7 @@ def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
tuple[str, dict]: SQL query and parameters.
"""
- return query, {"developer_id": developer_id, "user_id": user_id}
+ return (
+ query,
+ [developer_id, user_id],
+ )
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index e6f854410..34488ad9a 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -12,25 +12,33 @@
# Define the raw SQL query outside the function
raw_query = """
-SELECT
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at
-FROM users
-WHERE developer_id = %(developer_id)s
- {metadata_clause}
- AND deleted_at IS NULL
-ORDER BY {sort_by} {direction} NULLS LAST
-LIMIT %(limit)s
-OFFSET %(offset)s;
+WITH filtered_users AS (
+ SELECT
+ user_id as id,
+ developer_id,
+ name,
+ about,
+ metadata,
+ created_at,
+ updated_at
+ FROM users
+ WHERE developer_id = $1
+ AND deleted_at IS NULL
+ AND ($4::jsonb IS NULL OR metadata @> $4)
+)
+SELECT *
+FROM filtered_users
+ORDER BY
+ CASE WHEN $5 = 'created_at' AND $6 = 'asc' THEN created_at END ASC NULLS LAST,
+ CASE WHEN $5 = 'created_at' AND $6 = 'desc' THEN created_at END DESC NULLS LAST,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN updated_at END ASC NULLS LAST,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN updated_at END DESC NULLS LAST
+LIMIT $2
+OFFSET $3;
"""
# Parse and optimize the query
-query_template = optimize(
+query = optimize(
parse_one(raw_query),
schema={
"users": {
@@ -88,15 +96,16 @@ def list_users(
if offset < 0:
raise HTTPException(status_code=400, detail="Offset must be non-negative")
- metadata_clause = ""
- params = {"developer_id": developer_id, "limit": limit, "offset": offset}
-
- if metadata_filter:
- metadata_clause = "AND metadata @> %(metadata_filter)s"
- params["metadata_filter"] = metadata_filter
+ params = [
+ developer_id,
+ limit,
+ offset,
+ metadata_filter, # Will be NULL if not provided
+ sort_by,
+ direction,
+ ]
- query = query_template.format(
- metadata_clause=metadata_clause, sort_by=sort_by, direction=direction
+ return (
+ query,
+ params,
)
-
- return query, params
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index d491b8e84..1a1e91f60 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -13,9 +13,21 @@
# Define the raw SQL query outside the function
raw_query = """
UPDATE users
-SET {update_parts}
-WHERE developer_id = %(developer_id)s
-AND user_id = %(user_id)s
+SET
+ name = CASE
+ WHEN $3::text IS NOT NULL THEN $3
+ ELSE name
+ END,
+ about = CASE
+ WHEN $4::text IS NOT NULL THEN $4
+ ELSE about
+ END,
+ metadata = CASE
+ WHEN $5::jsonb IS NOT NULL THEN metadata || $5
+ ELSE metadata
+ END
+WHERE developer_id = $1
+AND user_id = $2
RETURNING
user_id as id,
developer_id,
@@ -27,7 +39,7 @@
"""
# Parse and optimize the query
-query_template = optimize(
+query = optimize(
parse_one(raw_query),
schema={
"users": {
@@ -71,22 +83,12 @@ def patch_user(
Returns:
tuple[str, dict]: SQL query and parameters
"""
- update_parts = []
- params = {
- "developer_id": developer_id,
- "user_id": user_id,
- }
-
- if data.name is not None:
- update_parts.append("name = %(name)s")
- params["name"] = data.name
- if data.about is not None:
- update_parts.append("about = %(about)s")
- params["about"] = data.about
- if data.metadata is not None:
- update_parts.append("metadata = metadata || %(metadata)s")
- params["metadata"] = data.metadata
-
- query = query_template.format(update_parts=", ".join(update_parts))
+ params = [
+ developer_id,
+ user_id,
+ data.name, # Will be NULL if not provided
+ data.about, # Will be NULL if not provided
+ data.metadata, # Will be NULL if not provided
+ ]
return query, params
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 9e622e40d..082784775 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -14,11 +14,11 @@
raw_query = """
UPDATE users
SET
- name = %(name)s,
- about = %(about)s,
- metadata = %(metadata)s
-WHERE developer_id = %(developer_id)s
-AND user_id = %(user_id)s
+ name = $3,
+ about = $4,
+ metadata = $5
+WHERE developer_id = $1
+AND user_id = $2
RETURNING
user_id as id,
developer_id,
@@ -74,12 +74,15 @@ def update_user(
Returns:
tuple[str, dict]: SQL query and parameters
"""
- params = {
- "developer_id": developer_id,
- "user_id": user_id,
- "name": data.name,
- "about": data.about,
- "metadata": data.metadata or {},
- }
+ params = [
+ developer_id,
+ user_id,
+ data.name,
+ data.about,
+ data.metadata or {},
+ ]
- return query, params
+ return (
+ query,
+ params,
+ )
From 22c6be5e0c98acf85226469b2e56fa032790ac65 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 00:36:10 +0530
Subject: [PATCH 029/274] wip: Make poe test work
Signed-off-by: Diwank Singh Tomer
---
agents-api/.gitignore | 2 -
agents-api/agents_api/clients/cozo.py | 29 -
agents-api/agents_api/clients/pg.py | 4 +-
.../agents_api/dependencies/developer_id.py | 2 +-
.../queries/{agent => agents}/__init__.py | 0
.../queries/{agent => agents}/create_agent.py | 0
.../create_or_update_agent.py | 0
.../queries/{agent => agents}/delete_agent.py | 0
.../queries/{agent => agents}/get_agent.py | 0
.../queries/{agent => agents}/list_agents.py | 0
.../queries/{agent => agents}/patch_agent.py | 0
.../queries/{agent => agents}/update_agent.py | 0
.../{developer => developers}/__init__.py | 0
.../get_developer.py | 3 +-
.../agents_api/queries/users/create_user.py | 3 +-
.../agents_api/queries/users/get_user.py | 15 +-
.../agents_api/queries/users/list_users.py | 20 +-
agents-api/agents_api/queries/utils.py | 6 +-
agents-api/agents_api/web.py | 9 +-
agents-api/pyproject.toml | 6 +-
agents-api/tests/fixtures.py | 679 ++--
.../tests/sample_tasks/test_find_selector.py | 250 +-
agents-api/tests/test_activities.py | 112 +-
agents-api/tests/test_agent_queries.py | 326 +-
agents-api/tests/test_agent_routes.py | 344 +-
agents-api/tests/test_chat_routes.py | 354 +-
agents-api/tests/test_developer_queries.py | 55 +-
agents-api/tests/test_docs_queries.py | 326 +-
agents-api/tests/test_docs_routes.py | 506 +--
agents-api/tests/test_entry_queries.py | 402 +--
agents-api/tests/test_execution_queries.py | 308 +-
agents-api/tests/test_execution_workflow.py | 2874 ++++++++---------
agents-api/tests/test_files_queries.py | 114 +-
agents-api/tests/test_files_routes.py | 132 +-
agents-api/tests/test_session_queries.py | 320 +-
agents-api/tests/test_sessions.py | 54 +-
agents-api/tests/test_task_queries.py | 320 +-
agents-api/tests/test_task_routes.py | 336 +-
agents-api/tests/test_tool_queries.py | 340 +-
agents-api/tests/test_user_queries.py | 295 +-
agents-api/tests/test_user_routes.py | 270 +-
agents-api/tests/test_user_sql.py | 178 -
agents-api/tests/test_workflow_routes.py | 270 +-
agents-api/tests/utils.py | 26 +
agents-api/uv.lock | 98 +-
memory-store/docker-compose.yml | 40 +-
46 files changed, 4565 insertions(+), 4863 deletions(-)
delete mode 100644 agents-api/agents_api/clients/cozo.py
rename agents-api/agents_api/queries/{agent => agents}/__init__.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/create_agent.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/create_or_update_agent.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/delete_agent.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/get_agent.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/list_agents.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/patch_agent.py (100%)
rename agents-api/agents_api/queries/{agent => agents}/update_agent.py (100%)
rename agents-api/agents_api/queries/{developer => developers}/__init__.py (100%)
rename agents-api/agents_api/queries/{developer => developers}/get_developer.py (94%)
delete mode 100644 agents-api/tests/test_user_sql.py
diff --git a/agents-api/.gitignore b/agents-api/.gitignore
index 651078450..c2e19f143 100644
--- a/agents-api/.gitignore
+++ b/agents-api/.gitignore
@@ -1,6 +1,4 @@
# Local database files
-cozo*
-.cozo*
temporal.db
*.bak
*.dat
diff --git a/agents-api/agents_api/clients/cozo.py b/agents-api/agents_api/clients/cozo.py
deleted file mode 100644
index 285bae8b2..000000000
--- a/agents-api/agents_api/clients/cozo.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from typing import Dict
-
-from pycozo.client import Client
-from pycozo_async import Client as AsyncClient
-
-from ..env import cozo_auth, cozo_host
-from ..web import app
-
-options: Dict[str, str] = {"host": cozo_host}
-if cozo_auth:
- options.update({"auth": cozo_auth})
-
-
-def get_cozo_client() -> Client:
- client = getattr(app.state, "cozo_client", Client("http", options=options))
- if not hasattr(app.state, "cozo_client"):
- app.state.cozo_client = client
-
- return client
-
-
-def get_async_cozo_client() -> AsyncClient:
- client = getattr(
- app.state, "async_cozo_client", AsyncClient("http", options=options)
- )
- if not hasattr(app.state, "async_cozo_client"):
- app.state.async_cozo_client = client
-
- return client
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index 987eb1178..ddef570f9 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -6,9 +6,9 @@
from ..web import app
-async def get_pg_client():
+async def get_pg_client(dsn: str = db_dsn):
# TODO: Create a postgres connection pool
- client = getattr(app.state, "pg_client", await asyncpg.connect(db_dsn))
+ client = getattr(app.state, "pg_client", await asyncpg.connect(dsn))
if not hasattr(app.state, "pg_client"):
await client.set_type_codec(
"jsonb",
diff --git a/agents-api/agents_api/dependencies/developer_id.py b/agents-api/agents_api/dependencies/developer_id.py
index 0ffc4896c..ffd048dd9 100644
--- a/agents-api/agents_api/dependencies/developer_id.py
+++ b/agents-api/agents_api/dependencies/developer_id.py
@@ -5,7 +5,7 @@
from ..common.protocol.developers import Developer
from ..env import multi_tenant_mode
-from ..queries.developer.get_developer import get_developer, verify_developer
+from ..queries.developers.get_developer import get_developer, verify_developer
from .exceptions import InvalidHeaderFormat
diff --git a/agents-api/agents_api/queries/agent/__init__.py b/agents-api/agents_api/queries/agents/__init__.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/__init__.py
rename to agents-api/agents_api/queries/agents/__init__.py
diff --git a/agents-api/agents_api/queries/agent/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/create_agent.py
rename to agents-api/agents_api/queries/agents/create_agent.py
diff --git a/agents-api/agents_api/queries/agent/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/create_or_update_agent.py
rename to agents-api/agents_api/queries/agents/create_or_update_agent.py
diff --git a/agents-api/agents_api/queries/agent/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/delete_agent.py
rename to agents-api/agents_api/queries/agents/delete_agent.py
diff --git a/agents-api/agents_api/queries/agent/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/get_agent.py
rename to agents-api/agents_api/queries/agents/get_agent.py
diff --git a/agents-api/agents_api/queries/agent/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/list_agents.py
rename to agents-api/agents_api/queries/agents/list_agents.py
diff --git a/agents-api/agents_api/queries/agent/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/patch_agent.py
rename to agents-api/agents_api/queries/agents/patch_agent.py
diff --git a/agents-api/agents_api/queries/agent/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
similarity index 100%
rename from agents-api/agents_api/queries/agent/update_agent.py
rename to agents-api/agents_api/queries/agents/update_agent.py
diff --git a/agents-api/agents_api/queries/developer/__init__.py b/agents-api/agents_api/queries/developers/__init__.py
similarity index 100%
rename from agents-api/agents_api/queries/developer/__init__.py
rename to agents-api/agents_api/queries/developers/__init__.py
diff --git a/agents-api/agents_api/queries/developer/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
similarity index 94%
rename from agents-api/agents_api/queries/developer/get_developer.py
rename to agents-api/agents_api/queries/developers/get_developer.py
index a6db40ada..38302ab3b 100644
--- a/agents-api/agents_api/queries/developer/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -5,7 +5,6 @@
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from sqlglot import parse_one
@@ -17,7 +16,7 @@
)
# TODO: Add verify_developer
-# verify_developer = None
+verify_developer = None
query = parse_one("SELECT * FROM developers WHERE developer_id = $1").sql(pretty=True)
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 5b396ab5f..edd9720f6 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -3,7 +3,8 @@
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import optimize, parse_one
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateUserRequest, User
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index d6a895013..946b92f6c 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -26,20 +26,7 @@
"""
# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "users": {
- "developer_id": "UUID",
- "user_id": "UUID",
- "name": "STRING",
- "about": "STRING",
- "metadata": "JSONB",
- "created_at": "TIMESTAMP",
- "updated_at": "TIMESTAMP",
- }
- },
-).sql(pretty=True)
+query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 34488ad9a..d4930b3f8 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -4,7 +4,8 @@
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import optimize, parse_one
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import User
from ...metrics.counters import increase_counter
@@ -38,20 +39,7 @@
"""
# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "users": {
- "developer_id": "UUID",
- "user_id": "UUID",
- "name": "STRING",
- "about": "STRING",
- "metadata": "JSONB",
- "created_at": "TIMESTAMP",
- "updated_at": "TIMESTAMP",
- }
- },
-).sql(pretty=True)
+# query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
@@ -106,6 +94,6 @@ def list_users(
]
return (
- query,
+ raw_query,
params,
)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index bd23453d2..a68ab2fe8 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -128,17 +128,13 @@ def wrap_in_class(
_kind: str | None = None,
):
def _return_data(rec: list[Record]):
- # Convert df to list of dicts
- # if _kind:
- # rec = rec[rec["_kind"] == _kind]
-
data = [dict(r.items()) for r in rec]
nonlocal transform
transform = transform or (lambda x: x)
if one:
- assert len(data) >= 1, "Expected one result, got none"
+ assert len(data) == 1, "Expected one result, got none"
obj: ModelT = cls(**transform(data[0]))
return obj
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 8e2e7da54..737a63426 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -15,7 +15,6 @@
from fastapi.responses import JSONResponse
from litellm.exceptions import APIError
from prometheus_fastapi_instrumentator import Instrumentator
-from pycozo.client import QueryException
from pydantic import ValidationError
from scalar_fastapi import get_scalar_api_reference
from temporalio.service import RPCError
@@ -134,10 +133,10 @@ def register_exceptions(app: FastAPI) -> None:
RequestValidationError,
make_exception_handler(status.HTTP_422_UNPROCESSABLE_ENTITY),
)
- app.add_exception_handler(
- QueryException,
- make_exception_handler(status.HTTP_500_INTERNAL_SERVER_ERROR),
- )
+ # app.add_exception_handler(
+ # QueryException,
+ # make_exception_handler(status.HTTP_500_INTERNAL_SERVER_ERROR),
+ # )
# TODO: Auth logic should be moved into global middleware _per router_
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index af3c053e6..f02876443 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -31,8 +31,6 @@ dependencies = [
"pandas~=2.2.2",
"prometheus-client~=0.21.0",
"prometheus-fastapi-instrumentator~=7.0.0",
- "pycozo-async~=0.7.7",
- "pycozo[embedded]~=0.7.6",
"pydantic-partial~=0.5.5",
"pydantic[email]~=2.10.2",
"python-box~=7.2.0",
@@ -57,7 +55,6 @@ dependencies = [
[dependency-groups]
dev = [
- "cozo-migrate>=0.2.4",
"datamodel-code-generator>=0.26.3",
"ipython>=8.30.0",
"ipywidgets>=8.1.5",
@@ -69,12 +66,13 @@ dev = [
"pyright>=1.1.389",
"pytype>=2024.10.11",
"ruff>=0.8.1",
+ "testcontainers[postgres]>=4.9.0",
"ward>=0.68.0b0",
]
[tool.setuptools]
py-modules = [
- "agents_api"
+ "agents_api",
]
[tool.uv.sources]
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 231a40b75..fdf04822c 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,10 +1,7 @@
import time
from uuid import UUID
-from cozo_migrate.api import apply, init
from fastapi.testclient import TestClient
-from pycozo import Client as CozoClient
-from pycozo_async import Client as AsyncCozoClient
from temporalio.client import WorkflowHandle
from uuid_extensions import uuid7
from ward import fixture
@@ -21,128 +18,75 @@
CreateUserRequest,
)
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
-from agents_api.models.agent.create_agent import create_agent
-from agents_api.models.agent.delete_agent import delete_agent
-from agents_api.models.developer.get_developer import get_developer
-from agents_api.models.docs.create_doc import create_doc
-from agents_api.models.docs.delete_doc import delete_doc
-from agents_api.models.execution.create_execution import create_execution
-from agents_api.models.execution.create_execution_transition import (
- create_execution_transition,
-)
-from agents_api.models.execution.create_temporal_lookup import create_temporal_lookup
-from agents_api.models.files.create_file import create_file
-from agents_api.models.files.delete_file import delete_file
-from agents_api.models.session.create_session import create_session
-from agents_api.models.session.delete_session import delete_session
-from agents_api.models.task.create_task import create_task
-from agents_api.models.task.delete_task import delete_task
-from agents_api.models.tools.create_tools import create_tools
-from agents_api.models.tools.delete_tool import delete_tool
-from agents_api.models.user.create_user import create_user
-from agents_api.models.user.delete_user import delete_user
-from agents_api.web import app
-from tests.utils import (
+
+# from agents_api.queries.agents.create_agent import create_agent
+# from agents_api.queries.agents.delete_agent import delete_agent
+from agents_api.queries.developers.get_developer import get_developer
+
+# from agents_api.queries.docs.create_doc import create_doc
+# from agents_api.queries.docs.delete_doc import delete_doc
+# from agents_api.queries.execution.create_execution import create_execution
+# from agents_api.queries.execution.create_execution_transition import (
+# create_execution_transition,
+# )
+# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
+# from agents_api.queries.files.create_file import create_file
+# from agents_api.queries.files.delete_file import delete_file
+# from agents_api.queries.session.create_session import create_session
+# from agents_api.queries.session.delete_session import delete_session
+# from agents_api.queries.task.create_task import create_task
+# from agents_api.queries.task.delete_task import delete_task
+# from agents_api.queries.tools.create_tools import create_tools
+# from agents_api.queries.tools.delete_tool import delete_tool
+from agents_api.queries.users.create_user import create_user
+from agents_api.queries.users.delete_user import delete_user
+# from agents_api.web import app
+from .utils import (
patch_embed_acompletion as patch_embed_acompletion_ctx,
+ patch_pg_client,
)
-from tests.utils import (
+from .utils import (
patch_s3_client,
)
EMBEDDING_SIZE: int = 1024
-
-@fixture(scope="global")
-def cozo_client(migrations_dir: str = "./migrations"):
- # Create a new client for each test
- # and initialize the schema.
- client = CozoClient()
-
- setattr(app.state, "cozo_client", client)
-
- init(client)
- apply(client, migrations_dir=migrations_dir, all_=True)
-
- return client
-
-
@fixture(scope="global")
-def cozo_clients_with_migrations(sync_client=cozo_client):
- async_client = AsyncCozoClient()
- async_client.embedded = sync_client.embedded
- setattr(app.state, "async_cozo_client", async_client)
-
- return sync_client, async_client
-
-
-@fixture(scope="global")
-def async_cozo_client(migrations_dir: str = "./migrations"):
- # Create a new client for each test
- # and initialize the schema.
- client = AsyncCozoClient()
- migrations_client = CozoClient()
- setattr(migrations_client, "embedded", client.embedded)
-
- setattr(app.state, "async_cozo_client", client)
-
- init(migrations_client)
- apply(migrations_client, migrations_dir=migrations_dir, all_=True)
-
- return client
-
+async def pg_client():
+ async with patch_pg_client() as pg_client:
+ yield pg_client
@fixture(scope="global")
-def test_developer_id(cozo_client=cozo_client):
+def test_developer_id():
if not multi_tenant_mode:
yield UUID(int=0)
return
developer_id = uuid7()
- cozo_client.run(
- f"""
- ?[developer_id, email, settings] <- [["{str(developer_id)}", "developers@julep.ai", {{}}]]
- :insert developers {{ developer_id, email, settings }}
- """
- )
-
yield developer_id
- cozo_client.run(
- f"""
- ?[developer_id, email] <- [["{str(developer_id)}", "developers@julep.ai"]]
- :delete developers {{ developer_id, email }}
- """
- )
+# @fixture(scope="global")
+# def test_file(client=pg_client, developer_id=test_developer_id):
+# file = create_file(
+# developer_id=developer_id,
+# data=CreateFileRequest(
+# name="Hello",
+# description="World",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# ),
+# client=client,
+# )
-
-@fixture(scope="global")
-def test_file(client=cozo_client, developer_id=test_developer_id):
- file = create_file(
- developer_id=developer_id,
- data=CreateFileRequest(
- name="Hello",
- description="World",
- mime_type="text/plain",
- content="eyJzYW1wbGUiOiAidGVzdCJ9",
- ),
- client=client,
- )
-
- yield file
-
- delete_file(
- developer_id=developer_id,
- file_id=file.id,
- client=client,
- )
+# yield file
@fixture(scope="global")
-def test_developer(cozo_client=cozo_client, developer_id=test_developer_id):
- return get_developer(
+async def test_developer(pg_client=pg_client, developer_id=test_developer_id):
+ return await get_developer(
developer_id=developer_id,
- client=cozo_client,
+ client=pg_client,
)
@@ -154,323 +98,250 @@ def patch_embed_acompletion():
yield embed, acompletion
-@fixture(scope="global")
-def test_agent(cozo_client=cozo_client, developer_id=test_developer_id):
- agent = create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- model="gpt-4o-mini",
- name="test agent",
- about="test agent about",
- metadata={"test": "test"},
- ),
- client=cozo_client,
- )
+# @fixture(scope="global")
+# def test_agent(pg_client=pg_client, developer_id=test_developer_id):
+# agent = create_agent(
+# developer_id=developer_id,
+# data=CreateAgentRequest(
+# model="gpt-4o-mini",
+# name="test agent",
+# about="test agent about",
+# metadata={"test": "test"},
+# ),
+# client=pg_client,
+# )
- yield agent
-
- delete_agent(
- developer_id=developer_id,
- agent_id=agent.id,
- client=cozo_client,
- )
+# yield agent
@fixture(scope="global")
-def test_user(cozo_client=cozo_client, developer_id=test_developer_id):
+def test_user(pg_client=pg_client, developer_id=test_developer_id):
user = create_user(
developer_id=developer_id,
data=CreateUserRequest(
name="test user",
about="test user about",
),
- client=cozo_client,
+ client=pg_client,
)
yield user
- delete_user(
- developer_id=developer_id,
- user_id=user.id,
- client=cozo_client,
- )
-
-
-@fixture(scope="global")
-def test_session(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- test_user=test_user,
- test_agent=test_agent,
-):
- session = create_session(
- developer_id=developer_id,
- data=CreateSessionRequest(
- agent=test_agent.id, user=test_user.id, metadata={"test": "test"}
- ),
- client=cozo_client,
- )
-
- yield session
-
- delete_session(
- developer_id=developer_id,
- session_id=session.id,
- client=cozo_client,
- )
-
-
-@fixture(scope="global")
-def test_doc(
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- doc = create_doc(
- developer_id=developer_id,
- owner_type="agent",
- owner_id=agent.id,
- data=CreateDocRequest(title="Hello", content=["World"]),
- client=client,
- )
-
- time.sleep(0.5)
-
- yield doc
-
- delete_doc(
- developer_id=developer_id,
- doc_id=doc.id,
- owner_type="agent",
- owner_id=agent.id,
- client=client,
- )
-
-
-@fixture(scope="global")
-def test_user_doc(
- client=cozo_client,
- developer_id=test_developer_id,
- user=test_user,
-):
- doc = create_doc(
- developer_id=developer_id,
- owner_type="user",
- owner_id=user.id,
- data=CreateDocRequest(title="Hello", content=["World"]),
- client=client,
- )
-
- time.sleep(0.5)
-
- yield doc
-
- delete_doc(
- developer_id=developer_id,
- doc_id=doc.id,
- owner_type="user",
- owner_id=user.id,
- client=client,
- )
-
-
-@fixture(scope="global")
-def test_task(
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hello": '"world"'}}],
- }
- ),
- client=client,
- )
-
- yield task
-
- delete_task(
- developer_id=developer_id,
- task_id=task.id,
- client=client,
- )
-
-
-@fixture(scope="global")
-def test_execution(
- client=cozo_client,
- developer_id=test_developer_id,
- task=test_task,
-):
- workflow_handle = WorkflowHandle(
- client=None,
- id="blah",
- )
-
- execution = create_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=CreateExecutionRequest(input={"test": "test"}),
- client=client,
- )
- create_temporal_lookup(
- developer_id=developer_id,
- execution_id=execution.id,
- workflow_handle=workflow_handle,
- client=client,
- )
-
- yield execution
-
- client.run(
- f"""
- ?[execution_id] <- ["{str(execution.id)}"]
- :delete executions {{ execution_id }}
- """
- )
-
-
-@fixture(scope="test")
-def test_execution_started(
- client=cozo_client,
- developer_id=test_developer_id,
- task=test_task,
-):
- workflow_handle = WorkflowHandle(
- client=None,
- id="blah",
- )
-
- execution = create_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=CreateExecutionRequest(input={"test": "test"}),
- client=client,
- )
- create_temporal_lookup(
- developer_id=developer_id,
- execution_id=execution.id,
- workflow_handle=workflow_handle,
- client=client,
- )
-
- # Start the execution
- create_execution_transition(
- developer_id=developer_id,
- task_id=task.id,
- execution_id=execution.id,
- data=CreateTransitionRequest(
- type="init",
- output={},
- current={"workflow": "main", "step": 0},
- next={"workflow": "main", "step": 0},
- ),
- update_execution_status=True,
- client=client,
- )
-
- yield execution
-
- client.run(
- f"""
- ?[execution_id, task_id] <- [[to_uuid("{str(execution.id)}"), to_uuid("{str(task.id)}")]]
- :delete executions {{ execution_id, task_id }}
- """
- )
-
-
-@fixture(scope="global")
-def test_transition(
- client=cozo_client,
- developer_id=test_developer_id,
- execution=test_execution,
-):
- transition = create_execution_transition(
- developer_id=developer_id,
- execution_id=execution.id,
- data=CreateTransitionRequest(
- type="step",
- output={},
- current={"workflow": "main", "step": 0},
- next={"workflow": "wf1", "step": 1},
- ),
- client=client,
- )
-
- yield transition
-
- client.run(
- f"""
- ?[transition_id] <- ["{str(transition.id)}"]
- :delete transitions {{ transition_id }}
- """
- )
-
-
-@fixture(scope="global")
-def test_tool(
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- function = {
- "description": "A function that prints hello world",
- "parameters": {"type": "object", "properties": {}},
- }
-
- tool = {
- "function": function,
- "name": "hello_world1",
- "type": "function",
- }
-
- [tool, *_] = create_tools(
- developer_id=developer_id,
- agent_id=agent.id,
- data=[CreateToolRequest(**tool)],
- client=client,
- )
-
- yield tool
-
- delete_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- client=client,
- )
-
-
-@fixture(scope="global")
-def client(cozo_client=cozo_client):
- client = TestClient(app=app)
- app.state.cozo_client = cozo_client
-
- return client
-
-
-@fixture(scope="global")
-def make_request(client=client, developer_id=test_developer_id):
- def _make_request(method, url, **kwargs):
- headers = kwargs.pop("headers", {})
- headers = {
- **headers,
- api_key_header_name: api_key,
- }
-
- if multi_tenant_mode:
- headers["X-Developer-Id"] = str(developer_id)
-
- return client.request(method, url, headers=headers, **kwargs)
- return _make_request
+# @fixture(scope="global")
+# def test_session(
+# pg_client=pg_client,
+# developer_id=test_developer_id,
+# test_user=test_user,
+# test_agent=test_agent,
+# ):
+# session = create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# agent=test_agent.id, user=test_user.id, metadata={"test": "test"}
+# ),
+# client=pg_client,
+# )
+
+# yield session
+
+
+# @fixture(scope="global")
+# def test_doc(
+# client=pg_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# doc = create_doc(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+
+# yield doc
+
+
+# @fixture(scope="global")
+# def test_user_doc(
+# client=pg_client,
+# developer_id=test_developer_id,
+# user=test_user,
+# ):
+# doc = create_doc(
+# developer_id=developer_id,
+# owner_type="user",
+# owner_id=user.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+
+# yield doc
+
+
+# @fixture(scope="global")
+# def test_task(
+# client=pg_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hello": '"world"'}}],
+# }
+# ),
+# client=client,
+# )
+
+# yield task
+
+
+# @fixture(scope="global")
+# def test_execution(
+# client=pg_client,
+# developer_id=test_developer_id,
+# task=test_task,
+# ):
+# workflow_handle = WorkflowHandle(
+# client=None,
+# id="blah",
+# )
+
+# execution = create_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=CreateExecutionRequest(input={"test": "test"}),
+# client=client,
+# )
+# create_temporal_lookup(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# workflow_handle=workflow_handle,
+# client=client,
+# )
+
+# yield execution
+
+
+# @fixture(scope="test")
+# def test_execution_started(
+# client=pg_client,
+# developer_id=test_developer_id,
+# task=test_task,
+# ):
+# workflow_handle = WorkflowHandle(
+# client=None,
+# id="blah",
+# )
+
+# execution = create_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=CreateExecutionRequest(input={"test": "test"}),
+# client=client,
+# )
+# create_temporal_lookup(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# workflow_handle=workflow_handle,
+# client=client,
+# )
+
+# # Start the execution
+# create_execution_transition(
+# developer_id=developer_id,
+# task_id=task.id,
+# execution_id=execution.id,
+# data=CreateTransitionRequest(
+# type="init",
+# output={},
+# current={"workflow": "main", "step": 0},
+# next={"workflow": "main", "step": 0},
+# ),
+# update_execution_status=True,
+# client=client,
+# )
+
+# yield execution
+
+
+# @fixture(scope="global")
+# def test_transition(
+# client=pg_client,
+# developer_id=test_developer_id,
+# execution=test_execution,
+# ):
+# transition = create_execution_transition(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# data=CreateTransitionRequest(
+# type="step",
+# output={},
+# current={"workflow": "main", "step": 0},
+# next={"workflow": "wf1", "step": 1},
+# ),
+# client=client,
+# )
+
+# yield transition
+
+
+# @fixture(scope="global")
+# def test_tool(
+# client=pg_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# function = {
+# "description": "A function that prints hello world",
+# "parameters": {"type": "object", "properties": {}},
+# }
+
+# tool = {
+# "function": function,
+# "name": "hello_world1",
+# "type": "function",
+# }
+
+# [tool, *_] = create_tools(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=[CreateToolRequest(**tool)],
+# client=client,
+# )
+#
+# yield tool
+
+
+# @fixture(scope="global")
+# def client(pg_client=pg_client):
+# client = TestClient(app=app)
+# client.state.pg_client = pg_client
+
+# return client
+
+# @fixture(scope="global")
+# def make_request(client=client, developer_id=test_developer_id):
+# def _make_request(method, url, **kwargs):
+# headers = kwargs.pop("headers", {})
+# headers = {
+# **headers,
+# api_key_header_name: api_key,
+# }
+
+# if multi_tenant_mode:
+# headers["X-Developer-Id"] = str(developer_id)
+
+# return client.request(method, url, headers=headers, **kwargs)
+
+# return _make_request
@fixture(scope="global")
diff --git a/agents-api/tests/sample_tasks/test_find_selector.py b/agents-api/tests/sample_tasks/test_find_selector.py
index 616d4cd38..beaa18613 100644
--- a/agents-api/tests/sample_tasks/test_find_selector.py
+++ b/agents-api/tests/sample_tasks/test_find_selector.py
@@ -1,125 +1,125 @@
-# Tests for task queries
-import os
-
-from uuid_extensions import uuid7
-from ward import raises, test
-
-from ..fixtures import cozo_client, test_agent, test_developer_id
-from ..utils import patch_embed_acompletion, patch_http_client_with_temporal
-
-this_dir = os.path.dirname(__file__)
-
-
-@test("workflow sample: find-selector create task")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- agent_id = str(agent.id)
- task_id = str(uuid7())
-
- with (
- patch_embed_acompletion(),
- open(f"{this_dir}/find_selector.yaml", "r") as sample_file,
- ):
- task_def = sample_file.read()
-
- async with patch_http_client_with_temporal(
- cozo_client=cozo_client, developer_id=developer_id
- ) as (
- make_request,
- _,
- ):
- make_request(
- method="POST",
- url=f"/agents/{agent_id}/tasks/{task_id}",
- headers={"Content-Type": "application/x-yaml"},
- data=task_def,
- ).raise_for_status()
-
-
-@test("workflow sample: find-selector start with bad input should fail")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- agent_id = str(agent.id)
- task_id = str(uuid7())
-
- with (
- patch_embed_acompletion(),
- open(f"{this_dir}/find_selector.yaml", "r") as sample_file,
- ):
- task_def = sample_file.read()
-
- async with patch_http_client_with_temporal(
- cozo_client=cozo_client, developer_id=developer_id
- ) as (
- make_request,
- temporal_client,
- ):
- make_request(
- method="POST",
- url=f"/agents/{agent_id}/tasks/{task_id}",
- headers={"Content-Type": "application/x-yaml"},
- data=task_def,
- ).raise_for_status()
-
- execution_data = dict(input={"test": "input"})
-
- with raises(BaseException):
- make_request(
- method="POST",
- url=f"/tasks/{task_id}/executions",
- json=execution_data,
- ).raise_for_status()
-
-
-@test("workflow sample: find-selector start with correct input")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- agent_id = str(agent.id)
- task_id = str(uuid7())
-
- with (
- patch_embed_acompletion(
- output={"role": "assistant", "content": "found: true\nvalue: 'Gaga'"}
- ),
- open(f"{this_dir}/find_selector.yaml", "r") as sample_file,
- ):
- task_def = sample_file.read()
-
- async with patch_http_client_with_temporal(
- cozo_client=cozo_client, developer_id=developer_id
- ) as (
- make_request,
- temporal_client,
- ):
- make_request(
- method="POST",
- url=f"/agents/{agent_id}/tasks/{task_id}",
- headers={"Content-Type": "application/x-yaml"},
- data=task_def,
- ).raise_for_status()
-
- input = dict(
- screenshot_base64="iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA",
- network_requests=[{"request": {}, "response": {"body": "Lady Gaga"}}],
- parameters=["name"],
- )
- execution_data = dict(input=input)
-
- execution_created = make_request(
- method="POST",
- url=f"/tasks/{task_id}/executions",
- json=execution_data,
- ).json()
-
- handle = temporal_client.get_workflow_handle(execution_created["jobs"][0])
-
- await handle.result()
+# # Tests for task queries
+# import os
+
+# from uuid_extensions import uuid7
+# from ward import raises, test
+
+# from ..fixtures import cozo_client, test_agent, test_developer_id
+# from ..utils import patch_embed_acompletion, patch_http_client_with_temporal
+
+# this_dir = os.path.dirname(__file__)
+
+
+# @test("workflow sample: find-selector create task")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# agent_id = str(agent.id)
+# task_id = str(uuid7())
+
+# with (
+# patch_embed_acompletion(),
+# open(f"{this_dir}/find_selector.yaml", "r") as sample_file,
+# ):
+# task_def = sample_file.read()
+
+# async with patch_http_client_with_temporal(
+# cozo_client=cozo_client, developer_id=developer_id
+# ) as (
+# make_request,
+# _,
+# ):
+# make_request(
+# method="POST",
+# url=f"/agents/{agent_id}/tasks/{task_id}",
+# headers={"Content-Type": "application/x-yaml"},
+# data=task_def,
+# ).raise_for_status()
+
+
+# @test("workflow sample: find-selector start with bad input should fail")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# agent_id = str(agent.id)
+# task_id = str(uuid7())
+
+# with (
+# patch_embed_acompletion(),
+# open(f"{this_dir}/find_selector.yaml", "r") as sample_file,
+# ):
+# task_def = sample_file.read()
+
+# async with patch_http_client_with_temporal(
+# cozo_client=cozo_client, developer_id=developer_id
+# ) as (
+# make_request,
+# temporal_client,
+# ):
+# make_request(
+# method="POST",
+# url=f"/agents/{agent_id}/tasks/{task_id}",
+# headers={"Content-Type": "application/x-yaml"},
+# data=task_def,
+# ).raise_for_status()
+
+# execution_data = dict(input={"test": "input"})
+
+# with raises(BaseException):
+# make_request(
+# method="POST",
+# url=f"/tasks/{task_id}/executions",
+# json=execution_data,
+# ).raise_for_status()
+
+
+# @test("workflow sample: find-selector start with correct input")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# agent_id = str(agent.id)
+# task_id = str(uuid7())
+
+# with (
+# patch_embed_acompletion(
+# output={"role": "assistant", "content": "found: true\nvalue: 'Gaga'"}
+# ),
+# open(f"{this_dir}/find_selector.yaml", "r") as sample_file,
+# ):
+# task_def = sample_file.read()
+
+# async with patch_http_client_with_temporal(
+# cozo_client=cozo_client, developer_id=developer_id
+# ) as (
+# make_request,
+# temporal_client,
+# ):
+# make_request(
+# method="POST",
+# url=f"/agents/{agent_id}/tasks/{task_id}",
+# headers={"Content-Type": "application/x-yaml"},
+# data=task_def,
+# ).raise_for_status()
+
+# input = dict(
+# screenshot_base64="iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA",
+# network_requests=[{"request": {}, "response": {"body": "Lady Gaga"}}],
+# parameters=["name"],
+# )
+# execution_data = dict(input=input)
+
+# execution_created = make_request(
+# method="POST",
+# url=f"/tasks/{task_id}/executions",
+# json=execution_data,
+# ).json()
+
+# handle = temporal_client.get_workflow_handle(execution_created["jobs"][0])
+
+# await handle.result()
diff --git a/agents-api/tests/test_activities.py b/agents-api/tests/test_activities.py
index d81e30038..b657a3047 100644
--- a/agents-api/tests/test_activities.py
+++ b/agents-api/tests/test_activities.py
@@ -1,56 +1,56 @@
-from uuid_extensions import uuid7
-from ward import test
-
-from agents_api.activities.embed_docs import embed_docs
-from agents_api.activities.types import EmbedDocsPayload
-from agents_api.clients import temporal
-from agents_api.env import temporal_task_queue
-from agents_api.workflows.demo import DemoWorkflow
-from agents_api.workflows.task_execution.helpers import DEFAULT_RETRY_POLICY
-
-from .fixtures import (
- cozo_client,
- test_developer_id,
- test_doc,
-)
-from .utils import patch_testing_temporal
-
-
-@test("activity: call direct embed_docs")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- doc=test_doc,
-):
- title = "title"
- content = ["content 1"]
- include_title = True
-
- await embed_docs(
- EmbedDocsPayload(
- developer_id=developer_id,
- doc_id=doc.id,
- title=title,
- content=content,
- include_title=include_title,
- embed_instruction=None,
- ),
- cozo_client,
- )
-
-
-@test("activity: call demo workflow via temporal client")
-async def _():
- async with patch_testing_temporal() as (_, mock_get_client):
- client = await temporal.get_client()
-
- result = await client.execute_workflow(
- DemoWorkflow.run,
- args=[1, 2],
- id=str(uuid7()),
- task_queue=temporal_task_queue,
- retry_policy=DEFAULT_RETRY_POLICY,
- )
-
- assert result == 3
- mock_get_client.assert_called_once()
+# from uuid_extensions import uuid7
+# from ward import test
+
+# from agents_api.activities.embed_docs import embed_docs
+# from agents_api.activities.types import EmbedDocsPayload
+# from agents_api.clients import temporal
+# from agents_api.env import temporal_task_queue
+# from agents_api.workflows.demo import DemoWorkflow
+# from agents_api.workflows.task_execution.helpers import DEFAULT_RETRY_POLICY
+
+# from .fixtures import (
+# cozo_client,
+# test_developer_id,
+# test_doc,
+# )
+# from .utils import patch_testing_temporal
+
+
+# @test("activity: call direct embed_docs")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# doc=test_doc,
+# ):
+# title = "title"
+# content = ["content 1"]
+# include_title = True
+
+# await embed_docs(
+# EmbedDocsPayload(
+# developer_id=developer_id,
+# doc_id=doc.id,
+# title=title,
+# content=content,
+# include_title=include_title,
+# embed_instruction=None,
+# ),
+# cozo_client,
+# )
+
+
+# @test("activity: call demo workflow via temporal client")
+# async def _():
+# async with patch_testing_temporal() as (_, mock_get_client):
+# client = await temporal.get_client()
+
+# result = await client.execute_workflow(
+# DemoWorkflow.run,
+# args=[1, 2],
+# id=str(uuid7()),
+# task_queue=temporal_task_queue,
+# retry_policy=DEFAULT_RETRY_POLICY,
+# )
+
+# assert result == 3
+# mock_get_client.assert_called_once()
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index f4a2a0c12..f079642b3 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,163 +1,163 @@
-# Tests for agent queries
-
-from uuid_extensions import uuid7
-from ward import raises, test
-
-from agents_api.autogen.openapi_model import (
- Agent,
- CreateAgentRequest,
- CreateOrUpdateAgentRequest,
- PatchAgentRequest,
- ResourceUpdatedResponse,
- UpdateAgentRequest,
-)
-from agents_api.models.agent.create_agent import create_agent
-from agents_api.models.agent.create_or_update_agent import create_or_update_agent
-from agents_api.models.agent.delete_agent import delete_agent
-from agents_api.models.agent.get_agent import get_agent
-from agents_api.models.agent.list_agents import list_agents
-from agents_api.models.agent.patch_agent import patch_agent
-from agents_api.models.agent.update_agent import update_agent
-from tests.fixtures import cozo_client, test_agent, test_developer_id
-
-
-@test("model: create agent")
-def _(client=cozo_client, developer_id=test_developer_id):
- create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- ),
- client=client,
- )
-
-
-@test("model: create agent with instructions")
-def _(client=cozo_client, developer_id=test_developer_id):
- create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
-
-@test("model: create or update agent")
-def _(client=cozo_client, developer_id=test_developer_id):
- create_or_update_agent(
- developer_id=developer_id,
- agent_id=uuid7(),
- data=CreateOrUpdateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
-
-@test("model: get agent not exists")
-def _(client=cozo_client, developer_id=test_developer_id):
- agent_id = uuid7()
-
- with raises(Exception):
- get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
-
-
-@test("model: get agent exists")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- result = get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
-
- assert result is not None
- assert isinstance(result, Agent)
-
-
-@test("model: delete agent")
-def _(client=cozo_client, developer_id=test_developer_id):
- temp_agent = create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
- # Delete the agent
- delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
-
- # Check that the agent is deleted
- with raises(Exception):
- get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
-
-
-@test("model: update agent")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- result = update_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- data=UpdateAgentRequest(
- name="updated agent",
- about="updated agent about",
- model="gpt-4o-mini",
- default_settings={"temperature": 1.0},
- metadata={"hello": "world"},
- ),
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
-
- agent = get_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert "test" not in agent.metadata
-
-
-@test("model: patch agent")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- result = patch_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- data=PatchAgentRequest(
- name="patched agent",
- about="patched agent about",
- default_settings={"temperature": 1.0},
- metadata={"something": "else"},
- ),
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
-
- agent = get_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert "hello" in agent.metadata
-
-
-@test("model: list agents")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
-
- result = list_agents(developer_id=developer_id, client=client)
-
- assert isinstance(result, list)
- assert all(isinstance(agent, Agent) for agent in result)
+# # Tests for agent queries
+
+# from uuid_extensions import uuid7
+# from ward import raises, test
+
+# from agents_api.autogen.openapi_model import (
+# Agent,
+# CreateAgentRequest,
+# CreateOrUpdateAgentRequest,
+# PatchAgentRequest,
+# ResourceUpdatedResponse,
+# UpdateAgentRequest,
+# )
+# from agents_api.queries.agent.create_agent import create_agent
+# from agents_api.queries.agent.create_or_update_agent import create_or_update_agent
+# from agents_api.queries.agent.delete_agent import delete_agent
+# from agents_api.queries.agent.get_agent import get_agent
+# from agents_api.queries.agent.list_agents import list_agents
+# from agents_api.queries.agent.patch_agent import patch_agent
+# from agents_api.queries.agent.update_agent import update_agent
+# from tests.fixtures import cozo_client, test_agent, test_developer_id
+
+
+# @test("query: create agent")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# create_agent(
+# developer_id=developer_id,
+# data=CreateAgentRequest(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# ),
+# client=client,
+# )
+
+
+# @test("query: create agent with instructions")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# create_agent(
+# developer_id=developer_id,
+# data=CreateAgentRequest(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# instructions=["test instruction"],
+# ),
+# client=client,
+# )
+
+
+# @test("query: create or update agent")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# create_or_update_agent(
+# developer_id=developer_id,
+# agent_id=uuid7(),
+# data=CreateOrUpdateAgentRequest(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# instructions=["test instruction"],
+# ),
+# client=client,
+# )
+
+
+# @test("query: get agent not exists")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# agent_id = uuid7()
+
+# with raises(Exception):
+# get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
+
+
+# @test("query: get agent exists")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# result = get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
+
+# assert result is not None
+# assert isinstance(result, Agent)
+
+
+# @test("query: delete agent")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# temp_agent = create_agent(
+# developer_id=developer_id,
+# data=CreateAgentRequest(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# instructions=["test instruction"],
+# ),
+# client=client,
+# )
+
+# # Delete the agent
+# delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+
+# # Check that the agent is deleted
+# with raises(Exception):
+# get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+
+
+# @test("query: update agent")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# result = update_agent(
+# agent_id=agent.id,
+# developer_id=developer_id,
+# data=UpdateAgentRequest(
+# name="updated agent",
+# about="updated agent about",
+# model="gpt-4o-mini",
+# default_settings={"temperature": 1.0},
+# metadata={"hello": "world"},
+# ),
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, ResourceUpdatedResponse)
+
+# agent = get_agent(
+# agent_id=agent.id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert "test" not in agent.metadata
+
+
+# @test("query: patch agent")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# result = patch_agent(
+# agent_id=agent.id,
+# developer_id=developer_id,
+# data=PatchAgentRequest(
+# name="patched agent",
+# about="patched agent about",
+# default_settings={"temperature": 1.0},
+# metadata={"something": "else"},
+# ),
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, ResourceUpdatedResponse)
+
+# agent = get_agent(
+# agent_id=agent.id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert "hello" in agent.metadata
+
+
+# @test("query: list agents")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
+
+# result = list_agents(developer_id=developer_id, client=client)
+
+# assert isinstance(result, list)
+# assert all(isinstance(agent, Agent) for agent in result)
diff --git a/agents-api/tests/test_agent_routes.py b/agents-api/tests/test_agent_routes.py
index ecab7c1e4..95e8e7558 100644
--- a/agents-api/tests/test_agent_routes.py
+++ b/agents-api/tests/test_agent_routes.py
@@ -1,230 +1,230 @@
-# Tests for agent queries
+# # Tests for agent queries
-from uuid_extensions import uuid7
-from ward import test
+# from uuid_extensions import uuid7
+# from ward import test
-from tests.fixtures import client, make_request, test_agent
+# from tests.fixtures import client, make_request, test_agent
-@test("route: unauthorized should fail")
-def _(client=client):
- data = dict(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- )
+# @test("route: unauthorized should fail")
+# def _(client=client):
+# data = dict(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# )
- response = client.request(
- method="POST",
- url="/agents",
- json=data,
- )
+# response = client.request(
+# method="POST",
+# url="/agents",
+# json=data,
+# )
- assert response.status_code == 403
+# assert response.status_code == 403
-@test("route: create agent")
-def _(make_request=make_request):
- data = dict(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- )
+# @test("route: create agent")
+# def _(make_request=make_request):
+# data = dict(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# )
- response = make_request(
- method="POST",
- url="/agents",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url="/agents",
+# json=data,
+# )
- assert response.status_code == 201
+# assert response.status_code == 201
-@test("route: create agent with instructions")
-def _(make_request=make_request):
- data = dict(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- )
+# @test("route: create agent with instructions")
+# def _(make_request=make_request):
+# data = dict(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# instructions=["test instruction"],
+# )
- response = make_request(
- method="POST",
- url="/agents",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url="/agents",
+# json=data,
+# )
- assert response.status_code == 201
+# assert response.status_code == 201
-@test("route: create or update agent")
-def _(make_request=make_request):
- agent_id = str(uuid7())
+# @test("route: create or update agent")
+# def _(make_request=make_request):
+# agent_id = str(uuid7())
- data = dict(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- )
+# data = dict(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# instructions=["test instruction"],
+# )
- response = make_request(
- method="POST",
- url=f"/agents/{agent_id}",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url=f"/agents/{agent_id}",
+# json=data,
+# )
- assert response.status_code == 201
+# assert response.status_code == 201
-@test("route: get agent not exists")
-def _(make_request=make_request):
- agent_id = str(uuid7())
+# @test("route: get agent not exists")
+# def _(make_request=make_request):
+# agent_id = str(uuid7())
- response = make_request(
- method="GET",
- url=f"/agents/{agent_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent_id}",
+# )
- assert response.status_code == 404
+# assert response.status_code == 404
-@test("route: get agent exists")
-def _(make_request=make_request, agent=test_agent):
- agent_id = str(agent.id)
+# @test("route: get agent exists")
+# def _(make_request=make_request, agent=test_agent):
+# agent_id = str(agent.id)
- response = make_request(
- method="GET",
- url=f"/agents/{agent_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent_id}",
+# )
- assert response.status_code != 404
+# assert response.status_code != 404
-@test("route: delete agent")
-def _(make_request=make_request):
- data = dict(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- )
+# @test("route: delete agent")
+# def _(make_request=make_request):
+# data = dict(
+# name="test agent",
+# about="test agent about",
+# model="gpt-4o-mini",
+# instructions=["test instruction"],
+# )
- response = make_request(
- method="POST",
- url="/agents",
- json=data,
- )
- agent_id = response.json()["id"]
+# response = make_request(
+# method="POST",
+# url="/agents",
+# json=data,
+# )
+# agent_id = response.json()["id"]
- response = make_request(
- method="DELETE",
- url=f"/agents/{agent_id}",
- )
+# response = make_request(
+# method="DELETE",
+# url=f"/agents/{agent_id}",
+# )
- assert response.status_code == 202
+# assert response.status_code == 202
- response = make_request(
- method="GET",
- url=f"/agents/{agent_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent_id}",
+# )
- assert response.status_code == 404
+# assert response.status_code == 404
-@test("route: update agent")
-def _(make_request=make_request, agent=test_agent):
- data = dict(
- name="updated agent",
- about="updated agent about",
- default_settings={"temperature": 1.0},
- model="gpt-4o-mini",
- metadata={"hello": "world"},
- )
+# @test("route: update agent")
+# def _(make_request=make_request, agent=test_agent):
+# data = dict(
+# name="updated agent",
+# about="updated agent about",
+# default_settings={"temperature": 1.0},
+# model="gpt-4o-mini",
+# metadata={"hello": "world"},
+# )
- agent_id = str(agent.id)
- response = make_request(
- method="PUT",
- url=f"/agents/{agent_id}",
- json=data,
- )
+# agent_id = str(agent.id)
+# response = make_request(
+# method="PUT",
+# url=f"/agents/{agent_id}",
+# json=data,
+# )
- assert response.status_code == 200
+# assert response.status_code == 200
- agent_id = response.json()["id"]
+# agent_id = response.json()["id"]
- response = make_request(
- method="GET",
- url=f"/agents/{agent_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent_id}",
+# )
- assert response.status_code == 200
- agent = response.json()
+# assert response.status_code == 200
+# agent = response.json()
- assert "test" not in agent["metadata"]
+# assert "test" not in agent["metadata"]
-@test("route: patch agent")
-def _(make_request=make_request, agent=test_agent):
- agent_id = str(agent.id)
+# @test("route: patch agent")
+# def _(make_request=make_request, agent=test_agent):
+# agent_id = str(agent.id)
- data = dict(
- name="patched agent",
- about="patched agent about",
- default_settings={"temperature": 1.0},
- metadata={"something": "else"},
- )
+# data = dict(
+# name="patched agent",
+# about="patched agent about",
+# default_settings={"temperature": 1.0},
+# metadata={"something": "else"},
+# )
- response = make_request(
- method="PATCH",
- url=f"/agents/{agent_id}",
- json=data,
- )
+# response = make_request(
+# method="PATCH",
+# url=f"/agents/{agent_id}",
+# json=data,
+# )
- assert response.status_code == 200
+# assert response.status_code == 200
- agent_id = response.json()["id"]
+# agent_id = response.json()["id"]
- response = make_request(
- method="GET",
- url=f"/agents/{agent_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent_id}",
+# )
- assert response.status_code == 200
- agent = response.json()
+# assert response.status_code == 200
+# agent = response.json()
- assert "hello" in agent["metadata"]
+# assert "hello" in agent["metadata"]
-@test("route: list agents")
-def _(make_request=make_request):
- response = make_request(
- method="GET",
- url="/agents",
- )
-
- assert response.status_code == 200
- response = response.json()
- agents = response["items"]
+# @test("route: list agents")
+# def _(make_request=make_request):
+# response = make_request(
+# method="GET",
+# url="/agents",
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# agents = response["items"]
- assert isinstance(agents, list)
- assert len(agents) > 0
+# assert isinstance(agents, list)
+# assert len(agents) > 0
-@test("route: list agents with metadata filter")
-def _(make_request=make_request):
- response = make_request(
- method="GET",
- url="/agents",
- params={
- "metadata_filter": {"test": "test"},
- },
- )
+# @test("route: list agents with metadata filter")
+# def _(make_request=make_request):
+# response = make_request(
+# method="GET",
+# url="/agents",
+# params={
+# "metadata_filter": {"test": "test"},
+# },
+# )
- assert response.status_code == 200
- response = response.json()
- agents = response["items"]
+# assert response.status_code == 200
+# response = response.json()
+# agents = response["items"]
- assert isinstance(agents, list)
- assert len(agents) > 0
+# assert isinstance(agents, list)
+# assert len(agents) > 0
diff --git a/agents-api/tests/test_chat_routes.py b/agents-api/tests/test_chat_routes.py
index 4838efcd5..6be130eb3 100644
--- a/agents-api/tests/test_chat_routes.py
+++ b/agents-api/tests/test_chat_routes.py
@@ -1,177 +1,177 @@
-# Tests for session queries
-
-from ward import test
-
-from agents_api.autogen.openapi_model import ChatInput, CreateSessionRequest
-from agents_api.clients import litellm
-from agents_api.common.protocol.sessions import ChatContext
-from agents_api.models.chat.gather_messages import gather_messages
-from agents_api.models.chat.prepare_chat_context import prepare_chat_context
-from agents_api.models.session.create_session import create_session
-from tests.fixtures import (
- cozo_client,
- make_request,
- patch_embed_acompletion,
- test_agent,
- test_developer,
- test_developer_id,
- test_session,
- test_tool,
- test_user,
-)
-
-
-@test("chat: check that patching libs works")
-async def _(
- _=patch_embed_acompletion,
-):
- assert (await litellm.acompletion(model="gpt-4o-mini", messages=[])).id == "fake_id"
- assert (await litellm.aembedding())[0][
- 0
- ] == 1.0 # pytype: disable=missing-parameter
-
-
-@test("chat: check that non-recall gather_messages works")
-async def _(
- developer=test_developer,
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
- session=test_session,
- tool=test_tool,
- user=test_user,
- mocks=patch_embed_acompletion,
-):
- (embed, _) = mocks
-
- chat_context = prepare_chat_context(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- session_id = session.id
-
- messages = [{"role": "user", "content": "hello"}]
-
- past_messages, doc_references = await gather_messages(
- developer=developer,
- session_id=session_id,
- chat_context=chat_context,
- chat_input=ChatInput(messages=messages, recall=False),
- )
-
- assert isinstance(past_messages, list)
- assert len(past_messages) >= 0
- assert isinstance(doc_references, list)
- assert len(doc_references) == 0
-
- # Check that embed was not called
- embed.assert_not_called()
-
-
-@test("chat: check that gather_messages works")
-async def _(
- developer=test_developer,
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
- # session=test_session,
- tool=test_tool,
- user=test_user,
- mocks=patch_embed_acompletion,
-):
- session = create_session(
- developer_id=developer_id,
- data=CreateSessionRequest(
- agent=agent.id,
- situation="test session about",
- recall_options={
- "mode": "text",
- "num_search_messages": 10,
- "max_query_length": 1001,
- },
- ),
- client=client,
- )
-
- (embed, _) = mocks
-
- chat_context = prepare_chat_context(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- session_id = session.id
-
- messages = [{"role": "user", "content": "hello"}]
-
- past_messages, doc_references = await gather_messages(
- developer=developer,
- session_id=session_id,
- chat_context=chat_context,
- chat_input=ChatInput(messages=messages, recall=True),
- )
-
- assert isinstance(past_messages, list)
- assert isinstance(doc_references, list)
-
- # Check that embed was called at least once
- embed.assert_called()
-
-
-@test("chat: check that chat route calls both mocks")
-async def _(
- make_request=make_request,
- developer_id=test_developer_id,
- agent=test_agent,
- mocks=patch_embed_acompletion,
- client=cozo_client,
-):
- session = create_session(
- developer_id=developer_id,
- data=CreateSessionRequest(
- agent=agent.id,
- situation="test session about",
- recall_options={
- "mode": "vector",
- "num_search_messages": 5,
- "max_query_length": 1001,
- },
- ),
- client=client,
- )
-
- (embed, acompletion) = mocks
-
- response = make_request(
- method="POST",
- url=f"/sessions/{session.id}/chat",
- json={"messages": [{"role": "user", "content": "hello"}]},
- )
-
- response.raise_for_status()
-
- # Check that both mocks were called at least once
- embed.assert_called()
- acompletion.assert_called()
-
-
-@test("model: prepare chat context")
-def _(
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
- session=test_session,
- tool=test_tool,
- user=test_user,
-):
- context = prepare_chat_context(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- assert isinstance(context, ChatContext)
- assert len(context.toolsets) > 0
+# # Tests for session queries
+
+# from ward import test
+
+# from agents_api.autogen.openapi_model import ChatInput, CreateSessionRequest
+# from agents_api.clients import litellm
+# from agents_api.common.protocol.sessions import ChatContext
+# from agents_api.queries.chat.gather_messages import gather_messages
+# from agents_api.queries.chat.prepare_chat_context import prepare_chat_context
+# from agents_api.queries.session.create_session import create_session
+# from tests.fixtures import (
+# cozo_client,
+# make_request,
+# patch_embed_acompletion,
+# test_agent,
+# test_developer,
+# test_developer_id,
+# test_session,
+# test_tool,
+# test_user,
+# )
+
+
+# @test("chat: check that patching libs works")
+# async def _(
+# _=patch_embed_acompletion,
+# ):
+# assert (await litellm.acompletion(model="gpt-4o-mini", messages=[])).id == "fake_id"
+# assert (await litellm.aembedding())[0][
+# 0
+# ] == 1.0 # pytype: disable=missing-parameter
+
+
+# @test("chat: check that non-recall gather_messages works")
+# async def _(
+# developer=test_developer,
+# client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# session=test_session,
+# tool=test_tool,
+# user=test_user,
+# mocks=patch_embed_acompletion,
+# ):
+# (embed, _) = mocks
+
+# chat_context = prepare_chat_context(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# session_id = session.id
+
+# messages = [{"role": "user", "content": "hello"}]
+
+# past_messages, doc_references = await gather_messages(
+# developer=developer,
+# session_id=session_id,
+# chat_context=chat_context,
+# chat_input=ChatInput(messages=messages, recall=False),
+# )
+
+# assert isinstance(past_messages, list)
+# assert len(past_messages) >= 0
+# assert isinstance(doc_references, list)
+# assert len(doc_references) == 0
+
+# # Check that embed was not called
+# embed.assert_not_called()
+
+
+# @test("chat: check that gather_messages works")
+# async def _(
+# developer=test_developer,
+# client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# # session=test_session,
+# tool=test_tool,
+# user=test_user,
+# mocks=patch_embed_acompletion,
+# ):
+# session = create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# agent=agent.id,
+# situation="test session about",
+# recall_options={
+# "mode": "text",
+# "num_search_messages": 10,
+# "max_query_length": 1001,
+# },
+# ),
+# client=client,
+# )
+
+# (embed, _) = mocks
+
+# chat_context = prepare_chat_context(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# session_id = session.id
+
+# messages = [{"role": "user", "content": "hello"}]
+
+# past_messages, doc_references = await gather_messages(
+# developer=developer,
+# session_id=session_id,
+# chat_context=chat_context,
+# chat_input=ChatInput(messages=messages, recall=True),
+# )
+
+# assert isinstance(past_messages, list)
+# assert isinstance(doc_references, list)
+
+# # Check that embed was called at least once
+# embed.assert_called()
+
+
+# @test("chat: check that chat route calls both mocks")
+# async def _(
+# make_request=make_request,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# mocks=patch_embed_acompletion,
+# client=cozo_client,
+# ):
+# session = create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# agent=agent.id,
+# situation="test session about",
+# recall_options={
+# "mode": "vector",
+# "num_search_messages": 5,
+# "max_query_length": 1001,
+# },
+# ),
+# client=client,
+# )
+
+# (embed, acompletion) = mocks
+
+# response = make_request(
+# method="POST",
+# url=f"/sessions/{session.id}/chat",
+# json={"messages": [{"role": "user", "content": "hello"}]},
+# )
+
+# response.raise_for_status()
+
+# # Check that both mocks were called at least once
+# embed.assert_called()
+# acompletion.assert_called()
+
+
+# @test("query: prepare chat context")
+# def _(
+# client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# session=test_session,
+# tool=test_tool,
+# user=test_user,
+# ):
+# context = prepare_chat_context(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# assert isinstance(context, ChatContext)
+# assert len(context.toolsets) > 0
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index 734afdd65..adba5ddd1 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -4,33 +4,42 @@
from ward import raises, test
from agents_api.common.protocol.developers import Developer
-from agents_api.models.developer.get_developer import get_developer, verify_developer
-from tests.fixtures import cozo_client, test_developer_id
+from agents_api.queries.developers.get_developer import get_developer # , verify_developer
+from .fixtures import pg_client, test_developer_id
-@test("model: get developer")
-def _(client=cozo_client, developer_id=test_developer_id):
- developer = get_developer(
- developer_id=developer_id,
- client=client,
- )
+@test("query: get developer not exists")
+def _(client=pg_client):
+ with raises(Exception):
+ get_developer(
+ developer_id=uuid7(),
+ client=client,
+ )
- assert isinstance(developer, Developer)
- assert developer.id
+# @test("query: get developer")
+# def _(client=pg_client, developer_id=test_developer_id):
+# developer = get_developer(
+# developer_id=developer_id,
+# client=client,
+# )
-@test("model: verify developer exists")
-def _(client=cozo_client, developer_id=test_developer_id):
- verify_developer(
- developer_id=developer_id,
- client=client,
- )
+# assert isinstance(developer, Developer)
+# assert developer.id
-@test("model: verify developer not exists")
-def _(client=cozo_client):
- with raises(Exception):
- verify_developer(
- developer_id=uuid7(),
- client=client,
- )
+# @test("query: verify developer exists")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# verify_developer(
+# developer_id=developer_id,
+# client=client,
+# )
+
+
+# @test("query: verify developer not exists")
+# def _(client=cozo_client):
+# with raises(Exception):
+# verify_developer(
+# developer_id=uuid7(),
+# client=client,
+# )
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index a7fa7868a..f2ff2c786 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -1,163 +1,163 @@
-# Tests for entry queries
-
-import asyncio
-
-from ward import test
-
-from agents_api.autogen.openapi_model import CreateDocRequest
-from agents_api.models.docs.create_doc import create_doc
-from agents_api.models.docs.delete_doc import delete_doc
-from agents_api.models.docs.embed_snippets import embed_snippets
-from agents_api.models.docs.get_doc import get_doc
-from agents_api.models.docs.list_docs import list_docs
-from agents_api.models.docs.search_docs_by_embedding import search_docs_by_embedding
-from agents_api.models.docs.search_docs_by_text import search_docs_by_text
-from tests.fixtures import (
- EMBEDDING_SIZE,
- cozo_client,
- test_agent,
- test_developer_id,
- test_doc,
- test_user,
-)
-
-
-@test("model: create docs")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
-):
- create_doc(
- developer_id=developer_id,
- owner_type="agent",
- owner_id=agent.id,
- data=CreateDocRequest(title="Hello", content=["World"]),
- client=client,
- )
-
- create_doc(
- developer_id=developer_id,
- owner_type="user",
- owner_id=user.id,
- data=CreateDocRequest(title="Hello", content=["World"]),
- client=client,
- )
-
-
-@test("model: get docs")
-def _(client=cozo_client, doc=test_doc, developer_id=test_developer_id):
- get_doc(
- developer_id=developer_id,
- doc_id=doc.id,
- client=client,
- )
-
-
-@test("model: delete doc")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- doc = create_doc(
- developer_id=developer_id,
- owner_type="agent",
- owner_id=agent.id,
- data=CreateDocRequest(title="Hello", content=["World"]),
- client=client,
- )
-
- delete_doc(
- developer_id=developer_id,
- doc_id=doc.id,
- owner_type="agent",
- owner_id=agent.id,
- client=client,
- )
-
-
-@test("model: list docs")
-def _(
- client=cozo_client, developer_id=test_developer_id, doc=test_doc, agent=test_agent
-):
- result = list_docs(
- developer_id=developer_id,
- owner_type="agent",
- owner_id=agent.id,
- client=client,
- include_without_embeddings=True,
- )
-
- assert len(result) >= 1
-
-
-@test("model: search docs by text")
-async def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id):
- create_doc(
- developer_id=developer_id,
- owner_type="agent",
- owner_id=agent.id,
- data=CreateDocRequest(
- title="Hello", content=["The world is a funny little thing"]
- ),
- client=client,
- )
-
- await asyncio.sleep(1)
-
- result = search_docs_by_text(
- developer_id=developer_id,
- owners=[("agent", agent.id)],
- query="funny",
- client=client,
- )
-
- assert len(result) >= 1
- assert result[0].metadata is not None
-
-
-@test("model: search docs by embedding")
-async def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id):
- doc = create_doc(
- developer_id=developer_id,
- owner_type="agent",
- owner_id=agent.id,
- data=CreateDocRequest(title="Hello", content=["World"]),
- client=client,
- )
-
- ### Add embedding to the snippet
- embed_snippets(
- developer_id=developer_id,
- doc_id=doc.id,
- snippet_indices=[0],
- embeddings=[[1.0] * EMBEDDING_SIZE],
- client=client,
- )
-
- await asyncio.sleep(1)
-
- ### Search
- query_embedding = [0.99] * EMBEDDING_SIZE
-
- result = search_docs_by_embedding(
- developer_id=developer_id,
- owners=[("agent", agent.id)],
- query_embedding=query_embedding,
- client=client,
- )
-
- assert len(result) >= 1
- assert result[0].metadata is not None
-
-
-@test("model: embed snippets")
-def _(client=cozo_client, developer_id=test_developer_id, doc=test_doc):
- snippet_indices = [0]
- embeddings = [[1.0] * EMBEDDING_SIZE]
-
- result = embed_snippets(
- developer_id=developer_id,
- doc_id=doc.id,
- snippet_indices=snippet_indices,
- embeddings=embeddings,
- client=client,
- )
-
- assert result is not None
- assert result.id == doc.id
+# # Tests for entry queries
+
+# import asyncio
+
+# from ward import test
+
+# from agents_api.autogen.openapi_model import CreateDocRequest
+# from agents_api.queries.docs.create_doc import create_doc
+# from agents_api.queries.docs.delete_doc import delete_doc
+# from agents_api.queries.docs.embed_snippets import embed_snippets
+# from agents_api.queries.docs.get_doc import get_doc
+# from agents_api.queries.docs.list_docs import list_docs
+# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
+# from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
+# from tests.fixtures import (
+# EMBEDDING_SIZE,
+# cozo_client,
+# test_agent,
+# test_developer_id,
+# test_doc,
+# test_user,
+# )
+
+
+# @test("query: create docs")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
+# ):
+# create_doc(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+
+# create_doc(
+# developer_id=developer_id,
+# owner_type="user",
+# owner_id=user.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+
+
+# @test("query: get docs")
+# def _(client=cozo_client, doc=test_doc, developer_id=test_developer_id):
+# get_doc(
+# developer_id=developer_id,
+# doc_id=doc.id,
+# client=client,
+# )
+
+
+# @test("query: delete doc")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# doc = create_doc(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+
+# delete_doc(
+# developer_id=developer_id,
+# doc_id=doc.id,
+# owner_type="agent",
+# owner_id=agent.id,
+# client=client,
+# )
+
+
+# @test("query: list docs")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, doc=test_doc, agent=test_agent
+# ):
+# result = list_docs(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# client=client,
+# include_without_embeddings=True,
+# )
+
+# assert len(result) >= 1
+
+
+# @test("query: search docs by text")
+# async def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id):
+# create_doc(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# data=CreateDocRequest(
+# title="Hello", content=["The world is a funny little thing"]
+# ),
+# client=client,
+# )
+
+# await asyncio.sleep(1)
+
+# result = search_docs_by_text(
+# developer_id=developer_id,
+# owners=[("agent", agent.id)],
+# query="funny",
+# client=client,
+# )
+
+# assert len(result) >= 1
+# assert result[0].metadata is not None
+
+
+# @test("query: search docs by embedding")
+# async def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id):
+# doc = create_doc(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+
+# ### Add embedding to the snippet
+# embed_snippets(
+# developer_id=developer_id,
+# doc_id=doc.id,
+# snippet_indices=[0],
+# embeddings=[[1.0] * EMBEDDING_SIZE],
+# client=client,
+# )
+
+# await asyncio.sleep(1)
+
+# ### Search
+# query_embedding = [0.99] * EMBEDDING_SIZE
+
+# result = search_docs_by_embedding(
+# developer_id=developer_id,
+# owners=[("agent", agent.id)],
+# query_embedding=query_embedding,
+# client=client,
+# )
+
+# assert len(result) >= 1
+# assert result[0].metadata is not None
+
+
+# @test("query: embed snippets")
+# def _(client=cozo_client, developer_id=test_developer_id, doc=test_doc):
+# snippet_indices = [0]
+# embeddings = [[1.0] * EMBEDDING_SIZE]
+
+# result = embed_snippets(
+# developer_id=developer_id,
+# doc_id=doc.id,
+# snippet_indices=snippet_indices,
+# embeddings=embeddings,
+# client=client,
+# )
+
+# assert result is not None
+# assert result.id == doc.id
diff --git a/agents-api/tests/test_docs_routes.py b/agents-api/tests/test_docs_routes.py
index 89a14a41c..a33f30108 100644
--- a/agents-api/tests/test_docs_routes.py
+++ b/agents-api/tests/test_docs_routes.py
@@ -1,261 +1,261 @@
-import time
-
-from ward import skip, test
-
-from tests.fixtures import (
- make_request,
- patch_embed_acompletion,
- test_agent,
- test_doc,
- test_user,
- test_user_doc,
-)
-from tests.utils import patch_testing_temporal
-
-
-@test("route: create user doc")
-async def _(make_request=make_request, user=test_user):
- async with patch_testing_temporal():
- data = dict(
- title="Test User Doc",
- content=["This is a test user document."],
- )
-
- response = make_request(
- method="POST",
- url=f"/users/{user.id}/docs",
- json=data,
- )
-
- assert response.status_code == 201
-
- result = response.json()
- assert len(result["jobs"]) > 0
+# import time
+
+# from ward import skip, test
+
+# from tests.fixtures import (
+# make_request,
+# patch_embed_acompletion,
+# test_agent,
+# test_doc,
+# test_user,
+# test_user_doc,
+# )
+# from tests.utils import patch_testing_temporal
+
+
+# @test("route: create user doc")
+# async def _(make_request=make_request, user=test_user):
+# async with patch_testing_temporal():
+# data = dict(
+# title="Test User Doc",
+# content=["This is a test user document."],
+# )
+
+# response = make_request(
+# method="POST",
+# url=f"/users/{user.id}/docs",
+# json=data,
+# )
+
+# assert response.status_code == 201
+
+# result = response.json()
+# assert len(result["jobs"]) > 0
-@test("route: create agent doc")
-async def _(make_request=make_request, agent=test_agent):
- async with patch_testing_temporal():
- data = dict(
- title="Test Agent Doc",
- content=["This is a test agent document."],
- )
-
- response = make_request(
- method="POST",
- url=f"/agents/{agent.id}/docs",
- json=data,
- )
-
- assert response.status_code == 201
-
- result = response.json()
- assert len(result["jobs"]) > 0
+# @test("route: create agent doc")
+# async def _(make_request=make_request, agent=test_agent):
+# async with patch_testing_temporal():
+# data = dict(
+# title="Test Agent Doc",
+# content=["This is a test agent document."],
+# )
+
+# response = make_request(
+# method="POST",
+# url=f"/agents/{agent.id}/docs",
+# json=data,
+# )
+
+# assert response.status_code == 201
+
+# result = response.json()
+# assert len(result["jobs"]) > 0
-@test("route: delete doc")
-async def _(make_request=make_request, agent=test_agent):
- async with patch_testing_temporal():
- data = dict(
- title="Test Agent Doc",
- content=["This is a test agent document."],
- )
-
- response = make_request(
- method="POST",
- url=f"/agents/{agent.id}/docs",
- json=data,
- )
- doc_id = response.json()["id"]
-
- response = make_request(
- method="DELETE",
- url=f"/agents/{agent.id}/docs/{doc_id}",
- )
-
- assert response.status_code == 202
-
- response = make_request(
- method="GET",
- url=f"/docs/{doc_id}",
- )
-
- assert response.status_code == 404
-
-
-@test("route: get doc")
-async def _(make_request=make_request, agent=test_agent):
- async with patch_testing_temporal():
- data = dict(
- title="Test Agent Doc",
- content=["This is a test agent document."],
- )
-
- response = make_request(
- method="POST",
- url=f"/agents/{agent.id}/docs",
- json=data,
- )
- doc_id = response.json()["id"]
-
- response = make_request(
- method="GET",
- url=f"/docs/{doc_id}",
- )
-
- assert response.status_code == 200
-
-
-@test("route: list user docs")
-def _(make_request=make_request, user=test_user):
- response = make_request(
- method="GET",
- url=f"/users/{user.id}/docs",
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["items"]
-
- assert isinstance(docs, list)
+# @test("route: delete doc")
+# async def _(make_request=make_request, agent=test_agent):
+# async with patch_testing_temporal():
+# data = dict(
+# title="Test Agent Doc",
+# content=["This is a test agent document."],
+# )
+
+# response = make_request(
+# method="POST",
+# url=f"/agents/{agent.id}/docs",
+# json=data,
+# )
+# doc_id = response.json()["id"]
+
+# response = make_request(
+# method="DELETE",
+# url=f"/agents/{agent.id}/docs/{doc_id}",
+# )
+
+# assert response.status_code == 202
+
+# response = make_request(
+# method="GET",
+# url=f"/docs/{doc_id}",
+# )
+
+# assert response.status_code == 404
+
+
+# @test("route: get doc")
+# async def _(make_request=make_request, agent=test_agent):
+# async with patch_testing_temporal():
+# data = dict(
+# title="Test Agent Doc",
+# content=["This is a test agent document."],
+# )
+
+# response = make_request(
+# method="POST",
+# url=f"/agents/{agent.id}/docs",
+# json=data,
+# )
+# doc_id = response.json()["id"]
+
+# response = make_request(
+# method="GET",
+# url=f"/docs/{doc_id}",
+# )
+
+# assert response.status_code == 200
+
+
+# @test("route: list user docs")
+# def _(make_request=make_request, user=test_user):
+# response = make_request(
+# method="GET",
+# url=f"/users/{user.id}/docs",
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["items"]
+
+# assert isinstance(docs, list)
-@test("route: list agent docs")
-def _(make_request=make_request, agent=test_agent):
- response = make_request(
- method="GET",
- url=f"/agents/{agent.id}/docs",
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["items"]
-
- assert isinstance(docs, list)
-
-
-@test("route: list user docs with metadata filter")
-def _(make_request=make_request, user=test_user):
- response = make_request(
- method="GET",
- url=f"/users/{user.id}/docs",
- params={
- "metadata_filter": {"test": "test"},
- },
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["items"]
-
- assert isinstance(docs, list)
-
-
-@test("route: list agent docs with metadata filter")
-def _(make_request=make_request, agent=test_agent):
- response = make_request(
- method="GET",
- url=f"/agents/{agent.id}/docs",
- params={
- "metadata_filter": {"test": "test"},
- },
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["items"]
-
- assert isinstance(docs, list)
-
-
-# TODO: Fix this test. It fails sometimes and sometimes not.
-@test("route: search agent docs")
-async def _(make_request=make_request, agent=test_agent, doc=test_doc):
- time.sleep(0.5)
- search_params = dict(
- text=doc.content[0],
- limit=1,
- )
-
- response = make_request(
- method="POST",
- url=f"/agents/{agent.id}/search",
- json=search_params,
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["docs"]
-
- assert isinstance(docs, list)
- assert len(docs) >= 1
-
-
-# FIXME: This test is failing because the search is not returning the expected results
-@skip("Fails randomly on CI")
-@test("route: search user docs")
-async def _(make_request=make_request, user=test_user, doc=test_user_doc):
- time.sleep(0.5)
- search_params = dict(
- text=doc.content[0],
- limit=1,
- )
+# @test("route: list agent docs")
+# def _(make_request=make_request, agent=test_agent):
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent.id}/docs",
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["items"]
+
+# assert isinstance(docs, list)
+
+
+# @test("route: list user docs with metadata filter")
+# def _(make_request=make_request, user=test_user):
+# response = make_request(
+# method="GET",
+# url=f"/users/{user.id}/docs",
+# params={
+# "metadata_filter": {"test": "test"},
+# },
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["items"]
+
+# assert isinstance(docs, list)
+
+
+# @test("route: list agent docs with metadata filter")
+# def _(make_request=make_request, agent=test_agent):
+# response = make_request(
+# method="GET",
+# url=f"/agents/{agent.id}/docs",
+# params={
+# "metadata_filter": {"test": "test"},
+# },
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["items"]
+
+# assert isinstance(docs, list)
+
+
+# # TODO: Fix this test. It fails sometimes and sometimes not.
+# @test("route: search agent docs")
+# async def _(make_request=make_request, agent=test_agent, doc=test_doc):
+# time.sleep(0.5)
+# search_params = dict(
+# text=doc.content[0],
+# limit=1,
+# )
+
+# response = make_request(
+# method="POST",
+# url=f"/agents/{agent.id}/search",
+# json=search_params,
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["docs"]
+
+# assert isinstance(docs, list)
+# assert len(docs) >= 1
+
+
+# # FIXME: This test is failing because the search is not returning the expected results
+# @skip("Fails randomly on CI")
+# @test("route: search user docs")
+# async def _(make_request=make_request, user=test_user, doc=test_user_doc):
+# time.sleep(0.5)
+# search_params = dict(
+# text=doc.content[0],
+# limit=1,
+# )
- response = make_request(
- method="POST",
- url=f"/users/{user.id}/search",
- json=search_params,
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["docs"]
-
- assert isinstance(docs, list)
+# response = make_request(
+# method="POST",
+# url=f"/users/{user.id}/search",
+# json=search_params,
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["docs"]
+
+# assert isinstance(docs, list)
- assert len(docs) >= 1
-
-
-@test("route: search agent docs hybrid with mmr")
-async def _(make_request=make_request, agent=test_agent, doc=test_doc):
- time.sleep(0.5)
-
- EMBEDDING_SIZE = 1024
- search_params = dict(
- text=doc.content[0],
- vector=[1.0] * EMBEDDING_SIZE,
- mmr_strength=0.5,
- limit=1,
- )
-
- response = make_request(
- method="POST",
- url=f"/agents/{agent.id}/search",
- json=search_params,
- )
-
- assert response.status_code == 200
- response = response.json()
- docs = response["docs"]
-
- assert isinstance(docs, list)
- assert len(docs) >= 1
-
-
-@test("routes: embed route")
-async def _(
- make_request=make_request,
- mocks=patch_embed_acompletion,
-):
- (embed, _) = mocks
-
- response = make_request(
- method="POST",
- url="/embed",
- json={"text": "blah blah"},
- )
-
- result = response.json()
- assert "vectors" in result
-
- embed.assert_called()
+# assert len(docs) >= 1
+
+
+# @test("route: search agent docs hybrid with mmr")
+# async def _(make_request=make_request, agent=test_agent, doc=test_doc):
+# time.sleep(0.5)
+
+# EMBEDDING_SIZE = 1024
+# search_params = dict(
+# text=doc.content[0],
+# vector=[1.0] * EMBEDDING_SIZE,
+# mmr_strength=0.5,
+# limit=1,
+# )
+
+# response = make_request(
+# method="POST",
+# url=f"/agents/{agent.id}/search",
+# json=search_params,
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# docs = response["docs"]
+
+# assert isinstance(docs, list)
+# assert len(docs) >= 1
+
+
+# @test("routes: embed route")
+# async def _(
+# make_request=make_request,
+# mocks=patch_embed_acompletion,
+# ):
+# (embed, _) = mocks
+
+# response = make_request(
+# method="POST",
+# url="/embed",
+# json={"text": "blah blah"},
+# )
+
+# result = response.json()
+# assert "vectors" in result
+
+# embed.assert_called()
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index a3c93f465..220b8d232 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -1,201 +1,201 @@
-"""
-This module contains tests for entry queries against the CozoDB database.
-It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
-"""
-
-# Tests for entry queries
-
-import time
-
-from ward import test
-
-from agents_api.autogen.openapi_model import CreateEntryRequest
-from agents_api.models.entry.create_entries import create_entries
-from agents_api.models.entry.delete_entries import delete_entries
-from agents_api.models.entry.get_history import get_history
-from agents_api.models.entry.list_entries import list_entries
-from agents_api.models.session.get_session import get_session
-from tests.fixtures import cozo_client, test_developer_id, test_session
-
-MODEL = "gpt-4o-mini"
-
-
-@test("model: create entry")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- """
- Tests the addition of a new entry to the database.
- Verifies that the entry can be successfully added using the create_entries function.
- """
-
- test_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- source="internal",
- content="test entry content",
- )
-
- create_entries(
- developer_id=developer_id,
- session_id=session.id,
- data=[test_entry],
- mark_session_as_updated=False,
- client=client,
- )
-
-
-@test("model: create entry, update session")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- """
- Tests the addition of a new entry to the database.
- Verifies that the entry can be successfully added using the create_entries function.
- """
-
- test_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- source="internal",
- content="test entry content",
- )
-
- # TODO: We should make sessions.updated_at also a updated_at_ms field to avoid this sleep
- time.sleep(1)
-
- create_entries(
- developer_id=developer_id,
- session_id=session.id,
- data=[test_entry],
- mark_session_as_updated=True,
- client=client,
- )
-
- updated_session = get_session(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- assert updated_session.updated_at > session.updated_at
-
-
-@test("model: get entries")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- """
- Tests the retrieval of entries from the database.
- Verifies that entries matching specific criteria can be successfully retrieved.
- """
-
- test_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- source="api_request",
- content="test entry content",
- )
-
- internal_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- content="test entry content",
- source="internal",
- )
-
- create_entries(
- developer_id=developer_id,
- session_id=session.id,
- data=[test_entry, internal_entry],
- client=client,
- )
-
- result = list_entries(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- # Asserts that only one entry is retrieved, matching the session_id.
- assert len(result) == 1
-
-
-@test("model: get history")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- """
- Tests the retrieval of entries from the database.
- Verifies that entries matching specific criteria can be successfully retrieved.
- """
-
- test_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- source="api_request",
- content="test entry content",
- )
-
- internal_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- content="test entry content",
- source="internal",
- )
-
- create_entries(
- developer_id=developer_id,
- session_id=session.id,
- data=[test_entry, internal_entry],
- client=client,
- )
-
- result = get_history(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- # Asserts that only one entry is retrieved, matching the session_id.
- assert len(result.entries) > 0
- assert result.entries[0].id
-
-
-@test("model: delete entries")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- """
- Tests the deletion of entries from the database.
- Verifies that entries can be successfully deleted using the delete_entries function.
- """
-
- test_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- source="api_request",
- content="test entry content",
- )
-
- internal_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- content="internal entry content",
- source="internal",
- )
-
- created_entries = create_entries(
- developer_id=developer_id,
- session_id=session.id,
- data=[test_entry, internal_entry],
- client=client,
- )
-
- entry_ids = [entry.id for entry in created_entries]
-
- delete_entries(
- developer_id=developer_id,
- session_id=session.id,
- entry_ids=entry_ids,
- client=client,
- )
-
- result = list_entries(
- developer_id=developer_id,
- session_id=session.id,
- client=client,
- )
-
- # Asserts that no entries are retrieved after deletion.
- assert all(id not in [entry.id for entry in result] for id in entry_ids)
+# """
+# This module contains tests for entry queries against the CozoDB database.
+# It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
+# """
+
+# # Tests for entry queries
+
+# import time
+
+# from ward import test
+
+# from agents_api.autogen.openapi_model import CreateEntryRequest
+# from agents_api.queries.entry.create_entries import create_entries
+# from agents_api.queries.entry.delete_entries import delete_entries
+# from agents_api.queries.entry.get_history import get_history
+# from agents_api.queries.entry.list_entries import list_entries
+# from agents_api.queries.session.get_session import get_session
+# from tests.fixtures import cozo_client, test_developer_id, test_session
+
+# MODEL = "gpt-4o-mini"
+
+
+# @test("query: create entry")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# """
+# Tests the addition of a new entry to the database.
+# Verifies that the entry can be successfully added using the create_entries function.
+# """
+
+# test_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# source="internal",
+# content="test entry content",
+# )
+
+# create_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# data=[test_entry],
+# mark_session_as_updated=False,
+# client=client,
+# )
+
+
+# @test("query: create entry, update session")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# """
+# Tests the addition of a new entry to the database.
+# Verifies that the entry can be successfully added using the create_entries function.
+# """
+
+# test_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# source="internal",
+# content="test entry content",
+# )
+
+# # TODO: We should make sessions.updated_at also a updated_at_ms field to avoid this sleep
+# time.sleep(1)
+
+# create_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# data=[test_entry],
+# mark_session_as_updated=True,
+# client=client,
+# )
+
+# updated_session = get_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# assert updated_session.updated_at > session.updated_at
+
+
+# @test("query: get entries")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# """
+# Tests the retrieval of entries from the database.
+# Verifies that entries matching specific criteria can be successfully retrieved.
+# """
+
+# test_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# source="api_request",
+# content="test entry content",
+# )
+
+# internal_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# content="test entry content",
+# source="internal",
+# )
+
+# create_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# data=[test_entry, internal_entry],
+# client=client,
+# )
+
+# result = list_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# # Asserts that only one entry is retrieved, matching the session_id.
+# assert len(result) == 1
+
+
+# @test("query: get history")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# """
+# Tests the retrieval of entries from the database.
+# Verifies that entries matching specific criteria can be successfully retrieved.
+# """
+
+# test_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# source="api_request",
+# content="test entry content",
+# )
+
+# internal_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# content="test entry content",
+# source="internal",
+# )
+
+# create_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# data=[test_entry, internal_entry],
+# client=client,
+# )
+
+# result = get_history(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# # Asserts that only one entry is retrieved, matching the session_id.
+# assert len(result.entries) > 0
+# assert result.entries[0].id
+
+
+# @test("query: delete entries")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# """
+# Tests the deletion of entries from the database.
+# Verifies that entries can be successfully deleted using the delete_entries function.
+# """
+
+# test_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# source="api_request",
+# content="test entry content",
+# )
+
+# internal_entry = CreateEntryRequest.from_model_input(
+# model=MODEL,
+# role="user",
+# content="internal entry content",
+# source="internal",
+# )
+
+# created_entries = create_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# data=[test_entry, internal_entry],
+# client=client,
+# )
+
+# entry_ids = [entry.id for entry in created_entries]
+
+# delete_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# entry_ids=entry_ids,
+# client=client,
+# )
+
+# result = list_entries(
+# developer_id=developer_id,
+# session_id=session.id,
+# client=client,
+# )
+
+# # Asserts that no entries are retrieved after deletion.
+# assert all(id not in [entry.id for entry in result] for id in entry_ids)
diff --git a/agents-api/tests/test_execution_queries.py b/agents-api/tests/test_execution_queries.py
index 9e75b3cda..ac8251905 100644
--- a/agents-api/tests/test_execution_queries.py
+++ b/agents-api/tests/test_execution_queries.py
@@ -1,154 +1,154 @@
-# Tests for execution queries
-
-from temporalio.client import WorkflowHandle
-from ward import test
-
-from agents_api.autogen.openapi_model import (
- CreateExecutionRequest,
- CreateTransitionRequest,
- Execution,
-)
-from agents_api.models.execution.count_executions import count_executions
-from agents_api.models.execution.create_execution import create_execution
-from agents_api.models.execution.create_execution_transition import (
- create_execution_transition,
-)
-from agents_api.models.execution.create_temporal_lookup import create_temporal_lookup
-from agents_api.models.execution.get_execution import get_execution
-from agents_api.models.execution.list_executions import list_executions
-from agents_api.models.execution.lookup_temporal_data import lookup_temporal_data
-from tests.fixtures import (
- cozo_client,
- test_developer_id,
- test_execution,
- test_execution_started,
- test_task,
-)
-
-MODEL = "gpt-4o-mini-mini"
-
-
-@test("model: create execution")
-def _(client=cozo_client, developer_id=test_developer_id, task=test_task):
- workflow_handle = WorkflowHandle(
- client=None,
- id="blah",
- )
-
- execution = create_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=CreateExecutionRequest(input={"test": "test"}),
- client=client,
- )
-
- create_temporal_lookup(
- developer_id=developer_id,
- execution_id=execution.id,
- workflow_handle=workflow_handle,
- client=client,
- )
-
-
-@test("model: get execution")
-def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution):
- result = get_execution(
- execution_id=execution.id,
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, Execution)
- assert result.status == "queued"
-
-
-@test("model: lookup temporal id")
-def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution):
- result = lookup_temporal_data(
- execution_id=execution.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert result is not None
- assert result["id"]
-
-
-@test("model: list executions")
-def _(
- client=cozo_client,
- developer_id=test_developer_id,
- execution=test_execution,
- task=test_task,
-):
- result = list_executions(
- developer_id=developer_id,
- task_id=task.id,
- client=client,
- )
-
- assert isinstance(result, list)
- assert len(result) >= 1
- assert result[0].status == "queued"
-
-
-@test("model: count executions")
-def _(
- client=cozo_client,
- developer_id=test_developer_id,
- execution=test_execution,
- task=test_task,
-):
- result = count_executions(
- developer_id=developer_id,
- task_id=task.id,
- client=client,
- )
-
- assert isinstance(result, dict)
- assert result["count"] > 0
-
-
-@test("model: create execution transition")
-def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution):
- result = create_execution_transition(
- developer_id=developer_id,
- execution_id=execution.id,
- data=CreateTransitionRequest(
- type="step",
- output={"result": "test"},
- current={"workflow": "main", "step": 0},
- next={"workflow": "main", "step": 1},
- ),
- client=client,
- )
-
- assert result is not None
- assert result.type == "step"
- assert result.output == {"result": "test"}
-
-
-@test("model: create execution transition with execution update")
-def _(
- client=cozo_client,
- developer_id=test_developer_id,
- task=test_task,
- execution=test_execution_started,
-):
- result = create_execution_transition(
- developer_id=developer_id,
- execution_id=execution.id,
- data=CreateTransitionRequest(
- type="cancelled",
- output={"result": "test"},
- current={"workflow": "main", "step": 0},
- next=None,
- ),
- task_id=task.id,
- update_execution_status=True,
- client=client,
- )
-
- assert result is not None
- assert result.type == "cancelled"
- assert result.output == {"result": "test"}
+# # Tests for execution queries
+
+# from temporalio.client import WorkflowHandle
+# from ward import test
+
+# from agents_api.autogen.openapi_model import (
+# CreateExecutionRequest,
+# CreateTransitionRequest,
+# Execution,
+# )
+# from agents_api.queries.execution.count_executions import count_executions
+# from agents_api.queries.execution.create_execution import create_execution
+# from agents_api.queries.execution.create_execution_transition import (
+# create_execution_transition,
+# )
+# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
+# from agents_api.queries.execution.get_execution import get_execution
+# from agents_api.queries.execution.list_executions import list_executions
+# from agents_api.queries.execution.lookup_temporal_data import lookup_temporal_data
+# from tests.fixtures import (
+# cozo_client,
+# test_developer_id,
+# test_execution,
+# test_execution_started,
+# test_task,
+# )
+
+# MODEL = "gpt-4o-mini-mini"
+
+
+# @test("query: create execution")
+# def _(client=cozo_client, developer_id=test_developer_id, task=test_task):
+# workflow_handle = WorkflowHandle(
+# client=None,
+# id="blah",
+# )
+
+# execution = create_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=CreateExecutionRequest(input={"test": "test"}),
+# client=client,
+# )
+
+# create_temporal_lookup(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# workflow_handle=workflow_handle,
+# client=client,
+# )
+
+
+# @test("query: get execution")
+# def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution):
+# result = get_execution(
+# execution_id=execution.id,
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, Execution)
+# assert result.status == "queued"
+
+
+# @test("query: lookup temporal id")
+# def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution):
+# result = lookup_temporal_data(
+# execution_id=execution.id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert result is not None
+# assert result["id"]
+
+
+# @test("query: list executions")
+# def _(
+# client=cozo_client,
+# developer_id=test_developer_id,
+# execution=test_execution,
+# task=test_task,
+# ):
+# result = list_executions(
+# developer_id=developer_id,
+# task_id=task.id,
+# client=client,
+# )
+
+# assert isinstance(result, list)
+# assert len(result) >= 1
+# assert result[0].status == "queued"
+
+
+# @test("query: count executions")
+# def _(
+# client=cozo_client,
+# developer_id=test_developer_id,
+# execution=test_execution,
+# task=test_task,
+# ):
+# result = count_executions(
+# developer_id=developer_id,
+# task_id=task.id,
+# client=client,
+# )
+
+# assert isinstance(result, dict)
+# assert result["count"] > 0
+
+
+# @test("query: create execution transition")
+# def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution):
+# result = create_execution_transition(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# data=CreateTransitionRequest(
+# type="step",
+# output={"result": "test"},
+# current={"workflow": "main", "step": 0},
+# next={"workflow": "main", "step": 1},
+# ),
+# client=client,
+# )
+
+# assert result is not None
+# assert result.type == "step"
+# assert result.output == {"result": "test"}
+
+
+# @test("query: create execution transition with execution update")
+# def _(
+# client=cozo_client,
+# developer_id=test_developer_id,
+# task=test_task,
+# execution=test_execution_started,
+# ):
+# result = create_execution_transition(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# data=CreateTransitionRequest(
+# type="cancelled",
+# output={"result": "test"},
+# current={"workflow": "main", "step": 0},
+# next=None,
+# ),
+# task_id=task.id,
+# update_execution_status=True,
+# client=client,
+# )
+
+# assert result is not None
+# assert result.type == "cancelled"
+# assert result.output == {"result": "test"}
diff --git a/agents-api/tests/test_execution_workflow.py b/agents-api/tests/test_execution_workflow.py
index ae440ff02..935d51526 100644
--- a/agents-api/tests/test_execution_workflow.py
+++ b/agents-api/tests/test_execution_workflow.py
@@ -1,1437 +1,1437 @@
-# Tests for task queries
-
-import asyncio
-import json
-from unittest.mock import patch
-
-import yaml
-from google.protobuf.json_format import MessageToDict
-from litellm.types.utils import Choices, ModelResponse
-from ward import raises, skip, test
-
-from agents_api.autogen.openapi_model import (
- CreateExecutionRequest,
- CreateTaskRequest,
-)
-from agents_api.models.task.create_task import create_task
-from agents_api.routers.tasks.create_task_execution import start_execution
-from tests.fixtures import (
- cozo_client,
- cozo_clients_with_migrations,
- test_agent,
- test_developer_id,
-)
-from tests.utils import patch_integration_service, patch_testing_temporal
-
-EMBEDDING_SIZE: int = 1024
-
-
-@test("workflow: evaluate step single")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hello": '"world"'}}],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == "world"
-
-
-@test("workflow: evaluate step multiple")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {"evaluate": {"hello": '"nope"'}},
- {"evaluate": {"hello": '"world"'}},
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == "world"
-
-
-@test("workflow: variable access in expressions")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == data.input["test"]
-
-
-@test("workflow: yield step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "other_workflow": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- ],
- "main": [
- # Testing that we can access the input
- {
- "workflow": "other_workflow",
- "arguments": {"test": '_["test"]'},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == data.input["test"]
-
-
-@test("workflow: sleep step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "other_workflow": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- {"sleep": {"days": 5}},
- ],
- "main": [
- # Testing that we can access the input
- {
- "workflow": "other_workflow",
- "arguments": {"test": '_["test"]'},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == data.input["test"]
-
-
-@test("workflow: return step direct")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- {"return": {"value": '_["hello"]'}},
- {"return": {"value": '"banana"'}},
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["value"] == data.input["test"]
-
-
-@test("workflow: return step nested")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "other_workflow": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- {"return": {"value": '_["hello"]'}},
- {"return": {"value": '"banana"'}},
- ],
- "main": [
- # Testing that we can access the input
- {
- "workflow": "other_workflow",
- "arguments": {"test": '_["test"]'},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["value"] == data.input["test"]
-
-
-@test("workflow: log step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "other_workflow": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- {"log": "{{_.hello}}"},
- ],
- "main": [
- # Testing that we can access the input
- {
- "workflow": "other_workflow",
- "arguments": {"test": '_["test"]'},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == data.input["test"]
-
-
-@test("workflow: log step expression fail")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "other_workflow": [
- # Testing that we can access the input
- {"evaluate": {"hello": '_["test"]'}},
- {
- "log": '{{_["hell"].strip()}}'
- }, # <--- The "hell" key does not exist
- ],
- "main": [
- # Testing that we can access the input
- {
- "workflow": "other_workflow",
- "arguments": {"test": '_["test"]'},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- with raises(BaseException):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == data.input["test"]
-
-
-@test("workflow: system call - list agents")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "Test system tool task",
- "description": "List agents using system call",
- "input_schema": {"type": "object"},
- "tools": [
- {
- "name": "list_agents",
- "description": "List all agents",
- "type": "system",
- "system": {"resource": "agent", "operation": "list"},
- },
- ],
- "main": [
- {
- "tool": "list_agents",
- "arguments": {
- "limit": "10",
- },
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert isinstance(result, list)
- # Result's length should be less than or equal to the limit
- assert len(result) <= 10
- # Check if all items are agent dictionaries
- assert all(isinstance(agent, dict) for agent in result)
- # Check if each agent has an 'id' field
- assert all("id" in agent for agent in result)
-
-
-@test("workflow: tool call api_call")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "tools": [
- {
- "type": "api_call",
- "name": "hello",
- "api_call": {
- "method": "GET",
- "url": "https://httpbin.org/get",
- },
- }
- ],
- "main": [
- {
- "tool": "hello",
- "arguments": {
- "params": {"test": "_.test"},
- },
- },
- {
- "evaluate": {"hello": "_.json.args.test"},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == data.input["test"]
-
-
-@test("workflow: tool call api_call test retry")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
- status_codes_to_retry = ",".join(str(code) for code in (408, 429, 503, 504))
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "tools": [
- {
- "type": "api_call",
- "name": "hello",
- "api_call": {
- "method": "GET",
- "url": f"https://httpbin.org/status/{status_codes_to_retry}",
- },
- }
- ],
- "main": [
- {
- "tool": "hello",
- "arguments": {
- "params": {"test": "_.test"},
- },
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- mock_run_task_execution_workflow.assert_called_once()
-
- # Let it run for a bit
- result_coroutine = handle.result()
- task = asyncio.create_task(result_coroutine)
- try:
- await asyncio.wait_for(task, timeout=10)
- except BaseException:
- task.cancel()
-
- # Get the history
- history = await handle.fetch_history()
- events = [MessageToDict(e) for e in history.events]
- assert len(events) > 0
-
- # NOTE: super janky but works
- events_strings = [json.dumps(event) for event in events]
- num_retries = len(
- [event for event in events_strings if "execute_api_call" in event]
- )
-
- assert num_retries >= 2
-
-
-@test("workflow: tool call integration dummy")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "tools": [
- {
- "type": "integration",
- "name": "hello",
- "integration": {
- "provider": "dummy",
- },
- }
- ],
- "main": [
- {
- "tool": "hello",
- "arguments": {"test": "_.test"},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["test"] == data.input["test"]
-
-
-@skip("integration service patch not working")
-@test("workflow: tool call integration mocked weather")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "tools": [
- {
- "type": "integration",
- "name": "get_weather",
- "integration": {
- "provider": "weather",
- "setup": {"openweathermap_api_key": "test"},
- "arguments": {"test": "fake"},
- },
- }
- ],
- "main": [
- {
- "tool": "get_weather",
- "arguments": {"location": "_.test"},
- },
- ],
- }
- ),
- client=client,
- )
-
- expected_output = {"temperature": 20, "humidity": 60}
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- with patch_integration_service(expected_output) as mock_integration_service:
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
- mock_integration_service.assert_called_once()
-
- result = await handle.result()
- assert result == expected_output
-
-
-@test("workflow: wait for input step start")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {"wait_for_input": {"info": {"hi": '"bye"'}}},
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- # Let it run for a bit
- result_coroutine = handle.result()
- task = asyncio.create_task(result_coroutine)
- try:
- await asyncio.wait_for(task, timeout=3)
- except asyncio.TimeoutError:
- task.cancel()
-
- # Get the history
- history = await handle.fetch_history()
- events = [MessageToDict(e) for e in history.events]
- assert len(events) > 0
-
- activities_scheduled = [
- event.get("activityTaskScheduledEventAttributes", {})
- .get("activityType", {})
- .get("name")
- for event in events
- if "ACTIVITY_TASK_SCHEDULED" in event["eventType"]
- ]
- activities_scheduled = [
- activity for activity in activities_scheduled if activity
- ]
-
- assert "wait_for_input_step" in activities_scheduled
-
-
-@test("workflow: foreach wait for input step start")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "foreach": {
- "in": "'a b c'.split()",
- "do": {"wait_for_input": {"info": {"hi": '"bye"'}}},
- },
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
- mock_run_task_execution_workflow.assert_called_once()
-
- # Let it run for a bit
- result_coroutine = handle.result()
- task = asyncio.create_task(result_coroutine)
- try:
- await asyncio.wait_for(task, timeout=3)
- except asyncio.TimeoutError:
- task.cancel()
-
- # Get the history
- history = await handle.fetch_history()
- events = [MessageToDict(e) for e in history.events]
- assert len(events) > 0
-
- activities_scheduled = [
- event.get("activityTaskScheduledEventAttributes", {})
- .get("activityType", {})
- .get("name")
- for event in events
- if "ACTIVITY_TASK_SCHEDULED" in event["eventType"]
- ]
- activities_scheduled = [
- activity for activity in activities_scheduled if activity
- ]
-
- assert "for_each_step" in activities_scheduled
-
-
-@test("workflow: if-else step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task_def = CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "if": "False",
- "then": {"evaluate": {"hello": '"world"'}},
- "else": {"evaluate": {"hello": "random.randint(0, 10)"}},
- },
- ],
- }
- )
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=task_def,
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
-
-
-@test("workflow: switch step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "switch": [
- {
- "case": "False",
- "then": {"evaluate": {"hello": '"bubbles"'}},
- },
- {
- "case": "True",
- "then": {"evaluate": {"hello": '"world"'}},
- },
- {
- "case": "True",
- "then": {"evaluate": {"hello": '"bye"'}},
- },
- ]
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result["hello"] == "world"
-
-
-@test("workflow: for each step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "foreach": {
- "in": "'a b c'.split()",
- "do": {"evaluate": {"hello": '"world"'}},
- },
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result[0]["hello"] == "world"
-
-
-@test("workflow: map reduce step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- map_step = {
- "over": "'a b c'.split()",
- "map": {
- "evaluate": {"res": "_"},
- },
- }
-
- task_def = {
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [map_step],
- }
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(**task_def),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert [r["res"] for r in result] == ["a", "b", "c"]
-
-
-for p in [1, 3, 5]:
-
- @test(f"workflow: map reduce step parallel (parallelism={p})")
- async def _(
- client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
- ):
- data = CreateExecutionRequest(input={"test": "input"})
-
- map_step = {
- "over": "'a b c d'.split()",
- "map": {
- "evaluate": {"res": "_ + '!'"},
- },
- "parallelism": p,
- }
-
- task_def = {
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [map_step],
- }
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(**task_def),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert [r["res"] for r in result] == [
- "a!",
- "b!",
- "c!",
- "d!",
- ]
-
-
-@test("workflow: prompt step (python expression)")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- mock_model_response = ModelResponse(
- id="fake_id",
- choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})],
- created=0,
- object="text_completion",
- )
-
- with patch("agents_api.clients.litellm.acompletion") as acompletion:
- acompletion.return_value = mock_model_response
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "prompt": "$_ [{'role': 'user', 'content': _.test}]",
- "settings": {},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- result = result["choices"][0]["message"]
- assert result["content"] == "Hello, world!"
- assert result["role"] == "assistant"
-
-
-@test("workflow: prompt step")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- mock_model_response = ModelResponse(
- id="fake_id",
- choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})],
- created=0,
- object="text_completion",
- )
-
- with patch("agents_api.clients.litellm.acompletion") as acompletion:
- acompletion.return_value = mock_model_response
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "prompt": [
- {
- "role": "user",
- "content": "message",
- },
- ],
- "settings": {},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- result = result["choices"][0]["message"]
- assert result["content"] == "Hello, world!"
- assert result["role"] == "assistant"
-
-
-@test("workflow: prompt step unwrap")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- mock_model_response = ModelResponse(
- id="fake_id",
- choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})],
- created=0,
- object="text_completion",
- )
-
- with patch("agents_api.clients.litellm.acompletion") as acompletion:
- acompletion.return_value = mock_model_response
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {
- "prompt": [
- {
- "role": "user",
- "content": "message",
- },
- ],
- "unwrap": True,
- "settings": {},
- },
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result == "Hello, world!"
-
-
-@test("workflow: set and get steps")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- data = CreateExecutionRequest(input={"test": "input"})
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [
- {"set": {"test_key": '"test_value"'}},
- {"get": "test_key"},
- ],
- }
- ),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- result = await handle.result()
- assert result == "test_value"
-
-
-@test("workflow: execute yaml task")
-async def _(
- clients=cozo_clients_with_migrations,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- client, _ = clients
- mock_model_response = ModelResponse(
- id="fake_id",
- choices=[
- Choices(
- message={"role": "assistant", "content": "found: true\nvalue: 'Gaga'"}
- )
- ],
- created=0,
- object="text_completion",
- )
-
- with (
- patch("agents_api.clients.litellm.acompletion") as acompletion,
- open("./tests/sample_tasks/find_selector.yaml", "r") as task_file,
- ):
- input = dict(
- screenshot_base64="iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA",
- network_requests=[{"request": {}, "response": {"body": "Lady Gaga"}}],
- parameters=["name"],
- )
- task_definition = yaml.safe_load(task_file)
- acompletion.return_value = mock_model_response
- data = CreateExecutionRequest(input=input)
-
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(**task_definition),
- client=client,
- )
-
- async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
- execution, handle = await start_execution(
- developer_id=developer_id,
- task_id=task.id,
- data=data,
- client=client,
- )
-
- assert handle is not None
- assert execution.task_id == task.id
- assert execution.input == data.input
-
- mock_run_task_execution_workflow.assert_called_once()
-
- await handle.result()
+# # Tests for task queries
+
+# import asyncio
+# import json
+# from unittest.mock import patch
+
+# import yaml
+# from google.protobuf.json_format import MessageToDict
+# from litellm.types.utils import Choices, ModelResponse
+# from ward import raises, skip, test
+
+# from agents_api.autogen.openapi_model import (
+# CreateExecutionRequest,
+# CreateTaskRequest,
+# )
+# from agents_api.queries.task.create_task import create_task
+# from agents_api.routers.tasks.create_task_execution import start_execution
+# from tests.fixtures import (
+# cozo_client,
+# cozo_clients_with_migrations,
+# test_agent,
+# test_developer_id,
+# )
+# from tests.utils import patch_integration_service, patch_testing_temporal
+
+# EMBEDDING_SIZE: int = 1024
+
+
+# @test("workflow: evaluate step single")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hello": '"world"'}}],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == "world"
+
+
+# @test("workflow: evaluate step multiple")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {"evaluate": {"hello": '"nope"'}},
+# {"evaluate": {"hello": '"world"'}},
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == "world"
+
+
+# @test("workflow: variable access in expressions")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == data.input["test"]
+
+
+# @test("workflow: yield step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "other_workflow": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# ],
+# "main": [
+# # Testing that we can access the input
+# {
+# "workflow": "other_workflow",
+# "arguments": {"test": '_["test"]'},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == data.input["test"]
+
+
+# @test("workflow: sleep step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "other_workflow": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# {"sleep": {"days": 5}},
+# ],
+# "main": [
+# # Testing that we can access the input
+# {
+# "workflow": "other_workflow",
+# "arguments": {"test": '_["test"]'},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == data.input["test"]
+
+
+# @test("workflow: return step direct")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# {"return": {"value": '_["hello"]'}},
+# {"return": {"value": '"banana"'}},
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["value"] == data.input["test"]
+
+
+# @test("workflow: return step nested")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "other_workflow": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# {"return": {"value": '_["hello"]'}},
+# {"return": {"value": '"banana"'}},
+# ],
+# "main": [
+# # Testing that we can access the input
+# {
+# "workflow": "other_workflow",
+# "arguments": {"test": '_["test"]'},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["value"] == data.input["test"]
+
+
+# @test("workflow: log step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "other_workflow": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# {"log": "{{_.hello}}"},
+# ],
+# "main": [
+# # Testing that we can access the input
+# {
+# "workflow": "other_workflow",
+# "arguments": {"test": '_["test"]'},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == data.input["test"]
+
+
+# @test("workflow: log step expression fail")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "other_workflow": [
+# # Testing that we can access the input
+# {"evaluate": {"hello": '_["test"]'}},
+# {
+# "log": '{{_["hell"].strip()}}'
+# }, # <--- The "hell" key does not exist
+# ],
+# "main": [
+# # Testing that we can access the input
+# {
+# "workflow": "other_workflow",
+# "arguments": {"test": '_["test"]'},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# with raises(BaseException):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == data.input["test"]
+
+
+# @test("workflow: system call - list agents")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "Test system tool task",
+# "description": "List agents using system call",
+# "input_schema": {"type": "object"},
+# "tools": [
+# {
+# "name": "list_agents",
+# "description": "List all agents",
+# "type": "system",
+# "system": {"resource": "agent", "operation": "list"},
+# },
+# ],
+# "main": [
+# {
+# "tool": "list_agents",
+# "arguments": {
+# "limit": "10",
+# },
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert isinstance(result, list)
+# # Result's length should be less than or equal to the limit
+# assert len(result) <= 10
+# # Check if all items are agent dictionaries
+# assert all(isinstance(agent, dict) for agent in result)
+# # Check if each agent has an 'id' field
+# assert all("id" in agent for agent in result)
+
+
+# @test("workflow: tool call api_call")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "tools": [
+# {
+# "type": "api_call",
+# "name": "hello",
+# "api_call": {
+# "method": "GET",
+# "url": "https://httpbin.org/get",
+# },
+# }
+# ],
+# "main": [
+# {
+# "tool": "hello",
+# "arguments": {
+# "params": {"test": "_.test"},
+# },
+# },
+# {
+# "evaluate": {"hello": "_.json.args.test"},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == data.input["test"]
+
+
+# @test("workflow: tool call api_call test retry")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+# status_codes_to_retry = ",".join(str(code) for code in (408, 429, 503, 504))
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "tools": [
+# {
+# "type": "api_call",
+# "name": "hello",
+# "api_call": {
+# "method": "GET",
+# "url": f"https://httpbin.org/status/{status_codes_to_retry}",
+# },
+# }
+# ],
+# "main": [
+# {
+# "tool": "hello",
+# "arguments": {
+# "params": {"test": "_.test"},
+# },
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# mock_run_task_execution_workflow.assert_called_once()
+
+# # Let it run for a bit
+# result_coroutine = handle.result()
+# task = asyncio.create_task(result_coroutine)
+# try:
+# await asyncio.wait_for(task, timeout=10)
+# except BaseException:
+# task.cancel()
+
+# # Get the history
+# history = await handle.fetch_history()
+# events = [MessageToDict(e) for e in history.events]
+# assert len(events) > 0
+
+# # NOTE: super janky but works
+# events_strings = [json.dumps(event) for event in events]
+# num_retries = len(
+# [event for event in events_strings if "execute_api_call" in event]
+# )
+
+# assert num_retries >= 2
+
+
+# @test("workflow: tool call integration dummy")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "tools": [
+# {
+# "type": "integration",
+# "name": "hello",
+# "integration": {
+# "provider": "dummy",
+# },
+# }
+# ],
+# "main": [
+# {
+# "tool": "hello",
+# "arguments": {"test": "_.test"},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["test"] == data.input["test"]
+
+
+# @skip("integration service patch not working")
+# @test("workflow: tool call integration mocked weather")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "tools": [
+# {
+# "type": "integration",
+# "name": "get_weather",
+# "integration": {
+# "provider": "weather",
+# "setup": {"openweathermap_api_key": "test"},
+# "arguments": {"test": "fake"},
+# },
+# }
+# ],
+# "main": [
+# {
+# "tool": "get_weather",
+# "arguments": {"location": "_.test"},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# expected_output = {"temperature": 20, "humidity": 60}
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# with patch_integration_service(expected_output) as mock_integration_service:
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+# mock_integration_service.assert_called_once()
+
+# result = await handle.result()
+# assert result == expected_output
+
+
+# @test("workflow: wait for input step start")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {"wait_for_input": {"info": {"hi": '"bye"'}}},
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# # Let it run for a bit
+# result_coroutine = handle.result()
+# task = asyncio.create_task(result_coroutine)
+# try:
+# await asyncio.wait_for(task, timeout=3)
+# except asyncio.TimeoutError:
+# task.cancel()
+
+# # Get the history
+# history = await handle.fetch_history()
+# events = [MessageToDict(e) for e in history.events]
+# assert len(events) > 0
+
+# activities_scheduled = [
+# event.get("activityTaskScheduledEventAttributes", {})
+# .get("activityType", {})
+# .get("name")
+# for event in events
+# if "ACTIVITY_TASK_SCHEDULED" in event["eventType"]
+# ]
+# activities_scheduled = [
+# activity for activity in activities_scheduled if activity
+# ]
+
+# assert "wait_for_input_step" in activities_scheduled
+
+
+# @test("workflow: foreach wait for input step start")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "foreach": {
+# "in": "'a b c'.split()",
+# "do": {"wait_for_input": {"info": {"hi": '"bye"'}}},
+# },
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+# mock_run_task_execution_workflow.assert_called_once()
+
+# # Let it run for a bit
+# result_coroutine = handle.result()
+# task = asyncio.create_task(result_coroutine)
+# try:
+# await asyncio.wait_for(task, timeout=3)
+# except asyncio.TimeoutError:
+# task.cancel()
+
+# # Get the history
+# history = await handle.fetch_history()
+# events = [MessageToDict(e) for e in history.events]
+# assert len(events) > 0
+
+# activities_scheduled = [
+# event.get("activityTaskScheduledEventAttributes", {})
+# .get("activityType", {})
+# .get("name")
+# for event in events
+# if "ACTIVITY_TASK_SCHEDULED" in event["eventType"]
+# ]
+# activities_scheduled = [
+# activity for activity in activities_scheduled if activity
+# ]
+
+# assert "for_each_step" in activities_scheduled
+
+
+# @test("workflow: if-else step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task_def = CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "if": "False",
+# "then": {"evaluate": {"hello": '"world"'}},
+# "else": {"evaluate": {"hello": "random.randint(0, 10)"}},
+# },
+# ],
+# }
+# )
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=task_def,
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+
+# @test("workflow: switch step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "switch": [
+# {
+# "case": "False",
+# "then": {"evaluate": {"hello": '"bubbles"'}},
+# },
+# {
+# "case": "True",
+# "then": {"evaluate": {"hello": '"world"'}},
+# },
+# {
+# "case": "True",
+# "then": {"evaluate": {"hello": '"bye"'}},
+# },
+# ]
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result["hello"] == "world"
+
+
+# @test("workflow: for each step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "foreach": {
+# "in": "'a b c'.split()",
+# "do": {"evaluate": {"hello": '"world"'}},
+# },
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result[0]["hello"] == "world"
+
+
+# @test("workflow: map reduce step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# map_step = {
+# "over": "'a b c'.split()",
+# "map": {
+# "evaluate": {"res": "_"},
+# },
+# }
+
+# task_def = {
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [map_step],
+# }
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(**task_def),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert [r["res"] for r in result] == ["a", "b", "c"]
+
+
+# for p in [1, 3, 5]:
+
+# @test(f"workflow: map reduce step parallel (parallelism={p})")
+# async def _(
+# client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# map_step = {
+# "over": "'a b c d'.split()",
+# "map": {
+# "evaluate": {"res": "_ + '!'"},
+# },
+# "parallelism": p,
+# }
+
+# task_def = {
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [map_step],
+# }
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(**task_def),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert [r["res"] for r in result] == [
+# "a!",
+# "b!",
+# "c!",
+# "d!",
+# ]
+
+
+# @test("workflow: prompt step (python expression)")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# mock_model_response = ModelResponse(
+# id="fake_id",
+# choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})],
+# created=0,
+# object="text_completion",
+# )
+
+# with patch("agents_api.clients.litellm.acompletion") as acompletion:
+# acompletion.return_value = mock_model_response
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "prompt": "$_ [{'role': 'user', 'content': _.test}]",
+# "settings": {},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# result = result["choices"][0]["message"]
+# assert result["content"] == "Hello, world!"
+# assert result["role"] == "assistant"
+
+
+# @test("workflow: prompt step")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# mock_model_response = ModelResponse(
+# id="fake_id",
+# choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})],
+# created=0,
+# object="text_completion",
+# )
+
+# with patch("agents_api.clients.litellm.acompletion") as acompletion:
+# acompletion.return_value = mock_model_response
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "prompt": [
+# {
+# "role": "user",
+# "content": "message",
+# },
+# ],
+# "settings": {},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# result = result["choices"][0]["message"]
+# assert result["content"] == "Hello, world!"
+# assert result["role"] == "assistant"
+
+
+# @test("workflow: prompt step unwrap")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# mock_model_response = ModelResponse(
+# id="fake_id",
+# choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})],
+# created=0,
+# object="text_completion",
+# )
+
+# with patch("agents_api.clients.litellm.acompletion") as acompletion:
+# acompletion.return_value = mock_model_response
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {
+# "prompt": [
+# {
+# "role": "user",
+# "content": "message",
+# },
+# ],
+# "unwrap": True,
+# "settings": {},
+# },
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result == "Hello, world!"
+
+
+# @test("workflow: set and get steps")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# data = CreateExecutionRequest(input={"test": "input"})
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [
+# {"set": {"test_key": '"test_value"'}},
+# {"get": "test_key"},
+# ],
+# }
+# ),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# result = await handle.result()
+# assert result == "test_value"
+
+
+# @test("workflow: execute yaml task")
+# async def _(
+# clients=cozo_clients_with_migrations,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# client, _ = clients
+# mock_model_response = ModelResponse(
+# id="fake_id",
+# choices=[
+# Choices(
+# message={"role": "assistant", "content": "found: true\nvalue: 'Gaga'"}
+# )
+# ],
+# created=0,
+# object="text_completion",
+# )
+
+# with (
+# patch("agents_api.clients.litellm.acompletion") as acompletion,
+# open("./tests/sample_tasks/find_selector.yaml", "r") as task_file,
+# ):
+# input = dict(
+# screenshot_base64="iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA",
+# network_requests=[{"request": {}, "response": {"body": "Lady Gaga"}}],
+# parameters=["name"],
+# )
+# task_definition = yaml.safe_load(task_file)
+# acompletion.return_value = mock_model_response
+# data = CreateExecutionRequest(input=input)
+
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(**task_definition),
+# client=client,
+# )
+
+# async with patch_testing_temporal() as (_, mock_run_task_execution_workflow):
+# execution, handle = await start_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=data,
+# client=client,
+# )
+
+# assert handle is not None
+# assert execution.task_id == task.id
+# assert execution.input == data.input
+
+# mock_run_task_execution_workflow.assert_called_once()
+
+# await handle.result()
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index 712a083ca..367fcccd4 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -1,57 +1,57 @@
-# Tests for entry queries
-
-
-from ward import test
-
-from agents_api.autogen.openapi_model import CreateFileRequest
-from agents_api.models.files.create_file import create_file
-from agents_api.models.files.delete_file import delete_file
-from agents_api.models.files.get_file import get_file
-from tests.fixtures import (
- cozo_client,
- test_developer_id,
- test_file,
-)
-
-
-@test("model: create file")
-def _(client=cozo_client, developer_id=test_developer_id):
- create_file(
- developer_id=developer_id,
- data=CreateFileRequest(
- name="Hello",
- description="World",
- mime_type="text/plain",
- content="eyJzYW1wbGUiOiAidGVzdCJ9",
- ),
- client=client,
- )
-
-
-@test("model: get file")
-def _(client=cozo_client, file=test_file, developer_id=test_developer_id):
- get_file(
- developer_id=developer_id,
- file_id=file.id,
- client=client,
- )
-
-
-@test("model: delete file")
-def _(client=cozo_client, developer_id=test_developer_id):
- file = create_file(
- developer_id=developer_id,
- data=CreateFileRequest(
- name="Hello",
- description="World",
- mime_type="text/plain",
- content="eyJzYW1wbGUiOiAidGVzdCJ9",
- ),
- client=client,
- )
-
- delete_file(
- developer_id=developer_id,
- file_id=file.id,
- client=client,
- )
+# # Tests for entry queries
+
+
+# from ward import test
+
+# from agents_api.autogen.openapi_model import CreateFileRequest
+# from agents_api.queries.files.create_file import create_file
+# from agents_api.queries.files.delete_file import delete_file
+# from agents_api.queries.files.get_file import get_file
+# from tests.fixtures import (
+# cozo_client,
+# test_developer_id,
+# test_file,
+# )
+
+
+# @test("query: create file")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# create_file(
+# developer_id=developer_id,
+# data=CreateFileRequest(
+# name="Hello",
+# description="World",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# ),
+# client=client,
+# )
+
+
+# @test("query: get file")
+# def _(client=cozo_client, file=test_file, developer_id=test_developer_id):
+# get_file(
+# developer_id=developer_id,
+# file_id=file.id,
+# client=client,
+# )
+
+
+# @test("query: delete file")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# file = create_file(
+# developer_id=developer_id,
+# data=CreateFileRequest(
+# name="Hello",
+# description="World",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# ),
+# client=client,
+# )
+
+# delete_file(
+# developer_id=developer_id,
+# file_id=file.id,
+# client=client,
+# )
diff --git a/agents-api/tests/test_files_routes.py b/agents-api/tests/test_files_routes.py
index 662612ff5..004cab74c 100644
--- a/agents-api/tests/test_files_routes.py
+++ b/agents-api/tests/test_files_routes.py
@@ -1,88 +1,88 @@
-import base64
-import hashlib
+# import base64
+# import hashlib
-from ward import test
+# from ward import test
-from tests.fixtures import make_request, s3_client
+# from tests.fixtures import make_request, s3_client
-@test("route: create file")
-async def _(make_request=make_request, s3_client=s3_client):
- data = dict(
- name="Test File",
- description="This is a test file.",
- mime_type="text/plain",
- content="eyJzYW1wbGUiOiAidGVzdCJ9",
- )
+# @test("route: create file")
+# async def _(make_request=make_request, s3_client=s3_client):
+# data = dict(
+# name="Test File",
+# description="This is a test file.",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# )
- response = make_request(
- method="POST",
- url="/files",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url="/files",
+# json=data,
+# )
- assert response.status_code == 201
+# assert response.status_code == 201
-@test("route: delete file")
-async def _(make_request=make_request, s3_client=s3_client):
- data = dict(
- name="Test File",
- description="This is a test file.",
- mime_type="text/plain",
- content="eyJzYW1wbGUiOiAidGVzdCJ9",
- )
+# @test("route: delete file")
+# async def _(make_request=make_request, s3_client=s3_client):
+# data = dict(
+# name="Test File",
+# description="This is a test file.",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# )
- response = make_request(
- method="POST",
- url="/files",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url="/files",
+# json=data,
+# )
- file_id = response.json()["id"]
+# file_id = response.json()["id"]
- response = make_request(
- method="DELETE",
- url=f"/files/{file_id}",
- )
+# response = make_request(
+# method="DELETE",
+# url=f"/files/{file_id}",
+# )
- assert response.status_code == 202
+# assert response.status_code == 202
- response = make_request(
- method="GET",
- url=f"/files/{file_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/files/{file_id}",
+# )
- assert response.status_code == 404
+# assert response.status_code == 404
-@test("route: get file")
-async def _(make_request=make_request, s3_client=s3_client):
- data = dict(
- name="Test File",
- description="This is a test file.",
- mime_type="text/plain",
- content="eyJzYW1wbGUiOiAidGVzdCJ9",
- )
+# @test("route: get file")
+# async def _(make_request=make_request, s3_client=s3_client):
+# data = dict(
+# name="Test File",
+# description="This is a test file.",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# )
- response = make_request(
- method="POST",
- url="/files",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url="/files",
+# json=data,
+# )
- file_id = response.json()["id"]
- content_bytes = base64.b64decode(data["content"])
- expected_hash = hashlib.sha256(content_bytes).hexdigest()
+# file_id = response.json()["id"]
+# content_bytes = base64.b64decode(data["content"])
+# expected_hash = hashlib.sha256(content_bytes).hexdigest()
- response = make_request(
- method="GET",
- url=f"/files/{file_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/files/{file_id}",
+# )
- assert response.status_code == 200
+# assert response.status_code == 200
- result = response.json()
+# result = response.json()
- # Decode base64 content and compute its SHA-256 hash
- assert result["hash"] == expected_hash
+# # Decode base64 content and compute its SHA-256 hash
+# assert result["hash"] == expected_hash
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index d59ac9250..e8ec40367 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -1,160 +1,160 @@
-# Tests for session queries
-
-from uuid_extensions import uuid7
-from ward import test
-
-from agents_api.autogen.openapi_model import (
- CreateOrUpdateSessionRequest,
- CreateSessionRequest,
- Session,
-)
-from agents_api.models.session.count_sessions import count_sessions
-from agents_api.models.session.create_or_update_session import create_or_update_session
-from agents_api.models.session.create_session import create_session
-from agents_api.models.session.delete_session import delete_session
-from agents_api.models.session.get_session import get_session
-from agents_api.models.session.list_sessions import list_sessions
-from tests.fixtures import (
- cozo_client,
- test_agent,
- test_developer_id,
- test_session,
- test_user,
-)
-
-MODEL = "gpt-4o-mini"
-
-
-@test("model: create session")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
-):
- create_session(
- developer_id=developer_id,
- data=CreateSessionRequest(
- users=[user.id],
- agents=[agent.id],
- situation="test session about",
- ),
- client=client,
- )
-
-
-@test("model: create session no user")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- create_session(
- developer_id=developer_id,
- data=CreateSessionRequest(
- agents=[agent.id],
- situation="test session about",
- ),
- client=client,
- )
-
-
-@test("model: get session not exists")
-def _(client=cozo_client, developer_id=test_developer_id):
- session_id = uuid7()
-
- try:
- get_session(
- session_id=session_id,
- developer_id=developer_id,
- client=client,
- )
- except Exception:
- pass
- else:
- assert False, "Session should not exist"
-
-
-@test("model: get session exists")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- result = get_session(
- session_id=session.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, Session)
-
-
-@test("model: delete session")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- session = create_session(
- developer_id=developer_id,
- data=CreateSessionRequest(
- agent=agent.id,
- situation="test session about",
- ),
- client=client,
- )
-
- delete_session(
- session_id=session.id,
- developer_id=developer_id,
- client=client,
- )
-
- try:
- get_session(
- session_id=session.id,
- developer_id=developer_id,
- client=client,
- )
- except Exception:
- pass
-
- else:
- assert False, "Session should not exist"
-
-
-@test("model: list sessions")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- result = list_sessions(
- developer_id=developer_id,
- client=client,
- )
-
- assert isinstance(result, list)
- assert len(result) > 0
-
-
-@test("model: count sessions")
-def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
- result = count_sessions(
- developer_id=developer_id,
- client=client,
- )
-
- assert isinstance(result, dict)
- assert result["count"] > 0
-
-
-@test("model: create or update session")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
-):
- session_id = uuid7()
-
- create_or_update_session(
- session_id=session_id,
- developer_id=developer_id,
- data=CreateOrUpdateSessionRequest(
- users=[user.id],
- agents=[agent.id],
- situation="test session about",
- ),
- client=client,
- )
-
- result = get_session(
- session_id=session_id,
- developer_id=developer_id,
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, Session)
- assert result.id == session_id
+# # Tests for session queries
+
+# from uuid_extensions import uuid7
+# from ward import test
+
+# from agents_api.autogen.openapi_model import (
+# CreateOrUpdateSessionRequest,
+# CreateSessionRequest,
+# Session,
+# )
+# from agents_api.queries.session.count_sessions import count_sessions
+# from agents_api.queries.session.create_or_update_session import create_or_update_session
+# from agents_api.queries.session.create_session import create_session
+# from agents_api.queries.session.delete_session import delete_session
+# from agents_api.queries.session.get_session import get_session
+# from agents_api.queries.session.list_sessions import list_sessions
+# from tests.fixtures import (
+# cozo_client,
+# test_agent,
+# test_developer_id,
+# test_session,
+# test_user,
+# )
+
+# MODEL = "gpt-4o-mini"
+
+
+# @test("query: create session")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
+# ):
+# create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# users=[user.id],
+# agents=[agent.id],
+# situation="test session about",
+# ),
+# client=client,
+# )
+
+
+# @test("query: create session no user")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# agents=[agent.id],
+# situation="test session about",
+# ),
+# client=client,
+# )
+
+
+# @test("query: get session not exists")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# session_id = uuid7()
+
+# try:
+# get_session(
+# session_id=session_id,
+# developer_id=developer_id,
+# client=client,
+# )
+# except Exception:
+# pass
+# else:
+# assert False, "Session should not exist"
+
+
+# @test("query: get session exists")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# result = get_session(
+# session_id=session.id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, Session)
+
+
+# @test("query: delete session")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# session = create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# agent=agent.id,
+# situation="test session about",
+# ),
+# client=client,
+# )
+
+# delete_session(
+# session_id=session.id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# try:
+# get_session(
+# session_id=session.id,
+# developer_id=developer_id,
+# client=client,
+# )
+# except Exception:
+# pass
+
+# else:
+# assert False, "Session should not exist"
+
+
+# @test("query: list sessions")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# result = list_sessions(
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert isinstance(result, list)
+# assert len(result) > 0
+
+
+# @test("query: count sessions")
+# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
+# result = count_sessions(
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert isinstance(result, dict)
+# assert result["count"] > 0
+
+
+# @test("query: create or update session")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
+# ):
+# session_id = uuid7()
+
+# create_or_update_session(
+# session_id=session_id,
+# developer_id=developer_id,
+# data=CreateOrUpdateSessionRequest(
+# users=[user.id],
+# agents=[agent.id],
+# situation="test session about",
+# ),
+# client=client,
+# )
+
+# result = get_session(
+# session_id=session_id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, Session)
+# assert result.id == session_id
diff --git a/agents-api/tests/test_sessions.py b/agents-api/tests/test_sessions.py
index b25a8a706..2a406aebb 100644
--- a/agents-api/tests/test_sessions.py
+++ b/agents-api/tests/test_sessions.py
@@ -1,36 +1,36 @@
-from ward import test
+# from ward import test
-from tests.fixtures import make_request
+# from tests.fixtures import make_request
-@test("model: list sessions")
-def _(make_request=make_request):
- response = make_request(
- method="GET",
- url="/sessions",
- )
+# @test("query: list sessions")
+# def _(make_request=make_request):
+# response = make_request(
+# method="GET",
+# url="/sessions",
+# )
- assert response.status_code == 200
- response = response.json()
- sessions = response["items"]
+# assert response.status_code == 200
+# response = response.json()
+# sessions = response["items"]
- assert isinstance(sessions, list)
- assert len(sessions) > 0
+# assert isinstance(sessions, list)
+# assert len(sessions) > 0
-@test("model: list sessions with metadata filter")
-def _(make_request=make_request):
- response = make_request(
- method="GET",
- url="/sessions",
- params={
- "metadata_filter": {"test": "test"},
- },
- )
+# @test("query: list sessions with metadata filter")
+# def _(make_request=make_request):
+# response = make_request(
+# method="GET",
+# url="/sessions",
+# params={
+# "metadata_filter": {"test": "test"},
+# },
+# )
- assert response.status_code == 200
- response = response.json()
- sessions = response["items"]
+# assert response.status_code == 200
+# response = response.json()
+# sessions = response["items"]
- assert isinstance(sessions, list)
- assert len(sessions) > 0
+# assert isinstance(sessions, list)
+# assert len(sessions) > 0
diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py
index 85c38ba81..1a9fcd544 100644
--- a/agents-api/tests/test_task_queries.py
+++ b/agents-api/tests/test_task_queries.py
@@ -1,160 +1,160 @@
-# Tests for task queries
-
-from uuid_extensions import uuid7
-from ward import test
-
-from agents_api.autogen.openapi_model import (
- CreateTaskRequest,
- ResourceUpdatedResponse,
- Task,
- UpdateTaskRequest,
-)
-from agents_api.models.task.create_or_update_task import create_or_update_task
-from agents_api.models.task.create_task import create_task
-from agents_api.models.task.delete_task import delete_task
-from agents_api.models.task.get_task import get_task
-from agents_api.models.task.list_tasks import list_tasks
-from agents_api.models.task.update_task import update_task
-from tests.fixtures import cozo_client, test_agent, test_developer_id, test_task
-
-
-@test("model: create task")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- task_id = uuid7()
-
- create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- task_id=task_id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hi": "_"}}],
- }
- ),
- client=client,
- )
-
-
-@test("model: create or update task")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- task_id = uuid7()
-
- create_or_update_task(
- developer_id=developer_id,
- agent_id=agent.id,
- task_id=task_id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hi": "_"}}],
- }
- ),
- client=client,
- )
-
-
-@test("model: get task not exists")
-def _(client=cozo_client, developer_id=test_developer_id):
- task_id = uuid7()
-
- try:
- get_task(
- developer_id=developer_id,
- task_id=task_id,
- client=client,
- )
- except Exception:
- pass
- else:
- assert False, "Task should not exist"
-
-
-@test("model: get task exists")
-def _(client=cozo_client, developer_id=test_developer_id, task=test_task):
- result = get_task(
- developer_id=developer_id,
- task_id=task.id,
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, Task)
-
-
-@test("model: delete task")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- task = create_task(
- developer_id=developer_id,
- agent_id=agent.id,
- data=CreateTaskRequest(
- **{
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hi": "_"}}],
- }
- ),
- client=client,
- )
-
- delete_task(
- developer_id=developer_id,
- agent_id=agent.id,
- task_id=task.id,
- client=client,
- )
-
- try:
- get_task(
- developer_id=developer_id,
- task_id=task.id,
- client=client,
- )
- except Exception:
- pass
-
- else:
- assert False, "Task should not exist"
-
-
-@test("model: update task")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, task=test_task
-):
- result = update_task(
- developer_id=developer_id,
- task_id=task.id,
- agent_id=agent.id,
- data=UpdateTaskRequest(
- **{
- "name": "updated task",
- "description": "updated task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hi": "_"}}],
- }
- ),
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
-
-
-@test("model: list tasks")
-def _(
- client=cozo_client, developer_id=test_developer_id, task=test_task, agent=test_agent
-):
- result = list_tasks(
- developer_id=developer_id,
- agent_id=agent.id,
- client=client,
- )
-
- assert isinstance(result, list)
- assert len(result) > 0
- assert all(isinstance(task, Task) for task in result)
+# # Tests for task queries
+
+# from uuid_extensions import uuid7
+# from ward import test
+
+# from agents_api.autogen.openapi_model import (
+# CreateTaskRequest,
+# ResourceUpdatedResponse,
+# Task,
+# UpdateTaskRequest,
+# )
+# from agents_api.queries.task.create_or_update_task import create_or_update_task
+# from agents_api.queries.task.create_task import create_task
+# from agents_api.queries.task.delete_task import delete_task
+# from agents_api.queries.task.get_task import get_task
+# from agents_api.queries.task.list_tasks import list_tasks
+# from agents_api.queries.task.update_task import update_task
+# from tests.fixtures import cozo_client, test_agent, test_developer_id, test_task
+
+
+# @test("query: create task")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# task_id = uuid7()
+
+# create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# task_id=task_id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hi": "_"}}],
+# }
+# ),
+# client=client,
+# )
+
+
+# @test("query: create or update task")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# task_id = uuid7()
+
+# create_or_update_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# task_id=task_id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hi": "_"}}],
+# }
+# ),
+# client=client,
+# )
+
+
+# @test("query: get task not exists")
+# def _(client=cozo_client, developer_id=test_developer_id):
+# task_id = uuid7()
+
+# try:
+# get_task(
+# developer_id=developer_id,
+# task_id=task_id,
+# client=client,
+# )
+# except Exception:
+# pass
+# else:
+# assert False, "Task should not exist"
+
+
+# @test("query: get task exists")
+# def _(client=cozo_client, developer_id=test_developer_id, task=test_task):
+# result = get_task(
+# developer_id=developer_id,
+# task_id=task.id,
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, Task)
+
+
+# @test("query: delete task")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# task = create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hi": "_"}}],
+# }
+# ),
+# client=client,
+# )
+
+# delete_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# task_id=task.id,
+# client=client,
+# )
+
+# try:
+# get_task(
+# developer_id=developer_id,
+# task_id=task.id,
+# client=client,
+# )
+# except Exception:
+# pass
+
+# else:
+# assert False, "Task should not exist"
+
+
+# @test("query: update task")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, task=test_task
+# ):
+# result = update_task(
+# developer_id=developer_id,
+# task_id=task.id,
+# agent_id=agent.id,
+# data=UpdateTaskRequest(
+# **{
+# "name": "updated task",
+# "description": "updated task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hi": "_"}}],
+# }
+# ),
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, ResourceUpdatedResponse)
+
+
+# @test("query: list tasks")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, task=test_task, agent=test_agent
+# ):
+# result = list_tasks(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# client=client,
+# )
+
+# assert isinstance(result, list)
+# assert len(result) > 0
+# assert all(isinstance(task, Task) for task in result)
diff --git a/agents-api/tests/test_task_routes.py b/agents-api/tests/test_task_routes.py
index 6f758c852..61ffa6a09 100644
--- a/agents-api/tests/test_task_routes.py
+++ b/agents-api/tests/test_task_routes.py
@@ -1,174 +1,65 @@
-# Tests for task routes
-
-from uuid_extensions import uuid7
-from ward import test
-
-from tests.fixtures import (
- client,
- make_request,
- test_agent,
- test_execution,
- test_task,
-)
-from tests.utils import patch_testing_temporal
-
-
-@test("route: unauthorized should fail")
-def _(client=client, agent=test_agent):
- data = dict(
- name="test user",
- main=[
- {
- "kind_": "evaluate",
- "evaluate": {
- "additionalProp1": "value1",
- },
- }
- ],
- )
-
- response = client.request(
- method="POST",
- url=f"/agents/{str(agent.id)}/tasks",
- data=data,
- )
-
- assert response.status_code == 403
-
-
-@test("route: create task")
-def _(make_request=make_request, agent=test_agent):
- data = dict(
- name="test user",
- main=[
- {
- "kind_": "evaluate",
- "evaluate": {
- "additionalProp1": "value1",
- },
- }
- ],
- )
-
- response = make_request(
- method="POST",
- url=f"/agents/{str(agent.id)}/tasks",
- json=data,
- )
-
- assert response.status_code == 201
-
-
-@test("route: create task execution")
-async def _(make_request=make_request, task=test_task):
- data = dict(
- input={},
- metadata={},
- )
-
- async with patch_testing_temporal():
- response = make_request(
- method="POST",
- url=f"/tasks/{str(task.id)}/executions",
- json=data,
- )
-
- assert response.status_code == 201
-
-
-@test("route: get execution not exists")
-def _(make_request=make_request):
- execution_id = str(uuid7())
-
- response = make_request(
- method="GET",
- url=f"/executions/{execution_id}",
- )
-
- assert response.status_code == 404
-
-
-@test("route: get execution exists")
-def _(make_request=make_request, execution=test_execution):
- response = make_request(
- method="GET",
- url=f"/executions/{str(execution.id)}",
- )
-
- assert response.status_code == 200
-
-
-@test("route: get task not exists")
-def _(make_request=make_request):
- task_id = str(uuid7())
-
- response = make_request(
- method="GET",
- url=f"/tasks/{task_id}",
- )
-
- assert response.status_code == 400
+# # Tests for task routes
+# from uuid_extensions import uuid7
+# from ward import test
-@test("route: get task exists")
-def _(make_request=make_request, task=test_task):
- response = make_request(
- method="GET",
- url=f"/tasks/{str(task.id)}",
- )
+# from tests.fixtures import (
+# client,
+# make_request,
+# test_agent,
+# test_execution,
+# test_task,
+# )
+# from tests.utils import patch_testing_temporal
- assert response.status_code == 200
-
-# FIXME: This test is failing
-# @test("route: list execution transitions")
-# def _(make_request=make_request, execution=test_execution, transition=test_transition):
-# response = make_request(
-# method="GET",
-# url=f"/executions/{str(execution.id)}/transitions",
+# @test("route: unauthorized should fail")
+# def _(client=client, agent=test_agent):
+# data = dict(
+# name="test user",
+# main=[
+# {
+# "kind_": "evaluate",
+# "evaluate": {
+# "additionalProp1": "value1",
+# },
+# }
+# ],
# )
-# assert response.status_code == 200
-# response = response.json()
-# transitions = response["items"]
-
-# assert isinstance(transitions, list)
-# assert len(transitions) > 0
-
-
-@test("route: list task executions")
-def _(make_request=make_request, execution=test_execution):
- response = make_request(
- method="GET",
- url=f"/tasks/{str(execution.task_id)}/executions",
- )
-
- assert response.status_code == 200
- response = response.json()
- executions = response["items"]
-
- assert isinstance(executions, list)
- assert len(executions) > 0
+# response = client.request(
+# method="POST",
+# url=f"/agents/{str(agent.id)}/tasks",
+# data=data,
+# )
+# assert response.status_code == 403
-@test("route: list tasks")
-def _(make_request=make_request, agent=test_agent):
- response = make_request(
- method="GET",
- url=f"/agents/{str(agent.id)}/tasks",
- )
- assert response.status_code == 200
- response = response.json()
- tasks = response["items"]
+# @test("route: create task")
+# def _(make_request=make_request, agent=test_agent):
+# data = dict(
+# name="test user",
+# main=[
+# {
+# "kind_": "evaluate",
+# "evaluate": {
+# "additionalProp1": "value1",
+# },
+# }
+# ],
+# )
- assert isinstance(tasks, list)
- assert len(tasks) > 0
+# response = make_request(
+# method="POST",
+# url=f"/agents/{str(agent.id)}/tasks",
+# json=data,
+# )
+# assert response.status_code == 201
-# FIXME: This test is failing
-# @test("route: patch execution")
+# @test("route: create task execution")
# async def _(make_request=make_request, task=test_task):
# data = dict(
# input={},
@@ -182,28 +73,137 @@ def _(make_request=make_request, agent=test_agent):
# json=data,
# )
-# execution = response.json()
+# assert response.status_code == 201
-# data = dict(
-# status="running",
+
+# @test("route: get execution not exists")
+# def _(make_request=make_request):
+# execution_id = str(uuid7())
+
+# response = make_request(
+# method="GET",
+# url=f"/executions/{execution_id}",
# )
+# assert response.status_code == 404
+
+
+# @test("route: get execution exists")
+# def _(make_request=make_request, execution=test_execution):
# response = make_request(
-# method="PATCH",
-# url=f"/tasks/{str(task.id)}/executions/{str(execution['id'])}",
-# json=data,
+# method="GET",
+# url=f"/executions/{str(execution.id)}",
# )
# assert response.status_code == 200
-# execution_id = response.json()["id"]
+
+# @test("route: get task not exists")
+# def _(make_request=make_request):
+# task_id = str(uuid7())
# response = make_request(
# method="GET",
-# url=f"/executions/{execution_id}",
+# url=f"/tasks/{task_id}",
+# )
+
+# assert response.status_code == 400
+
+
+# @test("route: get task exists")
+# def _(make_request=make_request, task=test_task):
+# response = make_request(
+# method="GET",
+# url=f"/tasks/{str(task.id)}",
+# )
+
+# assert response.status_code == 200
+
+
+# # FIXME: This test is failing
+# # @test("route: list execution transitions")
+# # def _(make_request=make_request, execution=test_execution, transition=test_transition):
+# # response = make_request(
+# # method="GET",
+# # url=f"/executions/{str(execution.id)}/transitions",
+# # )
+
+# # assert response.status_code == 200
+# # response = response.json()
+# # transitions = response["items"]
+
+# # assert isinstance(transitions, list)
+# # assert len(transitions) > 0
+
+
+# @test("route: list task executions")
+# def _(make_request=make_request, execution=test_execution):
+# response = make_request(
+# method="GET",
+# url=f"/tasks/{str(execution.task_id)}/executions",
+# )
+
+# assert response.status_code == 200
+# response = response.json()
+# executions = response["items"]
+
+# assert isinstance(executions, list)
+# assert len(executions) > 0
+
+
+# @test("route: list tasks")
+# def _(make_request=make_request, agent=test_agent):
+# response = make_request(
+# method="GET",
+# url=f"/agents/{str(agent.id)}/tasks",
# )
# assert response.status_code == 200
-# execution = response.json()
+# response = response.json()
+# tasks = response["items"]
+
+# assert isinstance(tasks, list)
+# assert len(tasks) > 0
+
+
+# # FIXME: This test is failing
+
+# # @test("route: patch execution")
+# # async def _(make_request=make_request, task=test_task):
+# # data = dict(
+# # input={},
+# # metadata={},
+# # )
+
+# # async with patch_testing_temporal():
+# # response = make_request(
+# # method="POST",
+# # url=f"/tasks/{str(task.id)}/executions",
+# # json=data,
+# # )
+
+# # execution = response.json()
+
+# # data = dict(
+# # status="running",
+# # )
+
+# # response = make_request(
+# # method="PATCH",
+# # url=f"/tasks/{str(task.id)}/executions/{str(execution['id'])}",
+# # json=data,
+# # )
+
+# # assert response.status_code == 200
+
+# # execution_id = response.json()["id"]
+
+# # response = make_request(
+# # method="GET",
+# # url=f"/executions/{execution_id}",
+# # )
+
+# # assert response.status_code == 200
+# # execution = response.json()
-# assert execution["status"] == "running"
+# # assert execution["status"] == "running"
diff --git a/agents-api/tests/test_tool_queries.py b/agents-api/tests/test_tool_queries.py
index b41125aaf..f6f4bac47 100644
--- a/agents-api/tests/test_tool_queries.py
+++ b/agents-api/tests/test_tool_queries.py
@@ -1,170 +1,170 @@
-# Tests for tool queries
-
-from ward import test
-
-from agents_api.autogen.openapi_model import (
- CreateToolRequest,
- PatchToolRequest,
- Tool,
- UpdateToolRequest,
-)
-from agents_api.models.tools.create_tools import create_tools
-from agents_api.models.tools.delete_tool import delete_tool
-from agents_api.models.tools.get_tool import get_tool
-from agents_api.models.tools.list_tools import list_tools
-from agents_api.models.tools.patch_tool import patch_tool
-from agents_api.models.tools.update_tool import update_tool
-from tests.fixtures import cozo_client, test_agent, test_developer_id, test_tool
-
-
-@test("model: create tool")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- function = {
- "name": "hello_world",
- "description": "A function that prints hello world",
- "parameters": {"type": "object", "properties": {}},
- }
-
- tool = {
- "function": function,
- "name": "hello_world",
- "type": "function",
- }
-
- result = create_tools(
- developer_id=developer_id,
- agent_id=agent.id,
- data=[CreateToolRequest(**tool)],
- client=client,
- )
-
- assert result is not None
- assert isinstance(result[0], Tool)
-
-
-@test("model: delete tool")
-def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
- function = {
- "name": "temp_temp",
- "description": "A function that prints hello world",
- "parameters": {"type": "object", "properties": {}},
- }
-
- tool = {
- "function": function,
- "name": "temp_temp",
- "type": "function",
- }
-
- [tool, *_] = create_tools(
- developer_id=developer_id,
- agent_id=agent.id,
- data=[CreateToolRequest(**tool)],
- client=client,
- )
-
- result = delete_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- client=client,
- )
-
- assert result is not None
-
-
-@test("model: get tool")
-def _(
- client=cozo_client, developer_id=test_developer_id, tool=test_tool, agent=test_agent
-):
- result = get_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- client=client,
- )
-
- assert result is not None
-
-
-@test("model: list tools")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
-):
- result = list_tools(
- developer_id=developer_id,
- agent_id=agent.id,
- client=client,
- )
-
- assert result is not None
- assert all(isinstance(tool, Tool) for tool in result)
-
-
-@test("model: patch tool")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
-):
- patch_data = PatchToolRequest(
- **{
- "name": "patched_tool",
- "function": {
- "description": "A patched function that prints hello world",
- },
- }
- )
-
- result = patch_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- data=patch_data,
- client=client,
- )
-
- assert result is not None
-
- tool = get_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- client=client,
- )
-
- assert tool.name == "patched_tool"
- assert tool.function.description == "A patched function that prints hello world"
- assert tool.function.parameters
-
-
-@test("model: update tool")
-def _(
- client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
-):
- update_data = UpdateToolRequest(
- name="updated_tool",
- description="An updated description",
- type="function",
- function={
- "description": "An updated function that prints hello world",
- },
- )
-
- result = update_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- data=update_data,
- client=client,
- )
-
- assert result is not None
-
- tool = get_tool(
- developer_id=developer_id,
- agent_id=agent.id,
- tool_id=tool.id,
- client=client,
- )
-
- assert tool.name == "updated_tool"
- assert not tool.function.parameters
+# # Tests for tool queries
+
+# from ward import test
+
+# from agents_api.autogen.openapi_model import (
+# CreateToolRequest,
+# PatchToolRequest,
+# Tool,
+# UpdateToolRequest,
+# )
+# from agents_api.queries.tools.create_tools import create_tools
+# from agents_api.queries.tools.delete_tool import delete_tool
+# from agents_api.queries.tools.get_tool import get_tool
+# from agents_api.queries.tools.list_tools import list_tools
+# from agents_api.queries.tools.patch_tool import patch_tool
+# from agents_api.queries.tools.update_tool import update_tool
+# from tests.fixtures import cozo_client, test_agent, test_developer_id, test_tool
+
+
+# @test("query: create tool")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# function = {
+# "name": "hello_world",
+# "description": "A function that prints hello world",
+# "parameters": {"type": "object", "properties": {}},
+# }
+
+# tool = {
+# "function": function,
+# "name": "hello_world",
+# "type": "function",
+# }
+
+# result = create_tools(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=[CreateToolRequest(**tool)],
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result[0], Tool)
+
+
+# @test("query: delete tool")
+# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
+# function = {
+# "name": "temp_temp",
+# "description": "A function that prints hello world",
+# "parameters": {"type": "object", "properties": {}},
+# }
+
+# tool = {
+# "function": function,
+# "name": "temp_temp",
+# "type": "function",
+# }
+
+# [tool, *_] = create_tools(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=[CreateToolRequest(**tool)],
+# client=client,
+# )
+
+# result = delete_tool(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# tool_id=tool.id,
+# client=client,
+# )
+
+# assert result is not None
+
+
+# @test("query: get tool")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, tool=test_tool, agent=test_agent
+# ):
+# result = get_tool(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# tool_id=tool.id,
+# client=client,
+# )
+
+# assert result is not None
+
+
+# @test("query: list tools")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
+# ):
+# result = list_tools(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# client=client,
+# )
+
+# assert result is not None
+# assert all(isinstance(tool, Tool) for tool in result)
+
+
+# @test("query: patch tool")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
+# ):
+# patch_data = PatchToolRequest(
+# **{
+# "name": "patched_tool",
+# "function": {
+# "description": "A patched function that prints hello world",
+# },
+# }
+# )
+
+# result = patch_tool(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# tool_id=tool.id,
+# data=patch_data,
+# client=client,
+# )
+
+# assert result is not None
+
+# tool = get_tool(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# tool_id=tool.id,
+# client=client,
+# )
+
+# assert tool.name == "patched_tool"
+# assert tool.function.description == "A patched function that prints hello world"
+# assert tool.function.parameters
+
+
+# @test("query: update tool")
+# def _(
+# client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
+# ):
+# update_data = UpdateToolRequest(
+# name="updated_tool",
+# description="An updated description",
+# type="function",
+# function={
+# "description": "An updated function that prints hello world",
+# },
+# )
+
+# result = update_tool(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# tool_id=tool.id,
+# data=update_data,
+# client=client,
+# )
+
+# assert result is not None
+
+# tool = get_tool(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# tool_id=tool.id,
+# client=client,
+# )
+
+# assert tool.name == "updated_tool"
+# assert not tool.function.parameters
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index abdc597ea..7ba25b358 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -1,117 +1,178 @@
-# This module contains tests for user-related queries against the 'cozodb' database. It includes tests for creating, updating, and retrieving user information.
-# Tests for user queries
-
-from uuid_extensions import uuid7
-from ward import test
-
-from agents_api.autogen.openapi_model import (
- CreateOrUpdateUserRequest,
- CreateUserRequest,
- ResourceUpdatedResponse,
- UpdateUserRequest,
- User,
-)
-from agents_api.models.user.create_or_update_user import create_or_update_user
-from agents_api.models.user.create_user import create_user
-from agents_api.models.user.get_user import get_user
-from agents_api.models.user.list_users import list_users
-from agents_api.models.user.update_user import update_user
-from tests.fixtures import cozo_client, test_developer_id, test_user
-
-
-@test("model: create user")
-def _(client=cozo_client, developer_id=test_developer_id):
- """Test that a user can be successfully created."""
-
- create_user(
- developer_id=developer_id,
- data=CreateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
-
-
-@test("model: create or update user")
-def _(client=cozo_client, developer_id=test_developer_id):
- """Test that a user can be successfully created or updated."""
-
- create_or_update_user(
- developer_id=developer_id,
- user_id=uuid7(),
- data=CreateOrUpdateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
-
-
-@test("model: update user")
-def _(client=cozo_client, developer_id=test_developer_id, user=test_user):
- """Test that an existing user's information can be successfully updated."""
-
- # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update.
- update_result = update_user(
- user_id=user.id,
- developer_id=developer_id,
- data=UpdateUserRequest(
- name="updated user",
- about="updated user about",
- ),
- client=client,
- )
-
- assert update_result is not None
- assert isinstance(update_result, ResourceUpdatedResponse)
- assert update_result.updated_at > user.created_at
-
-
-@test("model: get user not exists")
-def _(client=cozo_client, developer_id=test_developer_id):
- """Test that retrieving a non-existent user returns an empty result."""
-
- user_id = uuid7()
-
- # Ensure that the query for an existing user returns exactly one result.
- try:
- get_user(
- user_id=user_id,
- developer_id=developer_id,
- client=client,
- )
- except Exception:
- pass
- else:
- assert (
- False
- ), "Expected an exception to be raised when retrieving a non-existent user."
-
-
-@test("model: get user exists")
-def _(client=cozo_client, developer_id=test_developer_id, user=test_user):
- """Test that retrieving an existing user returns the correct user information."""
-
- result = get_user(
- user_id=user.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, User)
-
-
-@test("model: list users")
-def _(client=cozo_client, developer_id=test_developer_id, user=test_user):
- """Test that listing users returns a collection of user information."""
-
- result = list_users(
- developer_id=developer_id,
- client=client,
- )
-
- assert isinstance(result, list)
- assert len(result) >= 1
- assert all(isinstance(user, User) for user in result)
+# """
+# This module contains tests for SQL query generation functions in the users module.
+# Tests verify the SQL queries without actually executing them against a database.
+# """
+
+# from uuid import UUID
+
+# from uuid_extensions import uuid7
+# from ward import raises, test
+
+# from agents_api.autogen.openapi_model import (
+# CreateOrUpdateUserRequest,
+# CreateUserRequest,
+# PatchUserRequest,
+# ResourceUpdatedResponse,
+# UpdateUserRequest,
+# User,
+# )
+# from agents_api.queries.users import (
+# create_or_update_user,
+# create_user,
+# delete_user,
+# get_user,
+# list_users,
+# patch_user,
+# update_user,
+# )
+# from tests.fixtures import pg_client, test_developer_id, test_user
+
+# # Test UUIDs for consistent testing
+# TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
+# TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
+
+
+# @test("query: create user sql")
+# def _(client=pg_client, developer_id=test_developer_id):
+# """Test that a user can be successfully created."""
+
+# create_user(
+# developer_id=developer_id,
+# data=CreateUserRequest(
+# name="test user",
+# about="test user about",
+# ),
+# client=client,
+# )
+
+
+# @test("query: create or update user sql")
+# def _(client=pg_client, developer_id=test_developer_id):
+# """Test that a user can be successfully created or updated."""
+
+# create_or_update_user(
+# developer_id=developer_id,
+# user_id=uuid7(),
+# data=CreateOrUpdateUserRequest(
+# name="test user",
+# about="test user about",
+# ),
+# client=client,
+# )
+
+
+# @test("query: update user sql")
+# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+# """Test that an existing user's information can be successfully updated."""
+
+# # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update.
+# update_result = update_user(
+# user_id=user.id,
+# developer_id=developer_id,
+# data=UpdateUserRequest(
+# name="updated user",
+# about="updated user about",
+# ),
+# client=client,
+# )
+
+# assert update_result is not None
+# assert isinstance(update_result, ResourceUpdatedResponse)
+# assert update_result.updated_at > user.created_at
+
+
+# @test("query: get user not exists sql")
+# def _(client=pg_client, developer_id=test_developer_id):
+# """Test that retrieving a non-existent user returns an empty result."""
+
+# user_id = uuid7()
+
+# # Ensure that the query for an existing user returns exactly one result.
+# try:
+# get_user(
+# user_id=user_id,
+# developer_id=developer_id,
+# client=client,
+# )
+# except Exception:
+# pass
+# else:
+# assert (
+# False
+# ), "Expected an exception to be raised when retrieving a non-existent user."
+
+
+# @test("query: get user exists sql")
+# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+# """Test that retrieving an existing user returns the correct user information."""
+
+# result = get_user(
+# user_id=user.id,
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert result is not None
+# assert isinstance(result, User)
+
+
+# @test("query: list users sql")
+# def _(client=pg_client, developer_id=test_developer_id):
+# """Test that listing users returns a collection of user information."""
+
+# result = list_users(
+# developer_id=developer_id,
+# client=client,
+# )
+
+# assert isinstance(result, list)
+# assert len(result) >= 1
+# assert all(isinstance(user, User) for user in result)
+
+
+# @test("query: patch user sql")
+# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+# """Test that a user can be successfully patched."""
+
+# patch_result = patch_user(
+# developer_id=developer_id,
+# user_id=user.id,
+# data=PatchUserRequest(
+# name="patched user",
+# about="patched user about",
+# metadata={"test": "metadata"},
+# ),
+# client=client,
+# )
+
+# assert patch_result is not None
+# assert isinstance(patch_result, ResourceUpdatedResponse)
+# assert patch_result.updated_at > user.created_at
+
+
+# @test("query: delete user sql")
+# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
+# """Test that a user can be successfully deleted."""
+
+# delete_result = delete_user(
+# developer_id=developer_id,
+# user_id=user.id,
+# client=client,
+# )
+
+# assert delete_result is not None
+# assert isinstance(delete_result, ResourceUpdatedResponse)
+
+# # Verify the user no longer exists
+# try:
+# get_user(
+# developer_id=developer_id,
+# user_id=user.id,
+# client=client,
+# )
+# except Exception:
+# pass
+# else:
+# assert (
+# False
+# ), "Expected an exception to be raised when retrieving a deleted user."
diff --git a/agents-api/tests/test_user_routes.py b/agents-api/tests/test_user_routes.py
index a0696ed51..35f3b8fc7 100644
--- a/agents-api/tests/test_user_routes.py
+++ b/agents-api/tests/test_user_routes.py
@@ -1,185 +1,185 @@
-# Tests for user routes
+# # Tests for user routes
-from uuid_extensions import uuid7
-from ward import test
+# from uuid_extensions import uuid7
+# from ward import test
-from tests.fixtures import client, make_request, test_user
+# from tests.fixtures import client, make_request, test_user
-@test("route: unauthorized should fail")
-def _(client=client):
- data = dict(
- name="test user",
- about="test user about",
- )
+# @test("route: unauthorized should fail")
+# def _(client=client):
+# data = dict(
+# name="test user",
+# about="test user about",
+# )
- response = client.request(
- method="POST",
- url="/users",
- data=data,
- )
+# response = client.request(
+# method="POST",
+# url="/users",
+# data=data,
+# )
- assert response.status_code == 403
+# assert response.status_code == 403
-@test("route: create user")
-def _(make_request=make_request):
- data = dict(
- name="test user",
- about="test user about",
- )
+# @test("route: create user")
+# def _(make_request=make_request):
+# data = dict(
+# name="test user",
+# about="test user about",
+# )
- response = make_request(
- method="POST",
- url="/users",
- json=data,
- )
+# response = make_request(
+# method="POST",
+# url="/users",
+# json=data,
+# )
- assert response.status_code == 201
+# assert response.status_code == 201
-@test("route: get user not exists")
-def _(make_request=make_request):
- user_id = str(uuid7())
+# @test("route: get user not exists")
+# def _(make_request=make_request):
+# user_id = str(uuid7())
- response = make_request(
- method="GET",
- url=f"/users/{user_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/users/{user_id}",
+# )
- assert response.status_code == 404
+# assert response.status_code == 404
-@test("route: get user exists")
-def _(make_request=make_request, user=test_user):
- user_id = str(user.id)
+# @test("route: get user exists")
+# def _(make_request=make_request, user=test_user):
+# user_id = str(user.id)
- response = make_request(
- method="GET",
- url=f"/users/{user_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/users/{user_id}",
+# )
- assert response.status_code != 404
+# assert response.status_code != 404
-@test("route: delete user")
-def _(make_request=make_request):
- data = dict(
- name="test user",
- about="test user about",
- )
+# @test("route: delete user")
+# def _(make_request=make_request):
+# data = dict(
+# name="test user",
+# about="test user about",
+# )
- response = make_request(
- method="POST",
- url="/users",
- json=data,
- )
- user_id = response.json()["id"]
+# response = make_request(
+# method="POST",
+# url="/users",
+# json=data,
+# )
+# user_id = response.json()["id"]
- response = make_request(
- method="DELETE",
- url=f"/users/{user_id}",
- )
+# response = make_request(
+# method="DELETE",
+# url=f"/users/{user_id}",
+# )
- assert response.status_code == 202
+# assert response.status_code == 202
- response = make_request(
- method="GET",
- url=f"/users/{user_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/users/{user_id}",
+# )
- assert response.status_code == 404
+# assert response.status_code == 404
-@test("route: update user")
-def _(make_request=make_request, user=test_user):
- data = dict(
- name="updated user",
- about="updated user about",
- )
+# @test("route: update user")
+# def _(make_request=make_request, user=test_user):
+# data = dict(
+# name="updated user",
+# about="updated user about",
+# )
- user_id = str(user.id)
- response = make_request(
- method="PUT",
- url=f"/users/{user_id}",
- json=data,
- )
+# user_id = str(user.id)
+# response = make_request(
+# method="PUT",
+# url=f"/users/{user_id}",
+# json=data,
+# )
- assert response.status_code == 200
+# assert response.status_code == 200
- user_id = response.json()["id"]
+# user_id = response.json()["id"]
- response = make_request(
- method="GET",
- url=f"/users/{user_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/users/{user_id}",
+# )
- assert response.status_code == 200
- user = response.json()
+# assert response.status_code == 200
+# user = response.json()
- assert user["name"] == "updated user"
- assert user["about"] == "updated user about"
+# assert user["name"] == "updated user"
+# assert user["about"] == "updated user about"
-@test("model: patch user")
-def _(make_request=make_request, user=test_user):
- user_id = str(user.id)
+# @test("query: patch user")
+# def _(make_request=make_request, user=test_user):
+# user_id = str(user.id)
- data = dict(
- name="patched user",
- about="patched user about",
- )
+# data = dict(
+# name="patched user",
+# about="patched user about",
+# )
- response = make_request(
- method="PATCH",
- url=f"/users/{user_id}",
- json=data,
- )
+# response = make_request(
+# method="PATCH",
+# url=f"/users/{user_id}",
+# json=data,
+# )
- assert response.status_code == 200
+# assert response.status_code == 200
- user_id = response.json()["id"]
+# user_id = response.json()["id"]
- response = make_request(
- method="GET",
- url=f"/users/{user_id}",
- )
+# response = make_request(
+# method="GET",
+# url=f"/users/{user_id}",
+# )
- assert response.status_code == 200
- user = response.json()
+# assert response.status_code == 200
+# user = response.json()
- assert user["name"] == "patched user"
- assert user["about"] == "patched user about"
+# assert user["name"] == "patched user"
+# assert user["about"] == "patched user about"
-@test("model: list users")
-def _(make_request=make_request):
- response = make_request(
- method="GET",
- url="/users",
- )
+# @test("query: list users")
+# def _(make_request=make_request):
+# response = make_request(
+# method="GET",
+# url="/users",
+# )
- assert response.status_code == 200
- response = response.json()
- users = response["items"]
+# assert response.status_code == 200
+# response = response.json()
+# users = response["items"]
- assert isinstance(users, list)
- assert len(users) > 0
+# assert isinstance(users, list)
+# assert len(users) > 0
-@test("model: list users with right metadata filter")
-def _(make_request=make_request, user=test_user):
- response = make_request(
- method="GET",
- url="/users",
- params={
- "metadata_filter": {"test": "test"},
- },
- )
+# @test("query: list users with right metadata filter")
+# def _(make_request=make_request, user=test_user):
+# response = make_request(
+# method="GET",
+# url="/users",
+# params={
+# "metadata_filter": {"test": "test"},
+# },
+# )
- assert response.status_code == 200
- response = response.json()
- users = response["items"]
+# assert response.status_code == 200
+# response = response.json()
+# users = response["items"]
- assert isinstance(users, list)
- assert len(users) > 0
+# assert isinstance(users, list)
+# assert len(users) > 0
diff --git a/agents-api/tests/test_user_sql.py b/agents-api/tests/test_user_sql.py
deleted file mode 100644
index 50b6d096b..000000000
--- a/agents-api/tests/test_user_sql.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""
-This module contains tests for SQL query generation functions in the users module.
-Tests verify the SQL queries without actually executing them against a database.
-"""
-
-from uuid import UUID
-
-from uuid_extensions import uuid7
-from ward import raises, test
-
-from agents_api.autogen.openapi_model import (
- CreateOrUpdateUserRequest,
- CreateUserRequest,
- PatchUserRequest,
- ResourceUpdatedResponse,
- UpdateUserRequest,
- User,
-)
-from agents_api.queries.users import (
- create_or_update_user,
- create_user,
- delete_user,
- get_user,
- list_users,
- patch_user,
- update_user,
-)
-from tests.fixtures import pg_client, test_developer_id, test_user
-
-# Test UUIDs for consistent testing
-TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
-TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
-
-
-@test("model: create user sql")
-def _(client=pg_client, developer_id=test_developer_id):
- """Test that a user can be successfully created."""
-
- create_user(
- developer_id=developer_id,
- data=CreateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
-
-
-@test("model: create or update user sql")
-def _(client=pg_client, developer_id=test_developer_id):
- """Test that a user can be successfully created or updated."""
-
- create_or_update_user(
- developer_id=developer_id,
- user_id=uuid7(),
- data=CreateOrUpdateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
-
-
-@test("model: update user sql")
-def _(client=pg_client, developer_id=test_developer_id, user=test_user):
- """Test that an existing user's information can be successfully updated."""
-
- # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update.
- update_result = update_user(
- user_id=user.id,
- developer_id=developer_id,
- data=UpdateUserRequest(
- name="updated user",
- about="updated user about",
- ),
- client=client,
- )
-
- assert update_result is not None
- assert isinstance(update_result, ResourceUpdatedResponse)
- assert update_result.updated_at > user.created_at
-
-
-@test("model: get user not exists sql")
-def _(client=pg_client, developer_id=test_developer_id):
- """Test that retrieving a non-existent user returns an empty result."""
-
- user_id = uuid7()
-
- # Ensure that the query for an existing user returns exactly one result.
- try:
- get_user(
- user_id=user_id,
- developer_id=developer_id,
- client=client,
- )
- except Exception:
- pass
- else:
- assert (
- False
- ), "Expected an exception to be raised when retrieving a non-existent user."
-
-
-@test("model: get user exists sql")
-def _(client=pg_client, developer_id=test_developer_id, user=test_user):
- """Test that retrieving an existing user returns the correct user information."""
-
- result = get_user(
- user_id=user.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert result is not None
- assert isinstance(result, User)
-
-
-@test("model: list users sql")
-def _(client=pg_client, developer_id=test_developer_id):
- """Test that listing users returns a collection of user information."""
-
- result = list_users(
- developer_id=developer_id,
- client=client,
- )
-
- assert isinstance(result, list)
- assert len(result) >= 1
- assert all(isinstance(user, User) for user in result)
-
-
-@test("model: patch user sql")
-def _(client=pg_client, developer_id=test_developer_id, user=test_user):
- """Test that a user can be successfully patched."""
-
- patch_result = patch_user(
- developer_id=developer_id,
- user_id=user.id,
- data=PatchUserRequest(
- name="patched user",
- about="patched user about",
- metadata={"test": "metadata"},
- ),
- client=client,
- )
-
- assert patch_result is not None
- assert isinstance(patch_result, ResourceUpdatedResponse)
- assert patch_result.updated_at > user.created_at
-
-
-@test("model: delete user sql")
-def _(client=pg_client, developer_id=test_developer_id, user=test_user):
- """Test that a user can be successfully deleted."""
-
- delete_result = delete_user(
- developer_id=developer_id,
- user_id=user.id,
- client=client,
- )
-
- assert delete_result is not None
- assert isinstance(delete_result, ResourceUpdatedResponse)
-
- # Verify the user no longer exists
- try:
- get_user(
- developer_id=developer_id,
- user_id=user.id,
- client=client,
- )
- except Exception:
- pass
- else:
- assert (
- False
- ), "Expected an exception to be raised when retrieving a deleted user."
diff --git a/agents-api/tests/test_workflow_routes.py b/agents-api/tests/test_workflow_routes.py
index d7bdad027..3487f605e 100644
--- a/agents-api/tests/test_workflow_routes.py
+++ b/agents-api/tests/test_workflow_routes.py
@@ -1,135 +1,135 @@
-# Tests for task queries
-
-from uuid_extensions import uuid7
-from ward import test
-
-from tests.fixtures import cozo_client, test_agent, test_developer_id
-from tests.utils import patch_http_client_with_temporal
-
-
-@test("workflow route: evaluate step single")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- agent_id = str(agent.id)
- task_id = str(uuid7())
-
- async with patch_http_client_with_temporal(
- cozo_client=cozo_client, developer_id=developer_id
- ) as (
- make_request,
- client,
- ):
- task_data = {
- "name": "test task",
- "description": "test task about",
- "input_schema": {"type": "object", "additionalProperties": True},
- "main": [{"evaluate": {"hello": '"world"'}}],
- }
-
- make_request(
- method="POST",
- url=f"/agents/{agent_id}/tasks/{task_id}",
- json=task_data,
- ).raise_for_status()
-
- execution_data = dict(input={"test": "input"})
-
- make_request(
- method="POST",
- url=f"/tasks/{task_id}/executions",
- json=execution_data,
- ).raise_for_status()
-
-
-@test("workflow route: evaluate step single with yaml")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- agent_id = str(agent.id)
-
- async with patch_http_client_with_temporal(
- cozo_client=cozo_client, developer_id=developer_id
- ) as (
- make_request,
- client,
- ):
- task_data = """
-name: test task
-description: test task about
-input_schema:
- type: object
- additionalProperties: true
-
-main:
- - evaluate:
- hello: '"world"'
-"""
-
- result = (
- make_request(
- method="POST",
- url=f"/agents/{agent_id}/tasks",
- content=task_data.encode("utf-8"),
- headers={"Content-Type": "text/yaml"},
- )
- .raise_for_status()
- .json()
- )
-
- task_id = result["id"]
-
- execution_data = dict(input={"test": "input"})
-
- make_request(
- method="POST",
- url=f"/tasks/{task_id}/executions",
- json=execution_data,
- ).raise_for_status()
-
-
-@test("workflow route: create or update: evaluate step single with yaml")
-async def _(
- cozo_client=cozo_client,
- developer_id=test_developer_id,
- agent=test_agent,
-):
- agent_id = str(agent.id)
- task_id = str(uuid7())
-
- async with patch_http_client_with_temporal(
- cozo_client=cozo_client, developer_id=developer_id
- ) as (
- make_request,
- client,
- ):
- task_data = """
-name: test task
-description: test task about
-input_schema:
- type: object
- additionalProperties: true
-
-main:
- - evaluate:
- hello: '"world"'
-"""
-
- make_request(
- method="POST",
- url=f"/agents/{agent_id}/tasks/{task_id}",
- content=task_data.encode("utf-8"),
- headers={"Content-Type": "text/yaml"},
- ).raise_for_status()
-
- execution_data = dict(input={"test": "input"})
-
- make_request(
- method="POST",
- url=f"/tasks/{task_id}/executions",
- json=execution_data,
- ).raise_for_status()
+# # Tests for task queries
+
+# from uuid_extensions import uuid7
+# from ward import test
+
+# from tests.fixtures import cozo_client, test_agent, test_developer_id
+# from tests.utils import patch_http_client_with_temporal
+
+
+# @test("workflow route: evaluate step single")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# agent_id = str(agent.id)
+# task_id = str(uuid7())
+
+# async with patch_http_client_with_temporal(
+# cozo_client=cozo_client, developer_id=developer_id
+# ) as (
+# make_request,
+# client,
+# ):
+# task_data = {
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hello": '"world"'}}],
+# }
+
+# make_request(
+# method="POST",
+# url=f"/agents/{agent_id}/tasks/{task_id}",
+# json=task_data,
+# ).raise_for_status()
+
+# execution_data = dict(input={"test": "input"})
+
+# make_request(
+# method="POST",
+# url=f"/tasks/{task_id}/executions",
+# json=execution_data,
+# ).raise_for_status()
+
+
+# @test("workflow route: evaluate step single with yaml")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# agent_id = str(agent.id)
+
+# async with patch_http_client_with_temporal(
+# cozo_client=cozo_client, developer_id=developer_id
+# ) as (
+# make_request,
+# client,
+# ):
+# task_data = """
+# name: test task
+# description: test task about
+# input_schema:
+# type: object
+# additionalProperties: true
+
+# main:
+# - evaluate:
+# hello: '"world"'
+# """
+
+# result = (
+# make_request(
+# method="POST",
+# url=f"/agents/{agent_id}/tasks",
+# content=task_data.encode("utf-8"),
+# headers={"Content-Type": "text/yaml"},
+# )
+# .raise_for_status()
+# .json()
+# )
+
+# task_id = result["id"]
+
+# execution_data = dict(input={"test": "input"})
+
+# make_request(
+# method="POST",
+# url=f"/tasks/{task_id}/executions",
+# json=execution_data,
+# ).raise_for_status()
+
+
+# @test("workflow route: create or update: evaluate step single with yaml")
+# async def _(
+# cozo_client=cozo_client,
+# developer_id=test_developer_id,
+# agent=test_agent,
+# ):
+# agent_id = str(agent.id)
+# task_id = str(uuid7())
+
+# async with patch_http_client_with_temporal(
+# cozo_client=cozo_client, developer_id=developer_id
+# ) as (
+# make_request,
+# client,
+# ):
+# task_data = """
+# name: test task
+# description: test task about
+# input_schema:
+# type: object
+# additionalProperties: true
+
+# main:
+# - evaluate:
+# hello: '"world"'
+# """
+
+# make_request(
+# method="POST",
+# url=f"/agents/{agent_id}/tasks/{task_id}",
+# content=task_data.encode("utf-8"),
+# headers={"Content-Type": "text/yaml"},
+# ).raise_for_status()
+
+# execution_data = dict(input={"test": "input"})
+
+# make_request(
+# method="POST",
+# url=f"/tasks/{task_id}/executions",
+# json=execution_data,
+# ).raise_for_status()
diff --git a/agents-api/tests/utils.py b/agents-api/tests/utils.py
index 130518419..330f312b4 100644
--- a/agents-api/tests/utils.py
+++ b/agents-api/tests/utils.py
@@ -1,14 +1,18 @@
import asyncio
+import json
import logging
from contextlib import asynccontextmanager, contextmanager
from dataclasses import dataclass
+import subprocess
from typing import Any, Dict, Optional
from unittest.mock import patch
+import asyncpg
from botocore import exceptions
from fastapi.testclient import TestClient
from litellm.types.utils import ModelResponse
from temporalio.testing import WorkflowEnvironment
+from testcontainers.postgres import PostgresContainer
from agents_api.worker.codec import pydantic_data_converter
from agents_api.worker.worker import create_worker
@@ -170,3 +174,25 @@ async def __aexit__(self, *_):
with patch("agents_api.clients.async_s3.get_session") as get_session:
get_session.return_value = mock_session
yield mock_session
+
+@asynccontextmanager
+async def patch_pg_client():
+ # with patch("agents_api.clients.pg.get_pg_client") as get_pg_client:
+
+ with PostgresContainer("timescale/timescaledb-ha:pg17") as postgres:
+ test_psql_url = postgres.get_connection_url()
+ pg_dsn = f"postgres://{test_psql_url[22:]}?sslmode=disable"
+ command = f"migrate -database '{pg_dsn}' -path ../memory-store/migrations/ up"
+ process = subprocess.Popen(command, shell=True)
+ process.wait()
+
+ client = await asyncpg.connect(pg_dsn)
+ await client.set_type_codec(
+ "jsonb",
+ encoder=json.dumps,
+ decoder=json.loads,
+ schema="pg_catalog",
+ )
+
+ # get_pg_client.return_value = client
+ yield client
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 01a1178c4..9fadcd0cb 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -37,8 +37,6 @@ dependencies = [
{ name = "pandas" },
{ name = "prometheus-client" },
{ name = "prometheus-fastapi-instrumentator" },
- { name = "pycozo", extra = ["embedded"] },
- { name = "pycozo-async" },
{ name = "pydantic", extra = ["email"] },
{ name = "pydantic-partial" },
{ name = "python-box" },
@@ -62,7 +60,6 @@ dependencies = [
[package.dev-dependencies]
dev = [
- { name = "cozo-migrate" },
{ name = "datamodel-code-generator" },
{ name = "ipython" },
{ name = "ipywidgets" },
@@ -74,6 +71,7 @@ dev = [
{ name = "pyright" },
{ name = "pytype" },
{ name = "ruff" },
+ { name = "testcontainers" },
{ name = "ward" },
]
@@ -106,8 +104,6 @@ requires-dist = [
{ name = "pandas", specifier = "~=2.2.2" },
{ name = "prometheus-client", specifier = "~=0.21.0" },
{ name = "prometheus-fastapi-instrumentator", specifier = "~=7.0.0" },
- { name = "pycozo", extras = ["embedded"], specifier = "~=0.7.6" },
- { name = "pycozo-async", specifier = "~=0.7.7" },
{ name = "pydantic", extras = ["email"], specifier = "~=2.10.2" },
{ name = "pydantic-partial", specifier = "~=0.5.5" },
{ name = "python-box", specifier = "~=7.2.0" },
@@ -131,7 +127,6 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
- { name = "cozo-migrate", specifier = ">=0.2.4" },
{ name = "datamodel-code-generator", specifier = ">=0.26.3" },
{ name = "ipython", specifier = ">=8.30.0" },
{ name = "ipywidgets", specifier = ">=8.1.5" },
@@ -143,6 +138,7 @@ dev = [
{ name = "pyright", specifier = ">=1.1.389" },
{ name = "pytype", specifier = ">=2024.10.11" },
{ name = "ruff", specifier = ">=0.8.1" },
+ { name = "testcontainers", extras = ["postgres"], specifier = ">=4.9.0" },
{ name = "ward", specifier = ">=0.68.0b0" },
]
@@ -608,37 +604,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/00/3106b1854b45bd0474ced037dfe6b73b90fe68a68968cef47c23de3d43d2/confection-0.1.5-py3-none-any.whl", hash = "sha256:e29d3c3f8eac06b3f77eb9dfb4bf2fc6bcc9622a98ca00a698e3d019c6430b14", size = 35451 },
]
-[[package]]
-name = "cozo-embedded"
-version = "0.7.6"
-source = { registry = "https://pypi.org/simple" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/d3/17/e4a139cad601150303095532c51ab981b7b1ee9f6278188bedfe551c46e2/cozo_embedded-0.7.6-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:d146e76736beb5e14e0cf73dc8babefadfbbc358b325c94c64a51b6d5b0031e9", size = 9542067 },
- { url = "https://files.pythonhosted.org/packages/65/3b/92fe8c7c7b2b83974ae051c92697d92e860625326cfc06cb4c54222c2fc0/cozo_embedded-0.7.6-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:7341fa266369181bbc19ad9e68820b51900b0fe1c947318a3d860b570dca6e09", size = 8325766 },
- { url = "https://files.pythonhosted.org/packages/15/bf/19020af2645d8ea398e719bce8fcf7a91c341467aed9804c6d5f6ac878c2/cozo_embedded-0.7.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80de79554138628967d4fd2636fc0a0a8dcca1c0c3bb527e638f1ee6cb763d7d", size = 10515504 },
- { url = "https://files.pythonhosted.org/packages/db/a7/3c96a4077520ee3179b5eaeba350132a854b3aca34d1168f335bfcd0038d/cozo_embedded-0.7.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7571f6521041c13b7e9ca8ab8809cf9c8eaad929726ed6190ffc25a5a3ab57a7", size = 11135792 },
- { url = "https://files.pythonhosted.org/packages/58/f7/5c6ec98d3983968df1d6709f1faa88a44b8c0fa7cd80994bc7f7d6b10293/cozo_embedded-0.7.6-cp37-abi3-win_amd64.whl", hash = "sha256:c945ab7b350d0b79d3e643b68ebc8343fc02d223a02ab929eb0fb8e4e0df3542", size = 9532612 },
-]
-
-[[package]]
-name = "cozo-migrate"
-version = "0.2.4"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama" },
- { name = "cozo-embedded" },
- { name = "pandas" },
- { name = "pycozo" },
- { name = "requests" },
- { name = "rich" },
- { name = "shellingham" },
- { name = "typer" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/b1/3a/f66a88c50c5dd7bb7cb98d84f4d3e45bb2cfe1dba524f775f88b065b563b/cozo_migrate-0.2.4.tar.gz", hash = "sha256:ccb852f00bb25ff7c431dc8fa8a81e8f9f10198ad76aa34d1239d67f1613b899", size = 14317 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/26/ce/2dc5dc2be88ab79ed24b1412b7745c690e7f684e1665eb4feeb6300056bd/cozo_migrate-0.2.4-py3-none-any.whl", hash = "sha256:518151d65c81968e42402470418f42c8580e972f0b949df6c5c499cc2b098c1b", size = 21466 },
-]
-
[[package]]
name = "cucumber-tag-expressions"
version = "4.1.0"
@@ -739,6 +704,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 },
]
+[[package]]
+name = "docker"
+version = "7.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pywin32", marker = "sys_platform == 'win32'" },
+ { name = "requests" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 },
+]
+
[[package]]
name = "email-validator"
version = "2.2.0"
@@ -2219,35 +2198,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/af/7ba371f966657f6e7b1c9876cae7e9f1c5d3635c3df1329636b99e615494/pycnite-2024.7.31-py3-none-any.whl", hash = "sha256:9ff9c09d35056435b867e14ebf79626ca94b6017923a0bf9935377fa90d4cbb3", size = 22939 },
]
-[[package]]
-name = "pycozo"
-version = "0.7.6"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/92/18/dc0dd2db0f1661e2cf17a653da59b6812f30ddc976a66b7972fd5d2809bc/pycozo-0.7.6.tar.gz", hash = "sha256:e4be9a091ba71e9d4465179bbf7557d47af84c8114d4889bd5fa13c731d57a95", size = 19091 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/a1/e9/47ccff69e94bc80388c67e12b3c25244198fcfb1d3fad96489ed436a8e3f/pycozo-0.7.6-py3-none-any.whl", hash = "sha256:8930de5f82277d6481998a585c79aa898991cfb0692e168bde8b0a4558d579cf", size = 18977 },
-]
-
-[package.optional-dependencies]
-embedded = [
- { name = "cozo-embedded" },
-]
-
-[[package]]
-name = "pycozo-async"
-version = "0.7.7"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "cozo-embedded" },
- { name = "httpx" },
- { name = "ipython" },
- { name = "pandas" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/01/17/2fc41dd8311f366625fc6fb70fe2dc27c345da8db0a4de78f39ccf759977/pycozo_async-0.7.7.tar.gz", hash = "sha256:fae95d8e9e11448263a752983b12a5a05b7656fa1dda0eeeb6f213d6fc592e1d", size = 21559 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/22/64/63330e6bd9bc30abfc863bd392c20c81f8ad1d6b5d1b6511d477496a6fbe/pycozo_async-0.7.7-py3-none-any.whl", hash = "sha256:2c23b184f6295d4dc6178350425110467e512638b3f4def937ed0609df321dd1", size = 22714 },
-]
-
[[package]]
name = "pycparser"
version = "2.22"
@@ -3017,6 +2967,22 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154 },
]
+[[package]]
+name = "testcontainers"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "docker" },
+ { name = "python-dotenv" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+ { name = "wrapt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2e/9a/e1ac5231231192b39302fcad7de2c0dbfc718c0636d7e28917c30ec57c41/testcontainers-4.9.0.tar.gz", hash = "sha256:2cd6af070109ff68c1ab5389dc89c86c2dc3ab30a21ca734b2cb8f0f80ad479e", size = 64612 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl", hash = "sha256:c6fee929990972c40bf6b91b7072c94064ff3649b405a14fde0274c8b2479d32", size = 105324 },
+]
+
[[package]]
name = "thefuzz"
version = "0.22.1"
diff --git a/memory-store/docker-compose.yml b/memory-store/docker-compose.yml
index 775a97b82..cb687142a 100644
--- a/memory-store/docker-compose.yml
+++ b/memory-store/docker-compose.yml
@@ -1,20 +1,30 @@
name: pgai
services:
- db:
- image: timescale/timescaledb-ha:pg17
- environment:
- - POSTGRES_PASSWORD=${MEMORY_STORE_PASSWORD:-postgres}
- - VOYAGE_API_KEY=${VOYAGE_API_KEY}
- ports:
- - "5432:5432"
- volumes:
- - memory_store_data:/home/postgres/pgdata/data
- vectorizer-worker:
- image: timescale/pgai-vectorizer-worker:v0.3.0
- environment:
- - PGAI_VECTORIZER_WORKER_DB_URL=postgres://postgres:${MEMORY_STORE_PASSWORD:-postgres}@db:5432/postgres
- - VOYAGE_API_KEY=${VOYAGE_API_KEY}
- command: [ "--poll-interval", "5s" ]
+ db:
+ image: timescale/timescaledb-ha:pg17
+
+ # For timescaledb specific options,
+ # See: https://github.com/timescale/timescaledb-docker?tab=readme-ov-file#notes-on-timescaledb-tune
+ environment:
+ - POSTGRES_PASSWORD=${MEMORY_STORE_PASSWORD:-postgres}
+ - VOYAGE_API_KEY=${VOYAGE_API_KEY}
+ ports:
+ - "5432:5432"
+ volumes:
+ - memory_store_data:/home/postgres/pgdata/data
+
+ # TODO: Fix this to install pgaudit
+ # entrypoint: []
+ # command: >-
+ # sed -r -i "s/[#]*\s*(shared_preload_libraries)\s*=\s*'(.*)'/\1 = 'pgaudit,\2'/;s/,'/'/" /home/postgres/pgdata/data/postgresql.conf
+ # && exec /docker-entrypoint.sh
+
+ vectorizer-worker:
+ image: timescale/pgai-vectorizer-worker:v0.3.0
+ environment:
+ - PGAI_VECTORIZER_WORKER_DB_URL=postgres://postgres:${MEMORY_STORE_PASSWORD:-postgres}@db:5432/postgres
+ - VOYAGE_API_KEY=${VOYAGE_API_KEY}
+ command: [ "--poll-interval", "5s" ]
volumes:
memory_store_data:
From da26a5ed21d06a58a5b09a54d149dd5ed245b02e Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Mon, 16 Dec 2024 19:08:11 +0000
Subject: [PATCH 030/274] refactor: Lint agents-api (CI)
---
agents-api/tests/fixtures.py | 8 ++++++--
agents-api/tests/test_developer_queries.py | 5 ++++-
agents-api/tests/utils.py | 3 ++-
3 files changed, 12 insertions(+), 4 deletions(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index fdf04822c..520fbf922 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -40,22 +40,25 @@
# from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
from agents_api.queries.users.delete_user import delete_user
+
# from agents_api.web import app
from .utils import (
patch_embed_acompletion as patch_embed_acompletion_ctx,
- patch_pg_client,
)
from .utils import (
+ patch_pg_client,
patch_s3_client,
)
EMBEDDING_SIZE: int = 1024
+
@fixture(scope="global")
async def pg_client():
async with patch_pg_client() as pg_client:
yield pg_client
+
@fixture(scope="global")
def test_developer_id():
if not multi_tenant_mode:
@@ -66,6 +69,7 @@ def test_developer_id():
yield developer_id
+
# @fixture(scope="global")
# def test_file(client=pg_client, developer_id=test_developer_id):
# file = create_file(
@@ -316,7 +320,7 @@ def test_user(pg_client=pg_client, developer_id=test_developer_id):
# data=[CreateToolRequest(**tool)],
# client=client,
# )
-#
+#
# yield tool
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index adba5ddd1..9ac65dda9 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -4,7 +4,10 @@
from ward import raises, test
from agents_api.common.protocol.developers import Developer
-from agents_api.queries.developers.get_developer import get_developer # , verify_developer
+from agents_api.queries.developers.get_developer import (
+ get_developer,
+) # , verify_developer
+
from .fixtures import pg_client, test_developer_id
diff --git a/agents-api/tests/utils.py b/agents-api/tests/utils.py
index 330f312b4..a6a591823 100644
--- a/agents-api/tests/utils.py
+++ b/agents-api/tests/utils.py
@@ -1,9 +1,9 @@
import asyncio
import json
import logging
+import subprocess
from contextlib import asynccontextmanager, contextmanager
from dataclasses import dataclass
-import subprocess
from typing import Any, Dict, Optional
from unittest.mock import patch
@@ -175,6 +175,7 @@ async def __aexit__(self, *_):
get_session.return_value = mock_session
yield mock_session
+
@asynccontextmanager
async def patch_pg_client():
# with patch("agents_api.clients.pg.get_pg_client") as get_pg_client:
From 3a627b185d7ed30cf81cf33af1a3f76f7e67d2c1 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Mon, 16 Dec 2024 18:31:44 -0500
Subject: [PATCH 031/274] feat(agents-api): Add entry queries
---
.../queries/{ => entry}/__init__.py | 0
.../queries/entry/create_entries.py | 101 ++++++++++++++++++
.../queries/entry/delete_entries.py | 43 ++++++++
.../agents_api/queries/entry/get_history.py | 71 ++++++++++++
.../agents_api/queries/entry/list_entries.py | 74 +++++++++++++
5 files changed, 289 insertions(+)
rename agents-api/agents_api/queries/{ => entry}/__init__.py (100%)
create mode 100644 agents-api/agents_api/queries/entry/create_entries.py
create mode 100644 agents-api/agents_api/queries/entry/delete_entries.py
create mode 100644 agents-api/agents_api/queries/entry/get_history.py
create mode 100644 agents-api/agents_api/queries/entry/list_entries.py
diff --git a/agents-api/agents_api/queries/__init__.py b/agents-api/agents_api/queries/entry/__init__.py
similarity index 100%
rename from agents-api/agents_api/queries/__init__.py
rename to agents-api/agents_api/queries/entry/__init__.py
diff --git a/agents-api/agents_api/queries/entry/create_entries.py b/agents-api/agents_api/queries/entry/create_entries.py
new file mode 100644
index 000000000..feeebde89
--- /dev/null
+++ b/agents-api/agents_api/queries/entry/create_entries.py
@@ -0,0 +1,101 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import CreateEntryRequest, Entry
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ...common.utils.datetime import utcnow
+from ...common.utils.messages import content_to_json
+from uuid_extensions import uuid7
+
+# Define the raw SQL query for creating entries
+raw_query = """
+INSERT INTO entries (
+ session_id,
+ entry_id,
+ source,
+ role,
+ event_type,
+ name,
+ content,
+ tool_call_id,
+ tool_calls,
+ model,
+ token_count,
+ created_at,
+ timestamp
+)
+VALUES (
+ $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13
+)
+RETURNING *;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "entries": {
+ "session_id": "UUID",
+ "entry_id": "UUID",
+ "source": "TEXT",
+ "role": "chat_role",
+ "event_type": "TEXT",
+ "name": "TEXT",
+ "content": "JSONB[]",
+ "tool_call_id": "TEXT",
+ "tool_calls": "JSONB[]",
+ "model": "TEXT",
+ "token_count": "INTEGER",
+ "created_at": "TIMESTAMP",
+ "timestamp": "TIMESTAMP",
+ }
+ },
+).sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
+ asyncpg.UniqueViolationError: partialclass(HTTPException, status_code=409),
+ }
+)
+@wrap_in_class(Entry)
+@increase_counter("create_entries")
+@pg_query
+@beartype
+def create_entries(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: list[CreateEntryRequest],
+ mark_session_as_updated: bool = True,
+) -> tuple[str, list]:
+
+ data_dicts = [item.model_dump(mode="json") for item in data]
+
+ params = [
+ (
+ session_id,
+ item.pop("id", None) or str(uuid7()),
+ item.get("source"),
+ item.get("role"),
+ item.get("event_type") or 'message.create',
+ item.get("name"),
+ content_to_json(item.get("content") or []),
+ item.get("tool_call_id"),
+ item.get("tool_calls") or [],
+ item.get("model"),
+ item.get("token_count"),
+ (item.get("created_at") or utcnow()).timestamp(),
+ utcnow().timestamp(),
+ )
+ for item in data_dicts
+ ]
+
+ return query, params
diff --git a/agents-api/agents_api/queries/entry/delete_entries.py b/agents-api/agents_api/queries/entry/delete_entries.py
new file mode 100644
index 000000000..0150be3ee
--- /dev/null
+++ b/agents-api/agents_api/queries/entry/delete_entries.py
@@ -0,0 +1,43 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for deleting entries
+raw_query = """
+DELETE FROM entries
+WHERE session_id = $1
+RETURNING session_id as id;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "entries": {
+ "session_id": "UUID",
+ }
+ },
+).sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
+ }
+)
+@wrap_in_class(ResourceDeletedResponse, one=True)
+@increase_counter("delete_entries_for_session")
+@pg_query
+@beartype
+def delete_entries_for_session(
+ *, developer_id: UUID, session_id: UUID, mark_session_as_updated: bool = True
+) -> tuple[str, dict]:
+ return query, [session_id]
diff --git a/agents-api/agents_api/queries/entry/get_history.py b/agents-api/agents_api/queries/entry/get_history.py
new file mode 100644
index 000000000..eae4f4e6c
--- /dev/null
+++ b/agents-api/agents_api/queries/entry/get_history.py
@@ -0,0 +1,71 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import History
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for getting history
+raw_query = """
+SELECT
+ e.entry_id as id,
+ e.session_id,
+ e.role,
+ e.name,
+ e.content,
+ e.source,
+ e.token_count,
+ e.tokenizer,
+ e.created_at,
+ e.timestamp,
+ e.tool_calls,
+ e.tool_call_id
+FROM entries e
+WHERE e.session_id = $1
+AND e.source = ANY($2)
+ORDER BY e.created_at;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "entries": {
+ "entry_id": "UUID",
+ "session_id": "UUID",
+ "role": "STRING",
+ "name": "STRING",
+ "content": "JSONB",
+ "source": "STRING",
+ "token_count": "INTEGER",
+ "tokenizer": "STRING",
+ "created_at": "TIMESTAMP",
+ "timestamp": "TIMESTAMP",
+ "tool_calls": "JSONB",
+ "tool_call_id": "UUID",
+ }
+ },
+).sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
+ }
+)
+@wrap_in_class(History, one=True)
+@increase_counter("get_history")
+@pg_query
+@beartype
+def get_history(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ allowed_sources: list[str] = ["api_request", "api_response"],
+) -> tuple[str, list]:
+ return query, [session_id, allowed_sources]
diff --git a/agents-api/agents_api/queries/entry/list_entries.py b/agents-api/agents_api/queries/entry/list_entries.py
new file mode 100644
index 000000000..e5884b1b3
--- /dev/null
+++ b/agents-api/agents_api/queries/entry/list_entries.py
@@ -0,0 +1,74 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import Entry
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for listing entries
+raw_query = """
+SELECT
+ e.entry_id as id,
+ e.session_id,
+ e.role,
+ e.name,
+ e.content,
+ e.source,
+ e.token_count,
+ e.tokenizer,
+ e.created_at,
+ e.timestamp
+FROM entries e
+WHERE e.session_id = $1
+AND e.source = ANY($2)
+ORDER BY e.$3 $4
+LIMIT $5 OFFSET $6;
+"""
+
+# Parse and optimize the query
+query = optimize(
+ parse_one(raw_query),
+ schema={
+ "entries": {
+ "entry_id": "UUID",
+ "session_id": "UUID",
+ "role": "STRING",
+ "name": "STRING",
+ "content": "JSONB",
+ "source": "STRING",
+ "token_count": "INTEGER",
+ "tokenizer": "STRING",
+ "created_at": "TIMESTAMP",
+ "timestamp": "TIMESTAMP",
+ }
+ },
+).sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
+ }
+)
+@wrap_in_class(Entry)
+@increase_counter("list_entries")
+@pg_query
+@beartype
+def list_entries(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ allowed_sources: list[str] = ["api_request", "api_response"],
+ limit: int = -1,
+ offset: int = 0,
+ sort_by: Literal["created_at", "timestamp"] = "timestamp",
+ direction: Literal["asc", "desc"] = "asc",
+ exclude_relations: list[str] = [],
+) -> tuple[str, dict]:
+ return query, [session_id, allowed_sources, sort_by, direction, limit, offset]
From 6aa48071eaa6dc7847915e9d1b0b8e3ba08f7ec2 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Mon, 16 Dec 2024 23:32:45 +0000
Subject: [PATCH 032/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/entry/create_entries.py | 11 +++++------
1 file changed, 5 insertions(+), 6 deletions(-)
diff --git a/agents-api/agents_api/queries/entry/create_entries.py b/agents-api/agents_api/queries/entry/create_entries.py
index feeebde89..98bac13c6 100644
--- a/agents-api/agents_api/queries/entry/create_entries.py
+++ b/agents-api/agents_api/queries/entry/create_entries.py
@@ -5,13 +5,13 @@
from fastapi import HTTPException
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateEntryRequest, Entry
-from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
-from uuid_extensions import uuid7
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for creating entries
raw_query = """
@@ -76,16 +76,15 @@ def create_entries(
data: list[CreateEntryRequest],
mark_session_as_updated: bool = True,
) -> tuple[str, list]:
-
data_dicts = [item.model_dump(mode="json") for item in data]
-
+
params = [
(
session_id,
item.pop("id", None) or str(uuid7()),
item.get("source"),
item.get("role"),
- item.get("event_type") or 'message.create',
+ item.get("event_type") or "message.create",
item.get("name"),
content_to_json(item.get("content") or []),
item.get("tool_call_id"),
From a8d20686d83be37ac52e8718e7d175499a8f8e39 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Mon, 16 Dec 2024 23:08:02 -0500
Subject: [PATCH 033/274] chore: update the entyr queries
---
.../agents_api/queries/entry/__init__.py | 21 +++++++++++++++++++
.../queries/entry/create_entries.py | 5 ++++-
.../queries/entry/delete_entries.py | 5 ++++-
.../agents_api/queries/entry/get_history.py | 9 ++++----
.../agents_api/queries/entry/list_entries.py | 8 ++++---
5 files changed, 39 insertions(+), 9 deletions(-)
diff --git a/agents-api/agents_api/queries/entry/__init__.py b/agents-api/agents_api/queries/entry/__init__.py
index e69de29bb..2ad83f115 100644
--- a/agents-api/agents_api/queries/entry/__init__.py
+++ b/agents-api/agents_api/queries/entry/__init__.py
@@ -0,0 +1,21 @@
+"""
+The `entry` module provides SQL query functions for managing entries
+in the TimescaleDB database. This includes operations for:
+
+- Creating new entries
+- Deleting entries
+- Retrieving entry history
+- Listing entries with filtering and pagination
+"""
+
+from .create_entries import create_entries
+from .delete_entries import delete_entries_for_session
+from .get_history import get_history
+from .list_entries import list_entries
+
+__all__ = [
+ "create_entries",
+ "delete_entries_for_session",
+ "get_history",
+ "list_entries",
+]
diff --git a/agents-api/agents_api/queries/entry/create_entries.py b/agents-api/agents_api/queries/entry/create_entries.py
index 98bac13c6..3edad7b42 100644
--- a/agents-api/agents_api/queries/entry/create_entries.py
+++ b/agents-api/agents_api/queries/entry/create_entries.py
@@ -97,4 +97,7 @@ def create_entries(
for item in data_dicts
]
- return query, params
+ return (
+ query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/entry/delete_entries.py b/agents-api/agents_api/queries/entry/delete_entries.py
index 0150be3ee..d19dfa632 100644
--- a/agents-api/agents_api/queries/entry/delete_entries.py
+++ b/agents-api/agents_api/queries/entry/delete_entries.py
@@ -40,4 +40,7 @@
def delete_entries_for_session(
*, developer_id: UUID, session_id: UUID, mark_session_as_updated: bool = True
) -> tuple[str, dict]:
- return query, [session_id]
+ return (
+ query,
+ [session_id],
+ )
diff --git a/agents-api/agents_api/queries/entry/get_history.py b/agents-api/agents_api/queries/entry/get_history.py
index eae4f4e6c..8b98ed25c 100644
--- a/agents-api/agents_api/queries/entry/get_history.py
+++ b/agents-api/agents_api/queries/entry/get_history.py
@@ -20,7 +20,6 @@
e.content,
e.source,
e.token_count,
- e.tokenizer,
e.created_at,
e.timestamp,
e.tool_calls,
@@ -43,7 +42,6 @@
"content": "JSONB",
"source": "STRING",
"token_count": "INTEGER",
- "tokenizer": "STRING",
"created_at": "TIMESTAMP",
"timestamp": "TIMESTAMP",
"tool_calls": "JSONB",
@@ -67,5 +65,8 @@ def get_history(
developer_id: UUID,
session_id: UUID,
allowed_sources: list[str] = ["api_request", "api_response"],
-) -> tuple[str, list]:
- return query, [session_id, allowed_sources]
+) -> tuple[str, dict]:
+ return (
+ query,
+ [session_id, allowed_sources],
+ )
diff --git a/agents-api/agents_api/queries/entry/list_entries.py b/agents-api/agents_api/queries/entry/list_entries.py
index e5884b1b3..d2b664866 100644
--- a/agents-api/agents_api/queries/entry/list_entries.py
+++ b/agents-api/agents_api/queries/entry/list_entries.py
@@ -21,7 +21,6 @@
e.content,
e.source,
e.token_count,
- e.tokenizer,
e.created_at,
e.timestamp
FROM entries e
@@ -43,7 +42,6 @@
"content": "JSONB",
"source": "STRING",
"token_count": "INTEGER",
- "tokenizer": "STRING",
"created_at": "TIMESTAMP",
"timestamp": "TIMESTAMP",
}
@@ -71,4 +69,8 @@ def list_entries(
direction: Literal["asc", "desc"] = "asc",
exclude_relations: list[str] = [],
) -> tuple[str, dict]:
- return query, [session_id, allowed_sources, sort_by, direction, limit, offset]
+
+ return (
+ query,
+ [session_id, allowed_sources, sort_by, direction, limit, offset],
+ )
From dc2002f199564153aa4688a0aca43ead110115c0 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Tue, 17 Dec 2024 04:09:08 +0000
Subject: [PATCH 034/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/entry/list_entries.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/agents_api/queries/entry/list_entries.py b/agents-api/agents_api/queries/entry/list_entries.py
index d2b664866..6d8d88de5 100644
--- a/agents-api/agents_api/queries/entry/list_entries.py
+++ b/agents-api/agents_api/queries/entry/list_entries.py
@@ -69,7 +69,6 @@ def list_entries(
direction: Literal["asc", "desc"] = "asc",
exclude_relations: list[str] = [],
) -> tuple[str, dict]:
-
return (
query,
[session_id, allowed_sources, sort_by, direction, limit, offset],
From 70b759848b48b6f27ff99a7dbf696e33be073eeb Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Mon, 16 Dec 2024 23:29:49 -0500
Subject: [PATCH 035/274] chore: inner join developer table with entry queries
---
.../agents_api/queries/entry/create_entries.py | 10 +++++++---
.../agents_api/queries/entry/delete_entries.py | 12 +++++++-----
agents-api/agents_api/queries/entry/get_history.py | 7 ++++---
agents-api/agents_api/queries/entry/list_entries.py | 7 ++++---
4 files changed, 22 insertions(+), 14 deletions(-)
diff --git a/agents-api/agents_api/queries/entry/create_entries.py b/agents-api/agents_api/queries/entry/create_entries.py
index 3edad7b42..c131b0362 100644
--- a/agents-api/agents_api/queries/entry/create_entries.py
+++ b/agents-api/agents_api/queries/entry/create_entries.py
@@ -13,7 +13,7 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Define the raw SQL query for creating entries
+# Define the raw SQL query for creating entries with a developer check
raw_query = """
INSERT INTO entries (
session_id,
@@ -30,9 +30,12 @@
created_at,
timestamp
)
-VALUES (
+SELECT
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13
-)
+FROM
+ developers
+WHERE
+ developer_id = $14
RETURNING *;
"""
@@ -93,6 +96,7 @@ def create_entries(
item.get("token_count"),
(item.get("created_at") or utcnow()).timestamp(),
utcnow().timestamp(),
+ developer_id
)
for item in data_dicts
]
diff --git a/agents-api/agents_api/queries/entry/delete_entries.py b/agents-api/agents_api/queries/entry/delete_entries.py
index d19dfa632..1fa34176f 100644
--- a/agents-api/agents_api/queries/entry/delete_entries.py
+++ b/agents-api/agents_api/queries/entry/delete_entries.py
@@ -10,11 +10,13 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Define the raw SQL query for deleting entries
+# Define the raw SQL query for deleting entries with a developer check
raw_query = """
DELETE FROM entries
-WHERE session_id = $1
-RETURNING session_id as id;
+USING developers
+WHERE entries.session_id = $1
+AND developers.developer_id = $2
+RETURNING entries.session_id as id;
"""
# Parse and optimize the query
@@ -39,8 +41,8 @@
@beartype
def delete_entries_for_session(
*, developer_id: UUID, session_id: UUID, mark_session_as_updated: bool = True
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
return (
query,
- [session_id],
+ [session_id, developer_id],
)
diff --git a/agents-api/agents_api/queries/entry/get_history.py b/agents-api/agents_api/queries/entry/get_history.py
index 8b98ed25c..dd06734b0 100644
--- a/agents-api/agents_api/queries/entry/get_history.py
+++ b/agents-api/agents_api/queries/entry/get_history.py
@@ -10,7 +10,7 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Define the raw SQL query for getting history
+# Define the raw SQL query for getting history with a developer check
raw_query = """
SELECT
e.entry_id as id,
@@ -25,6 +25,7 @@
e.tool_calls,
e.tool_call_id
FROM entries e
+JOIN developers d ON d.developer_id = $3
WHERE e.session_id = $1
AND e.source = ANY($2)
ORDER BY e.created_at;
@@ -65,8 +66,8 @@ def get_history(
developer_id: UUID,
session_id: UUID,
allowed_sources: list[str] = ["api_request", "api_response"],
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
return (
query,
- [session_id, allowed_sources],
+ [session_id, allowed_sources, developer_id],
)
diff --git a/agents-api/agents_api/queries/entry/list_entries.py b/agents-api/agents_api/queries/entry/list_entries.py
index 6d8d88de5..42add6899 100644
--- a/agents-api/agents_api/queries/entry/list_entries.py
+++ b/agents-api/agents_api/queries/entry/list_entries.py
@@ -11,7 +11,7 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Define the raw SQL query for listing entries
+# Define the raw SQL query for listing entries with a developer check
raw_query = """
SELECT
e.entry_id as id,
@@ -24,6 +24,7 @@
e.created_at,
e.timestamp
FROM entries e
+JOIN developers d ON d.developer_id = $7
WHERE e.session_id = $1
AND e.source = ANY($2)
ORDER BY e.$3 $4
@@ -68,8 +69,8 @@ def list_entries(
sort_by: Literal["created_at", "timestamp"] = "timestamp",
direction: Literal["asc", "desc"] = "asc",
exclude_relations: list[str] = [],
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
return (
query,
- [session_id, allowed_sources, sort_by, direction, limit, offset],
+ [session_id, allowed_sources, sort_by, direction, limit, offset, developer_id],
)
From 5cf876757d3a8b583775aec2482c6928b647d314 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Tue, 17 Dec 2024 04:30:39 +0000
Subject: [PATCH 036/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/entry/create_entries.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/agents-api/agents_api/queries/entry/create_entries.py b/agents-api/agents_api/queries/entry/create_entries.py
index c131b0362..d3b3b4982 100644
--- a/agents-api/agents_api/queries/entry/create_entries.py
+++ b/agents-api/agents_api/queries/entry/create_entries.py
@@ -96,7 +96,7 @@ def create_entries(
item.get("token_count"),
(item.get("created_at") or utcnow()).timestamp(),
utcnow().timestamp(),
- developer_id
+ developer_id,
)
for item in data_dicts
]
From 9782fbfabc58159a17292ed9eccd14a59ea94f24 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 12:32:29 +0530
Subject: [PATCH 037/274] wip: Make poe test work
Signed-off-by: Diwank Singh Tomer
---
agents-api/Dockerfile.migration | 22 --
agents-api/agents_api/clients/pg.py | 22 +-
.../queries/users/create_or_update_user.py | 4 +-
.../agents_api/queries/users/create_user.py | 21 +-
.../agents_api/queries/users/delete_user.py | 4 +-
.../agents_api/queries/users/get_user.py | 4 +-
.../agents_api/queries/users/list_users.py | 4 +-
.../agents_api/queries/users/patch_user.py | 4 +-
.../agents_api/queries/users/update_user.py | 4 +-
agents-api/agents_api/queries/utils.py | 14 +-
agents-api/agents_api/web.py | 36 +-
.../migrations/migrate_1704699172_init.py | 130 -------
.../migrate_1704699595_developers.py | 151 -------
.../migrate_1704728076_additional_info.py | 107 -----
.../migrations/migrate_1704892503_tools.py | 106 -----
.../migrate_1706090164_entries_timestamp.py | 102 -----
.../migrate_1706092435_entry_relations.py | 38 --
...grate_1707537826_rename_additional_info.py | 217 -----------
...09200345_extend_agents_default_settings.py | 83 ----
.../migrations/migrate_1709292828_presets.py | 82 ----
.../migrations/migrate_1709631202_metadata.py | 232 -----------
...1709806979_entry_relations_to_relations.py | 30 --
.../migrations/migrate_1709810233_memories.py | 92 -----
.../migrate_1712309841_simplify_memories.py | 144 -------
...igrate_1712405369_simplify_instructions.py | 109 ------
...ate_1714119679_session_render_templates.py | 67 ----
...1714566760_change_embeddings_dimensions.py | 149 -------
.../migrate_1716013793_session_cache.py | 33 --
...te_1716847597_support_multimodal_chatml.py | 93 -----
.../migrate_1716939839_task_relations.py | 87 -----
.../migrate_1717239610_token_budget.py | 67 ----
...rate_1721576813_extended_tool_relations.py | 90 -----
...igrate_1721609661_task_tool_ref_by_name.py | 105 -----
...21609675_multi_agent_multi_user_session.py | 79 ----
.../migrate_1721666295_developers_relation.py | 32 --
..._1721678846_rename_information_snippets.py | 33 --
...2107354_rename_executions_arguments_col.py | 83 ----
...rate_1722115427_rename_transitions_from.py | 103 -----
...te_1722710530_unify_owner_doc_relations.py | 204 ----------
...migrate_1722875101_add_temporal_mapping.py | 40 --
...igrate_1723307805_add_lsh_index_to_docs.py | 44 ---
...e_1723400730_add_settings_to_developers.py | 68 ----
...ate_1725153437_add_output_to_executions.py | 104 -----
...5323734_make_transition_output_optional.py | 109 ------
...727235852_add_forward_tool_calls_option.py | 87 -----
...ate_1727922523_add_description_to_tools.py | 64 ---
...rate_1729114011_tweak_proximity_indices.py | 133 -------
...migrate_1731143165_support_tool_call_id.py | 100 -----
...igrate_1731953383_create_files_relation.py | 29 --
...33493650_add_recall_options_to_sessions.py | 91 -----
.../migrate_1733755642_transition_indices.py | 42 --
agents-api/tests/fixtures.py | 360 ++++++++---------
agents-api/tests/test_developer_queries.py | 15 +-
agents-api/tests/test_user_queries.py | 368 +++++++++---------
agents-api/tests/utils.py | 17 +-
.../migrations/000017_compression.down.sql | 17 +
.../migrations/000017_compression.up.sql | 25 ++
.../migrations/000018_doc_search.down.sql | 0
.../migrations/000018_doc_search.up.sql | 23 ++
.../000019_system_developer.down.sql | 7 +
.../migrations/000019_system_developer.up.sql | 18 +
61 files changed, 532 insertions(+), 4216 deletions(-)
delete mode 100644 agents-api/Dockerfile.migration
delete mode 100644 agents-api/migrations/migrate_1704699172_init.py
delete mode 100644 agents-api/migrations/migrate_1704699595_developers.py
delete mode 100644 agents-api/migrations/migrate_1704728076_additional_info.py
delete mode 100644 agents-api/migrations/migrate_1704892503_tools.py
delete mode 100644 agents-api/migrations/migrate_1706090164_entries_timestamp.py
delete mode 100644 agents-api/migrations/migrate_1706092435_entry_relations.py
delete mode 100644 agents-api/migrations/migrate_1707537826_rename_additional_info.py
delete mode 100644 agents-api/migrations/migrate_1709200345_extend_agents_default_settings.py
delete mode 100644 agents-api/migrations/migrate_1709292828_presets.py
delete mode 100644 agents-api/migrations/migrate_1709631202_metadata.py
delete mode 100644 agents-api/migrations/migrate_1709806979_entry_relations_to_relations.py
delete mode 100644 agents-api/migrations/migrate_1709810233_memories.py
delete mode 100644 agents-api/migrations/migrate_1712309841_simplify_memories.py
delete mode 100644 agents-api/migrations/migrate_1712405369_simplify_instructions.py
delete mode 100644 agents-api/migrations/migrate_1714119679_session_render_templates.py
delete mode 100644 agents-api/migrations/migrate_1714566760_change_embeddings_dimensions.py
delete mode 100644 agents-api/migrations/migrate_1716013793_session_cache.py
delete mode 100644 agents-api/migrations/migrate_1716847597_support_multimodal_chatml.py
delete mode 100644 agents-api/migrations/migrate_1716939839_task_relations.py
delete mode 100644 agents-api/migrations/migrate_1717239610_token_budget.py
delete mode 100644 agents-api/migrations/migrate_1721576813_extended_tool_relations.py
delete mode 100644 agents-api/migrations/migrate_1721609661_task_tool_ref_by_name.py
delete mode 100644 agents-api/migrations/migrate_1721609675_multi_agent_multi_user_session.py
delete mode 100644 agents-api/migrations/migrate_1721666295_developers_relation.py
delete mode 100644 agents-api/migrations/migrate_1721678846_rename_information_snippets.py
delete mode 100644 agents-api/migrations/migrate_1722107354_rename_executions_arguments_col.py
delete mode 100644 agents-api/migrations/migrate_1722115427_rename_transitions_from.py
delete mode 100644 agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py
delete mode 100644 agents-api/migrations/migrate_1722875101_add_temporal_mapping.py
delete mode 100644 agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py
delete mode 100644 agents-api/migrations/migrate_1723400730_add_settings_to_developers.py
delete mode 100644 agents-api/migrations/migrate_1725153437_add_output_to_executions.py
delete mode 100644 agents-api/migrations/migrate_1725323734_make_transition_output_optional.py
delete mode 100644 agents-api/migrations/migrate_1727235852_add_forward_tool_calls_option.py
delete mode 100644 agents-api/migrations/migrate_1727922523_add_description_to_tools.py
delete mode 100644 agents-api/migrations/migrate_1729114011_tweak_proximity_indices.py
delete mode 100644 agents-api/migrations/migrate_1731143165_support_tool_call_id.py
delete mode 100644 agents-api/migrations/migrate_1731953383_create_files_relation.py
delete mode 100644 agents-api/migrations/migrate_1733493650_add_recall_options_to_sessions.py
delete mode 100644 agents-api/migrations/migrate_1733755642_transition_indices.py
create mode 100644 memory-store/migrations/000017_compression.down.sql
create mode 100644 memory-store/migrations/000017_compression.up.sql
create mode 100644 memory-store/migrations/000018_doc_search.down.sql
create mode 100644 memory-store/migrations/000018_doc_search.up.sql
create mode 100644 memory-store/migrations/000019_system_developer.down.sql
create mode 100644 memory-store/migrations/000019_system_developer.up.sql
diff --git a/agents-api/Dockerfile.migration b/agents-api/Dockerfile.migration
deleted file mode 100644
index 78f60c16b..000000000
--- a/agents-api/Dockerfile.migration
+++ /dev/null
@@ -1,22 +0,0 @@
-# syntax=docker/dockerfile:1
-# check=error=true
-
-FROM python:3.13-slim
-
-ENV PYTHONUNBUFFERED=1
-ENV POETRY_CACHE_DIR=/tmp/poetry_cache
-
-WORKDIR /app
-
-RUN pip install --no-cache-dir --upgrade cozo-migrate
-
-COPY . ./
-ENV COZO_HOST="http://cozo:9070"
-
-# Expected environment variables:
-# COZO_AUTH_TOKEN="myauthkey"
-
-SHELL ["/bin/bash", "-c"]
-ENTRYPOINT \
- cozo-migrate -e http -h $COZO_HOST --auth $COZO_AUTH_TOKEN init \
- ; cozo-migrate -e http -h $COZO_HOST --auth $COZO_AUTH_TOKEN -d ./migrations apply -ay
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index ddef570f9..852152769 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -1,3 +1,4 @@
+from contextlib import asynccontextmanager
import json
import asyncpg
@@ -6,16 +7,23 @@
from ..web import app
-async def get_pg_client(dsn: str = db_dsn):
- # TODO: Create a postgres connection pool
- client = getattr(app.state, "pg_client", await asyncpg.connect(dsn))
- if not hasattr(app.state, "pg_client"):
+async def get_pg_pool(dsn: str = db_dsn, **kwargs):
+ pool = getattr(app.state, "pg_pool", None)
+
+ if pool is None:
+ pool = await asyncpg.create_pool(dsn, **kwargs)
+ app.state.pg_pool = pool
+
+ return pool
+
+
+@asynccontextmanager
+async def get_pg_client(pool: asyncpg.Pool):
+ async with pool.acquire() as client:
await client.set_type_codec(
"jsonb",
encoder=json.dumps,
decoder=json.loads,
schema="pg_catalog",
)
- app.state.pg_client = client
-
- return client
+ yield client
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index 1a7eddd26..b9939b620 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -68,7 +68,7 @@
@beartype
def create_or_update_user(
*, developer_id: UUID, user_id: UUID, data: CreateUserRequest
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs an SQL query to create or update a user.
@@ -78,7 +78,7 @@ def create_or_update_user(
data (CreateUserRequest): The user data to insert or update.
Returns:
- tuple[str, dict]: SQL query and parameters.
+ tuple[str, list]: SQL query and parameters.
Raises:
HTTPException: If developer doesn't exist (404) or on unique constraint violation (409)
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index edd9720f6..66e8bcc27 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -31,18 +31,7 @@
"""
# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "users": {
- "developer_id": "UUID",
- "user_id": "UUID",
- "name": "STRING",
- "about": "STRING",
- "metadata": "JSONB",
- }
- },
-).sql(pretty=True)
+query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
@@ -59,16 +48,16 @@
),
}
)
-@wrap_in_class(User)
+@wrap_in_class(User, one=True, transform=lambda d: {**d, "id": d["user_id"]})
@increase_counter("create_user")
@pg_query
@beartype
-def create_user(
+async def create_user(
*,
developer_id: UUID,
user_id: UUID | None = None,
data: CreateUserRequest,
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs the SQL query to create a new user.
@@ -78,7 +67,7 @@ def create_user(
data (CreateUserRequest): The user data to insert.
Returns:
- tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
user_id = user_id or uuid7()
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 8ca2202f0..2a57ccc7c 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -49,7 +49,7 @@
@increase_counter("delete_user")
@pg_query
@beartype
-def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
+def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
"""
Constructs optimized SQL query to delete a user and related data.
Uses primary key for efficient deletion.
@@ -59,7 +59,7 @@ def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
user_id (UUID): The user's UUID
Returns:
- tuple[str, dict]: SQL query and parameters
+ tuple[str, list]: SQL query and parameters
"""
return (
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 946b92f6c..6e7c26d75 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -42,7 +42,7 @@
@increase_counter("get_user")
@pg_query
@beartype
-def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
+def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
"""
Constructs an optimized SQL query to retrieve a user's details.
Uses the primary key index (developer_id, user_id) for efficient lookup.
@@ -52,7 +52,7 @@ def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, dict]:
user_id (UUID): The UUID of the user to retrieve.
Returns:
- tuple[str, dict]: SQL query and parameters.
+ tuple[str, list]: SQL query and parameters.
"""
return (
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index d4930b3f8..c2259444a 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -63,7 +63,7 @@ def list_users(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
metadata_filter: dict | None = None,
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs an optimized SQL query for listing users with pagination and filtering.
Uses indexes on developer_id and metadata for efficient querying.
@@ -77,7 +77,7 @@ def list_users(
metadata_filter (dict, optional): Metadata-based filters
Returns:
- tuple[str, dict]: SQL query and parameters
+ tuple[str, list]: SQL query and parameters
"""
if limit < 1 or limit > 1000:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index 1a1e91f60..913b476c5 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -70,7 +70,7 @@
@beartype
def patch_user(
*, developer_id: UUID, user_id: UUID, data: PatchUserRequest
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs an optimized SQL query for partial user updates.
Uses primary key for efficient update and jsonb_merge for metadata.
@@ -81,7 +81,7 @@ def patch_user(
data (PatchUserRequest): Partial update data
Returns:
- tuple[str, dict]: SQL query and parameters
+ tuple[str, list]: SQL query and parameters
"""
params = [
developer_id,
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 082784775..71599182d 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -61,7 +61,7 @@
@beartype
def update_user(
*, developer_id: UUID, user_id: UUID, data: UpdateUserRequest
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs an optimized SQL query to update a user's details.
Uses primary key for efficient update.
@@ -72,7 +72,7 @@ def update_user(
data (UpdateUserRequest): Updated user data
Returns:
- tuple[str, dict]: SQL query and parameters
+ tuple[str, list]: SQL query and parameters
"""
params = [
developer_id,
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index a68ab2fe8..99f6f901a 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -31,7 +31,6 @@ def pg_query(
func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
debug: bool | None = None,
only_on_error: bool = False,
- timeit: bool = False,
):
def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
"""
@@ -74,13 +73,12 @@ async def wrapper(
from ..clients import pg
try:
- client = client or await pg.get_pg_client()
-
- start = timeit and time.perf_counter()
- results: list[Record] = await client.fetch(query, *variables)
- end = timeit and time.perf_counter()
-
- timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
+ if client is None:
+ pool = await pg.get_pg_pool()
+ async with pg.get_pg_client(pool=pool) as client:
+ results: list[Record] = await client.fetch(query, *variables)
+ else:
+ results: list[Record] = await client.fetch(query, *variables)
except Exception as e:
if only_on_error and debug:
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 737a63426..d3a672fd8 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -23,16 +23,16 @@
from .dependencies.auth import get_api_key
from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
from .exceptions import PromptTooBigError
-from .routers import (
- agents,
- docs,
- files,
- internal,
- jobs,
- sessions,
- tasks,
- users,
-)
+# from .routers import (
+# agents,
+# docs,
+# files,
+# internal,
+# jobs,
+# sessions,
+# tasks,
+# users,
+# )
if not sentry_dsn:
print("Sentry DSN not found. Sentry will not be enabled.")
@@ -179,14 +179,14 @@ async def scalar_html():
app.include_router(scalar_router)
# Add other routers with the get_api_key dependency
-app.include_router(agents.router, dependencies=[Depends(get_api_key)])
-app.include_router(sessions.router, dependencies=[Depends(get_api_key)])
-app.include_router(users.router, dependencies=[Depends(get_api_key)])
-app.include_router(jobs.router, dependencies=[Depends(get_api_key)])
-app.include_router(files.router, dependencies=[Depends(get_api_key)])
-app.include_router(docs.router, dependencies=[Depends(get_api_key)])
-app.include_router(tasks.router, dependencies=[Depends(get_api_key)])
-app.include_router(internal.router)
+# app.include_router(agents.router, dependencies=[Depends(get_api_key)])
+# app.include_router(sessions.router, dependencies=[Depends(get_api_key)])
+# app.include_router(users.router, dependencies=[Depends(get_api_key)])
+# app.include_router(jobs.router, dependencies=[Depends(get_api_key)])
+# app.include_router(files.router, dependencies=[Depends(get_api_key)])
+# app.include_router(docs.router, dependencies=[Depends(get_api_key)])
+# app.include_router(tasks.router, dependencies=[Depends(get_api_key)])
+# app.include_router(internal.router)
# TODO: CORS should be enabled only for JWT auth
#
diff --git a/agents-api/migrations/migrate_1704699172_init.py b/agents-api/migrations/migrate_1704699172_init.py
deleted file mode 100644
index 3a427ad48..000000000
--- a/agents-api/migrations/migrate_1704699172_init.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "init"
-CREATED_AT = 1704699172.673636
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- create_agents_relation_query = """
- :create agents {
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
-
- create_model_settings_relation_query = """
- :create agent_default_settings {
- agent_id: Uuid,
- =>
- frequency_penalty: Float default 0.0,
- presence_penalty: Float default 0.0,
- length_penalty: Float default 1.0,
- repetition_penalty: Float default 1.0,
- top_p: Float default 0.95,
- temperature: Float default 0.7,
- }
- """
-
- create_entries_relation_query = """
- :create entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: String,
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- }
- """
-
- create_sessions_relation_query = """
- :create sessions {
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- }
- """
-
- create_session_lookup_relation_query = """
- :create session_lookup {
- agent_id: Uuid,
- user_id: Uuid? default null,
- session_id: Uuid,
- }
- """
-
- create_users_relation_query = """
- :create users {
- user_id: Uuid,
- =>
- name: String,
- about: String,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
-
- run(
- client,
- create_agents_relation_query,
- create_model_settings_relation_query,
- create_entries_relation_query,
- create_sessions_relation_query,
- create_session_lookup_relation_query,
- create_users_relation_query,
- )
-
-
-def down(client):
- remove_agents_relation_query = """
- ::remove agents
- """
-
- remove_model_settings_relation_query = """
- ::remove agent_default_settings
- """
-
- remove_entries_relation_query = """
- ::remove entries
- """
-
- remove_sessions_relation_query = """
- ::remove sessions
- """
-
- remove_session_lookup_relation_query = """
- ::remove session_lookup
- """
-
- remove_users_relation_query = """
- ::remove users
- """
-
- run(
- client,
- remove_users_relation_query,
- remove_session_lookup_relation_query,
- remove_sessions_relation_query,
- remove_entries_relation_query,
- remove_model_settings_relation_query,
- remove_agents_relation_query,
- )
diff --git a/agents-api/migrations/migrate_1704699595_developers.py b/agents-api/migrations/migrate_1704699595_developers.py
deleted file mode 100644
index d22edb393..000000000
--- a/agents-api/migrations/migrate_1704699595_developers.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "developers"
-CREATED_AT = 1704699595.546072
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- update_agents_relation_query = """
- ?[agent_id, name, about, model, created_at, updated_at, developer_id] := *agents{
- agent_id,
- name,
- about,
- model,
- created_at,
- updated_at,
- }, developer_id = rand_uuid_v4()
-
- :replace agents {
- developer_id: Uuid,
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
-
- update_sessions_relation_query = """
- ?[developer_id, session_id, updated_at, situation, summary, created_at] := *sessions{
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- }, developer_id = rand_uuid_v4()
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- }
- """
-
- update_users_relation_query = """
- ?[user_id, name, about, created_at, updated_at, developer_id] := *users{
- user_id,
- name,
- about,
- created_at,
- updated_at,
- }, developer_id = rand_uuid_v4()
-
- :replace users {
- developer_id: Uuid,
- user_id: Uuid,
- =>
- name: String,
- about: String,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
-
- run(
- client,
- update_agents_relation_query,
- update_sessions_relation_query,
- update_users_relation_query,
- )
-
-
-def down(client):
- update_agents_relation_query = """
- ?[agent_id, name, about, model, created_at, updated_at] := *agents{
- agent_id,
- name,
- about,
- model,
- created_at,
- updated_at,
- }
-
- :replace agents {
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
-
- update_sessions_relation_query = """
- ?[session_id, updated_at, situation, summary, created_at] := *sessions{
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- }
-
- :replace sessions {
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- }
- """
-
- update_users_relation_query = """
- ?[user_id, name, about, created_at, updated_at] := *users{
- user_id,
- name,
- about,
- created_at,
- updated_at,
- }
-
- :replace users {
- user_id: Uuid,
- =>
- name: String,
- about: String,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
-
- run(
- client,
- update_users_relation_query,
- update_sessions_relation_query,
- update_agents_relation_query,
- )
diff --git a/agents-api/migrations/migrate_1704728076_additional_info.py b/agents-api/migrations/migrate_1704728076_additional_info.py
deleted file mode 100644
index c20f021f4..000000000
--- a/agents-api/migrations/migrate_1704728076_additional_info.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "additional_info"
-CREATED_AT = 1704728076.129496
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-agent_additional_info_table = dict(
- up="""
- :create agent_additional_info {
- agent_id: Uuid,
- additional_info_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
- down="""
- ::remove agent_additional_info
- """,
-)
-
-user_additional_info_table = dict(
- up="""
- :create user_additional_info {
- user_id: Uuid,
- additional_info_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
- down="""
- ::remove user_additional_info
- """,
-)
-
-information_snippets_table = dict(
- up="""
- :create information_snippets {
- additional_info_id: Uuid,
- snippet_idx: Int,
- =>
- title: String,
- snippet: String,
- embed_instruction: String default 'Encode this passage for retrieval: ',
- embedding: ? default null,
- }
- """,
- down="""
- ::remove information_snippets
- """,
-)
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-information_snippets_hnsw_index = dict(
- up="""
- ::hnsw create information_snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop information_snippets:embedding_space
- """,
-)
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-information_snippets_fts_index = dict(
- up="""
- ::fts create information_snippets:fts {
- extractor: concat(title, ' ', snippet),
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- down="""
- ::fts drop information_snippets:fts
- """,
-)
-
-queries_to_run = [
- agent_additional_info_table,
- user_additional_info_table,
- information_snippets_table,
- information_snippets_hnsw_index,
- information_snippets_fts_index,
-]
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1704892503_tools.py b/agents-api/migrations/migrate_1704892503_tools.py
deleted file mode 100644
index 38fefaa08..000000000
--- a/agents-api/migrations/migrate_1704892503_tools.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "tools"
-CREATED_AT = 1704892503.302678
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-agent_instructions_table = dict(
- up="""
- :create agent_instructions {
- agent_id: Uuid,
- instruction_idx: Int,
- =>
- content: String,
- important: Bool default false,
- embed_instruction: String default 'Embed this historical text chunk for retrieval: ',
- embedding: ? default null,
- created_at: Float default now(),
- }
- """,
- down="""
- ::remove agent_instructions
- """,
-)
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-agent_instructions_hnsw_index = dict(
- up="""
- ::hnsw create agent_instructions:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop agent_instructions:embedding_space
- """,
-)
-
-agent_functions_table = dict(
- up="""
- :create agent_functions {
- agent_id: Uuid,
- tool_id: Uuid,
- =>
- name: String,
- description: String,
- parameters: Json,
- embed_instruction: String default 'Transform this tool description for retrieval: ',
- embedding: ? default null,
- updated_at: Float default now(),
- created_at: Float default now(),
- }
- """,
- down="""
- ::remove agent_functions
- """,
-)
-
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-agent_functions_hnsw_index = dict(
- up="""
- ::hnsw create agent_functions:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop agent_functions:embedding_space
- """,
-)
-
-
-queries_to_run = [
- agent_instructions_table,
- agent_instructions_hnsw_index,
- agent_functions_table,
- agent_functions_hnsw_index,
-]
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1706090164_entries_timestamp.py b/agents-api/migrations/migrate_1706090164_entries_timestamp.py
deleted file mode 100644
index d85a7170e..000000000
--- a/agents-api/migrations/migrate_1706090164_entries_timestamp.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "entries_timestamp"
-CREATED_AT = 1706090164.80913
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-update_entries = {
- "up": """
- ?[
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- ] := *entries{
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- }, timestamp = created_at
-
- :replace entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: String,
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- timestamp: Float default now(),
- }
- """,
- "down": """
- ?[
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- ] := *entries{
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- }
-
- :replace entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: String,
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- }
- """,
-}
-
-queries_to_run = [
- update_entries,
-]
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in queries_to_run])
diff --git a/agents-api/migrations/migrate_1706092435_entry_relations.py b/agents-api/migrations/migrate_1706092435_entry_relations.py
deleted file mode 100644
index e031b27d1..000000000
--- a/agents-api/migrations/migrate_1706092435_entry_relations.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "entry_relations"
-CREATED_AT = 1706092435.462968
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-entry_relations = {
- "up": """
- :create entry_relations {
- head: Uuid,
- relation: String,
- tail: Uuid,
- }
- """,
- "down": """
- ::remove entry_relations
- """,
-}
-
-queries_to_run = [
- entry_relations,
-]
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in queries_to_run])
diff --git a/agents-api/migrations/migrate_1707537826_rename_additional_info.py b/agents-api/migrations/migrate_1707537826_rename_additional_info.py
deleted file mode 100644
index d71576f05..000000000
--- a/agents-api/migrations/migrate_1707537826_rename_additional_info.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "rename_additional_info"
-CREATED_AT = 1707537826.539182
-
-rename_agent_doc_id = dict(
- up="""
- ?[agent_id, doc_id, created_at] :=
- *agent_additional_info{
- agent_id,
- additional_info_id: doc_id,
- created_at,
- }
-
- :replace agent_additional_info {
- agent_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
- down="""
- ?[agent_id, additional_info_id, created_at] :=
- *agent_additional_info{
- agent_id,
- doc_id: additional_info_id,
- created_at,
- }
-
- :replace agent_additional_info {
- agent_id: Uuid,
- additional_info_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
-)
-
-
-rename_user_doc_id = dict(
- up="""
- ?[user_id, doc_id, created_at] :=
- *user_additional_info{
- user_id,
- additional_info_id: doc_id,
- created_at,
- }
-
- :replace user_additional_info {
- user_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
- down="""
- ?[user_id, additional_info_id, created_at] :=
- *user_additional_info{
- user_id,
- doc_id: additional_info_id,
- created_at,
- }
-
- :replace user_additional_info {
- user_id: Uuid,
- additional_info_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
-)
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-information_snippets_hnsw_index = dict(
- up="""
- ::hnsw create information_snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop information_snippets:embedding_space
- """,
-)
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-information_snippets_fts_index = dict(
- up="""
- ::fts create information_snippets:fts {
- extractor: concat(title, ' ', snippet),
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- down="""
- ::fts drop information_snippets:fts
- """,
-)
-
-drop_information_snippets_hnsw_index = {
- "up": information_snippets_hnsw_index["down"],
- "down": information_snippets_hnsw_index["up"],
-}
-
-
-drop_information_snippets_fts_index = {
- "up": information_snippets_fts_index["down"],
- "down": information_snippets_fts_index["up"],
-}
-
-
-rename_information_snippets_doc_id = dict(
- up="""
- ?[
- doc_id,
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- ] :=
- *information_snippets{
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- additional_info_id: doc_id,
- }
-
- :replace information_snippets {
- doc_id: Uuid,
- snippet_idx: Int,
- =>
- title: String,
- snippet: String,
- embed_instruction: String default 'Encode this passage for retrieval: ',
- embedding: ? default null,
- }
- """,
- down="""
- ?[
- additional_info_id,
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- ] :=
- *information_snippets{
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- doc_id: additional_info_id,
- }
-
- :replace information_snippets {
- additional_info_id: Uuid,
- snippet_idx: Int,
- =>
- title: String,
- snippet: String,
- embed_instruction: String default 'Encode this passage for retrieval: ',
- embedding: ? default null,
- }
- """,
-)
-
-rename_relations = dict(
- up="""
- ::rename
- agent_additional_info -> agent_docs,
- user_additional_info -> user_docs
- """,
- down="""
- ::rename
- agent_docs -> agent_additional_info,
- user_docs -> user_additional_info
- """,
-)
-
-
-queries_to_run = [
- rename_agent_doc_id,
- rename_user_doc_id,
- drop_information_snippets_hnsw_index,
- drop_information_snippets_fts_index,
- rename_information_snippets_doc_id,
- information_snippets_hnsw_index,
- information_snippets_fts_index,
- rename_relations,
-]
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
-
- client.run(query)
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1709200345_extend_agents_default_settings.py b/agents-api/migrations/migrate_1709200345_extend_agents_default_settings.py
deleted file mode 100644
index 4a2be5921..000000000
--- a/agents-api/migrations/migrate_1709200345_extend_agents_default_settings.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "extend_agents_default_settings"
-CREATED_AT = 1709200345.052425
-
-
-extend_agents_default_settings = {
- "up": """
- ?[
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- ] := *agent_default_settings{
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- }, min_p = 0.01
-
- :replace agent_default_settings {
- agent_id: Uuid,
- =>
- frequency_penalty: Float default 0.0,
- presence_penalty: Float default 0.0,
- length_penalty: Float default 1.0,
- repetition_penalty: Float default 1.0,
- top_p: Float default 0.95,
- temperature: Float default 0.7,
- min_p: Float default 0.01,
- }
- """,
- "down": """
- ?[
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- ] := *agent_default_settings{
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- }
-
- :replace agent_default_settings {
- agent_id: Uuid,
- =>
- frequency_penalty: Float default 0.0,
- presence_penalty: Float default 0.0,
- length_penalty: Float default 1.0,
- repetition_penalty: Float default 1.0,
- top_p: Float default 0.95,
- temperature: Float default 0.7,
- }
- """,
-}
-
-
-queries_to_run = [
- extend_agents_default_settings,
-]
-
-
-def up(client):
- client.run(extend_agents_default_settings["up"])
-
-
-def down(client):
- client.run(extend_agents_default_settings["down"])
diff --git a/agents-api/migrations/migrate_1709292828_presets.py b/agents-api/migrations/migrate_1709292828_presets.py
deleted file mode 100644
index ee2c3885a..000000000
--- a/agents-api/migrations/migrate_1709292828_presets.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "presets"
-CREATED_AT = 1709292828.203209
-
-extend_agents_default_settings = {
- "up": """
- ?[
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- ] := *agent_default_settings{
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- }, preset = null
-
- :replace agent_default_settings {
- agent_id: Uuid,
- =>
- frequency_penalty: Float default 0.0,
- presence_penalty: Float default 0.0,
- length_penalty: Float default 1.0,
- repetition_penalty: Float default 1.0,
- top_p: Float default 0.95,
- temperature: Float default 0.7,
- min_p: Float default 0.01,
- preset: String? default null,
- }
- """,
- "down": """
- ?[
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- ] := *agent_default_settings{
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- }
-
- :replace agent_default_settings {
- agent_id: Uuid,
- =>
- frequency_penalty: Float default 0.0,
- presence_penalty: Float default 0.0,
- length_penalty: Float default 1.0,
- repetition_penalty: Float default 1.0,
- top_p: Float default 0.95,
- temperature: Float default 0.7,
- min_p: Float default 0.01,
- }
- """,
-}
-
-
-def up(client):
- client.run(extend_agents_default_settings["up"])
-
-
-def down(client):
- client.run(extend_agents_default_settings["down"])
diff --git a/agents-api/migrations/migrate_1709631202_metadata.py b/agents-api/migrations/migrate_1709631202_metadata.py
deleted file mode 100644
index 36c1c8ec4..000000000
--- a/agents-api/migrations/migrate_1709631202_metadata.py
+++ /dev/null
@@ -1,232 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "metadata"
-CREATED_AT = 1709631202.917773
-
-
-extend_agents = {
- "up": """
- ?[agent_id, name, about, model, created_at, updated_at, developer_id, metadata] := *agents{
- agent_id,
- name,
- about,
- model,
- created_at,
- updated_at,
- developer_id,
- }, metadata = {}
-
- :replace agents {
- developer_id: Uuid,
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- metadata: Json default {},
- }
- """,
- "down": """
- ?[agent_id, name, about, model, created_at, updated_at, developer_id] := *agents{
- agent_id,
- name,
- about,
- model,
- created_at,
- updated_at,
- developer_id,
- }
-
- :replace agents {
- developer_id: Uuid,
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
-}
-
-
-extend_users = {
- "up": """
- ?[user_id, name, about, created_at, updated_at, developer_id, metadata] := *users{
- user_id,
- name,
- about,
- created_at,
- updated_at,
- developer_id,
- }, metadata = {}
-
- :replace users {
- developer_id: Uuid,
- user_id: Uuid,
- =>
- name: String,
- about: String,
- created_at: Float default now(),
- updated_at: Float default now(),
- metadata: Json default {},
- }
- """,
- "down": """
- ?[user_id, name, about, created_at, updated_at, developer_id] := *users{
- user_id,
- name,
- about,
- created_at,
- updated_at,
- developer_id,
- }
-
- :replace users {
- developer_id: Uuid,
- user_id: Uuid,
- =>
- name: String,
- about: String,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
-}
-
-
-extend_sessions = {
- "up": """
- ?[developer_id, session_id, updated_at, situation, summary, created_at, developer_id, metadata] := *sessions{
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- developer_id
- }, metadata = {}
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
- "down": """
- ?[developer_id, session_id, updated_at, situation, summary, created_at, developer_id] := *sessions{
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- developer_id
- }
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- }
- """,
-}
-
-
-extend_agent_docs = {
- "up": """
- ?[agent_id, doc_id, created_at, metadata] :=
- *agent_docs{
- agent_id,
- doc_id,
- created_at,
- }, metadata = {}
-
- :replace agent_docs {
- agent_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
- "down": """
- ?[agent_id, doc_id, created_at] :=
- *agent_docs{
- agent_id,
- doc_id,
- created_at,
- }
-
- :replace agent_docs {
- agent_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
-}
-
-
-extend_user_docs = {
- "up": """
- ?[user_id, doc_id, created_at, metadata] :=
- *user_docs{
- user_id,
- doc_id,
- created_at,
- }, metadata = {}
-
- :replace user_docs {
- user_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
- "down": """
- ?[user_id, doc_id, created_at] :=
- *user_docs{
- user_id,
- doc_id,
- created_at,
- }
-
- :replace user_docs {
- user_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- }
- """,
-}
-
-
-queries_to_run = [
- extend_agents,
- extend_users,
- extend_sessions,
- extend_agent_docs,
- extend_user_docs,
-]
-
-
-def up(client):
- for q in queries_to_run:
- client.run(q["up"])
-
-
-def down(client):
- for q in reversed(queries_to_run):
- client.run(q["down"])
diff --git a/agents-api/migrations/migrate_1709806979_entry_relations_to_relations.py b/agents-api/migrations/migrate_1709806979_entry_relations_to_relations.py
deleted file mode 100644
index e8c05be8f..000000000
--- a/agents-api/migrations/migrate_1709806979_entry_relations_to_relations.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "entry_relations_to_relations"
-CREATED_AT = 1709806979.250619
-
-
-entry_relations_to_relations = {
- "up": """
- ::rename
- entry_relations -> relations
- """,
- "down": """
- ::rename
- relations -> entry_relations
- """,
-}
-
-queries_to_run = [
- entry_relations_to_relations,
-]
-
-
-def up(client):
- for q in queries_to_run:
- client.run(q["up"])
-
-
-def down(client):
- for q in reversed(queries_to_run):
- client.run(q["down"])
diff --git a/agents-api/migrations/migrate_1709810233_memories.py b/agents-api/migrations/migrate_1709810233_memories.py
deleted file mode 100644
index 5036c1826..000000000
--- a/agents-api/migrations/migrate_1709810233_memories.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "memories"
-CREATED_AT = 1709810233.271039
-
-
-memories = {
- "up": """
- :create memories {
- memory_id: Uuid,
- type: String, # enum: belief | episode
- =>
- content: String,
- weight: Int, # range: 0-100
- last_accessed_at: Float? default null,
- timestamp: Float default now(),
- sentiment: Int,
- emotions: [String],
- duration: Float? default null,
- created_at: Float default now(),
- embedding: ? default null,
- }
- """,
- "down": """
- ::remove memories
- """,
-}
-
-
-memory_lookup = {
- "up": """
- :create memory_lookup {
- agent_id: Uuid,
- user_id: Uuid? default null,
- memory_id: Uuid,
- }
- """,
- "down": """
- ::remove memory_lookup
- """,
-}
-
-
-memories_hnsw_index = {
- "up": """
- ::hnsw create memories:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- "down": """
- ::hnsw drop memories:embedding_space
- """,
-}
-
-
-memories_fts_index = {
- "up": """
- ::fts create memories:fts {
- extractor: content,
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- "down": """
- ::fts drop memories:fts
- """,
-}
-
-
-queries_to_run = [
- memories,
- memory_lookup,
- memories_hnsw_index,
- memories_fts_index,
-]
-
-
-def up(client):
- for q in queries_to_run:
- client.run(q["up"])
-
-
-def down(client):
- for q in reversed(queries_to_run):
- client.run(q["down"])
diff --git a/agents-api/migrations/migrate_1712309841_simplify_memories.py b/agents-api/migrations/migrate_1712309841_simplify_memories.py
deleted file mode 100644
index 5a2656d83..000000000
--- a/agents-api/migrations/migrate_1712309841_simplify_memories.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "simplify_memories"
-CREATED_AT = 1712309841.289588
-
-simplify_memories = {
- "up": """
- ?[
- memory_id,
- content,
- last_accessed_at,
- timestamp,
- sentiment,
- entities,
- created_at,
- embedding,
- ] :=
- *memories {
- memory_id,
- content,
- last_accessed_at,
- timestamp,
- sentiment,
- created_at,
- embedding,
- },
- entities = []
-
- :replace memories {
- memory_id: Uuid,
- =>
- content: String,
- last_accessed_at: Float? default null,
- timestamp: Float default now(),
- sentiment: Int default 0.0,
- entities: [Json] default [],
- created_at: Float default now(),
- embedding: ? default null,
- }
- """,
- "down": """
- ?[
- memory_id,
- type,
- weight,
- duration,
- emotions,
- content,
- last_accessed_at,
- timestamp,
- sentiment,
- created_at,
- embedding,
- ] :=
- *memories {
- memory_id,
- content,
- last_accessed_at,
- timestamp,
- sentiment,
- created_at,
- embedding,
- },
- type = 'episode',
- weight = 1,
- duration = null,
- emotions = []
-
- :replace memories {
- memory_id: Uuid,
- type: String, # enum: belief | episode
- =>
- content: String,
- weight: Int, # range: 0-100
- last_accessed_at: Float? default null,
- timestamp: Float default now(),
- sentiment: Int,
- emotions: [String],
- duration: Float? default null,
- created_at: Float default now(),
- embedding: ? default null,
- }
- """,
-}
-
-memories_hnsw_index = {
- "up": """
- ::hnsw create memories:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- "down": """
- ::hnsw drop memories:embedding_space
- """,
-}
-
-
-memories_fts_index = {
- "up": """
- ::fts create memories:fts {
- extractor: content,
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- "down": """
- ::fts drop memories:fts
- """,
-}
-
-drop_memories_hnsw_index = {
- "up": memories_hnsw_index["down"],
- "down": memories_hnsw_index["up"],
-}
-
-drop_memories_fts_index = {
- "up": memories_fts_index["down"],
- "down": memories_fts_index["up"],
-}
-
-queries_to_run = [
- drop_memories_hnsw_index,
- drop_memories_fts_index,
- simplify_memories,
- memories_hnsw_index,
- memories_fts_index,
-]
-
-
-def up(client):
- for query in queries_to_run:
- client.run(query["up"])
-
-
-def down(client):
- for query in reversed(queries_to_run):
- client.run(query["down"])
diff --git a/agents-api/migrations/migrate_1712405369_simplify_instructions.py b/agents-api/migrations/migrate_1712405369_simplify_instructions.py
deleted file mode 100644
index b3f8a289a..000000000
--- a/agents-api/migrations/migrate_1712405369_simplify_instructions.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "simplify_instructions"
-CREATED_AT = 1712405369.263776
-
-update_agents_relation_query = dict(
- up="""
- ?[agent_id, name, about, model, created_at, updated_at, developer_id, instructions, metadata] := *agents{
- agent_id,
- name,
- about,
- model,
- created_at,
- updated_at,
- metadata,
- },
- developer_id = rand_uuid_v4(),
- instructions = []
-
- :replace agents {
- developer_id: Uuid,
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- instructions: [String] default [],
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- metadata: Json default {},
- }
- """,
- down="""
- ?[agent_id, name, about, model, created_at, updated_at, developer_id, metadata] := *agents{
- agent_id,
- name,
- about,
- model,
- created_at,
- updated_at,
- metadata,
- }, developer_id = rand_uuid_v4()
-
- :replace agents {
- developer_id: Uuid,
- agent_id: Uuid,
- =>
- name: String,
- about: String,
- model: String default 'gpt-4o',
- created_at: Float default now(),
- updated_at: Float default now(),
- metadata: Json default {},
- }
- """,
-)
-
-drop_instructions_table = dict(
- down="""
- :create agent_instructions {
- agent_id: Uuid,
- instruction_idx: Int,
- =>
- content: String,
- important: Bool default false,
- embed_instruction: String default 'Embed this historical text chunk for retrieval: ',
- embedding: ? default null,
- created_at: Float default now(),
- }
- """,
- up="""
- ::remove agent_instructions
- """,
-)
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-drop_agent_instructions_hnsw_index = dict(
- down="""
- ::hnsw create agent_instructions:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- up="""
- ::hnsw drop agent_instructions:embedding_space
- """,
-)
-
-queries_to_run = [
- drop_agent_instructions_hnsw_index,
- drop_instructions_table,
- update_agents_relation_query,
-]
-
-
-def up(client):
- for query in queries_to_run:
- client.run(query["up"])
-
-
-def down(client):
- for query in reversed(queries_to_run):
- client.run(query["down"])
diff --git a/agents-api/migrations/migrate_1714119679_session_render_templates.py b/agents-api/migrations/migrate_1714119679_session_render_templates.py
deleted file mode 100644
index 93d7dba14..000000000
--- a/agents-api/migrations/migrate_1714119679_session_render_templates.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "session_render_templates"
-CREATED_AT = 1714119679.493182
-
-extend_sessions = {
- "up": """
- ?[render_templates, developer_id, session_id, updated_at, situation, summary, created_at, developer_id, metadata] := *sessions{
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- developer_id
- },
- metadata = {},
- render_templates = false
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- }
- """,
- "down": """
- ?[developer_id, session_id, updated_at, situation, summary, created_at, developer_id, metadata] := *sessions{
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- developer_id
- }, metadata = {}
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
-}
-
-
-queries_to_run = [
- extend_sessions,
-]
-
-
-def up(client):
- for q in queries_to_run:
- client.run(q["up"])
-
-
-def down(client):
- for q in reversed(queries_to_run):
- client.run(q["down"])
diff --git a/agents-api/migrations/migrate_1714566760_change_embeddings_dimensions.py b/agents-api/migrations/migrate_1714566760_change_embeddings_dimensions.py
deleted file mode 100644
index dba657345..000000000
--- a/agents-api/migrations/migrate_1714566760_change_embeddings_dimensions.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "change_embeddings_dimensions"
-CREATED_AT = 1714566760.731964
-
-
-change_dimensions = {
- "up": """
- ?[
- doc_id,
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- ] :=
- *information_snippets{
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- doc_id,
- }
-
- :replace information_snippets {
- doc_id: Uuid,
- snippet_idx: Int,
- =>
- title: String,
- snippet: String,
- embed_instruction: String default 'Encode this passage for retrieval: ',
- embedding: ? default null,
- }
- """,
- "down": """
- ?[
- doc_id,
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- ] :=
- *information_snippets{
- snippet_idx,
- title,
- snippet,
- embed_instruction,
- embedding,
- doc_id,
- }
-
- :replace information_snippets {
- doc_id: Uuid,
- snippet_idx: Int,
- =>
- title: String,
- snippet: String,
- embed_instruction: String default 'Encode this passage for retrieval: ',
- embedding: ? default null,
- }
- """,
-}
-
-snippets_hnsw_768_index = dict(
- up="""
- ::hnsw create information_snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: true,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop information_snippets:embedding_space
- """,
-)
-
-drop_snippets_hnsw_768_index = {
- "up": snippets_hnsw_768_index["down"],
- "down": snippets_hnsw_768_index["up"],
-}
-
-snippets_hnsw_1024_index = dict(
- up="""
- ::hnsw create information_snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 1024,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: true,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop information_snippets:embedding_space
- """,
-)
-
-drop_snippets_hnsw_1024_index = {
- "up": snippets_hnsw_1024_index["down"],
- "down": snippets_hnsw_1024_index["up"],
-}
-
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-information_snippets_fts_index = dict(
- up="""
- ::fts create information_snippets:fts {
- extractor: concat(title, ' ', snippet),
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- down="""
- ::fts drop information_snippets:fts
- """,
-)
-
-drop_information_snippets_fts_index = {
- "up": information_snippets_fts_index["down"],
- "down": information_snippets_fts_index["up"],
-}
-
-
-queries_to_run = [
- drop_information_snippets_fts_index,
- drop_snippets_hnsw_768_index,
- change_dimensions,
- snippets_hnsw_1024_index,
- information_snippets_fts_index,
-]
-
-
-def up(client):
- for q in queries_to_run:
- client.run(q["up"])
-
-
-def down(client):
- for q in reversed(queries_to_run):
- client.run(q["down"])
diff --git a/agents-api/migrations/migrate_1716013793_session_cache.py b/agents-api/migrations/migrate_1716013793_session_cache.py
deleted file mode 100644
index c29f670b3..000000000
--- a/agents-api/migrations/migrate_1716013793_session_cache.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "session_cache"
-CREATED_AT = 1716013793.746602
-
-
-session_cache = dict(
- up="""
- :create session_cache {
- key: String,
- =>
- value: Json,
- }
- """,
- down="""
- ::remove session_cache
- """,
-)
-
-
-queries_to_run = [
- session_cache,
-]
-
-
-def up(client):
- for q in queries_to_run:
- client.run(q["up"])
-
-
-def down(client):
- for q in reversed(queries_to_run):
- client.run(q["down"])
diff --git a/agents-api/migrations/migrate_1716847597_support_multimodal_chatml.py b/agents-api/migrations/migrate_1716847597_support_multimodal_chatml.py
deleted file mode 100644
index 8b54b6b06..000000000
--- a/agents-api/migrations/migrate_1716847597_support_multimodal_chatml.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "support_multimodal_chatml"
-CREATED_AT = 1716847597.155657
-
-update_entries = {
- "up": """
- ?[
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- ] := *entries{
- session_id,
- entry_id,
- source,
- role,
- name,
- content: content_string,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- }, content = [{"type": "text", "content": content_string}]
-
- :replace entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: [Json],
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- timestamp: Float default now(),
- }
- """,
- "down": """
- ?[
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- ] := *entries{
- session_id,
- entry_id,
- source,
- role,
- name,
- content: content_array,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- }, content = json_to_scalar(get(content_array, 0, ""))
-
- :replace entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: String,
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- timestamp: Float default now(),
- }
- """,
-}
-
-
-def up(client):
- client.run(update_entries["up"])
-
-
-def down(client):
- client.run(update_entries["down"])
diff --git a/agents-api/migrations/migrate_1716939839_task_relations.py b/agents-api/migrations/migrate_1716939839_task_relations.py
deleted file mode 100644
index 14a6037a1..000000000
--- a/agents-api/migrations/migrate_1716939839_task_relations.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "task_relations"
-CREATED_AT = 1716939839.690704
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-create_task_relation_query = dict(
- up="""
- :create tasks {
- agent_id: Uuid,
- task_id: Uuid,
- updated_at_ms: Validity default [floor(now() * 1000), true],
- =>
- name: String,
- description: String? default null,
- input_schema: Json,
- tools_available: [Uuid] default [],
- workflows: [Json],
- created_at: Float default now(),
- }
- """,
- down="::remove tasks",
-)
-
-create_execution_relation_query = dict(
- up="""
- :create executions {
- task_id: Uuid,
- execution_id: Uuid,
- =>
- status: String default 'queued',
- # one of: "queued", "starting", "running", "awaiting_input", "succeeded", "failed"
-
- arguments: Json,
- session_id: Uuid? default null,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
- down="::remove executions",
-)
-
-create_transition_relation_query = dict(
- up="""
- :create transitions {
- execution_id: Uuid,
- transition_id: Uuid,
- =>
- type: String,
- # one of: "finish", "wait", "error", "step"
-
- from: (String, Int),
- to: (String, Int)?,
- output: Json,
-
- task_token: String? default null,
-
- # should store: an Activity Id, a Workflow Id, and optionally a Run Id.
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
- down="::remove transitions",
-)
-
-queries = [
- create_task_relation_query,
- create_execution_relation_query,
- create_transition_relation_query,
-]
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1717239610_token_budget.py b/agents-api/migrations/migrate_1717239610_token_budget.py
deleted file mode 100644
index c042c56e5..000000000
--- a/agents-api/migrations/migrate_1717239610_token_budget.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "token_budget"
-CREATED_AT = 1717239610.622555
-
-update_sessions = {
- "up": """
- ?[developer_id, session_id, updated_at, situation, summary, created_at, metadata, render_templates, token_budget, context_overflow] := *sessions{
- developer_id,
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- },
- token_budget = null,
- context_overflow = null,
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- token_budget: Int? default null,
- context_overflow: String? default null,
- }
- """,
- "down": """
- ?[developer_id, session_id, updated_at, situation, summary, created_at, metadata, render_templates] := *sessions{
- developer_id,
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- }
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- }
- """,
-}
-
-
-def up(client):
- client.run(update_sessions["up"])
-
-
-def down(client):
- client.run(update_sessions["down"])
diff --git a/agents-api/migrations/migrate_1721576813_extended_tool_relations.py b/agents-api/migrations/migrate_1721576813_extended_tool_relations.py
deleted file mode 100644
index 2e4583a18..000000000
--- a/agents-api/migrations/migrate_1721576813_extended_tool_relations.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "extended_tool_relations"
-CREATED_AT = 1721576813.383905
-
-
-drop_agent_functions_hnsw_index = dict(
- up="""
- ::hnsw drop agent_functions:embedding_space
- """,
- down="""
- ::hnsw create agent_functions:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 768,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
-)
-
-create_tools_relation = dict(
- up="""
- ?[agent_id, tool_id, type, name, spec, updated_at, created_at] := *agent_functions{
- agent_id, tool_id, name, description, parameters, updated_at, created_at
- }, type = "function",
- spec = {"description": description, "parameters": parameters}
-
- :create tools {
- agent_id: Uuid,
- tool_id: Uuid,
- =>
- type: String,
- name: String,
- spec: Json,
-
- updated_at: Float default now(),
- created_at: Float default now(),
- }
- """,
- down="""
- ::remove tools
- """,
-)
-
-drop_agent_functions_table = dict(
- up="""
- ::remove agent_functions
- """,
- down="""
- :create agent_functions {
- agent_id: Uuid,
- tool_id: Uuid,
- =>
- name: String,
- description: String,
- parameters: Json,
- embed_instruction: String default 'Transform this tool description for retrieval: ',
- embedding: ? default null,
- updated_at: Float default now(),
- created_at: Float default now(),
- }
- """,
-)
-
-
-queries_to_run = [
- drop_agent_functions_hnsw_index,
- create_tools_relation,
- drop_agent_functions_table,
-]
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1721609661_task_tool_ref_by_name.py b/agents-api/migrations/migrate_1721609661_task_tool_ref_by_name.py
deleted file mode 100644
index 902ec396d..000000000
--- a/agents-api/migrations/migrate_1721609661_task_tool_ref_by_name.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "task_tool_ref_by_name"
-CREATED_AT = 1721609661.768934
-
-
-# - add metadata
-# - add inherit_tools bool
-# - rename tools_available to tools
-update_tasks_relation = dict(
- up="""
- ?[
- agent_id,
- task_id,
- updated_at_ms,
- name,
- description,
- input_schema,
- inherit_tools,
- workflows,
- created_at,
- metadata,
- ] := *tasks {
- agent_id,
- task_id,
- updated_at_ms,
- name,
- description,
- input_schema,
- workflows,
- created_at,
- },
- metadata = {},
- inherit_tools = true
-
- :replace tasks {
- agent_id: Uuid,
- task_id: Uuid,
- updated_at_ms: Validity default [floor(now() * 1000), true],
- =>
- name: String,
- description: String? default null,
- input_schema: Json,
- tools: [Json] default [],
- inherit_tools: Bool default true,
- workflows: [Json],
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
- down="""
- ?[
- agent_id,
- task_id,
- updated_at_ms,
- name,
- description,
- input_schema,
- workflows,
- created_at,
- ] := *tasks {
- agent_id,
- task_id,
- updated_at_ms,
- name,
- description,
- input_schema,
- workflows,
- created_at,
- }
-
- :replace tasks {
- agent_id: Uuid,
- task_id: Uuid,
- updated_at_ms: Validity default [floor(now() * 1000), true],
- =>
- name: String,
- description: String? default null,
- input_schema: Json,
- tools_available: [Uuid] default [],
- workflows: [Json],
- created_at: Float default now(),
- }
- """,
-)
-
-queries_to_run = [
- update_tasks_relation,
-]
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1721609675_multi_agent_multi_user_session.py b/agents-api/migrations/migrate_1721609675_multi_agent_multi_user_session.py
deleted file mode 100644
index 6b144fca3..000000000
--- a/agents-api/migrations/migrate_1721609675_multi_agent_multi_user_session.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "multi_agent_multi_user_session"
-CREATED_AT = 1721609675.213755
-
-add_multiple_participants_in_session = dict(
- up="""
- ?[session_id, participant_id, participant_type] :=
- *session_lookup {
- agent_id: participant_id,
- user_id: null,
- session_id,
- }, participant_type = 'agent'
-
- ?[session_id, participant_id, participant_type] :=
- *session_lookup {
- agent_id,
- user_id: participant_id,
- session_id,
- }, participant_type = 'user',
- participant_id != null
-
- :replace session_lookup {
- session_id: Uuid,
- participant_type: String,
- participant_id: Uuid,
- }
- """,
- down="""
- users[user_id, session_id] :=
- *session_lookup {
- session_id,
- participant_type: "user",
- participant_id: user_id,
- }
-
- agents[agent_id, session_id] :=
- *session_lookup {
- session_id,
- participant_type: "agent",
- participant_id: agent_id,
- }
-
- ?[agent_id, user_id, session_id] :=
- agents[agent_id, session_id],
- users[user_id, session_id]
-
- ?[agent_id, user_id, session_id] :=
- agents[agent_id, session_id],
- not users[_, session_id],
- user_id = null
-
- :replace session_lookup {
- agent_id: Uuid,
- user_id: Uuid? default null,
- session_id: Uuid,
- }
- """,
-)
-
-queries_to_run = [
- add_multiple_participants_in_session,
-]
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1721666295_developers_relation.py b/agents-api/migrations/migrate_1721666295_developers_relation.py
deleted file mode 100644
index 560b056da..000000000
--- a/agents-api/migrations/migrate_1721666295_developers_relation.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "developers_relation"
-CREATED_AT = 1721666295.486804
-
-
-def up(client):
- client.run(
- """
- # Create developers table and insert default developer
- ?[developer_id, email] <- [
- ["00000000-0000-0000-0000-000000000000", "developers@example.com"]
- ]
-
- :create developers {
- developer_id: Uuid,
- =>
- email: String,
- active: Bool default true,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
- )
-
-
-def down(client):
- client.run(
- """
- ::remove developers
- """
- )
diff --git a/agents-api/migrations/migrate_1721678846_rename_information_snippets.py b/agents-api/migrations/migrate_1721678846_rename_information_snippets.py
deleted file mode 100644
index a3fdd4f94..000000000
--- a/agents-api/migrations/migrate_1721678846_rename_information_snippets.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "rename_information_snippets"
-CREATED_AT = 1721678846.468865
-
-rename_information_snippets = dict(
- up="""
- ::rename information_snippets -> snippets
- """,
- down="""
- ::rename snippets -> information_snippets
- """,
-)
-
-queries_to_run = [
- rename_information_snippets,
-]
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1722107354_rename_executions_arguments_col.py b/agents-api/migrations/migrate_1722107354_rename_executions_arguments_col.py
deleted file mode 100644
index 9fcb3dac9..000000000
--- a/agents-api/migrations/migrate_1722107354_rename_executions_arguments_col.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "rename_executions_arguments_col"
-CREATED_AT = 1722107354.988836
-
-rename_arguments_add_metadata_query = dict(
- up="""
- ?[
- task_id,
- execution_id,
- status,
- input,
- session_id,
- created_at,
- updated_at,
- metadata,
- ] :=
- *executions{
- task_id,
- execution_id,
- arguments: input,
- status,
- session_id,
- created_at,
- updated_at,
- }, metadata = {}
-
- :replace executions {
- task_id: Uuid,
- execution_id: Uuid,
- =>
- status: String default 'queued',
- # one of: "queued", "starting", "running", "awaiting_input", "succeeded", "failed"
-
- input: Json,
- session_id: Uuid? default null,
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
- down="""
- ?[
- task_id,
- execution_id,
- status,
- arguments,
- session_id,
- created_at,
- updated_at,
- ] :=
- *executions{
- task_id,
- execution_id,
- input: arguments,
- status,
- session_id,
- created_at,
- updated_at,
- }
-
- :replace executions {
- task_id: Uuid,
- execution_id: Uuid,
- =>
- status: String default 'queued',
- # one of: "queued", "starting", "running", "awaiting_input", "succeeded", "failed"
-
- arguments: Json,
- session_id: Uuid? default null,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
-)
-
-
-def up(client):
- client.run(rename_arguments_add_metadata_query["up"])
-
-
-def down(client):
- client.run(rename_arguments_add_metadata_query["down"])
diff --git a/agents-api/migrations/migrate_1722115427_rename_transitions_from.py b/agents-api/migrations/migrate_1722115427_rename_transitions_from.py
deleted file mode 100644
index 63f2660e8..000000000
--- a/agents-api/migrations/migrate_1722115427_rename_transitions_from.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "rename_transitions_from"
-CREATED_AT = 1722115427.685346
-
-rename_transitions_from_to_query = dict(
- up="""
- ?[
- execution_id,
- transition_id,
- type,
- current,
- next,
- output,
- task_token,
- metadata,
- created_at,
- updated_at,
- ] := *transitions {
- execution_id,
- transition_id,
- type,
- from: current,
- to: next,
- output,
- task_token,
- metadata,
- created_at,
- updated_at,
- }
-
- :replace transitions {
- execution_id: Uuid,
- transition_id: Uuid,
- =>
- type: String,
- # one of: "finish", "wait", "error", "step"
-
- current: (String, Int),
- next: (String, Int)?,
- output: Json,
-
- task_token: String? default null,
-
- # should store: an Activity Id, a Workflow Id, and optionally a Run Id.
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
- down="""
- ?[
- execution_id,
- transition_id,
- type,
- from,
- to,
- output,
- task_token,
- metadata,
- created_at,
- updated_at,
- ] := *transitions {
- execution_id,
- transition_id,
- type,
- current: from,
- next: to,
- output,
- task_token,
- metadata,
- created_at,
- updated_at,
- }
-
- :replace transitions {
- execution_id: Uuid,
- transition_id: Uuid,
- =>
- type: String,
- # one of: "finish", "wait", "error", "step"
-
- from: (String, Int),
- to: (String, Int)?,
- output: Json,
-
- task_token: String? default null,
-
- # should store: an Activity Id, a Workflow Id, and optionally a Run Id.
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
-)
-
-
-def up(client):
- client.run(rename_transitions_from_to_query["up"])
-
-
-def down(client):
- client.run(rename_transitions_from_to_query["down"])
diff --git a/agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py b/agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py
deleted file mode 100644
index a56bce674..000000000
--- a/agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "unify_owner_doc_relations"
-CREATED_AT = 1722710530.126563
-
-create_docs_relations_query = dict(
- up="""
- :create docs {
- owner_type: String,
- owner_id: Uuid,
- doc_id: Uuid,
- =>
- title: String,
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
- down="::remove docs",
-)
-
-remove_user_docs_table = dict(
- up="""
- doc_title[doc_id, unique(title)] :=
- *snippets {
- doc_id,
- title,
- }
-
- ?[owner_type, owner_id, doc_id, title, created_at, metadata] :=
- owner_type = "user",
- *user_docs {
- user_id: owner_id,
- doc_id,
- created_at,
- metadata,
- },
- doc_title[doc_id, title]
-
- :insert docs {
- owner_type,
- owner_id,
- doc_id,
- title,
- created_at,
- metadata,
- }
-
- } { # <-- this is just a separator between the two queries
- ::remove user_docs
- """,
- down="""
- :create user_docs {
- user_id: Uuid,
- doc_id: Uuid
- =>
- created_at: Float default now(),
- metadata: Json default {},
- }
- """,
-)
-
-remove_agent_docs_table = dict(
- up=remove_user_docs_table["up"].replace("user", "agent"),
- down=remove_user_docs_table["down"].replace("user", "agent"),
-)
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-snippets_hnsw_index = dict(
- up="""
- ::hnsw create snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 1024,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: true,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop snippets:embedding_space
- """,
-)
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-snippets_fts_index = dict(
- up="""
- ::fts create snippets:fts {
- extractor: content,
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- down="""
- ::fts drop snippets:fts
- """,
-)
-
-temp_rename_snippets_table = dict(
- up="""
- ::rename snippets -> information_snippets
- """,
- down="""
- ::rename information_snippets -> snippets
- """,
-)
-
-temp_rename_snippets_table_back = dict(
- up=temp_rename_snippets_table["down"],
- down=temp_rename_snippets_table["up"],
-)
-
-drop_snippets_hnsw_index = {
- "up": snippets_hnsw_index["down"].replace("snippets:", "information_snippets:"),
- "down": snippets_hnsw_index["up"].replace("snippets:", "information_snippets:"),
-}
-
-drop_snippets_fts_index = dict(
- up="""
- ::fts drop information_snippets:fts
- """,
- down="""
- ::fts create information_snippets:fts {
- extractor: concat(title, ' ', snippet),
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
-)
-
-
-remove_title_from_snippets_table = dict(
- up="""
- ?[doc_id, index, content, embedding] :=
- *snippets {
- doc_id,
- snippet_idx: index,
- snippet: content,
- embedding,
- }
-
- :replace snippets {
- doc_id: Uuid,
- index: Int,
- =>
- content: String,
- embedding: ? default null,
- }
- """,
- down="""
- ?[doc_id, snippet_idx, title, snippet, embedding] :=
- *snippets {
- doc_id,
- index: snippet_idx,
- content: snippet,
- embedding,
- },
- *docs {
- doc_id,
- title,
- }
-
- :replace snippets {
- doc_id: Uuid,
- snippet_idx: Int,
- =>
- title: String,
- snippet: String,
- embed_instruction: String default 'Encode this passage for retrieval: ',
- embedding: ? default null,
- }
- """,
-)
-
-queries = [
- create_docs_relations_query,
- remove_user_docs_table,
- remove_agent_docs_table,
- temp_rename_snippets_table, # Because of a bug in Cozo
- drop_snippets_hnsw_index,
- drop_snippets_fts_index,
- temp_rename_snippets_table_back, # Because of a bug in Cozo
- remove_title_from_snippets_table,
- snippets_fts_index,
- snippets_hnsw_index,
-]
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
-
- client.run(query)
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1722875101_add_temporal_mapping.py b/agents-api/migrations/migrate_1722875101_add_temporal_mapping.py
deleted file mode 100644
index b38a3717c..000000000
--- a/agents-api/migrations/migrate_1722875101_add_temporal_mapping.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_temporal_mapping"
-CREATED_AT = 1722875101.262791
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-create_temporal_executions_lookup = dict(
- up="""
- :create temporal_executions_lookup {
- execution_id: Uuid,
- id: String,
- =>
- run_id: String?,
- first_execution_run_id: String?,
- result_run_id: String?,
- created_at: Float default now(),
- }
- """,
- down="::remove temporal_executions_lookup",
-)
-
-queries = [
- create_temporal_executions_lookup,
-]
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py b/agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py
deleted file mode 100644
index 01eaa8a60..000000000
--- a/agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_lsh_index_to_docs"
-CREATED_AT = 1723307805.007054
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-snippets_lsh_index = dict(
- up="""
- ::lsh create snippets:lsh {
- extractor: content,
- tokenizer: Simple,
- filters: [Stopwords('en')],
- n_perm: 200,
- target_threshold: 0.9,
- n_gram: 3,
- false_positive_weight: 1.0,
- false_negative_weight: 1.0,
- }
- """,
- down="""
- ::lsh drop snippets:lsh
- """,
-)
-
-queries = [
- snippets_lsh_index,
-]
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
-
- client.run(query)
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1723400730_add_settings_to_developers.py b/agents-api/migrations/migrate_1723400730_add_settings_to_developers.py
deleted file mode 100644
index e10e71510..000000000
--- a/agents-api/migrations/migrate_1723400730_add_settings_to_developers.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_settings_to_developers"
-CREATED_AT = 1723400730.539554
-
-
-def up(client):
- client.run(
- """
- ?[
- developer_id,
- email,
- active,
- tags,
- settings,
- created_at,
- updated_at,
- ] := *developers {
- developer_id,
- email,
- active,
- created_at,
- updated_at,
- },
- tags = [],
- settings = {}
-
- :replace developers {
- developer_id: Uuid,
- =>
- email: String,
- active: Bool default true,
- tags: [String] default [],
- settings: Json,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
- )
-
-
-def down(client):
- client.run(
- """
- ?[
- developer_id,
- email,
- active,
- created_at,
- updated_at,
- ] := *developers {
- developer_id,
- email,
- active,
- created_at,
- updated_at,
- }
-
- :replace developers {
- developer_id: Uuid,
- =>
- email: String,
- active: Bool default true,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """
- )
diff --git a/agents-api/migrations/migrate_1725153437_add_output_to_executions.py b/agents-api/migrations/migrate_1725153437_add_output_to_executions.py
deleted file mode 100644
index 8118e4f89..000000000
--- a/agents-api/migrations/migrate_1725153437_add_output_to_executions.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_output_to_executions"
-CREATED_AT = 1725153437.489542
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-add_output_to_executions_query = dict(
- up="""
- ?[
- task_id,
- execution_id,
- status,
- input,
- session_id,
- created_at,
- updated_at,
- output,
- error,
- metadata,
- ] :=
- *executions {
- task_id,
- execution_id,
- status,
- input,
- session_id,
- created_at,
- updated_at,
- },
- output = null,
- error = null,
- metadata = {}
-
- :replace executions {
- task_id: Uuid,
- execution_id: Uuid,
- =>
- status: String default 'queued',
- # one of: "queued", "starting", "running", "awaiting_input", "succeeded", "failed"
-
- input: Json,
- output: Json? default null,
- error: String? default null,
- session_id: Uuid? default null,
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
- down="""
- ?[
- task_id,
- execution_id,
- status,
- input,
- session_id,
- created_at,
- updated_at,
- ] :=
- *executions {
- task_id,
- execution_id,
- status,
- input,
- session_id,
- created_at,
- updated_at,
- }
-
- :replace executions {
- task_id: Uuid,
- execution_id: Uuid,
- =>
- status: String default 'queued',
- # one of: "queued", "starting", "running", "awaiting_input", "succeeded", "failed"
-
- input: Json,
- session_id: Uuid? default null,
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
-)
-
-
-queries = [
- add_output_to_executions_query,
-]
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1725323734_make_transition_output_optional.py b/agents-api/migrations/migrate_1725323734_make_transition_output_optional.py
deleted file mode 100644
index dd13c3132..000000000
--- a/agents-api/migrations/migrate_1725323734_make_transition_output_optional.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "make_transition_output_optional"
-CREATED_AT = 1725323734.591567
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-make_transition_output_optional_query = dict(
- up="""
- ?[
- execution_id,
- transition_id,
- output,
- type,
- current,
- next,
- task_token,
- metadata,
- created_at,
- updated_at,
- ] :=
- *transitions {
- execution_id,
- transition_id,
- output,
- type,
- current,
- next,
- task_token,
- metadata,
- created_at,
- updated_at,
- }
-
- :replace transitions {
- execution_id: Uuid,
- transition_id: Uuid,
- =>
- type: String,
- current: (String, Int),
- next: (String, Int)?,
- output: Json?, # <--- this is the only change; output is now optional
- task_token: String? default null,
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
- down="""
- ?[
- execution_id,
- transition_id,
- output,
- type,
- current,
- next,
- task_token,
- metadata,
- created_at,
- updated_at,
- ] :=
- *transitions {
- execution_id,
- transition_id,
- output,
- type,
- current,
- next,
- task_token,
- metadata,
- created_at,
- updated_at,
- }
-
- :replace transitions {
- execution_id: Uuid,
- transition_id: Uuid,
- =>
- type: String,
- current: (String, Int),
- next: (String, Int)?,
- output: Json,
- task_token: String? default null,
- metadata: Json default {},
- created_at: Float default now(),
- updated_at: Float default now(),
- }
- """,
-)
-
-
-queries = [
- make_transition_output_optional_query,
-]
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1727235852_add_forward_tool_calls_option.py b/agents-api/migrations/migrate_1727235852_add_forward_tool_calls_option.py
deleted file mode 100644
index aa1b8441a..000000000
--- a/agents-api/migrations/migrate_1727235852_add_forward_tool_calls_option.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_forward_tool_calls_option"
-CREATED_AT = 1727235852.744035
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-add_forward_tool_calls_option_to_session_query = dict(
- up="""
- ?[forward_tool_calls, token_budget, context_overflow, developer_id, session_id, updated_at, situation, summary, created_at, metadata, render_templates, token_budget, context_overflow] := *sessions{
- developer_id,
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- },
- forward_tool_calls = null
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- token_budget: Int? default null,
- context_overflow: String? default null,
- forward_tool_calls: Bool? default null,
- }
- """,
- down="""
- ?[token_budget, context_overflow, developer_id, session_id, updated_at, situation, summary, created_at, metadata, render_templates, token_budget, context_overflow] := *sessions{
- developer_id,
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- }
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- token_budget: Int? default null,
- context_overflow: String? default null,
- }
- """,
-)
-
-
-queries = [
- add_forward_tool_calls_option_to_session_query,
-]
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1727922523_add_description_to_tools.py b/agents-api/migrations/migrate_1727922523_add_description_to_tools.py
deleted file mode 100644
index 1d6724090..000000000
--- a/agents-api/migrations/migrate_1727922523_add_description_to_tools.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_description_to_tools"
-CREATED_AT = 1727922523.283493
-
-
-add_description_to_tools = dict(
- up="""
- ?[agent_id, tool_id, type, name, description, spec, updated_at, created_at] := *tools {
- agent_id, tool_id, type, name, spec, updated_at, created_at
- }, description = null
-
- :replace tools {
- agent_id: Uuid,
- tool_id: Uuid,
- =>
- type: String,
- name: String,
- description: String?,
- spec: Json,
-
- updated_at: Float default now(),
- created_at: Float default now(),
- }
- """,
- down="""
- ?[agent_id, tool_id, type, name, spec, updated_at, created_at] := *tools {
- agent_id, tool_id, type, name, spec, updated_at, created_at
- }
-
- :replace tools {
- agent_id: Uuid,
- tool_id: Uuid,
- =>
- type: String,
- name: String,
- spec: Json,
-
- updated_at: Float default now(),
- created_at: Float default now(),
- }
- """,
-)
-
-
-queries_to_run = [
- add_description_to_tools,
-]
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1729114011_tweak_proximity_indices.py b/agents-api/migrations/migrate_1729114011_tweak_proximity_indices.py
deleted file mode 100644
index 4852f3603..000000000
--- a/agents-api/migrations/migrate_1729114011_tweak_proximity_indices.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "tweak_proximity_indices"
-CREATED_AT = 1729114011.022733
-
-
-def run(client, *queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-drop_snippets_hnsw_index = dict(
- down="""
- ::hnsw create snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 1024,
- distance: Cosine,
- m: 64,
- ef_construction: 256,
- extend_candidates: true,
- keep_pruned_connections: false,
- }
- """,
- up="""
- ::hnsw drop snippets:embedding_space
- """,
-)
-
-
-# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
-snippets_hnsw_index = dict(
- up="""
- ::hnsw create snippets:embedding_space {
- fields: [embedding],
- filter: !is_null(embedding),
- dim: 1024,
- distance: Cosine,
- m: 64,
- ef_construction: 800,
- extend_candidates: false,
- keep_pruned_connections: false,
- }
- """,
- down="""
- ::hnsw drop snippets:embedding_space
- """,
-)
-
-drop_snippets_lsh_index = dict(
- up="""
- ::lsh drop snippets:lsh
- """,
- down="""
- ::lsh create snippets:lsh {
- extractor: content,
- tokenizer: Simple,
- filters: [Stopwords('en')],
- n_perm: 200,
- target_threshold: 0.9,
- n_gram: 3,
- false_positive_weight: 1.0,
- false_negative_weight: 1.0,
- }
- """,
-)
-
-snippets_lsh_index = dict(
- up="""
- ::lsh create snippets:lsh {
- extractor: content,
- tokenizer: Simple,
- filters: [Lowercase, AsciiFolding, Stemmer('english'), Stopwords('en')],
- n_perm: 200,
- target_threshold: 0.5,
- n_gram: 2,
- false_positive_weight: 1.0,
- false_negative_weight: 1.0,
- }
- """,
- down="""
- ::lsh drop snippets:lsh
- """,
-)
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-drop_snippets_fts_index = dict(
- down="""
- ::fts create snippets:fts {
- extractor: content,
- tokenizer: Simple,
- filters: [Lowercase, Stemmer('english'), Stopwords('en')],
- }
- """,
- up="""
- ::fts drop snippets:fts
- """,
-)
-
-# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
-snippets_fts_index = dict(
- up="""
- ::fts create snippets:fts {
- extractor: content,
- tokenizer: Simple,
- filters: [Lowercase, AsciiFolding, Stemmer('english'), Stopwords('en')],
- }
- """,
- down="""
- ::fts drop snippets:fts
- """,
-)
-
-queries_to_run = [
- drop_snippets_hnsw_index,
- drop_snippets_lsh_index,
- drop_snippets_fts_index,
- snippets_hnsw_index,
- snippets_lsh_index,
- snippets_fts_index,
-]
-
-
-def up(client):
- run(client, *[q["up"] for q in queries_to_run])
-
-
-def down(client):
- run(client, *[q["down"] for q in reversed(queries_to_run)])
diff --git a/agents-api/migrations/migrate_1731143165_support_tool_call_id.py b/agents-api/migrations/migrate_1731143165_support_tool_call_id.py
deleted file mode 100644
index 9faf4d577..000000000
--- a/agents-api/migrations/migrate_1731143165_support_tool_call_id.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "support_tool_call_id"
-CREATED_AT = 1731143165.95882
-
-update_entries = {
- "down": """
- ?[
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- ] := *entries{
- session_id,
- entry_id,
- source,
- role,
- name,
- content: content_string,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- }, content = [{"type": "text", "content": content_string}]
-
- :replace entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: [Json],
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- timestamp: Float default now(),
- }
- """,
- "up": """
- ?[
- session_id,
- entry_id,
- source,
- role,
- name,
- content,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- tool_call_id,
- tool_calls,
- ] := *entries{
- session_id,
- entry_id,
- source,
- role,
- name,
- content: content_string,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- },
- content = [{"type": "text", "content": content_string}],
- tool_call_id = null,
- tool_calls = null
-
- :replace entries {
- session_id: Uuid,
- entry_id: Uuid default random_uuid_v4(),
- source: String,
- role: String,
- name: String? default null,
- =>
- content: [Json],
- tool_call_id: String? default null,
- tool_calls: [Json]? default null,
- token_count: Int,
- tokenizer: String,
- created_at: Float default now(),
- timestamp: Float default now(),
- }
- """,
-}
-
-
-def up(client):
- client.run(update_entries["up"])
-
-
-def down(client):
- client.run(update_entries["down"])
diff --git a/agents-api/migrations/migrate_1731953383_create_files_relation.py b/agents-api/migrations/migrate_1731953383_create_files_relation.py
deleted file mode 100644
index 9cdc4f8fe..000000000
--- a/agents-api/migrations/migrate_1731953383_create_files_relation.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "create_files_relation"
-CREATED_AT = 1731953383.258172
-
-create_files_query = dict(
- up="""
- :create files {
- developer_id: Uuid,
- file_id: Uuid,
- =>
- name: String,
- description: String default "",
- mime_type: String? default null,
- size: Int,
- hash: String,
- created_at: Float default now(),
- }
- """,
- down="::remove files",
-)
-
-
-def up(client):
- client.run(create_files_query["up"])
-
-
-def down(client):
- client.run(create_files_query["down"])
diff --git a/agents-api/migrations/migrate_1733493650_add_recall_options_to_sessions.py b/agents-api/migrations/migrate_1733493650_add_recall_options_to_sessions.py
deleted file mode 100644
index ba0be5d2b..000000000
--- a/agents-api/migrations/migrate_1733493650_add_recall_options_to_sessions.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "add_recall_options_to_sessions"
-CREATED_AT = 1733493650.922383
-
-
-def run(client, queries):
- joiner = "}\n\n{"
-
- query = joiner.join(queries)
- query = f"{{\n{query}\n}}"
- client.run(query)
-
-
-add_recall_options_to_sessions_query = dict(
- up="""
- ?[recall_options, forward_tool_calls, token_budget, context_overflow, developer_id, session_id, updated_at, situation, summary, created_at, metadata, render_templates, token_budget, context_overflow] := *sessions{
- developer_id,
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- forward_tool_calls,
- },
- recall_options = {},
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- token_budget: Int? default null,
- context_overflow: String? default null,
- forward_tool_calls: Bool? default null,
- recall_options: Json default {},
- }
- """,
- down="""
- ?[forward_tool_calls, token_budget, context_overflow, developer_id, session_id, updated_at, situation, summary, created_at, metadata, render_templates, token_budget, context_overflow] := *sessions{
- developer_id,
- session_id,
- updated_at,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- },
- forward_tool_calls = null
-
- :replace sessions {
- developer_id: Uuid,
- session_id: Uuid,
- updated_at: Validity default [floor(now()), true],
- =>
- situation: String,
- summary: String? default null,
- created_at: Float default now(),
- metadata: Json default {},
- render_templates: Bool default false,
- token_budget: Int? default null,
- context_overflow: String? default null,
- forward_tool_calls: Bool? default null,
- }
- """,
-)
-
-
-queries = [
- add_recall_options_to_sessions_query,
-]
-
-
-def up(client):
- run(client, [q["up"] for q in queries])
-
-
-def down(client):
- run(client, [q["down"] for q in reversed(queries)])
diff --git a/agents-api/migrations/migrate_1733755642_transition_indices.py b/agents-api/migrations/migrate_1733755642_transition_indices.py
deleted file mode 100644
index 1b33f4646..000000000
--- a/agents-api/migrations/migrate_1733755642_transition_indices.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# /usr/bin/env python3
-
-MIGRATION_ID = "transition_indices"
-CREATED_AT = 1733755642.881131
-
-
-create_transition_indices = dict(
- up=[
- "::index create executions:execution_id_status_idx { execution_id, status }",
- "::index create executions:execution_id_task_id_idx { execution_id, task_id }",
- "::index create executions:task_id_execution_id_idx { task_id, execution_id }",
- "::index create tasks:task_id_agent_id_idx { task_id, agent_id }",
- "::index create agents:agent_id_developer_id_idx { agent_id, developer_id }",
- "::index create sessions:session_id_developer_id_idx { session_id, developer_id }",
- "::index create docs:owner_id_metadata_doc_id_idx { owner_id, metadata, doc_id }",
- "::index create agents:developer_id_metadata_agent_id_idx { developer_id, metadata, agent_id }",
- "::index create users:developer_id_metadata_user_id_idx { developer_id, metadata, user_id }",
- "::index create transitions:execution_id_type_created_at_idx { execution_id, type, created_at }",
- ],
- down=[
- "::index drop executions:execution_id_status_idx",
- "::index drop executions:execution_id_task_id_idx",
- "::index drop executions:task_id_execution_id_idx",
- "::index drop tasks:task_id_agent_id_idx",
- "::index drop agents:agent_id_developer_id_idx",
- "::index drop sessions:session_id_developer_id_idx",
- "::index drop docs:owner_id_metadata_doc_id_idx",
- "::index drop agents:developer_id_metadata_agent_id_idx",
- "::index drop users:developer_id_metadata_user_id_idx",
- "::index drop transitions:execution_id_type_created_at_idx",
- ],
-)
-
-
-def up(client):
- for q in create_transition_indices["up"]:
- client.run(q)
-
-
-def down(client):
- for q in create_transition_indices["down"]:
- client.run(q)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 520fbf922..01a6991ee 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,6 +1,8 @@
+import json
import time
from uuid import UUID
+import asyncpg
from fastapi.testclient import TestClient
from temporalio.client import WorkflowHandle
from uuid_extensions import uuid7
@@ -17,6 +19,7 @@
CreateTransitionRequest,
CreateUserRequest,
)
+from agents_api.clients.pg import get_pg_client
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
# from agents_api.queries.agents.create_agent import create_agent
@@ -26,9 +29,7 @@
# from agents_api.queries.docs.create_doc import create_doc
# from agents_api.queries.docs.delete_doc import delete_doc
# from agents_api.queries.execution.create_execution import create_execution
-# from agents_api.queries.execution.create_execution_transition import (
-# create_execution_transition,
-# )
+# from agents_api.queries.execution.create_execution_transition import create_execution_transition
# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
# from agents_api.queries.files.create_file import create_file
# from agents_api.queries.files.delete_file import delete_file
@@ -39,14 +40,14 @@
# from agents_api.queries.tools.create_tools import create_tools
# from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
-from agents_api.queries.users.delete_user import delete_user
-# from agents_api.web import app
+# from agents_api.queries.users.delete_user import delete_user
+from agents_api.web import app
from .utils import (
+ get_pg_dsn,
patch_embed_acompletion as patch_embed_acompletion_ctx,
)
from .utils import (
- patch_pg_client,
patch_s3_client,
)
@@ -54,9 +55,9 @@
@fixture(scope="global")
-async def pg_client():
- async with patch_pg_client() as pg_client:
- yield pg_client
+def pg_dsn():
+ with get_pg_dsn() as pg_dsn:
+ yield pg_dsn
@fixture(scope="global")
@@ -66,150 +67,157 @@ def test_developer_id():
return
developer_id = uuid7()
-
yield developer_id
# @fixture(scope="global")
-# def test_file(client=pg_client, developer_id=test_developer_id):
-# file = create_file(
-# developer_id=developer_id,
-# data=CreateFileRequest(
-# name="Hello",
-# description="World",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# ),
-# client=client,
-# )
-
-# yield file
+# async def test_file(dsn=pg_dsn, developer_id=test_developer_id):
+# async with get_pg_client(dsn=dsn) as client:
+# file = await create_file(
+# developer_id=developer_id,
+# data=CreateFileRequest(
+# name="Hello",
+# description="World",
+# mime_type="text/plain",
+# content="eyJzYW1wbGUiOiAidGVzdCJ9",
+# ),
+# client=client,
+# )
+# yield file
@fixture(scope="global")
-async def test_developer(pg_client=pg_client, developer_id=test_developer_id):
- return await get_developer(
- developer_id=developer_id,
- client=pg_client,
- )
+async def test_developer(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ developer = await get_developer(
+ developer_id=developer_id,
+ client=client,
+ )
+
+ yield developer
+ await pool.close()
@fixture(scope="test")
def patch_embed_acompletion():
output = {"role": "assistant", "content": "Hello, world!"}
-
with patch_embed_acompletion_ctx(output) as (embed, acompletion):
yield embed, acompletion
# @fixture(scope="global")
-# def test_agent(pg_client=pg_client, developer_id=test_developer_id):
-# agent = create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# model="gpt-4o-mini",
-# name="test agent",
-# about="test agent about",
-# metadata={"test": "test"},
-# ),
-# client=pg_client,
-# )
-
-# yield agent
+# async def test_agent(dsn=pg_dsn, developer_id=test_developer_id):
+# async with get_pg_client(dsn=dsn) as client:
+# agent = await create_agent(
+# developer_id=developer_id,
+# data=CreateAgentRequest(
+# model="gpt-4o-mini",
+# name="test agent",
+# about="test agent about",
+# metadata={"test": "test"},
+# ),
+# client=client,
+# )
+# yield agent
@fixture(scope="global")
-def test_user(pg_client=pg_client, developer_id=test_developer_id):
- user = create_user(
- developer_id=developer_id,
- data=CreateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=pg_client,
- )
+async def test_user(dsn=pg_dsn, developer=test_developer):
+ pool = await asyncpg.create_pool(dsn=dsn)
+
+ async with get_pg_client(pool=pool) as client:
+ user = await create_user(
+ developer_id=developer.id,
+ data=CreateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ client=client,
+ )
yield user
+ await pool.close()
# @fixture(scope="global")
-# def test_session(
-# pg_client=pg_client,
+# async def test_session(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# test_user=test_user,
# test_agent=test_agent,
# ):
-# session = create_session(
-# developer_id=developer_id,
-# data=CreateSessionRequest(
-# agent=test_agent.id, user=test_user.id, metadata={"test": "test"}
-# ),
-# client=pg_client,
-# )
-
-# yield session
+# async with get_pg_client(dsn=dsn) as client:
+# session = await create_session(
+# developer_id=developer_id,
+# data=CreateSessionRequest(
+# agent=test_agent.id, user=test_user.id, metadata={"test": "test"}
+# ),
+# client=client,
+# )
+# yield session
# @fixture(scope="global")
-# def test_doc(
-# client=pg_client,
+# async def test_doc(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# agent=test_agent,
# ):
-# doc = create_doc(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-
-# yield doc
+# async with get_pg_client(dsn=dsn) as client:
+# doc = await create_doc(
+# developer_id=developer_id,
+# owner_type="agent",
+# owner_id=agent.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+# yield doc
# @fixture(scope="global")
-# def test_user_doc(
-# client=pg_client,
+# async def test_user_doc(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# user=test_user,
# ):
-# doc = create_doc(
-# developer_id=developer_id,
-# owner_type="user",
-# owner_id=user.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-
-# yield doc
+# async with get_pg_client(dsn=dsn) as client:
+# doc = await create_doc(
+# developer_id=developer_id,
+# owner_type="user",
+# owner_id=user.id,
+# data=CreateDocRequest(title="Hello", content=["World"]),
+# client=client,
+# )
+# yield doc
# @fixture(scope="global")
-# def test_task(
-# client=pg_client,
+# async def test_task(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# agent=test_agent,
# ):
-# task = create_task(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# data=CreateTaskRequest(
-# **{
-# "name": "test task",
-# "description": "test task about",
-# "input_schema": {"type": "object", "additionalProperties": True},
-# "main": [{"evaluate": {"hello": '"world"'}}],
-# }
-# ),
-# client=client,
-# )
-
-# yield task
+# async with get_pg_client(dsn=dsn) as client:
+# task = await create_task(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=CreateTaskRequest(
+# **{
+# "name": "test task",
+# "description": "test task about",
+# "input_schema": {"type": "object", "additionalProperties": True},
+# "main": [{"evaluate": {"hello": '"world"'}}],
+# }
+# ),
+# client=client,
+# )
+# yield task
# @fixture(scope="global")
-# def test_execution(
-# client=pg_client,
+# async def test_execution(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# task=test_task,
# ):
@@ -218,25 +226,25 @@ def test_user(pg_client=pg_client, developer_id=test_developer_id):
# id="blah",
# )
-# execution = create_execution(
-# developer_id=developer_id,
-# task_id=task.id,
-# data=CreateExecutionRequest(input={"test": "test"}),
-# client=client,
-# )
-# create_temporal_lookup(
-# developer_id=developer_id,
-# execution_id=execution.id,
-# workflow_handle=workflow_handle,
-# client=client,
-# )
-
-# yield execution
+# async with get_pg_client(dsn=dsn) as client:
+# execution = await create_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=CreateExecutionRequest(input={"test": "test"}),
+# client=client,
+# )
+# await create_temporal_lookup(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# workflow_handle=workflow_handle,
+# client=client,
+# )
+# yield execution
# @fixture(scope="test")
-# def test_execution_started(
-# client=pg_client,
+# async def test_execution_started(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# task=test_task,
# ):
@@ -245,61 +253,61 @@ def test_user(pg_client=pg_client, developer_id=test_developer_id):
# id="blah",
# )
-# execution = create_execution(
-# developer_id=developer_id,
-# task_id=task.id,
-# data=CreateExecutionRequest(input={"test": "test"}),
-# client=client,
-# )
-# create_temporal_lookup(
-# developer_id=developer_id,
-# execution_id=execution.id,
-# workflow_handle=workflow_handle,
-# client=client,
-# )
-
-# # Start the execution
-# create_execution_transition(
-# developer_id=developer_id,
-# task_id=task.id,
-# execution_id=execution.id,
-# data=CreateTransitionRequest(
-# type="init",
-# output={},
-# current={"workflow": "main", "step": 0},
-# next={"workflow": "main", "step": 0},
-# ),
-# update_execution_status=True,
-# client=client,
-# )
-
-# yield execution
+# async with get_pg_client(dsn=dsn) as client:
+# execution = await create_execution(
+# developer_id=developer_id,
+# task_id=task.id,
+# data=CreateExecutionRequest(input={"test": "test"}),
+# client=client,
+# )
+# await create_temporal_lookup(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# workflow_handle=workflow_handle,
+# client=client,
+# )
+
+# # Start the execution
+# await create_execution_transition(
+# developer_id=developer_id,
+# task_id=task.id,
+# execution_id=execution.id,
+# data=CreateTransitionRequest(
+# type="init",
+# output={},
+# current={"workflow": "main", "step": 0},
+# next={"workflow": "main", "step": 0},
+# ),
+# update_execution_status=True,
+# client=client,
+# )
+# yield execution
# @fixture(scope="global")
-# def test_transition(
-# client=pg_client,
+# async def test_transition(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# execution=test_execution,
# ):
-# transition = create_execution_transition(
-# developer_id=developer_id,
-# execution_id=execution.id,
-# data=CreateTransitionRequest(
-# type="step",
-# output={},
-# current={"workflow": "main", "step": 0},
-# next={"workflow": "wf1", "step": 1},
-# ),
-# client=client,
-# )
-
-# yield transition
+# async with get_pg_client(dsn=dsn) as client:
+# transition = await create_execution_transition(
+# developer_id=developer_id,
+# execution_id=execution.id,
+# data=CreateTransitionRequest(
+# type="step",
+# output={},
+# current={"workflow": "main", "step": 0},
+# next={"workflow": "wf1", "step": 1},
+# ),
+# client=client,
+# )
+# yield transition
# @fixture(scope="global")
-# def test_tool(
-# client=pg_client,
+# async def test_tool(
+# dsn=pg_dsn,
# developer_id=test_developer_id,
# agent=test_agent,
# ):
@@ -314,23 +322,23 @@ def test_user(pg_client=pg_client, developer_id=test_developer_id):
# "type": "function",
# }
-# [tool, *_] = create_tools(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# data=[CreateToolRequest(**tool)],
-# client=client,
-# )
-#
-# yield tool
+# async with get_pg_client(dsn=dsn) as client:
+# [tool, *_] = await create_tools(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=[CreateToolRequest(**tool)],
+# client=client,
+# )
+# yield tool
# @fixture(scope="global")
-# def client(pg_client=pg_client):
+# def client(dsn=pg_dsn):
# client = TestClient(app=app)
-# client.state.pg_client = pg_client
-
+# client.state.pg_client = get_pg_client(dsn=dsn)
# return client
+
# @fixture(scope="global")
# def make_request(client=client, developer_id=test_developer_id):
# def _make_request(method, url, **kwargs):
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index 9ac65dda9..6a14d9575 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -3,21 +3,24 @@
from uuid_extensions import uuid7
from ward import raises, test
+from agents_api.clients.pg import get_pg_client, get_pg_pool
from agents_api.common.protocol.developers import Developer
from agents_api.queries.developers.get_developer import (
get_developer,
) # , verify_developer
-from .fixtures import pg_client, test_developer_id
+from .fixtures import pg_dsn, test_developer_id
@test("query: get developer not exists")
-def _(client=pg_client):
+async def _(dsn=pg_dsn):
+ pool = await get_pg_pool(dsn=dsn)
with raises(Exception):
- get_developer(
- developer_id=uuid7(),
- client=client,
- )
+ async with get_pg_client(pool=pool) as client:
+ await get_developer(
+ developer_id=uuid7(),
+ client=client,
+ )
# @test("query: get developer")
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index 7ba25b358..d21b39594 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -1,178 +1,190 @@
-# """
-# This module contains tests for SQL query generation functions in the users module.
-# Tests verify the SQL queries without actually executing them against a database.
-# """
-
-# from uuid import UUID
-
-# from uuid_extensions import uuid7
-# from ward import raises, test
-
-# from agents_api.autogen.openapi_model import (
-# CreateOrUpdateUserRequest,
-# CreateUserRequest,
-# PatchUserRequest,
-# ResourceUpdatedResponse,
-# UpdateUserRequest,
-# User,
-# )
-# from agents_api.queries.users import (
-# create_or_update_user,
-# create_user,
-# delete_user,
-# get_user,
-# list_users,
-# patch_user,
-# update_user,
-# )
-# from tests.fixtures import pg_client, test_developer_id, test_user
-
-# # Test UUIDs for consistent testing
-# TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
-# TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
-
-
-# @test("query: create user sql")
-# def _(client=pg_client, developer_id=test_developer_id):
-# """Test that a user can be successfully created."""
-
-# create_user(
-# developer_id=developer_id,
-# data=CreateUserRequest(
-# name="test user",
-# about="test user about",
-# ),
-# client=client,
-# )
-
-
-# @test("query: create or update user sql")
-# def _(client=pg_client, developer_id=test_developer_id):
-# """Test that a user can be successfully created or updated."""
-
-# create_or_update_user(
-# developer_id=developer_id,
-# user_id=uuid7(),
-# data=CreateOrUpdateUserRequest(
-# name="test user",
-# about="test user about",
-# ),
-# client=client,
-# )
-
-
-# @test("query: update user sql")
-# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
-# """Test that an existing user's information can be successfully updated."""
-
-# # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update.
-# update_result = update_user(
-# user_id=user.id,
-# developer_id=developer_id,
-# data=UpdateUserRequest(
-# name="updated user",
-# about="updated user about",
-# ),
-# client=client,
-# )
-
-# assert update_result is not None
-# assert isinstance(update_result, ResourceUpdatedResponse)
-# assert update_result.updated_at > user.created_at
-
-
-# @test("query: get user not exists sql")
-# def _(client=pg_client, developer_id=test_developer_id):
-# """Test that retrieving a non-existent user returns an empty result."""
-
-# user_id = uuid7()
-
-# # Ensure that the query for an existing user returns exactly one result.
-# try:
-# get_user(
-# user_id=user_id,
-# developer_id=developer_id,
-# client=client,
-# )
-# except Exception:
-# pass
-# else:
-# assert (
-# False
-# ), "Expected an exception to be raised when retrieving a non-existent user."
-
-
-# @test("query: get user exists sql")
-# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
-# """Test that retrieving an existing user returns the correct user information."""
-
-# result = get_user(
-# user_id=user.id,
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, User)
-
-
-# @test("query: list users sql")
-# def _(client=pg_client, developer_id=test_developer_id):
-# """Test that listing users returns a collection of user information."""
-
-# result = list_users(
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert isinstance(result, list)
-# assert len(result) >= 1
-# assert all(isinstance(user, User) for user in result)
-
-
-# @test("query: patch user sql")
-# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
-# """Test that a user can be successfully patched."""
-
-# patch_result = patch_user(
-# developer_id=developer_id,
-# user_id=user.id,
-# data=PatchUserRequest(
-# name="patched user",
-# about="patched user about",
-# metadata={"test": "metadata"},
-# ),
-# client=client,
-# )
-
-# assert patch_result is not None
-# assert isinstance(patch_result, ResourceUpdatedResponse)
-# assert patch_result.updated_at > user.created_at
-
-
-# @test("query: delete user sql")
-# def _(client=pg_client, developer_id=test_developer_id, user=test_user):
-# """Test that a user can be successfully deleted."""
-
-# delete_result = delete_user(
-# developer_id=developer_id,
-# user_id=user.id,
-# client=client,
-# )
-
-# assert delete_result is not None
-# assert isinstance(delete_result, ResourceUpdatedResponse)
-
-# # Verify the user no longer exists
-# try:
-# get_user(
-# developer_id=developer_id,
-# user_id=user.id,
-# client=client,
-# )
-# except Exception:
-# pass
-# else:
-# assert (
-# False
-# ), "Expected an exception to be raised when retrieving a deleted user."
+"""
+This module contains tests for SQL query generation functions in the users module.
+Tests verify the SQL queries without actually executing them against a database.
+"""
+
+from uuid import UUID
+
+import asyncpg
+from uuid_extensions import uuid7
+from ward import raises, test
+
+from agents_api.autogen.openapi_model import (
+ CreateOrUpdateUserRequest,
+ CreateUserRequest,
+ PatchUserRequest,
+ ResourceUpdatedResponse,
+ UpdateUserRequest,
+ User,
+)
+from agents_api.clients.pg import get_pg_client
+from agents_api.queries.users import (
+ create_or_update_user,
+ create_user,
+ delete_user,
+ get_user,
+ list_users,
+ patch_user,
+ update_user,
+)
+from tests.fixtures import pg_dsn, test_developer_id, test_user
+
+# Test UUIDs for consistent testing
+TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
+TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
+
+
+@test("query: create user sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that a user can be successfully created."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_user(
+ developer_id=developer_id,
+ data=CreateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ client=client,
+ )
+
+
+@test("query: create or update user sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that a user can be successfully created or updated."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_or_update_user(
+ developer_id=developer_id,
+ user_id=uuid7(),
+ data=CreateOrUpdateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ client=client,
+ )
+
+
+@test("query: update user sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
+ """Test that an existing user's information can be successfully updated."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ update_result = await update_user(
+ user_id=user.id,
+ developer_id=developer_id,
+ data=UpdateUserRequest(
+ name="updated user",
+ about="updated user about",
+ ),
+ client=client,
+ )
+
+ assert update_result is not None
+ assert isinstance(update_result, ResourceUpdatedResponse)
+ assert update_result.updated_at > user.created_at
+
+
+@test("query: get user not exists sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that retrieving a non-existent user returns an empty result."""
+
+ user_id = uuid7()
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+
+ with raises(Exception):
+ async with get_pg_client(pool=pool) as client:
+ await get_user(
+ user_id=user_id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+
+@test("query: get user exists sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
+ """Test that retrieving an existing user returns the correct user information."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await get_user(
+ user_id=user.id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert result is not None
+ assert isinstance(result, User)
+
+
+@test("query: list users sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that listing users returns a collection of user information."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await list_users(
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert isinstance(result, list)
+ assert len(result) >= 1
+ assert all(isinstance(user, User) for user in result)
+
+
+@test("query: patch user sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
+ """Test that a user can be successfully patched."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ patch_result = await patch_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ data=PatchUserRequest(
+ name="patched user",
+ about="patched user about",
+ metadata={"test": "metadata"},
+ ),
+ client=client,
+ )
+
+ assert patch_result is not None
+ assert isinstance(patch_result, ResourceUpdatedResponse)
+ assert patch_result.updated_at > user.created_at
+
+
+@test("query: delete user sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
+ """Test that a user can be successfully deleted."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ delete_result = await delete_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ client=client,
+ )
+
+ assert delete_result is not None
+ assert isinstance(delete_result, ResourceUpdatedResponse)
+
+ # Verify the user no longer exists
+ try:
+ async with get_pg_client(pool=pool) as client:
+ await get_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ client=client,
+ )
+ except Exception:
+ pass
+ else:
+ assert (
+ False
+ ), "Expected an exception to be raised when retrieving a deleted user."
diff --git a/agents-api/tests/utils.py b/agents-api/tests/utils.py
index a6a591823..990a1015e 100644
--- a/agents-api/tests/utils.py
+++ b/agents-api/tests/utils.py
@@ -176,10 +176,8 @@ async def __aexit__(self, *_):
yield mock_session
-@asynccontextmanager
-async def patch_pg_client():
- # with patch("agents_api.clients.pg.get_pg_client") as get_pg_client:
-
+@contextmanager
+def get_pg_dsn():
with PostgresContainer("timescale/timescaledb-ha:pg17") as postgres:
test_psql_url = postgres.get_connection_url()
pg_dsn = f"postgres://{test_psql_url[22:]}?sslmode=disable"
@@ -187,13 +185,4 @@ async def patch_pg_client():
process = subprocess.Popen(command, shell=True)
process.wait()
- client = await asyncpg.connect(pg_dsn)
- await client.set_type_codec(
- "jsonb",
- encoder=json.dumps,
- decoder=json.loads,
- schema="pg_catalog",
- )
-
- # get_pg_client.return_value = client
- yield client
+ yield pg_dsn
diff --git a/memory-store/migrations/000017_compression.down.sql b/memory-store/migrations/000017_compression.down.sql
new file mode 100644
index 000000000..8befeb465
--- /dev/null
+++ b/memory-store/migrations/000017_compression.down.sql
@@ -0,0 +1,17 @@
+BEGIN;
+
+SELECT
+ remove_compression_policy ('entries');
+
+SELECT
+ remove_compression_policy ('transitions');
+
+ALTER TABLE entries
+SET
+ (timescaledb.compress = FALSE);
+
+ALTER TABLE transitions
+SET
+ (timescaledb.compress = FALSE);
+
+COMMIT;
diff --git a/memory-store/migrations/000017_compression.up.sql b/memory-store/migrations/000017_compression.up.sql
new file mode 100644
index 000000000..5cb57d518
--- /dev/null
+++ b/memory-store/migrations/000017_compression.up.sql
@@ -0,0 +1,25 @@
+BEGIN;
+
+ALTER TABLE entries
+SET
+ (
+ timescaledb.compress = TRUE,
+ timescaledb.compress_segmentby = 'session_id',
+ timescaledb.compress_orderby = 'created_at DESC, entry_id DESC'
+ );
+
+SELECT
+ add_compression_policy ('entries', INTERVAL '7 days');
+
+ALTER TABLE transitions
+SET
+ (
+ timescaledb.compress = TRUE,
+ timescaledb.compress_segmentby = 'execution_id',
+ timescaledb.compress_orderby = 'created_at DESC, transition_id DESC'
+ );
+
+SELECT
+ add_compression_policy ('transitions', INTERVAL '7 days');
+
+COMMIT;
diff --git a/memory-store/migrations/000018_doc_search.down.sql b/memory-store/migrations/000018_doc_search.down.sql
new file mode 100644
index 000000000..e69de29bb
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
new file mode 100644
index 000000000..737415348
--- /dev/null
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -0,0 +1,23 @@
+-- docs_embeddings schema (docs_embeddings is an extended view of docs)
+-- +----------------------+--------------------------+-----------+----------+-------------+
+-- | Column | Type | Modifiers | Storage | Description |
+-- |----------------------+--------------------------+-----------+----------+-------------|
+-- | embedding_uuid | uuid | | plain | |
+-- | chunk_seq | integer | | plain | |
+-- | chunk | text | | extended | |
+-- | embedding | vector(1024) | | external | |
+-- | developer_id | uuid | | plain | |
+-- | doc_id | uuid | | plain | |
+-- | title | text | | extended | |
+-- | content | text | | extended | |
+-- | index | integer | | plain | |
+-- | modality | text | | extended | |
+-- | embedding_model | text | | extended | |
+-- | embedding_dimensions | integer | | plain | |
+-- | language | text | | extended | |
+-- | created_at | timestamp with time zone | | plain | |
+-- | updated_at | timestamp with time zone | | plain | |
+-- | metadata | jsonb | | extended | |
+-- | search_tsv | tsvector | | extended | |
+-- +----------------------+--------------------------+-----------+----------+-------------+
+
diff --git a/memory-store/migrations/000019_system_developer.down.sql b/memory-store/migrations/000019_system_developer.down.sql
new file mode 100644
index 000000000..92d8d65d5
--- /dev/null
+++ b/memory-store/migrations/000019_system_developer.down.sql
@@ -0,0 +1,7 @@
+BEGIN;
+
+-- Remove the system developer
+DELETE FROM developers
+WHERE developer_id = '00000000-0000-0000-0000-000000000000';
+
+COMMIT;
diff --git a/memory-store/migrations/000019_system_developer.up.sql b/memory-store/migrations/000019_system_developer.up.sql
new file mode 100644
index 000000000..34635b7ad
--- /dev/null
+++ b/memory-store/migrations/000019_system_developer.up.sql
@@ -0,0 +1,18 @@
+BEGIN;
+
+-- Insert system developer with all zeros UUID
+INSERT INTO developers (
+ developer_id,
+ email,
+ active,
+ tags,
+ settings
+) VALUES (
+ '00000000-0000-0000-0000-000000000000',
+ 'system@internal.julep.ai',
+ true,
+ ARRAY['system', 'paid'],
+ '{}'::jsonb
+) ON CONFLICT (developer_id) DO NOTHING;
+
+COMMIT;
From 57fdec65c7e149dfdab0f1f63a6e4c61b2dc9cff Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Tue, 17 Dec 2024 07:03:41 +0000
Subject: [PATCH 038/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/clients/pg.py | 2 +-
agents-api/agents_api/web.py | 1 +
agents-api/tests/fixtures.py | 5 +++--
3 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index 852152769..f8c637023 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -1,5 +1,5 @@
-from contextlib import asynccontextmanager
import json
+from contextlib import asynccontextmanager
import asyncpg
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index d3a672fd8..ff801d81c 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -23,6 +23,7 @@
from .dependencies.auth import get_api_key
from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
from .exceptions import PromptTooBigError
+
# from .routers import (
# agents,
# docs,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 01a6991ee..d0fa7daf8 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -43,12 +43,13 @@
# from agents_api.queries.users.delete_user import delete_user
from agents_api.web import app
+
from .utils import (
get_pg_dsn,
- patch_embed_acompletion as patch_embed_acompletion_ctx,
+ patch_s3_client,
)
from .utils import (
- patch_s3_client,
+ patch_embed_acompletion as patch_embed_acompletion_ctx,
)
EMBEDDING_SIZE: int = 1024
From 09b2053ee3367b390b8582ee9d8e854c52eacc5b Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Tue, 17 Dec 2024 11:12:21 +0300
Subject: [PATCH 039/274] fix(agents-api): fix user queries and tests
---
.../agents_api/queries/agents/delete_agent.py | 5 +--
.../queries/users/create_or_update_user.py | 28 +++++-----------
.../agents_api/queries/users/delete_user.py | 21 ++++++------
.../agents_api/queries/users/get_user.py | 2 +-
.../agents_api/queries/users/list_users.py | 3 +-
.../agents_api/queries/users/patch_user.py | 18 ++---------
.../agents_api/queries/users/update_user.py | 32 +++++--------------
agents-api/pyproject.toml | 1 +
agents-api/tests/test_user_queries.py | 3 +-
agents-api/uv.lock | 2 ++
10 files changed, 35 insertions(+), 80 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index cad3d774f..9d6869a94 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -38,11 +38,8 @@
ResourceDeletedResponse,
one=True,
transform=lambda d: {
- "id": UUID(d.pop("agent_id")),
- "deleted_at": utcnow(),
- "jobs": [],
+ "id": d["agent_id"],
},
- _kind="deleted",
)
@pg_query
# @increase_counter("delete_agent1")
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index b9939b620..667199097 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -6,7 +6,7 @@
from sqlglot import parse_one
from sqlglot.optimizer import optimize
-from ...autogen.openapi_model import CreateUserRequest, User
+from ...autogen.openapi_model import CreateOrUpdateUserRequest, User
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
@@ -24,7 +24,7 @@
$2,
$3,
$4,
- COALESCE($5, '{}'::jsonb)
+ $5
)
ON CONFLICT (developer_id, user_id) DO UPDATE SET
name = EXCLUDED.name,
@@ -34,19 +34,7 @@
"""
# Add index hint for better performance
-query = optimize(
- parse_one(raw_query),
- schema={
- "users": {
- "developer_id": "UUID",
- "user_id": "UUID",
- "name": "STRING",
- "about": "STRING",
- "metadata": "JSONB",
- }
- },
-).sql(pretty=True)
-
+query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
{
@@ -62,12 +50,12 @@
),
}
)
-@wrap_in_class(User)
+@wrap_in_class(User, one=True, transform=lambda d: {**d, "id": d["user_id"]})
@increase_counter("create_or_update_user")
@pg_query
@beartype
-def create_or_update_user(
- *, developer_id: UUID, user_id: UUID, data: CreateUserRequest
+async def create_or_update_user(
+ *, developer_id: UUID, user_id: UUID, data: CreateOrUpdateUserRequest
) -> tuple[str, list]:
"""
Constructs an SQL query to create or update a user.
@@ -75,7 +63,7 @@ def create_or_update_user(
Args:
developer_id (UUID): The UUID of the developer.
user_id (UUID): The UUID of the user.
- data (CreateUserRequest): The user data to insert or update.
+ data (CreateOrUpdateUserRequest): The user data to insert or update.
Returns:
tuple[str, list]: SQL query and parameters.
@@ -88,7 +76,7 @@ def create_or_update_user(
user_id,
data.name,
data.about,
- data.metadata, # Let COALESCE handle None case in SQL
+ data.metadata or {},
]
return (
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 2a57ccc7c..63119f226 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -10,6 +10,7 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ...common.utils.datetime import utcnow
# Define the raw SQL query outside the function
raw_query = """
WITH deleted_data AS (
@@ -22,19 +23,11 @@
)
DELETE FROM users
WHERE developer_id = $1 AND user_id = $2
-RETURNING user_id as id, developer_id;
+RETURNING user_id, developer_id;
"""
# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "user_files": {"developer_id": "UUID", "user_id": "UUID"},
- "user_docs": {"developer_id": "UUID", "user_id": "UUID"},
- "users": {"developer_id": "UUID", "user_id": "UUID"},
- },
-).sql(pretty=True)
-
+query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
{
@@ -45,11 +38,15 @@
)
}
)
-@wrap_in_class(ResourceDeletedResponse, one=True)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {**d, "id": d["user_id"], "deleted_at": utcnow()},
+)
@increase_counter("delete_user")
@pg_query
@beartype
-def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
+async def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
"""
Constructs optimized SQL query to delete a user and related data.
Uses primary key for efficient deletion.
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 6e7c26d75..6989c8edb 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -42,7 +42,7 @@
@increase_counter("get_user")
@pg_query
@beartype
-def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
+async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
"""
Constructs an optimized SQL query to retrieve a user's details.
Uses the primary key index (developer_id, user_id) for efficient lookup.
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index c2259444a..7f3677eab 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -24,7 +24,6 @@
updated_at
FROM users
WHERE developer_id = $1
- AND deleted_at IS NULL
AND ($4::jsonb IS NULL OR metadata @> $4)
)
SELECT *
@@ -55,7 +54,7 @@
@increase_counter("list_users")
@pg_query
@beartype
-def list_users(
+async def list_users(
*,
developer_id: UUID,
limit: int = 100,
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index 913b476c5..fac1e443a 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -39,21 +39,7 @@
"""
# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "users": {
- "developer_id": "UUID",
- "user_id": "UUID",
- "name": "STRING",
- "about": "STRING",
- "metadata": "JSONB",
- "created_at": "TIMESTAMP",
- "updated_at": "TIMESTAMP",
- }
- },
-).sql(pretty=True)
-
+query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
{
@@ -68,7 +54,7 @@
@increase_counter("patch_user")
@pg_query
@beartype
-def patch_user(
+async def patch_user(
*, developer_id: UUID, user_id: UUID, data: PatchUserRequest
) -> tuple[str, list]:
"""
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 71599182d..1fffdebe7 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -19,31 +19,11 @@
metadata = $5
WHERE developer_id = $1
AND user_id = $2
-RETURNING
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at;
+RETURNING *
"""
# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "users": {
- "developer_id": "UUID",
- "user_id": "UUID",
- "name": "STRING",
- "about": "STRING",
- "metadata": "JSONB",
- "created_at": "TIMESTAMP",
- "updated_at": "TIMESTAMP",
- }
- },
-).sql(pretty=True)
+query = parse_one(raw_query).sql(pretty=True)
@rewrap_exceptions(
@@ -55,11 +35,15 @@
)
}
)
-@wrap_in_class(ResourceUpdatedResponse, one=True)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {**d, "id": d["user_id"]},
+)
@increase_counter("update_user")
@pg_query
@beartype
-def update_user(
+async def update_user(
*, developer_id: UUID, user_id: UUID, data: UpdateUserRequest
) -> tuple[str, list]:
"""
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index f02876443..f0d57a70b 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -51,6 +51,7 @@ dependencies = [
"uuid7>=0.1.0",
"asyncpg>=0.30.0",
"sqlglot>=26.0.0",
+ "testcontainers>=4.9.0",
]
[dependency-groups]
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index d21b39594..2554a1f46 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -13,6 +13,7 @@
CreateOrUpdateUserRequest,
CreateUserRequest,
PatchUserRequest,
+ ResourceDeletedResponse,
ResourceUpdatedResponse,
UpdateUserRequest,
User,
@@ -172,7 +173,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
)
assert delete_result is not None
- assert isinstance(delete_result, ResourceUpdatedResponse)
+ assert isinstance(delete_result, ResourceDeletedResponse)
# Verify the user no longer exists
try:
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 9fadcd0cb..07ec7cb4f 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -50,6 +50,7 @@ dependencies = [
{ name = "sse-starlette" },
{ name = "temporalio", extra = ["opentelemetry"] },
{ name = "tenacity" },
+ { name = "testcontainers" },
{ name = "thefuzz" },
{ name = "tiktoken" },
{ name = "uuid7" },
@@ -117,6 +118,7 @@ requires-dist = [
{ name = "sse-starlette", specifier = "~=2.1.3" },
{ name = "temporalio", extras = ["opentelemetry"], specifier = "~=1.8" },
{ name = "tenacity", specifier = "~=9.0.0" },
+ { name = "testcontainers", specifier = ">=4.9.0" },
{ name = "thefuzz", specifier = "~=0.22.1" },
{ name = "tiktoken", specifier = "~=0.7.0" },
{ name = "uuid7", specifier = ">=0.1.0" },
From 6f54492647ab4e0483677fef5d42907831aa11cc Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Tue, 17 Dec 2024 08:13:48 +0000
Subject: [PATCH 040/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/users/create_or_update_user.py | 1 +
agents-api/agents_api/queries/users/delete_user.py | 3 ++-
agents-api/agents_api/queries/users/patch_user.py | 1 +
3 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index 667199097..d2be71bb4 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -36,6 +36,7 @@
# Add index hint for better performance
query = parse_one(raw_query).sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 63119f226..520c8d695 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -7,10 +7,10 @@
from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-from ...common.utils.datetime import utcnow
# Define the raw SQL query outside the function
raw_query = """
WITH deleted_data AS (
@@ -29,6 +29,7 @@
# Parse and optimize the query
query = parse_one(raw_query).sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index fac1e443a..971e96b81 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -41,6 +41,7 @@
# Parse and optimize the query
query = parse_one(raw_query).sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
From c6285aa77e97cfdd1814cc03e892df6145609405 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 14:05:04 +0530
Subject: [PATCH 041/274] feat(memory-store): Normalize workflows table
Signed-off-by: Diwank Singh Tomer
---
memory-store/migrations/000007_ann.up.sql | 7 ++
.../migrations/000009_sessions.up.sql | 17 +++-
memory-store/migrations/000010_tasks.up.sql | 50 ++++++++++--
.../migrations/000012_transitions.up.sql | 7 ++
.../000013_executions_continuous_view.up.sql | 7 ++
.../migrations/000017_compression.up.sql | 7 ++
.../migrations/000018_doc_search.down.sql | 9 +++
.../migrations/000018_doc_search.up.sql | 80 +++++++++++++++++++
8 files changed, 178 insertions(+), 6 deletions(-)
diff --git a/memory-store/migrations/000007_ann.up.sql b/memory-store/migrations/000007_ann.up.sql
index 3cc606fde..c98b9a2be 100644
--- a/memory-store/migrations/000007_ann.up.sql
+++ b/memory-store/migrations/000007_ann.up.sql
@@ -1,3 +1,10 @@
+/*
+ * VECTOR SIMILARITY SEARCH WITH DISKANN (Complexity: 8/10)
+ * Uses TimescaleDB's vectorizer to convert text into high-dimensional vectors for semantic search.
+ * Implements DiskANN (Disk-based Approximate Nearest Neighbor) for efficient similarity search at scale.
+ * Includes smart text chunking to handle large documents while preserving context and semantic meaning.
+ */
+
-- Create vector similarity search index using diskann and timescale vectorizer
SELECT
ai.create_vectorizer (
diff --git a/memory-store/migrations/000009_sessions.up.sql b/memory-store/migrations/000009_sessions.up.sql
index 71e83b7ec..082f3823c 100644
--- a/memory-store/migrations/000009_sessions.up.sql
+++ b/memory-store/migrations/000009_sessions.up.sql
@@ -16,7 +16,22 @@ CREATE TABLE IF NOT EXISTS sessions (
forward_tool_calls BOOLEAN,
recall_options JSONB NOT NULL DEFAULT '{}'::JSONB,
CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id),
- CONSTRAINT uq_sessions_session_id UNIQUE (session_id)
+ CONSTRAINT uq_sessions_session_id UNIQUE (session_id),
+ CONSTRAINT chk_sessions_token_budget_positive CHECK (
+ token_budget IS NULL
+ OR token_budget > 0
+ ),
+ CONSTRAINT chk_sessions_context_overflow_valid CHECK (
+ context_overflow IS NULL
+ OR context_overflow IN ('truncate', 'adaptive')
+ ),
+ CONSTRAINT chk_sessions_system_template_not_empty CHECK (length(trim(system_template)) > 0),
+ CONSTRAINT chk_sessions_situation_not_empty CHECK (
+ situation IS NULL
+ OR length(trim(situation)) > 0
+ ),
+ CONSTRAINT chk_sessions_metadata_valid CHECK (jsonb_typeof(metadata) = 'object'),
+ CONSTRAINT chk_sessions_recall_options_valid CHECK (jsonb_typeof(recall_options) = 'object')
);
-- Create indexes if they don't exist
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index 2ba6b7910..3ca740788 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -1,5 +1,12 @@
BEGIN;
+/*
+ * DEFERRED FOREIGN KEY CONSTRAINTS (Complexity: 6/10)
+ * Uses PostgreSQL's deferred constraints to handle complex relationships between tasks and tools tables.
+ * Constraints are checked at transaction commit rather than immediately, allowing circular references.
+ * This enables more flexible data loading patterns while maintaining referential integrity.
+ */
+
-- Create tasks table if it doesn't exist
CREATE TABLE IF NOT EXISTS tasks (
developer_id UUID NOT NULL,
@@ -9,8 +16,7 @@ CREATE TABLE IF NOT EXISTS tasks (
),
agent_id UUID NOT NULL,
task_id UUID NOT NULL,
- VERSION INTEGER NOT NULL DEFAULT 1,
- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ version INTEGER NOT NULL DEFAULT 1,
name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (
length(name) >= 1
AND length(name) <= 255
@@ -21,14 +27,17 @@ CREATE TABLE IF NOT EXISTS tasks (
),
input_schema JSON NOT NULL,
inherit_tools BOOLEAN DEFAULT FALSE,
- workflows JSON[] DEFAULT ARRAY[]::JSON[],
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB DEFAULT '{}'::JSONB,
CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id),
CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
- CONSTRAINT uq_tasks_version_unique UNIQUE (task_id, VERSION),
+ CONSTRAINT uq_tasks_version_unique UNIQUE (task_id, version),
CONSTRAINT fk_tasks_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
- CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
+ CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$'),
+ CONSTRAINT chk_tasks_metadata_valid CHECK (jsonb_typeof(metadata) = 'object'),
+ CONSTRAINT chk_tasks_input_schema_valid CHECK (jsonb_typeof(input_schema) = 'object'),
+ CONSTRAINT chk_tasks_version_positive CHECK (version > 0)
);
-- Create sorted index on task_id if it doesn't exist
@@ -87,4 +96,35 @@ END $$;
-- Add comment to table (comments are idempotent by default)
COMMENT ON TABLE tasks IS 'Stores tasks associated with AI agents for developers';
+-- Create 'workflows' table
+CREATE TABLE IF NOT EXISTS workflows (
+ developer_id UUID NOT NULL,
+ task_id UUID NOT NULL,
+ version INTEGER NOT NULL,
+ name TEXT NOT NULL CONSTRAINT chk_workflows_name_length CHECK (
+ length(name) >= 1 AND length(name) <= 255
+ ),
+ step_idx INTEGER NOT NULL CONSTRAINT chk_workflows_step_idx_positive CHECK (step_idx >= 0),
+ step_type TEXT NOT NULL CONSTRAINT chk_workflows_step_type_length CHECK (
+ length(step_type) >= 1 AND length(step_type) <= 255
+ ),
+ step_definition JSONB NOT NULL CONSTRAINT chk_workflows_step_definition_valid CHECK (
+ jsonb_typeof(step_definition) = 'object'
+ ),
+ CONSTRAINT pk_workflows PRIMARY KEY (developer_id, task_id, version, step_idx),
+ CONSTRAINT fk_workflows_tasks FOREIGN KEY (developer_id, task_id, version)
+ REFERENCES tasks (developer_id, task_id, version) ON DELETE CASCADE
+);
+
+-- Create index on 'developer_id' for 'workflows' table if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_workflows_developer') THEN
+ CREATE INDEX idx_workflows_developer ON workflows (developer_id);
+ END IF;
+END $$;
+
+-- Add comment to 'workflows' table
+COMMENT ON TABLE workflows IS 'Stores normalized workflows for tasks';
+
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000012_transitions.up.sql b/memory-store/migrations/000012_transitions.up.sql
index 6fd7dbcd1..7bbcf2ad5 100644
--- a/memory-store/migrations/000012_transitions.up.sql
+++ b/memory-store/migrations/000012_transitions.up.sql
@@ -1,5 +1,12 @@
BEGIN;
+/*
+ * CUSTOM TYPES AND ENUMS WITH COMPLEX CONSTRAINTS (Complexity: 7/10)
+ * Creates custom composite type transition_cursor to track workflow state and enum type for transition states.
+ * Uses compound primary key combining timestamps and UUIDs for efficient time-series operations.
+ * Implements complex indexing strategy optimized for various query patterns (current state, next state, labels).
+ */
+
-- Create transition type enum if it doesn't exist
DO $$
BEGIN
diff --git a/memory-store/migrations/000013_executions_continuous_view.up.sql b/memory-store/migrations/000013_executions_continuous_view.up.sql
index 43285efbc..ec9d42ee7 100644
--- a/memory-store/migrations/000013_executions_continuous_view.up.sql
+++ b/memory-store/migrations/000013_executions_continuous_view.up.sql
@@ -1,5 +1,12 @@
BEGIN;
+/*
+ * CONTINUOUS AGGREGATES WITH STATE AGGREGATION (Complexity: 9/10)
+ * This is a TimescaleDB feature that automatically maintains a real-time summary of the transitions table.
+ * It uses special aggregation functions like state_agg() to track state changes and last() to get most recent values.
+ * The view updates every 10 minutes and can serve both historical and real-time data (materialized_only = FALSE).
+ */
+
-- create a function to convert transition_type to text (needed coz ::text is stable not immutable)
CREATE
OR REPLACE function to_text (transition_type) RETURNS text AS $$
diff --git a/memory-store/migrations/000017_compression.up.sql b/memory-store/migrations/000017_compression.up.sql
index 5cb57d518..06c7e6c77 100644
--- a/memory-store/migrations/000017_compression.up.sql
+++ b/memory-store/migrations/000017_compression.up.sql
@@ -1,3 +1,10 @@
+/*
+ * MULTI-DIMENSIONAL HYPERTABLES WITH COMPRESSION (Complexity: 8/10)
+ * TimescaleDB's advanced feature that partitions data by both time (created_at) and space (session_id/execution_id).
+ * Automatically compresses data older than 7 days to save storage while maintaining query performance.
+ * Uses segment_by to group related rows and order_by to optimize decompression speed.
+ */
+
BEGIN;
ALTER TABLE entries
diff --git a/memory-store/migrations/000018_doc_search.down.sql b/memory-store/migrations/000018_doc_search.down.sql
index e69de29bb..86079b0d1 100644
--- a/memory-store/migrations/000018_doc_search.down.sql
+++ b/memory-store/migrations/000018_doc_search.down.sql
@@ -0,0 +1,9 @@
+BEGIN;
+
+-- Drop the embed_with_cache function
+DROP FUNCTION IF EXISTS embed_with_cache;
+
+-- Drop the embeddings cache table
+DROP TABLE IF EXISTS embeddings_cache CASCADE;
+
+COMMIT;
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index 737415348..4f0ef5521 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -20,4 +20,84 @@
-- | metadata | jsonb | | extended | |
-- | search_tsv | tsvector | | extended | |
-- +----------------------+--------------------------+-----------+----------+-------------+
+BEGIN;
+-- Create unlogged table for caching embeddings
+CREATE UNLOGGED TABLE IF NOT EXISTS embeddings_cache (
+ provider TEXT NOT NULL,
+ model TEXT NOT NULL,
+ input_text TEXT NOT NULL,
+ input_type TEXT DEFAULT NULL,
+ api_key TEXT DEFAULT NULL,
+ api_key_name TEXT DEFAULT NULL,
+ embedding vector (1024) NOT NULL,
+ CONSTRAINT pk_embeddings_cache PRIMARY KEY (provider, model, input_text)
+);
+
+-- Add index on provider, model, input_text for faster lookups
+CREATE INDEX IF NOT EXISTS idx_embeddings_cache_provider_model_input_text ON embeddings_cache (provider, model, input_text ASC);
+
+-- Add comment explaining table purpose
+COMMENT ON TABLE embeddings_cache IS 'Unlogged table that caches embedding requests to avoid duplicate API calls';
+
+CREATE
+OR REPLACE function embed_with_cache (
+ _provider text,
+ _model text,
+ _input_text text,
+ _input_type text DEFAULT NULL,
+ _api_key text DEFAULT NULL,
+ _api_key_name text DEFAULT NULL
+) returns vector (1024) language plpgsql AS $$
+
+-- Try to get cached embedding first
+declare
+ cached_embedding vector(1024);
+begin
+ if _provider != 'voyageai' then
+ raise exception 'Only voyageai provider is supported';
+ end if;
+
+ select embedding into cached_embedding
+ from embeddings_cache c
+ where c.provider = _provider
+ and c.model = _model
+ and c.input_text = _input_text;
+
+ if found then
+ return cached_embedding;
+ end if;
+
+ -- Not found in cache, call AI embedding function
+ cached_embedding := ai.voyageai_embed(
+ _model,
+ _input_text,
+ _input_type,
+ _api_key,
+ _api_key_name
+ );
+
+ -- Cache the result
+ insert into embeddings_cache (
+ provider,
+ model,
+ input_text,
+ input_type,
+ api_key,
+ api_key_name,
+ embedding
+ ) values (
+ _provider,
+ _model,
+ _input_text,
+ _input_type,
+ _api_key,
+ _api_key_name,
+ cached_embedding
+ );
+
+ return cached_embedding;
+end;
+$$;
+
+COMMIT;
From efebc0654948af465853c87ac35bb2d716c233d0 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 14:40:45 +0530
Subject: [PATCH 042/274] fix(memory-store): Fix workflows table
Signed-off-by: Diwank Singh Tomer
---
memory-store/migrations/000010_tasks.down.sql | 3 +++
memory-store/migrations/000010_tasks.up.sql | 16 +++++++---------
memory-store/migrations/000011_executions.up.sql | 2 +-
3 files changed, 11 insertions(+), 10 deletions(-)
diff --git a/memory-store/migrations/000010_tasks.down.sql b/memory-store/migrations/000010_tasks.down.sql
index 84608ea71..3b9b05b8b 100644
--- a/memory-store/migrations/000010_tasks.down.sql
+++ b/memory-store/migrations/000010_tasks.down.sql
@@ -17,6 +17,9 @@ BEGIN
END IF;
END $$;
+-- Drop the workflows table first since it depends on tasks
+DROP TABLE IF EXISTS workflows CASCADE;
+
-- Drop the tasks table and all its dependent objects (CASCADE will handle indexes, triggers, and constraints)
DROP TABLE IF EXISTS tasks CASCADE;
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index 3ca740788..ad27d5bdc 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -6,7 +6,6 @@ BEGIN;
* Constraints are checked at transaction commit rather than immediately, allowing circular references.
* This enables more flexible data loading patterns while maintaining referential integrity.
*/
-
-- Create tasks table if it doesn't exist
CREATE TABLE IF NOT EXISTS tasks (
developer_id UUID NOT NULL,
@@ -16,7 +15,7 @@ CREATE TABLE IF NOT EXISTS tasks (
),
agent_id UUID NOT NULL,
task_id UUID NOT NULL,
- version INTEGER NOT NULL DEFAULT 1,
+ "version" INTEGER NOT NULL DEFAULT 1,
name TEXT NOT NULL CONSTRAINT ct_tasks_name_length CHECK (
length(name) >= 1
AND length(name) <= 255
@@ -25,19 +24,18 @@ CREATE TABLE IF NOT EXISTS tasks (
description IS NULL
OR length(description) <= 1000
),
- input_schema JSON NOT NULL,
+ input_schema JSONB NOT NULL,
inherit_tools BOOLEAN DEFAULT FALSE,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB DEFAULT '{}'::JSONB,
- CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id),
+ CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id, "version"),
CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
- CONSTRAINT uq_tasks_version_unique UNIQUE (task_id, version),
CONSTRAINT fk_tasks_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$'),
CONSTRAINT chk_tasks_metadata_valid CHECK (jsonb_typeof(metadata) = 'object'),
CONSTRAINT chk_tasks_input_schema_valid CHECK (jsonb_typeof(input_schema) = 'object'),
- CONSTRAINT chk_tasks_version_positive CHECK (version > 0)
+ CONSTRAINT chk_tasks_version_positive CHECK ("version" > 0)
);
-- Create sorted index on task_id if it doesn't exist
@@ -73,7 +71,7 @@ BEGIN
WHERE constraint_name = 'fk_tools_task_id'
) THEN
ALTER TABLE tools ADD CONSTRAINT fk_tools_task_id
- FOREIGN KEY (task_id, task_version) REFERENCES tasks(task_id, version)
+ FOREIGN KEY (developer_id, task_id, task_version) REFERENCES tasks(developer_id, task_id, version)
DEFERRABLE INITIALLY DEFERRED;
END IF;
END $$;
@@ -116,11 +114,11 @@ CREATE TABLE IF NOT EXISTS workflows (
REFERENCES tasks (developer_id, task_id, version) ON DELETE CASCADE
);
--- Create index on 'developer_id' for 'workflows' table if it doesn't exist
+-- Create index for 'workflows' table if it doesn't exist
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_workflows_developer') THEN
- CREATE INDEX idx_workflows_developer ON workflows (developer_id);
+ CREATE INDEX idx_workflows_developer ON workflows (developer_id, task_id, version);
END IF;
END $$;
diff --git a/memory-store/migrations/000011_executions.up.sql b/memory-store/migrations/000011_executions.up.sql
index cf0666136..976ead369 100644
--- a/memory-store/migrations/000011_executions.up.sql
+++ b/memory-store/migrations/000011_executions.up.sql
@@ -16,7 +16,7 @@ CREATE TABLE IF NOT EXISTS executions (
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_executions PRIMARY KEY (execution_id),
CONSTRAINT fk_executions_developer FOREIGN KEY (developer_id) REFERENCES developers (developer_id),
- CONSTRAINT fk_executions_task FOREIGN KEY (developer_id, task_id) REFERENCES tasks (developer_id, task_id)
+ CONSTRAINT fk_executions_task FOREIGN KEY (developer_id, task_id, task_version) REFERENCES tasks (developer_id, task_id, "version")
);
-- Create sorted index on execution_id (optimized for UUID v7)
From a4aac2ca662e0c5fea00a27dfafa7dc18563243d Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Tue, 17 Dec 2024 13:39:21 +0300
Subject: [PATCH 043/274] feat(agents-api): add agent queries tests
---
.../agents_api/queries/agents/__init__.py | 12 +-
.../agents_api/queries/agents/create_agent.py | 61 ++-
.../queries/agents/create_or_update_agent.py | 21 +-
.../agents_api/queries/agents/delete_agent.py | 23 +-
.../agents_api/queries/agents/get_agent.py | 24 +-
.../agents_api/queries/agents/list_agents.py | 23 +-
.../agents_api/queries/agents/patch_agent.py | 23 +-
.../agents_api/queries/agents/update_agent.py | 23 +-
agents-api/tests/fixtures.py | 34 +-
agents-api/tests/test_agent_queries.py | 350 ++++++++++--------
10 files changed, 307 insertions(+), 287 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/__init__.py b/agents-api/agents_api/queries/agents/__init__.py
index 709b051ea..ebd169040 100644
--- a/agents-api/agents_api/queries/agents/__init__.py
+++ b/agents-api/agents_api/queries/agents/__init__.py
@@ -13,9 +13,9 @@
# ruff: noqa: F401, F403, F405
from .create_agent import create_agent
-from .create_or_update_agent import create_or_update_agent_query
-from .delete_agent import delete_agent_query
-from .get_agent import get_agent_query
-from .list_agents import list_agents_query
-from .patch_agent import patch_agent_query
-from .update_agent import update_agent_query
+from .create_or_update_agent import create_or_update_agent
+from .delete_agent import delete_agent
+from .get_agent import get_agent
+from .list_agents import list_agents
+from .patch_agent import patch_agent
+from .update_agent import update_agent
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 46dc453f9..7e95dc3ab 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from pydantic import ValidationError
from uuid_extensions import uuid7
@@ -26,35 +25,35 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- ),
- psycopg_errors.UniqueViolation: partialclass(
- HTTPException,
- status_code=409,
- detail="An agent with this canonical name already exists for this developer.",
- ),
- psycopg_errors.CheckViolation: partialclass(
- HTTPException,
- status_code=400,
- detail="The provided data violates one or more constraints. Please check the input values.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. Please review the input.",
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# ),
+# psycopg_errors.UniqueViolation: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="An agent with this canonical name already exists for this developer.",
+# ),
+# psycopg_errors.CheckViolation: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="The provided data violates one or more constraints. Please check the input values.",
+# ),
+# ValidationError: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="Input validation failed. Please check the provided data.",
+# ),
+# TypeError: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="A type mismatch occurred. Please review the input.",
+# ),
+# }
+# )
@wrap_in_class(
Agent,
one=True,
@@ -64,7 +63,7 @@
@pg_query
# @increase_counter("create_agent")
@beartype
-def create_agent(
+async def create_agent(
*,
developer_id: UUID,
agent_id: UUID | None = None,
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 261508237..50c96a94a 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
@@ -24,15 +23,15 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# )
@wrap_in_class(
Agent,
one=True,
@@ -42,7 +41,7 @@
@pg_query
# @increase_counter("create_or_update_agent1")
@beartype
-def create_or_update_agent_query(
+async def create_or_update_agent(
*, agent_id: UUID, developer_id: UUID, data: CreateOrUpdateAgentRequest
) -> tuple[list[str], dict]:
"""
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 9d6869a94..282022ad3 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
@@ -24,16 +23,16 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(
ResourceDeletedResponse,
one=True,
@@ -44,7 +43,7 @@
@pg_query
# @increase_counter("delete_agent1")
@beartype
-def delete_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
"""
Constructs the SQL queries to delete an agent and its related settings.
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 9061db7cf..a9f6b8368 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -8,8 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
-
from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
from ..utils import (
@@ -23,21 +21,21 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+ # {
+ # psycopg_errors.ForeignKeyViolation: partialclass(
+ # HTTPException,
+ # status_code=404,
+ # detail="The specified developer does not exist.",
+ # )
+ # }
+ # # TODO: Add more exceptions
+# )
@wrap_in_class(Agent, one=True)
@pg_query
# @increase_counter("get_agent1")
@beartype
-def get_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
"""
Constructs the SQL query to retrieve an agent's details.
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 62aed6536..d2ebf0c07 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
@@ -23,21 +22,21 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(Agent)
@pg_query
# @increase_counter("list_agents1")
@beartype
-def list_agents_query(
+async def list_agents(
*,
developer_id: UUID,
limit: int = 100,
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index c418f5c26..915aa8c66 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
@@ -23,16 +22,16 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
@@ -42,7 +41,7 @@
@pg_query
# @increase_counter("patch_agent1")
@beartype
-def patch_agent_query(
+async def patch_agent(
*, agent_id: UUID, developer_id: UUID, data: PatchAgentRequest
) -> tuple[str, dict]:
"""
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 4e38adfac..48e00bf5a 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...metrics.counters import increase_counter
@@ -23,16 +22,16 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
@@ -42,7 +41,7 @@
@pg_query
# @increase_counter("update_agent1")
@beartype
-def update_agent_query(
+async def update_agent(
*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
) -> tuple[str, dict]:
"""
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index d0fa7daf8..749d9c273 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -22,7 +22,7 @@
from agents_api.clients.pg import get_pg_client
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
-# from agents_api.queries.agents.create_agent import create_agent
+from agents_api.queries.agents.create_agent import create_agent
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
@@ -107,20 +107,24 @@ def patch_embed_acompletion():
yield embed, acompletion
-# @fixture(scope="global")
-# async def test_agent(dsn=pg_dsn, developer_id=test_developer_id):
-# async with get_pg_client(dsn=dsn) as client:
-# agent = await create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# model="gpt-4o-mini",
-# name="test agent",
-# about="test agent about",
-# metadata={"test": "test"},
-# ),
-# client=client,
-# )
-# yield agent
+@fixture(scope="global")
+async def test_agent(dsn=pg_dsn, developer=test_developer):
+ pool = await asyncpg.create_pool(dsn=dsn)
+
+ async with get_pg_client(pool=pool) as client:
+ agent = await create_agent(
+ developer_id=developer.id,
+ data=CreateAgentRequest(
+ model="gpt-4o-mini",
+ name="test agent",
+ about="test agent about",
+ metadata={"test": "test"},
+ ),
+ client=client,
+ )
+
+ yield agent
+ await pool.close()
@fixture(scope="global")
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index f079642b3..f8f75fd0b 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,163 +1,187 @@
-# # Tests for agent queries
-
-# from uuid_extensions import uuid7
-# from ward import raises, test
-
-# from agents_api.autogen.openapi_model import (
-# Agent,
-# CreateAgentRequest,
-# CreateOrUpdateAgentRequest,
-# PatchAgentRequest,
-# ResourceUpdatedResponse,
-# UpdateAgentRequest,
-# )
-# from agents_api.queries.agent.create_agent import create_agent
-# from agents_api.queries.agent.create_or_update_agent import create_or_update_agent
-# from agents_api.queries.agent.delete_agent import delete_agent
-# from agents_api.queries.agent.get_agent import get_agent
-# from agents_api.queries.agent.list_agents import list_agents
-# from agents_api.queries.agent.patch_agent import patch_agent
-# from agents_api.queries.agent.update_agent import update_agent
-# from tests.fixtures import cozo_client, test_agent, test_developer_id
-
-
-# @test("query: create agent")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# ),
-# client=client,
-# )
-
-
-# @test("query: create agent with instructions")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# ),
-# client=client,
-# )
-
-
-# @test("query: create or update agent")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_or_update_agent(
-# developer_id=developer_id,
-# agent_id=uuid7(),
-# data=CreateOrUpdateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# ),
-# client=client,
-# )
-
-
-# @test("query: get agent not exists")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# agent_id = uuid7()
-
-# with raises(Exception):
-# get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
-
-
-# @test("query: get agent exists")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# result = get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
-
-# assert result is not None
-# assert isinstance(result, Agent)
-
-
-# @test("query: delete agent")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# temp_agent = create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# ),
-# client=client,
-# )
-
-# # Delete the agent
-# delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
-
-# # Check that the agent is deleted
-# with raises(Exception):
-# get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
-
-
-# @test("query: update agent")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# result = update_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# data=UpdateAgentRequest(
-# name="updated agent",
-# about="updated agent about",
-# model="gpt-4o-mini",
-# default_settings={"temperature": 1.0},
-# metadata={"hello": "world"},
-# ),
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-
-# agent = get_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert "test" not in agent.metadata
-
-
-# @test("query: patch agent")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# result = patch_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# data=PatchAgentRequest(
-# name="patched agent",
-# about="patched agent about",
-# default_settings={"temperature": 1.0},
-# metadata={"something": "else"},
-# ),
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-
-# agent = get_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert "hello" in agent.metadata
-
-
-# @test("query: list agents")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
-
-# result = list_agents(developer_id=developer_id, client=client)
-
-# assert isinstance(result, list)
-# assert all(isinstance(agent, Agent) for agent in result)
+# Tests for agent queries
+from uuid import uuid4
+
+import asyncpg
+from ward import raises, test
+
+from agents_api.autogen.openapi_model import (
+ Agent,
+ CreateAgentRequest,
+ CreateOrUpdateAgentRequest,
+ PatchAgentRequest,
+ ResourceUpdatedResponse,
+ UpdateAgentRequest,
+)
+from agents_api.clients.pg import get_pg_client
+from agents_api.queries.agents import (
+ create_agent,
+ create_or_update_agent,
+ delete_agent,
+ get_agent,
+ list_agents,
+ patch_agent,
+ update_agent,
+)
+from tests.fixtures import pg_dsn, test_agent, test_developer_id
+
+
+@test("model: create agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ ),
+ client=client,
+ )
+
+
+@test("model: create agent with instructions")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ client=client,
+ )
+
+
+@test("model: create or update agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_or_update_agent(
+ developer_id=developer_id,
+ agent_id=uuid4(),
+ data=CreateOrUpdateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ client=client,
+ )
+
+
+@test("model: get agent not exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ agent_id = uuid4()
+ pool = await asyncpg.create_pool(dsn=dsn)
+
+ with raises(Exception):
+ async with get_pg_client(pool=pool) as client:
+ await get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
+
+
+@test("model: get agent exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
+
+ assert result is not None
+ assert isinstance(result, Agent)
+
+
+@test("model: delete agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ temp_agent = await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ client=client,
+ )
+
+ # Delete the agent
+ await delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+
+ # Check that the agent is deleted
+ with raises(Exception):
+ await get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+
+
+@test("model: update agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await update_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=UpdateAgentRequest(
+ name="updated agent",
+ about="updated agent about",
+ model="gpt-4o-mini",
+ default_settings={"temperature": 1.0},
+ metadata={"hello": "world"},
+ ),
+ client=client,
+ )
+
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+
+ async with get_pg_client(pool=pool) as client:
+ agent = await get_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert "test" not in agent.metadata
+
+
+@test("model: patch agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await patch_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=PatchAgentRequest(
+ name="patched agent",
+ about="patched agent about",
+ default_settings={"temperature": 1.0},
+ metadata={"something": "else"},
+ ),
+ client=client,
+ )
+
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+
+ async with get_pg_client(pool=pool) as client:
+ agent = await get_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert "hello" in agent.metadata
+
+
+@test("model: list agents")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await list_agents(developer_id=developer_id, client=client)
+
+ assert isinstance(result, list)
+ assert all(isinstance(agent, Agent) for agent in result)
From b1390148eb4d4bedcf818935e61bf25a1123068f Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 16:51:59 +0530
Subject: [PATCH 044/274] feat(memory-store): Add search plsql functions
Signed-off-by: Diwank Singh Tomer
---
.../migrations/000018_doc_search.down.sql | 21 +
.../migrations/000018_doc_search.up.sql | 477 +++++++++++++++++-
.../000019_system_developer.down.sql | 2 +-
3 files changed, 477 insertions(+), 23 deletions(-)
diff --git a/memory-store/migrations/000018_doc_search.down.sql b/memory-store/migrations/000018_doc_search.down.sql
index 86079b0d1..d32c51a0a 100644
--- a/memory-store/migrations/000018_doc_search.down.sql
+++ b/memory-store/migrations/000018_doc_search.down.sql
@@ -1,8 +1,29 @@
BEGIN;
+-- Drop the embed and search hybrid function
+DROP FUNCTION IF EXISTS embed_and_search_hybrid;
+
+-- Drop the hybrid search function
+DROP FUNCTION IF EXISTS search_hybrid;
+
+-- Drop the text search function
+DROP FUNCTION IF EXISTS search_by_text;
+
+-- Drop the combined embed and search function
+DROP FUNCTION IF EXISTS embed_and_search_by_vector;
+
+-- Drop the search function
+DROP FUNCTION IF EXISTS search_by_vector;
+
+-- Drop the doc_search_result type
+DROP TYPE IF EXISTS doc_search_result;
+
-- Drop the embed_with_cache function
DROP FUNCTION IF EXISTS embed_with_cache;
+-- Drop the index on embeddings_cache
+DROP INDEX IF EXISTS idx_embeddings_cache_provider_model_input_text;
+
-- Drop the embeddings cache table
DROP TABLE IF EXISTS embeddings_cache CASCADE;
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index 4f0ef5521..b58ff1eaf 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -1,25 +1,3 @@
--- docs_embeddings schema (docs_embeddings is an extended view of docs)
--- +----------------------+--------------------------+-----------+----------+-------------+
--- | Column | Type | Modifiers | Storage | Description |
--- |----------------------+--------------------------+-----------+----------+-------------|
--- | embedding_uuid | uuid | | plain | |
--- | chunk_seq | integer | | plain | |
--- | chunk | text | | extended | |
--- | embedding | vector(1024) | | external | |
--- | developer_id | uuid | | plain | |
--- | doc_id | uuid | | plain | |
--- | title | text | | extended | |
--- | content | text | | extended | |
--- | index | integer | | plain | |
--- | modality | text | | extended | |
--- | embedding_model | text | | extended | |
--- | embedding_dimensions | integer | | plain | |
--- | language | text | | extended | |
--- | created_at | timestamp with time zone | | plain | |
--- | updated_at | timestamp with time zone | | plain | |
--- | metadata | jsonb | | extended | |
--- | search_tsv | tsvector | | extended | |
--- +----------------------+--------------------------+-----------+----------+-------------+
BEGIN;
-- Create unlogged table for caching embeddings
@@ -100,4 +78,459 @@ begin
end;
$$;
+-- Create a type for the search results if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_type WHERE typname = 'doc_search_result'
+ ) THEN
+ CREATE TYPE doc_search_result AS (
+ doc_id uuid,
+ index integer,
+ title text,
+ content text,
+ distance float,
+ embedding vector(1024),
+ metadata jsonb,
+ owner_type text,
+ owner_id uuid
+ );
+ END IF;
+END $$;
+
+-- Create the search function
+CREATE
+OR REPLACE FUNCTION search_by_vector (
+ query_embedding vector (1024),
+ owner_types TEXT[],
+ owner_ids UUID [],
+ k integer DEFAULT 3,
+ confidence float DEFAULT 0.5,
+ metadata_filter jsonb DEFAULT NULL
+) RETURNS SETOF doc_search_result LANGUAGE plpgsql AS $$
+DECLARE
+ search_threshold float;
+ owner_filter_sql text;
+ metadata_filter_sql text;
+BEGIN
+ -- Input validation
+ IF k <= 0 THEN
+ RAISE EXCEPTION 'k must be greater than 0';
+ END IF;
+
+ IF confidence < 0 OR confidence > 1 THEN
+ RAISE EXCEPTION 'confidence must be between 0 and 1';
+ END IF;
+
+ IF owner_types IS NOT NULL AND owner_ids IS NOT NULL AND
+ array_length(owner_types, 1) != array_length(owner_ids, 1) THEN
+ RAISE EXCEPTION 'owner_types and owner_ids arrays must have the same length';
+ END IF;
+
+ -- Calculate search threshold from confidence
+ search_threshold := 1.0 - confidence;
+
+ -- Build owner filter SQL if provided
+ IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
+ owner_filter_sql := '
+ AND EXISTS (
+ SELECT 1
+ FROM unnest($4::text[], $5::uuid[]) AS owner_data(type, id)
+ WHERE (
+ (owner_data.type = ''user'' AND EXISTS (
+ SELECT 1 FROM user_docs ud
+ WHERE ud.doc_id = d.doc_id
+ AND ud.user_id = owner_data.id
+ ))
+ OR
+ (owner_data.type = ''agent'' AND EXISTS (
+ SELECT 1 FROM agent_docs ad
+ WHERE ad.doc_id = d.doc_id
+ AND ad.agent_id = owner_data.id
+ ))
+ )
+ )';
+ ELSE
+ owner_filter_sql := '';
+ END IF;
+
+ -- Build metadata filter SQL if provided
+ IF metadata_filter IS NOT NULL THEN
+ metadata_filter_sql := 'AND d.metadata @> $6';
+ ELSE
+ metadata_filter_sql := '';
+ END IF;
+
+ -- Return search results
+ RETURN QUERY EXECUTE format(
+ 'WITH ranked_docs AS (
+ SELECT
+ d.doc_id,
+ d.index,
+ d.title,
+ d.content,
+ (1 - (d.embedding <=> $1)) as distance,
+ d.embedding,
+ d.metadata,
+ CASE
+ WHEN ud.user_id IS NOT NULL THEN ''user''
+ WHEN ad.agent_id IS NOT NULL THEN ''agent''
+ END as owner_type,
+ COALESCE(ud.user_id, ad.agent_id) as owner_id
+ FROM docs_embeddings d
+ LEFT JOIN user_docs ud ON d.doc_id = ud.doc_id
+ LEFT JOIN agent_docs ad ON d.doc_id = ad.doc_id
+ WHERE 1 - (d.embedding <=> $1) >= $2
+ %s
+ %s
+ )
+ SELECT DISTINCT ON (doc_id) *
+ FROM ranked_docs
+ ORDER BY doc_id, distance DESC
+ LIMIT $3',
+ owner_filter_sql,
+ metadata_filter_sql
+ )
+ USING
+ query_embedding,
+ search_threshold,
+ k,
+ owner_types,
+ owner_ids,
+ metadata_filter;
+
+END;
+$$;
+
+-- Add helpful comment
+COMMENT ON FUNCTION search_by_vector IS 'Search documents by vector similarity with configurable confidence threshold and filtering options';
+
+-- Create the combined embed and search function
+CREATE
+OR REPLACE FUNCTION embed_and_search_by_vector (
+ query_text text,
+ owner_types TEXT[],
+ owner_ids UUID [],
+ k integer DEFAULT 3,
+ confidence float DEFAULT 0.5,
+ metadata_filter jsonb DEFAULT NULL,
+ embedding_provider text DEFAULT 'voyageai',
+ embedding_model text DEFAULT 'voyage-01',
+ input_type text DEFAULT NULL,
+ api_key text DEFAULT NULL,
+ api_key_name text DEFAULT NULL
+) RETURNS SETOF doc_search_result LANGUAGE plpgsql AS $$
+DECLARE
+ query_embedding vector(1024);
+BEGIN
+ -- First generate embedding for the query text
+ query_embedding := embed_with_cache(
+ embedding_provider,
+ embedding_model,
+ query_text,
+ input_type,
+ api_key,
+ api_key_name
+ );
+
+ -- Then perform the search using the generated embedding
+ RETURN QUERY SELECT * FROM search_by_vector(
+ query_embedding,
+ owner_types,
+ owner_ids,
+ k,
+ confidence,
+ metadata_filter
+ );
+END;
+$$;
+
+COMMENT ON FUNCTION embed_and_search_by_vector IS 'Convenience function that combines text embedding and vector search in one call';
+
+-- Create the text search function
+CREATE OR REPLACE FUNCTION search_by_text(
+ query_text text,
+ owner_types TEXT[],
+ owner_ids UUID[],
+ search_language text DEFAULT 'english',
+ k integer DEFAULT 3,
+ metadata_filter jsonb DEFAULT NULL
+) RETURNS SETOF doc_search_result LANGUAGE plpgsql AS $$
+DECLARE
+ owner_filter_sql text;
+ metadata_filter_sql text;
+ ts_query tsquery;
+BEGIN
+ -- Input validation
+ IF k <= 0 THEN
+ RAISE EXCEPTION 'k must be greater than 0';
+ END IF;
+
+ IF owner_types IS NOT NULL AND owner_ids IS NOT NULL AND
+ array_length(owner_types, 1) != array_length(owner_ids, 1) THEN
+ RAISE EXCEPTION 'owner_types and owner_ids arrays must have the same length';
+ END IF;
+
+ -- Convert search query to tsquery
+ ts_query := websearch_to_tsquery(search_language::regconfig, query_text);
+
+ -- Build owner filter SQL if provided
+ IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
+ owner_filter_sql := '
+ AND EXISTS (
+ SELECT 1
+ FROM unnest($4::text[], $5::uuid[]) AS owner_data(type, id)
+ WHERE (
+ (owner_data.type = ''user'' AND EXISTS (
+ SELECT 1 FROM user_docs ud
+ WHERE ud.doc_id = d.doc_id
+ AND ud.user_id = owner_data.id
+ ))
+ OR
+ (owner_data.type = ''agent'' AND EXISTS (
+ SELECT 1 FROM agent_docs ad
+ WHERE ad.doc_id = d.doc_id
+ AND ad.agent_id = owner_data.id
+ ))
+ )
+ )';
+ ELSE
+ owner_filter_sql := '';
+ END IF;
+
+ -- Build metadata filter SQL if provided
+ IF metadata_filter IS NOT NULL THEN
+ metadata_filter_sql := 'AND d.metadata @> $6';
+ ELSE
+ metadata_filter_sql := '';
+ END IF;
+
+ -- Return search results
+ RETURN QUERY EXECUTE format(
+ 'WITH ranked_docs AS (
+ SELECT
+ d.doc_id,
+ d.index,
+ d.title,
+ d.content,
+ ts_rank_cd(d.search_tsv, $1, 32)::double precision as distance,
+ d.embedding,
+ d.metadata,
+ CASE
+ WHEN ud.user_id IS NOT NULL THEN ''user''
+ WHEN ad.agent_id IS NOT NULL THEN ''agent''
+ END as owner_type,
+ COALESCE(ud.user_id, ad.agent_id) as owner_id
+ FROM docs_embeddings d
+ LEFT JOIN user_docs ud ON d.doc_id = ud.doc_id
+ LEFT JOIN agent_docs ad ON d.doc_id = ad.doc_id
+ WHERE d.search_tsv @@ $1
+ %s
+ %s
+ )
+ SELECT DISTINCT ON (doc_id) *
+ FROM ranked_docs
+ ORDER BY doc_id, distance DESC
+ LIMIT $3',
+ owner_filter_sql,
+ metadata_filter_sql
+ )
+ USING
+ ts_query,
+ search_language,
+ k,
+ owner_types,
+ owner_ids,
+ metadata_filter;
+
+END;
+$$;
+
+COMMENT ON FUNCTION search_by_text IS 'Search documents using full-text search with configurable language and filtering options';
+
+-- Function to calculate mean of an array
+CREATE OR REPLACE FUNCTION array_mean(arr float[])
+RETURNS float AS $$
+ SELECT avg(v) FROM unnest(arr) v;
+$$ LANGUAGE SQL;
+
+-- Function to calculate standard deviation of an array
+CREATE OR REPLACE FUNCTION array_stddev(arr float[])
+RETURNS float AS $$
+ SELECT stddev(v) FROM unnest(arr) v;
+$$ LANGUAGE SQL;
+
+-- DBSF normalization function
+CREATE OR REPLACE FUNCTION dbsf_normalize(scores float[])
+RETURNS float[] AS $$
+DECLARE
+ m float;
+ sd float;
+ m3d float;
+ m_3d float;
+BEGIN
+ -- Handle edge cases
+ IF array_length(scores, 1) < 2 THEN
+ RETURN scores;
+ END IF;
+
+ -- Calculate statistics
+ sd := array_stddev(scores);
+ IF sd = 0 THEN
+ RETURN scores;
+ END IF;
+
+ m := array_mean(scores);
+ m3d := 3 * sd + m;
+ m_3d := m - 3 * sd;
+
+ -- Apply normalization
+ RETURN array(
+ SELECT (s - m_3d) / (m3d - m_3d)
+ FROM unnest(scores) s
+ );
+END;
+$$ LANGUAGE plpgsql;
+
+-- Hybrid search function combining text and vector search
+CREATE OR REPLACE FUNCTION search_hybrid(
+ query_text text,
+ query_embedding vector(1024),
+ owner_types TEXT[],
+ owner_ids UUID[],
+ k integer DEFAULT 3,
+ alpha float DEFAULT 0.7, -- Weight for embedding results
+ confidence float DEFAULT 0.5,
+ metadata_filter jsonb DEFAULT NULL,
+ search_language text DEFAULT 'english'
+) RETURNS SETOF doc_search_result AS $$
+DECLARE
+ text_weight float;
+ embedding_weight float;
+BEGIN
+ -- Input validation
+ IF k <= 0 THEN
+ RAISE EXCEPTION 'k must be greater than 0';
+ END IF;
+
+ text_weight := 1.0 - alpha;
+ embedding_weight := alpha;
+
+ RETURN QUERY
+ WITH text_results AS (
+ SELECT * FROM search_by_text(
+ query_text,
+ owner_types,
+ owner_ids,
+ search_language,
+ k,
+ metadata_filter
+ )
+ ),
+ embedding_results AS (
+ SELECT * FROM search_by_vector(
+ query_embedding,
+ owner_types,
+ owner_ids,
+ k,
+ confidence,
+ metadata_filter
+ )
+ ),
+ all_results AS (
+ SELECT DISTINCT doc_id, title, content, metadata, embedding,
+ index, owner_type, owner_id
+ FROM (
+ SELECT * FROM text_results
+ UNION
+ SELECT * FROM embedding_results
+ ) combined
+ ),
+ scores AS (
+ SELECT
+ r.doc_id,
+ r.title,
+ r.content,
+ r.metadata,
+ r.embedding,
+ r.index,
+ r.owner_type,
+ r.owner_id,
+ COALESCE(t.distance, 0.0) as text_score,
+ COALESCE(e.distance, 0.0) as embedding_score
+ FROM all_results r
+ LEFT JOIN text_results t ON r.doc_id = t.doc_id
+ LEFT JOIN embedding_results e ON r.doc_id = e.doc_id
+ ),
+ normalized_scores AS (
+ SELECT
+ *,
+ unnest(dbsf_normalize(array_agg(text_score) OVER ())) as norm_text_score,
+ unnest(dbsf_normalize(array_agg(embedding_score) OVER ())) as norm_embedding_score
+ FROM scores
+ )
+ SELECT
+ doc_id,
+ index,
+ title,
+ content,
+ 1.0 - (text_weight * norm_text_score + embedding_weight * norm_embedding_score) as distance,
+ embedding,
+ metadata,
+ owner_type,
+ owner_id
+ FROM normalized_scores
+ ORDER BY distance ASC
+ LIMIT k;
+END;
+$$ LANGUAGE plpgsql;
+
+COMMENT ON FUNCTION search_hybrid IS 'Hybrid search combining text and vector search using Distribution-Based Score Fusion (DBSF)';
+
+-- Convenience function that handles embedding generation
+CREATE OR REPLACE FUNCTION embed_and_search_hybrid(
+ query_text text,
+ owner_types TEXT[],
+ owner_ids UUID[],
+ k integer DEFAULT 3,
+ alpha float DEFAULT 0.7,
+ confidence float DEFAULT 0.5,
+ metadata_filter jsonb DEFAULT NULL,
+ search_language text DEFAULT 'english',
+ embedding_provider text DEFAULT 'voyageai',
+ embedding_model text DEFAULT 'voyage-01',
+ input_type text DEFAULT NULL,
+ api_key text DEFAULT NULL,
+ api_key_name text DEFAULT NULL
+) RETURNS SETOF doc_search_result AS $$
+DECLARE
+ query_embedding vector(1024);
+BEGIN
+ -- Generate embedding for query text
+ query_embedding := embed_with_cache(
+ embedding_provider,
+ embedding_model,
+ query_text,
+ input_type,
+ api_key,
+ api_key_name
+ );
+
+ -- Perform hybrid search
+ RETURN QUERY SELECT * FROM search_hybrid(
+ query_text,
+ query_embedding,
+ owner_types,
+ owner_ids,
+ k,
+ alpha,
+ confidence,
+ metadata_filter,
+ search_language
+ );
+END;
+$$ LANGUAGE plpgsql;
+
+COMMENT ON FUNCTION embed_and_search_hybrid IS 'Convenience function that combines text embedding generation and hybrid search in one call';
+
COMMIT;
diff --git a/memory-store/migrations/000019_system_developer.down.sql b/memory-store/migrations/000019_system_developer.down.sql
index 92d8d65d5..706db81dd 100644
--- a/memory-store/migrations/000019_system_developer.down.sql
+++ b/memory-store/migrations/000019_system_developer.down.sql
@@ -2,6 +2,6 @@ BEGIN;
-- Remove the system developer
DELETE FROM developers
-WHERE developer_id = '00000000-0000-0000-0000-000000000000';
+WHERE developer_id = '00000000-0000-0000-0000-000000000000'::uuid;
COMMIT;
From ba1168333868b9f30ee5cd8dbe6057296765f89b Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 17:10:15 +0530
Subject: [PATCH 045/274] fix(memory-store): Improve search plsql functions
Signed-off-by: Diwank Singh Tomer
---
.../migrations/000018_doc_search.up.sql | 75 +++++++------------
1 file changed, 27 insertions(+), 48 deletions(-)
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index b58ff1eaf..5293cc81a 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -72,7 +72,7 @@ begin
_api_key,
_api_key_name,
cached_embedding
- );
+ ) on conflict (provider, model, input_text) do update set embedding = cached_embedding;
return cached_embedding;
end;
@@ -133,22 +133,10 @@ BEGIN
-- Build owner filter SQL if provided
IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
owner_filter_sql := '
- AND EXISTS (
- SELECT 1
- FROM unnest($4::text[], $5::uuid[]) AS owner_data(type, id)
- WHERE (
- (owner_data.type = ''user'' AND EXISTS (
- SELECT 1 FROM user_docs ud
- WHERE ud.doc_id = d.doc_id
- AND ud.user_id = owner_data.id
- ))
- OR
- (owner_data.type = ''agent'' AND EXISTS (
- SELECT 1 FROM agent_docs ad
- WHERE ad.doc_id = d.doc_id
- AND ad.agent_id = owner_data.id
- ))
- )
+ AND (
+ (ud.user_id = ANY($5) AND ''user'' = ANY($4))
+ OR
+ (ad.agent_id = ANY($5) AND ''agent'' = ANY($4))
)';
ELSE
owner_filter_sql := '';
@@ -216,7 +204,7 @@ OR REPLACE FUNCTION embed_and_search_by_vector (
metadata_filter jsonb DEFAULT NULL,
embedding_provider text DEFAULT 'voyageai',
embedding_model text DEFAULT 'voyage-01',
- input_type text DEFAULT NULL,
+ input_type text DEFAULT 'query',
api_key text DEFAULT NULL,
api_key_name text DEFAULT NULL
) RETURNS SETOF doc_search_result LANGUAGE plpgsql AS $$
@@ -248,10 +236,11 @@ $$;
COMMENT ON FUNCTION embed_and_search_by_vector IS 'Convenience function that combines text embedding and vector search in one call';
-- Create the text search function
-CREATE OR REPLACE FUNCTION search_by_text(
+CREATE
+OR REPLACE FUNCTION search_by_text (
query_text text,
owner_types TEXT[],
- owner_ids UUID[],
+ owner_ids UUID [],
search_language text DEFAULT 'english',
k integer DEFAULT 3,
metadata_filter jsonb DEFAULT NULL
@@ -277,22 +266,10 @@ BEGIN
-- Build owner filter SQL if provided
IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
owner_filter_sql := '
- AND EXISTS (
- SELECT 1
- FROM unnest($4::text[], $5::uuid[]) AS owner_data(type, id)
- WHERE (
- (owner_data.type = ''user'' AND EXISTS (
- SELECT 1 FROM user_docs ud
- WHERE ud.doc_id = d.doc_id
- AND ud.user_id = owner_data.id
- ))
- OR
- (owner_data.type = ''agent'' AND EXISTS (
- SELECT 1 FROM agent_docs ad
- WHERE ad.doc_id = d.doc_id
- AND ad.agent_id = owner_data.id
- ))
- )
+ AND (
+ (ud.user_id = ANY($5) AND ''user'' = ANY($4))
+ OR
+ (ad.agent_id = ANY($5) AND ''agent'' = ANY($4))
)';
ELSE
owner_filter_sql := '';
@@ -349,20 +326,20 @@ $$;
COMMENT ON FUNCTION search_by_text IS 'Search documents using full-text search with configurable language and filtering options';
-- Function to calculate mean of an array
-CREATE OR REPLACE FUNCTION array_mean(arr float[])
-RETURNS float AS $$
+CREATE
+OR REPLACE FUNCTION array_mean (arr FLOAT[]) RETURNS float AS $$
SELECT avg(v) FROM unnest(arr) v;
$$ LANGUAGE SQL;
-- Function to calculate standard deviation of an array
-CREATE OR REPLACE FUNCTION array_stddev(arr float[])
-RETURNS float AS $$
+CREATE
+OR REPLACE FUNCTION array_stddev (arr FLOAT[]) RETURNS float AS $$
SELECT stddev(v) FROM unnest(arr) v;
$$ LANGUAGE SQL;
-- DBSF normalization function
-CREATE OR REPLACE FUNCTION dbsf_normalize(scores float[])
-RETURNS float[] AS $$
+CREATE
+OR REPLACE FUNCTION dbsf_normalize (scores FLOAT[]) RETURNS FLOAT[] AS $$
DECLARE
m float;
sd float;
@@ -393,11 +370,12 @@ END;
$$ LANGUAGE plpgsql;
-- Hybrid search function combining text and vector search
-CREATE OR REPLACE FUNCTION search_hybrid(
+CREATE
+OR REPLACE FUNCTION search_hybrid (
query_text text,
- query_embedding vector(1024),
+ query_embedding vector (1024),
owner_types TEXT[],
- owner_ids UUID[],
+ owner_ids UUID [],
k integer DEFAULT 3,
alpha float DEFAULT 0.7, -- Weight for embedding results
confidence float DEFAULT 0.5,
@@ -488,10 +466,11 @@ $$ LANGUAGE plpgsql;
COMMENT ON FUNCTION search_hybrid IS 'Hybrid search combining text and vector search using Distribution-Based Score Fusion (DBSF)';
-- Convenience function that handles embedding generation
-CREATE OR REPLACE FUNCTION embed_and_search_hybrid(
+CREATE
+OR REPLACE FUNCTION embed_and_search_hybrid (
query_text text,
owner_types TEXT[],
- owner_ids UUID[],
+ owner_ids UUID [],
k integer DEFAULT 3,
alpha float DEFAULT 0.7,
confidence float DEFAULT 0.5,
@@ -499,7 +478,7 @@ CREATE OR REPLACE FUNCTION embed_and_search_hybrid(
search_language text DEFAULT 'english',
embedding_provider text DEFAULT 'voyageai',
embedding_model text DEFAULT 'voyage-01',
- input_type text DEFAULT NULL,
+ input_type text DEFAULT 'query',
api_key text DEFAULT NULL,
api_key_name text DEFAULT NULL
) RETURNS SETOF doc_search_result AS $$
From 2d6cad03de1dfc0db10038089b510b88a88e63d5 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Tue, 17 Dec 2024 14:41:54 +0300
Subject: [PATCH 046/274] feat: Add developer queries, create db connection
pool
---
agents-api/agents_api/app.py | 35 ++++++++++++
agents-api/agents_api/clients/pg.py | 32 ++++-------
.../agents_api/dependencies/developer_id.py | 4 +-
.../agents_api/queries/developers/__init__.py | 5 +-
.../queries/developers/create_developer.py | 54 +++++++++++++++++++
.../queries/developers/patch_developer.py | 42 +++++++++++++++
.../queries/developers/update_developer.py | 42 +++++++++++++++
agents-api/agents_api/queries/utils.py | 34 ++++++------
agents-api/agents_api/web.py | 20 +------
9 files changed, 208 insertions(+), 60 deletions(-)
create mode 100644 agents-api/agents_api/app.py
create mode 100644 agents-api/agents_api/queries/developers/create_developer.py
create mode 100644 agents-api/agents_api/queries/developers/patch_developer.py
create mode 100644 agents-api/agents_api/queries/developers/update_developer.py
diff --git a/agents-api/agents_api/app.py b/agents-api/agents_api/app.py
new file mode 100644
index 000000000..8c414ddba
--- /dev/null
+++ b/agents-api/agents_api/app.py
@@ -0,0 +1,35 @@
+import json
+import asyncpg
+from contextlib import asynccontextmanager
+from fastapi import FastAPI
+from prometheus_fastapi_instrumentator import Instrumentator
+from .env import api_prefix, db_dsn
+from .clients.pg import create_db_pool
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ app.state.postgres_pool = await create_db_pool(db_dsn)
+ yield
+ await app.state.postgres_pool.close()
+
+
+app: FastAPI = FastAPI(
+ docs_url="/swagger",
+ openapi_prefix=api_prefix,
+ redoc_url=None,
+ title="Julep Agents API",
+ description="API for Julep Agents",
+ version="0.4.0",
+ terms_of_service="https://www.julep.ai/terms",
+ contact={
+ "name": "Julep",
+ "url": "https://www.julep.ai",
+ "email": "team@julep.ai",
+ },
+ root_path=api_prefix,
+ lifespan=lifespan,
+)
+
+# Enable metrics
+Instrumentator().instrument(app).expose(app, include_in_schema=False)
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index f8c637023..02daeb9e6 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -1,29 +1,15 @@
import json
-from contextlib import asynccontextmanager
-
import asyncpg
-from ..env import db_dsn
-from ..web import app
-
-
-async def get_pg_pool(dsn: str = db_dsn, **kwargs):
- pool = getattr(app.state, "pg_pool", None)
-
- if pool is None:
- pool = await asyncpg.create_pool(dsn, **kwargs)
- app.state.pg_pool = pool
- return pool
+async def _init_conn(conn):
+ await conn.set_type_codec(
+ "jsonb",
+ encoder=json.dumps,
+ decoder=json.loads,
+ schema="pg_catalog",
+ )
-@asynccontextmanager
-async def get_pg_client(pool: asyncpg.Pool):
- async with pool.acquire() as client:
- await client.set_type_codec(
- "jsonb",
- encoder=json.dumps,
- decoder=json.loads,
- schema="pg_catalog",
- )
- yield client
+async def create_db_pool(dsn: str):
+ return await asyncpg.create_pool(dsn, init=_init_conn)
diff --git a/agents-api/agents_api/dependencies/developer_id.py b/agents-api/agents_api/dependencies/developer_id.py
index ffd048dd9..534ed1e00 100644
--- a/agents-api/agents_api/dependencies/developer_id.py
+++ b/agents-api/agents_api/dependencies/developer_id.py
@@ -5,7 +5,7 @@
from ..common.protocol.developers import Developer
from ..env import multi_tenant_mode
-from ..queries.developers.get_developer import get_developer, verify_developer
+from ..queries.developers.get_developer import get_developer
from .exceptions import InvalidHeaderFormat
@@ -24,8 +24,6 @@ async def get_developer_id(
except ValueError as e:
raise InvalidHeaderFormat("X-Developer-Id must be a valid UUID") from e
- verify_developer(developer_id=x_developer_id)
-
return x_developer_id
diff --git a/agents-api/agents_api/queries/developers/__init__.py b/agents-api/agents_api/queries/developers/__init__.py
index a7117c06b..64ff08fe1 100644
--- a/agents-api/agents_api/queries/developers/__init__.py
+++ b/agents-api/agents_api/queries/developers/__init__.py
@@ -16,4 +16,7 @@
# ruff: noqa: F401, F403, F405
-from .get_developer import get_developer, verify_developer
+from .get_developer import get_developer
+from .create_developer import create_developer
+from .update_developer import update_developer
+from .patch_developer import patch_developer
diff --git a/agents-api/agents_api/queries/developers/create_developer.py b/agents-api/agents_api/queries/developers/create_developer.py
new file mode 100644
index 000000000..7ee845fbf
--- /dev/null
+++ b/agents-api/agents_api/queries/developers/create_developer.py
@@ -0,0 +1,54 @@
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+from uuid_extensions import uuid7
+
+from ...common.protocol.developers import Developer
+from ..utils import (
+ pg_query,
+ wrap_in_class,
+)
+
+query = parse_one("""
+INSERT INTO developers (
+ developer_id,
+ email,
+ active,
+ tags,
+ settings
+)
+VALUES (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5::jsonb
+)
+RETURNING *;
+""").sql(pretty=True)
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=403),
+# ValidationError: partialclass(HTTPException, status_code=500),
+# }
+# )
+@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
+@pg_query
+@beartype
+async def create_developer(
+ *,
+ email: str,
+ active: bool = True,
+ tags: list[str] | None = None,
+ settings: dict | None = None,
+ developer_id: UUID | None = None,
+) -> tuple[str, list]:
+ developer_id = str(developer_id or uuid7())
+
+ return (
+ query,
+ [developer_id, email, active, tags or [], settings or {}],
+ )
diff --git a/agents-api/agents_api/queries/developers/patch_developer.py b/agents-api/agents_api/queries/developers/patch_developer.py
new file mode 100644
index 000000000..49edfe370
--- /dev/null
+++ b/agents-api/agents_api/queries/developers/patch_developer.py
@@ -0,0 +1,42 @@
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+
+from ...common.protocol.developers import Developer
+from ..utils import (
+ pg_query,
+ wrap_in_class,
+)
+
+query = parse_one("""
+UPDATE developers
+SET email = $1, active = $2, tags = tags || $3, settings = settings || $4
+WHERE developer_id = $5
+RETURNING *;
+""").sql(pretty=True)
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=403),
+# ValidationError: partialclass(HTTPException, status_code=500),
+# }
+# )
+@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
+@pg_query
+@beartype
+async def patch_developer(
+ *,
+ developer_id: UUID,
+ email: str,
+ active: bool = True,
+ tags: list[str] | None = None,
+ settings: dict | None = None,
+) -> tuple[str, list]:
+ developer_id = str(developer_id)
+
+ return (
+ query,
+ [email, active, tags or [], settings or {}, developer_id],
+ )
diff --git a/agents-api/agents_api/queries/developers/update_developer.py b/agents-api/agents_api/queries/developers/update_developer.py
new file mode 100644
index 000000000..8350d45a0
--- /dev/null
+++ b/agents-api/agents_api/queries/developers/update_developer.py
@@ -0,0 +1,42 @@
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+
+from ...common.protocol.developers import Developer
+from ..utils import (
+ pg_query,
+ wrap_in_class,
+)
+
+query = parse_one("""
+UPDATE developers
+SET email = $1, active = $2, tags = $3, settings = $4
+WHERE developer_id = $5
+RETURNING *;
+""").sql(pretty=True)
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=403),
+# ValidationError: partialclass(HTTPException, status_code=500),
+# }
+# )
+@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
+@pg_query
+@beartype
+async def update_developer(
+ *,
+ developer_id: UUID,
+ email: str,
+ active: bool = True,
+ tags: list[str] | None = None,
+ settings: dict | None = None,
+) -> tuple[str, list]:
+ developer_id = str(developer_id)
+
+ return (
+ query,
+ [email, active, tags or [], settings or {}, developer_id],
+ )
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 99f6f901a..82aaab615 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,14 +1,16 @@
import concurrent.futures
import inspect
import socket
+import asyncpg
import time
from functools import partialmethod, wraps
-from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar
+from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar, cast
import pandas as pd
from asyncpg import Record
from fastapi import HTTPException
from pydantic import BaseModel
+from ..app import app
P = ParamSpec("P")
T = TypeVar("T")
@@ -31,6 +33,7 @@ def pg_query(
func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
debug: bool | None = None,
only_on_error: bool = False,
+ timeit: bool = False,
):
def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
"""
@@ -43,12 +46,12 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
from pprint import pprint
- from tenacity import (
- retry,
- retry_if_exception,
- stop_after_attempt,
- wait_exponential,
- )
+ # from tenacity import (
+ # retry,
+ # retry_if_exception,
+ # stop_after_attempt,
+ # wait_exponential,
+ # )
# TODO: Remove all tenacity decorators
# @retry(
@@ -58,7 +61,7 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
# )
@wraps(func)
async def wrapper(
- *args: P.args, client=None, **kwargs: P.kwargs
+ *args: P.args, connection_pool: asyncpg.Pool | None =None, **kwargs: P.kwargs
) -> list[Record]:
query, variables = await func(*args, **kwargs)
@@ -70,15 +73,16 @@ async def wrapper(
)
# Run the query
- from ..clients import pg
try:
- if client is None:
- pool = await pg.get_pg_pool()
- async with pg.get_pg_client(pool=pool) as client:
- results: list[Record] = await client.fetch(query, *variables)
- else:
- results: list[Record] = await client.fetch(query, *variables)
+ pool = connection_pool if connection_pool is not None else cast(asyncpg.Pool, app.state.postgres_pool)
+ async with pool.acquire() as conn:
+ async with conn.transaction():
+ start = timeit and time.perf_counter()
+ results: list[Record] = await conn.fetch(query, *variables)
+ end = timeit and time.perf_counter()
+
+ timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
except Exception as e:
if only_on_error and debug:
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index ff801d81c..0865e36be 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -14,11 +14,12 @@
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from litellm.exceptions import APIError
-from prometheus_fastapi_instrumentator import Instrumentator
+from pycozo.client import QueryException
from pydantic import ValidationError
from scalar_fastapi import get_scalar_api_reference
from temporalio.service import RPCError
+from .app import app
from .common.exceptions import BaseCommonException
from .dependencies.auth import get_api_key
from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
@@ -144,24 +145,7 @@ def register_exceptions(app: FastAPI) -> None:
# Because some routes don't require auth
# See: https://fastapi.tiangolo.com/tutorial/bigger-applications/
#
-app: FastAPI = FastAPI(
- docs_url="/swagger",
- openapi_prefix=api_prefix,
- redoc_url=None,
- title="Julep Agents API",
- description="API for Julep Agents",
- version="0.4.0",
- terms_of_service="https://www.julep.ai/terms",
- contact={
- "name": "Julep",
- "url": "https://www.julep.ai",
- "email": "team@julep.ai",
- },
- root_path=api_prefix,
-)
-# Enable metrics
-Instrumentator().instrument(app).expose(app, include_in_schema=False)
# Create a new router for the docs
scalar_router = APIRouter()
From f62b3c7bc4a6f2e8531a2cb2de201110fcd6f917 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Tue, 17 Dec 2024 14:58:19 +0300
Subject: [PATCH 047/274] chore: Apply formating
---
agents-api/agents_api/app.py | 8 +++++---
.../agents_api/queries/developers/__init__.py | 4 ++--
agents-api/agents_api/queries/utils.py | 17 +++++++++++++----
agents-api/agents_api/web.py | 1 -
4 files changed, 20 insertions(+), 10 deletions(-)
diff --git a/agents-api/agents_api/app.py b/agents-api/agents_api/app.py
index 8c414ddba..735dfc8c0 100644
--- a/agents-api/agents_api/app.py
+++ b/agents-api/agents_api/app.py
@@ -1,15 +1,17 @@
import json
-import asyncpg
from contextlib import asynccontextmanager
+
+import asyncpg
from fastapi import FastAPI
from prometheus_fastapi_instrumentator import Instrumentator
-from .env import api_prefix, db_dsn
+
from .clients.pg import create_db_pool
+from .env import api_prefix
@asynccontextmanager
async def lifespan(app: FastAPI):
- app.state.postgres_pool = await create_db_pool(db_dsn)
+ app.state.postgres_pool = await create_db_pool()
yield
await app.state.postgres_pool.close()
diff --git a/agents-api/agents_api/queries/developers/__init__.py b/agents-api/agents_api/queries/developers/__init__.py
index 64ff08fe1..b3964aba4 100644
--- a/agents-api/agents_api/queries/developers/__init__.py
+++ b/agents-api/agents_api/queries/developers/__init__.py
@@ -16,7 +16,7 @@
# ruff: noqa: F401, F403, F405
-from .get_developer import get_developer
from .create_developer import create_developer
-from .update_developer import update_developer
+from .get_developer import get_developer
from .patch_developer import patch_developer
+from .update_developer import update_developer
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 82aaab615..e93135172 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,15 +1,16 @@
import concurrent.futures
import inspect
import socket
-import asyncpg
import time
from functools import partialmethod, wraps
from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar, cast
+import asyncpg
import pandas as pd
from asyncpg import Record
from fastapi import HTTPException
from pydantic import BaseModel
+
from ..app import app
P = ParamSpec("P")
@@ -61,7 +62,9 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
# )
@wraps(func)
async def wrapper(
- *args: P.args, connection_pool: asyncpg.Pool | None =None, **kwargs: P.kwargs
+ *args: P.args,
+ connection_pool: asyncpg.Pool | None = None,
+ **kwargs: P.kwargs,
) -> list[Record]:
query, variables = await func(*args, **kwargs)
@@ -75,14 +78,20 @@ async def wrapper(
# Run the query
try:
- pool = connection_pool if connection_pool is not None else cast(asyncpg.Pool, app.state.postgres_pool)
+ pool = (
+ connection_pool
+ if connection_pool is not None
+ else cast(asyncpg.Pool, app.state.postgres_pool)
+ )
async with pool.acquire() as conn:
async with conn.transaction():
start = timeit and time.perf_counter()
results: list[Record] = await conn.fetch(query, *variables)
end = timeit and time.perf_counter()
- timeit and print(f"PostgreSQL query time: {end - start:.2f} seconds")
+ timeit and print(
+ f"PostgreSQL query time: {end - start:.2f} seconds"
+ )
except Exception as e:
if only_on_error and debug:
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 0865e36be..b354f97bf 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -14,7 +14,6 @@
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from litellm.exceptions import APIError
-from pycozo.client import QueryException
from pydantic import ValidationError
from scalar_fastapi import get_scalar_api_reference
from temporalio.service import RPCError
From fb5755a511447b758dd56539e559f186205bc473 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Tue, 17 Dec 2024 14:58:35 +0300
Subject: [PATCH 048/274] feat: Make dsn parameter optional
---
agents-api/agents_api/clients/pg.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/clients/pg.py b/agents-api/agents_api/clients/pg.py
index 02daeb9e6..acf7a2b0e 100644
--- a/agents-api/agents_api/clients/pg.py
+++ b/agents-api/agents_api/clients/pg.py
@@ -1,6 +1,9 @@
import json
+
import asyncpg
+from ..env import db_dsn
+
async def _init_conn(conn):
await conn.set_type_codec(
@@ -11,5 +14,7 @@ async def _init_conn(conn):
)
-async def create_db_pool(dsn: str):
- return await asyncpg.create_pool(dsn, init=_init_conn)
+async def create_db_pool(dsn: str | None = None):
+ return await asyncpg.create_pool(
+ dsn if dsn is not None else db_dsn, init=_init_conn
+ )
From 6d8887d6f31e105c384731185d499d53232cf96a Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Tue, 17 Dec 2024 15:15:51 +0300
Subject: [PATCH 049/274] fix: Fix tests
---
agents-api/tests/fixtures.py | 34 +++--
agents-api/tests/test_developer_queries.py | 6 +-
agents-api/tests/test_user_queries.py | 147 ++++++++++-----------
3 files changed, 88 insertions(+), 99 deletions(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index d0fa7daf8..0ec074f42 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -19,7 +19,7 @@
CreateTransitionRequest,
CreateUserRequest,
)
-from agents_api.clients.pg import get_pg_client
+from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
# from agents_api.queries.agents.create_agent import create_agent
@@ -89,12 +89,11 @@ def test_developer_id():
@fixture(scope="global")
async def test_developer(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- developer = await get_developer(
- developer_id=developer_id,
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ developer = await get_developer(
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
yield developer
await pool.close()
@@ -125,17 +124,16 @@ def patch_embed_acompletion():
@fixture(scope="global")
async def test_user(dsn=pg_dsn, developer=test_developer):
- pool = await asyncpg.create_pool(dsn=dsn)
-
- async with get_pg_client(pool=pool) as client:
- user = await create_user(
- developer_id=developer.id,
- data=CreateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+
+ user = await create_user(
+ developer_id=developer.id,
+ data=CreateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ connection_pool=pool,
+ )
yield user
await pool.close()
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index 6a14d9575..d39850e1e 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -3,7 +3,7 @@
from uuid_extensions import uuid7
from ward import raises, test
-from agents_api.clients.pg import get_pg_client, get_pg_pool
+from agents_api.clients.pg import create_db_pool
from agents_api.common.protocol.developers import Developer
from agents_api.queries.developers.get_developer import (
get_developer,
@@ -14,9 +14,9 @@
@test("query: get developer not exists")
async def _(dsn=pg_dsn):
- pool = await get_pg_pool(dsn=dsn)
+ pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- async with get_pg_client(pool=pool) as client:
+ async with pool.acquire() as client:
await get_developer(
developer_id=uuid7(),
client=client,
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index 2554a1f46..cbe7e0353 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -18,7 +18,7 @@
UpdateUserRequest,
User,
)
-from agents_api.clients.pg import get_pg_client
+from agents_api.clients.pg import create_db_pool
from agents_api.queries.users import (
create_or_update_user,
create_user,
@@ -39,50 +39,47 @@
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that a user can be successfully created."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_user(
- developer_id=developer_id,
- data=CreateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ await create_user(
+ developer_id=developer_id,
+ data=CreateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ connection_pool=pool,
+ )
@test("query: create or update user sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that a user can be successfully created or updated."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_or_update_user(
- developer_id=developer_id,
- user_id=uuid7(),
- data=CreateOrUpdateUserRequest(
- name="test user",
- about="test user about",
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ await create_or_update_user(
+ developer_id=developer_id,
+ user_id=uuid7(),
+ data=CreateOrUpdateUserRequest(
+ name="test user",
+ about="test user about",
+ ),
+ connection_pool=pool,
+ )
@test("query: update user sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
"""Test that an existing user's information can be successfully updated."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- update_result = await update_user(
- user_id=user.id,
- developer_id=developer_id,
- data=UpdateUserRequest(
- name="updated user",
- about="updated user about",
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ update_result = await update_user(
+ user_id=user.id,
+ developer_id=developer_id,
+ data=UpdateUserRequest(
+ name="updated user",
+ about="updated user about",
+ ),
+ connection_pool=pool,
+ )
assert update_result is not None
assert isinstance(update_result, ResourceUpdatedResponse)
@@ -95,28 +92,26 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
user_id = uuid7()
- pool = await asyncpg.create_pool(dsn=dsn)
+ pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- async with get_pg_client(pool=pool) as client:
- await get_user(
- user_id=user_id,
- developer_id=developer_id,
- client=client,
- )
+ await get_user(
+ user_id=user_id,
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
@test("query: get user exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
"""Test that retrieving an existing user returns the correct user information."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await get_user(
- user_id=user.id,
- developer_id=developer_id,
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ result = await get_user(
+ user_id=user.id,
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
assert result is not None
assert isinstance(result, User)
@@ -126,12 +121,11 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that listing users returns a collection of user information."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await list_users(
- developer_id=developer_id,
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_users(
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
assert isinstance(result, list)
assert len(result) >= 1
@@ -142,18 +136,17 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
"""Test that a user can be successfully patched."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- patch_result = await patch_user(
- developer_id=developer_id,
- user_id=user.id,
- data=PatchUserRequest(
- name="patched user",
- about="patched user about",
- metadata={"test": "metadata"},
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ patch_result = await patch_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ data=PatchUserRequest(
+ name="patched user",
+ about="patched user about",
+ metadata={"test": "metadata"},
+ ),
+ connection_pool=pool,
+ )
assert patch_result is not None
assert isinstance(patch_result, ResourceUpdatedResponse)
@@ -164,25 +157,23 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
async def _(dsn=pg_dsn, developer_id=test_developer_id, user=test_user):
"""Test that a user can be successfully deleted."""
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- delete_result = await delete_user(
- developer_id=developer_id,
- user_id=user.id,
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+ delete_result = await delete_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ connection_pool=pool,
+ )
assert delete_result is not None
assert isinstance(delete_result, ResourceDeletedResponse)
# Verify the user no longer exists
try:
- async with get_pg_client(pool=pool) as client:
- await get_user(
- developer_id=developer_id,
- user_id=user.id,
- client=client,
- )
+ await get_user(
+ developer_id=developer_id,
+ user_id=user.id,
+ connection_pool=pool,
+ )
except Exception:
pass
else:
From 495492d3df371c264bd505d353a7031c9a6b3c19 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Tue, 17 Dec 2024 16:05:41 +0300
Subject: [PATCH 050/274] test: Add more developers tests
---
agents-api/tests/fixtures.py | 25 +++++
agents-api/tests/test_developer_queries.py | 116 ++++++++++++++-------
2 files changed, 106 insertions(+), 35 deletions(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 0ec074f42..bf0f93b45 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,6 +1,9 @@
import json
import time
+import string
+import random
from uuid import UUID
+from uuid_extensions import uuid7
import asyncpg
from fastapi.testclient import TestClient
@@ -25,6 +28,7 @@
# from agents_api.queries.agents.create_agent import create_agent
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
+from agents_api.queries.developers.create_developer import create_developer
# from agents_api.queries.docs.create_doc import create_doc
# from agents_api.queries.docs.delete_doc import delete_doc
@@ -139,6 +143,27 @@ async def test_user(dsn=pg_dsn, developer=test_developer):
await pool.close()
+@fixture(scope="test")
+async def random_email():
+ return f"{"".join([random.choice(string.ascii_lowercase) for _ in range(10)])}@mail.com"
+
+
+@fixture(scope="test")
+async def test_new_developer(dsn=pg_dsn, email=random_email):
+ pool = await create_db_pool(dsn=dsn)
+ dev_id = uuid7()
+ developer = await create_developer(
+ email=email,
+ active=True,
+ tags=["tag1"],
+ settings={"key1": "val1"},
+ developer_id=dev_id,
+ connection_pool=pool,
+ )
+
+ return developer
+
+
# @fixture(scope="global")
# async def test_session(
# dsn=pg_dsn,
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index d39850e1e..c97604e88 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -7,45 +7,91 @@
from agents_api.common.protocol.developers import Developer
from agents_api.queries.developers.get_developer import (
get_developer,
-) # , verify_developer
+)
+from agents_api.queries.developers.create_developer import create_developer
+from agents_api.queries.developers.update_developer import update_developer
+from agents_api.queries.developers.patch_developer import patch_developer
-from .fixtures import pg_dsn, test_developer_id
+from .fixtures import pg_dsn, test_new_developer, random_email
@test("query: get developer not exists")
async def _(dsn=pg_dsn):
pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- async with pool.acquire() as client:
- await get_developer(
- developer_id=uuid7(),
- client=client,
- )
-
-
-# @test("query: get developer")
-# def _(client=pg_client, developer_id=test_developer_id):
-# developer = get_developer(
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert isinstance(developer, Developer)
-# assert developer.id
-
-
-# @test("query: verify developer exists")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# verify_developer(
-# developer_id=developer_id,
-# client=client,
-# )
-
-
-# @test("query: verify developer not exists")
-# def _(client=cozo_client):
-# with raises(Exception):
-# verify_developer(
-# developer_id=uuid7(),
-# client=client,
-# )
+ await get_developer(
+ developer_id=uuid7(),
+ connection_pool=pool,
+ )
+
+
+@test("query: get developer exists")
+async def _(dsn=pg_dsn, dev=test_new_developer):
+ pool = await create_db_pool(dsn=dsn)
+ developer = await get_developer(
+ developer_id=dev.id,
+ connection_pool=pool,
+ )
+
+ assert developer.id == dev.id
+ assert developer.email == dev.email
+ assert developer.active
+ assert developer.tags == dev.tags
+ assert developer.settings == dev.settings
+
+
+@test("query: create developer")
+async def _(dsn=pg_dsn):
+ pool = await create_db_pool(dsn=dsn)
+ dev_id = uuid7()
+ developer = await create_developer(
+ email="m@mail.com",
+ active=True,
+ tags=["tag1"],
+ settings={"key1": "val1"},
+ developer_id=dev_id,
+ connection_pool=pool,
+ )
+
+ assert developer.id == dev_id
+ assert developer.email == "m@mail.com"
+ assert developer.active
+ assert developer.tags == ["tag1"]
+ assert developer.settings == {"key1": "val1"}
+
+
+@test("query: update developer")
+async def _(dsn=pg_dsn, dev=test_new_developer, email=random_email):
+ pool = await create_db_pool(dsn=dsn)
+ developer = await update_developer(
+ email=email,
+ tags=["tag2"],
+ settings={"key2": "val2"},
+ developer_id=dev.id,
+ connection_pool=pool,
+ )
+
+ assert developer.id == dev.id
+ assert developer.email == email
+ assert developer.active
+ assert developer.tags == ["tag2"]
+ assert developer.settings == {"key2": "val2"}
+
+
+@test("query: patch developer")
+async def _(dsn=pg_dsn, dev=test_new_developer, email=random_email):
+ pool = await create_db_pool(dsn=dsn)
+ developer = await patch_developer(
+ email=email,
+ active=True,
+ tags=["tag2"],
+ settings={"key2": "val2"},
+ developer_id=dev.id,
+ connection_pool=pool,
+ )
+
+ assert developer.id == dev.id
+ assert developer.email == email
+ assert developer.active
+ assert developer.tags == dev.tags + ["tag2"]
+ assert developer.settings == {**dev.settings, "key2": "val2"}
From ce3dbc1565bd69b6d65191e759f5d3027754e539 Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Tue, 17 Dec 2024 13:06:58 +0000
Subject: [PATCH 051/274] refactor: Lint agents-api (CI)
---
agents-api/tests/fixtures.py | 7 +++----
agents-api/tests/test_developer_queries.py | 6 +++---
2 files changed, 6 insertions(+), 7 deletions(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index bf0f93b45..389dafab2 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,9 +1,8 @@
import json
-import time
-import string
import random
+import string
+import time
from uuid import UUID
-from uuid_extensions import uuid7
import asyncpg
from fastapi.testclient import TestClient
@@ -24,11 +23,11 @@
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
+from agents_api.queries.developers.create_developer import create_developer
# from agents_api.queries.agents.create_agent import create_agent
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
-from agents_api.queries.developers.create_developer import create_developer
# from agents_api.queries.docs.create_doc import create_doc
# from agents_api.queries.docs.delete_doc import delete_doc
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index c97604e88..d360a7dc2 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -5,14 +5,14 @@
from agents_api.clients.pg import create_db_pool
from agents_api.common.protocol.developers import Developer
+from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import (
get_developer,
)
-from agents_api.queries.developers.create_developer import create_developer
-from agents_api.queries.developers.update_developer import update_developer
from agents_api.queries.developers.patch_developer import patch_developer
+from agents_api.queries.developers.update_developer import update_developer
-from .fixtures import pg_dsn, test_new_developer, random_email
+from .fixtures import pg_dsn, random_email, test_new_developer
@test("query: get developer not exists")
From bd83d4f7d4d946f1b57c64492ae01dd3acc83596 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 22:06:56 +0530
Subject: [PATCH 052/274] feat(agents-api): Add sqlvalidator lint check
Signed-off-by: Diwank Singh Tomer
---
agents-api/poe_tasks.toml | 2 ++
agents-api/pyproject.toml | 1 +
agents-api/uv.lock | 11 +++++++++++
3 files changed, 14 insertions(+)
diff --git a/agents-api/poe_tasks.toml b/agents-api/poe_tasks.toml
index 60fa533f7..e08ba7222 100644
--- a/agents-api/poe_tasks.toml
+++ b/agents-api/poe_tasks.toml
@@ -2,9 +2,11 @@
format = "ruff format"
lint = "ruff check --select I --fix --unsafe-fixes agents_api/**/*.py migrations/**/*.py tests/**/*.py"
typecheck = "pytype --config pytype.toml"
+validate-sql = "sqlvalidator --verbose-validate agents_api/"
check = [
"lint",
"format",
+ "validate-sql",
"typecheck",
]
codegen = """
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index f0d57a70b..db271a021 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -67,6 +67,7 @@ dev = [
"pyright>=1.1.389",
"pytype>=2024.10.11",
"ruff>=0.8.1",
+ "sqlvalidator>=0.0.20",
"testcontainers[postgres]>=4.9.0",
"ward>=0.68.0b0",
]
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 07ec7cb4f..569aa96dc 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -72,6 +72,7 @@ dev = [
{ name = "pyright" },
{ name = "pytype" },
{ name = "ruff" },
+ { name = "sqlvalidator" },
{ name = "testcontainers" },
{ name = "ward" },
]
@@ -140,6 +141,7 @@ dev = [
{ name = "pyright", specifier = ">=1.1.389" },
{ name = "pytype", specifier = ">=2024.10.11" },
{ name = "ruff", specifier = ">=0.8.1" },
+ { name = "sqlvalidator", specifier = ">=0.0.20" },
{ name = "testcontainers", extras = ["postgres"], specifier = ">=4.9.0" },
{ name = "ward", specifier = ">=0.68.0b0" },
]
@@ -2848,6 +2850,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6b/1e/af60a2188773414a9fa65d0e8a32e81342cfcbedf113a19df724d2968c04/sqlglot-26.0.0-py3-none-any.whl", hash = "sha256:1ee9b285e3138c2642a5670c0dbec9afd01860246837788b0f3d228aa6aff619", size = 435457 },
]
+[[package]]
+name = "sqlvalidator"
+version = "0.0.20"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/21/7f/bd1ba351693e60b4dcddd3a84dad89ea75cbc627f9631da17809761a3eb4/sqlvalidator-0.0.20.tar.gz", hash = "sha256:6f399be1bf0ba54a17ad16f6818836c169d17c16306f4cfa6fc883f13b1705fc", size = 24291 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/9d/5434c2b90dac2a8ab12d42027398e2012d1ce347a0bcc9500525d05ac1ee/sqlvalidator-0.0.20-py3-none-any.whl", hash = "sha256:8820752d9ec5ccb9cc977099edf991f0090acf4f1e4beb0f2fb35a6e1cc03c89", size = 24182 },
+]
+
[[package]]
name = "srsly"
version = "2.4.8"
From 8b6b0d90062fc1dc7471c4cd6239ca4cfded5275 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 17 Dec 2024 23:59:05 +0530
Subject: [PATCH 053/274] wip(agents-api): Add session sql queries
Signed-off-by: Diwank Singh Tomer
---
.../agents_api/queries/sessions/__init__.py | 31 ++
.../queries/sessions/count_sessions.py | 55 ++++
.../sessions/create_or_update_session.py | 151 ++++++++++
.../queries/sessions/create_session.py | 138 +++++++++
.../queries/sessions/delete_session.py | 69 +++++
.../queries/sessions/get_session.py | 85 ++++++
.../queries/sessions/list_sessions.py | 109 +++++++
.../queries/sessions/patch_session.py | 131 +++++++++
.../queries/sessions/update_session.py | 131 +++++++++
.../agents_api/queries/users/list_users.py | 3 -
agents-api/tests/test_session_queries.py | 265 ++++++++++--------
.../migrations/000009_sessions.up.sql | 3 +-
memory-store/migrations/000015_entries.up.sql | 16 ++
13 files changed, 1065 insertions(+), 122 deletions(-)
create mode 100644 agents-api/agents_api/queries/sessions/__init__.py
create mode 100644 agents-api/agents_api/queries/sessions/count_sessions.py
create mode 100644 agents-api/agents_api/queries/sessions/create_or_update_session.py
create mode 100644 agents-api/agents_api/queries/sessions/create_session.py
create mode 100644 agents-api/agents_api/queries/sessions/delete_session.py
create mode 100644 agents-api/agents_api/queries/sessions/get_session.py
create mode 100644 agents-api/agents_api/queries/sessions/list_sessions.py
create mode 100644 agents-api/agents_api/queries/sessions/patch_session.py
create mode 100644 agents-api/agents_api/queries/sessions/update_session.py
diff --git a/agents-api/agents_api/queries/sessions/__init__.py b/agents-api/agents_api/queries/sessions/__init__.py
new file mode 100644
index 000000000..bf192210b
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/__init__.py
@@ -0,0 +1,31 @@
+"""
+The `sessions` module within the `queries` package provides SQL query functions for managing sessions
+in the PostgreSQL database. This includes operations for:
+
+- Creating new sessions
+- Updating existing sessions
+- Retrieving session details
+- Listing sessions with filtering and pagination
+- Deleting sessions
+"""
+
+from .count_sessions import count_sessions
+from .create_or_update_session import create_or_update_session
+from .create_session import create_session
+from .delete_session import delete_session
+from .get_session import get_session
+from .list_sessions import list_sessions
+from .patch_session import patch_session
+from .update_session import update_session
+
+__all__ = [
+ "count_sessions",
+ "create_or_update_session",
+ "create_session",
+ "delete_session",
+ "get_session",
+ "list_sessions",
+ "patch_session",
+ "update_session",
+]
+
diff --git a/agents-api/agents_api/queries/sessions/count_sessions.py b/agents-api/agents_api/queries/sessions/count_sessions.py
new file mode 100644
index 000000000..71c1ec0dc
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/count_sessions.py
@@ -0,0 +1,55 @@
+"""This module contains functions for querying session data from the PostgreSQL database."""
+
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query outside the function
+raw_query = """
+SELECT COUNT(session_id) as count
+FROM sessions
+WHERE developer_id = $1;
+"""
+
+# Parse and optimize the query
+query = parse_one(raw_query).sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
+@wrap_in_class(dict, one=True)
+@increase_counter("count_sessions")
+@pg_query
+@beartype
+async def count_sessions(
+ *,
+ developer_id: UUID,
+) -> tuple[str, list]:
+ """
+ Counts sessions from the PostgreSQL database.
+ Uses the index on developer_id for efficient counting.
+
+ Args:
+ developer_id (UUID): The developer's ID to filter sessions by.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters.
+ """
+
+ return (
+ query,
+ [developer_id],
+ )
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/sessions/create_or_update_session.py b/agents-api/agents_api/queries/sessions/create_or_update_session.py
new file mode 100644
index 000000000..4bbbef091
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/create_or_update_session.py
@@ -0,0 +1,151 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import CreateOrUpdateSessionRequest, ResourceUpdatedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL queries
+session_query = parse_one("""
+INSERT INTO sessions (
+ developer_id,
+ session_id,
+ situation,
+ system_template,
+ metadata,
+ render_templates,
+ token_budget,
+ context_overflow,
+ forward_tool_calls,
+ recall_options
+)
+VALUES (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7,
+ $8,
+ $9,
+ $10
+)
+ON CONFLICT (developer_id, session_id) DO UPDATE SET
+ situation = EXCLUDED.situation,
+ system_template = EXCLUDED.system_template,
+ metadata = EXCLUDED.metadata,
+ render_templates = EXCLUDED.render_templates,
+ token_budget = EXCLUDED.token_budget,
+ context_overflow = EXCLUDED.context_overflow,
+ forward_tool_calls = EXCLUDED.forward_tool_calls,
+ recall_options = EXCLUDED.recall_options
+RETURNING *;
+""").sql(pretty=True)
+
+lookup_query = parse_one("""
+WITH deleted_lookups AS (
+ DELETE FROM session_lookup
+ WHERE developer_id = $1 AND session_id = $2
+)
+INSERT INTO session_lookup (
+ developer_id,
+ session_id,
+ participant_type,
+ participant_id
+)
+SELECT
+ $1 as developer_id,
+ $2 as session_id,
+ unnest($3::participant_type[]) as participant_type,
+ unnest($4::uuid[]) as participant_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or participant does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A session with this ID already exists.",
+ ),
+ }
+)
+@wrap_in_class(ResourceUpdatedResponse, one=True)
+@increase_counter("create_or_update_session")
+@pg_query
+@beartype
+async def create_or_update_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: CreateOrUpdateSessionRequest,
+) -> list[tuple[str, list]]:
+ """
+ Constructs SQL queries to create or update a session and its participant lookups.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ session_id (UUID): The session's UUID
+ data (CreateOrUpdateSessionRequest): Session data to insert or update
+
+ Returns:
+ list[tuple[str, list]]: List of SQL queries and their parameters
+ """
+ # Handle participants
+ users = data.users or ([data.user] if data.user else [])
+ agents = data.agents or ([data.agent] if data.agent else [])
+
+ if not agents:
+ raise HTTPException(
+ status_code=400,
+ detail="At least one agent must be provided",
+ )
+
+ if data.agent and data.agents:
+ raise HTTPException(
+ status_code=400,
+ detail="Only one of 'agent' or 'agents' should be provided",
+ )
+
+ # Prepare participant arrays for lookup query
+ participant_types = (
+ ["user"] * len(users) + ["agent"] * len(agents)
+ )
+ participant_ids = [str(u) for u in users] + [str(a) for a in agents]
+
+ # Prepare session parameters
+ session_params = [
+ developer_id, # $1
+ session_id, # $2
+ data.situation, # $3
+ data.system_template, # $4
+ data.metadata or {}, # $5
+ data.render_templates, # $6
+ data.token_budget, # $7
+ data.context_overflow, # $8
+ data.forward_tool_calls, # $9
+ data.recall_options or {}, # $10
+ ]
+
+ # Prepare lookup parameters
+ lookup_params = [
+ developer_id, # $1
+ session_id, # $2
+ participant_types, # $3
+ participant_ids, # $4
+ ]
+
+ return [
+ (session_query, session_params),
+ (lookup_query, lookup_params),
+ ]
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
new file mode 100644
index 000000000..9f756f25c
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -0,0 +1,138 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import CreateSessionRequest, Session
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL queries
+session_query = parse_one("""
+INSERT INTO sessions (
+ developer_id,
+ session_id,
+ situation,
+ system_template,
+ metadata,
+ render_templates,
+ token_budget,
+ context_overflow,
+ forward_tool_calls,
+ recall_options
+)
+VALUES (
+ $1, -- developer_id
+ $2, -- session_id
+ $3, -- situation
+ $4, -- system_template
+ $5, -- metadata
+ $6, -- render_templates
+ $7, -- token_budget
+ $8, -- context_overflow
+ $9, -- forward_tool_calls
+ $10 -- recall_options
+)
+RETURNING *;
+""").sql(pretty=True)
+
+lookup_query = parse_one("""
+INSERT INTO session_lookup (
+ developer_id,
+ session_id,
+ participant_type,
+ participant_id
+)
+SELECT
+ $1 as developer_id,
+ $2 as session_id,
+ unnest($3::participant_type[]) as participant_type,
+ unnest($4::uuid[]) as participant_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or participant does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A session with this ID already exists.",
+ ),
+ }
+)
+@wrap_in_class(Session, one=True, transform=lambda d: {**d, "id": d["session_id"]})
+@increase_counter("create_session")
+@pg_query
+@beartype
+async def create_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: CreateSessionRequest,
+) -> list[tuple[str, list]]:
+ """
+ Constructs SQL queries to create a new session and its participant lookups.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ session_id (UUID): The session's UUID
+ data (CreateSessionRequest): Session creation data
+
+ Returns:
+ list[tuple[str, list]]: SQL queries and their parameters
+ """
+ # Handle participants
+ users = data.users or ([data.user] if data.user else [])
+ agents = data.agents or ([data.agent] if data.agent else [])
+
+ if not agents:
+ raise HTTPException(
+ status_code=400,
+ detail="At least one agent must be provided",
+ )
+
+ if data.agent and data.agents:
+ raise HTTPException(
+ status_code=400,
+ detail="Only one of 'agent' or 'agents' should be provided",
+ )
+
+ # Prepare participant arrays for lookup query
+ participant_types = (
+ ["user"] * len(users) + ["agent"] * len(agents)
+ )
+ participant_ids = [str(u) for u in users] + [str(a) for a in agents]
+
+ # Prepare session parameters
+ session_params = [
+ developer_id, # $1
+ session_id, # $2
+ data.situation, # $3
+ data.system_template, # $4
+ data.metadata or {}, # $5
+ data.render_templates, # $6
+ data.token_budget, # $7
+ data.context_overflow, # $8
+ data.forward_tool_calls, # $9
+ data.recall_options or {}, # $10
+ ]
+
+ # Prepare lookup parameters
+ lookup_params = [
+ developer_id, # $1
+ session_id, # $2
+ participant_types, # $3
+ participant_ids, # $4
+ ]
+
+ return [
+ (session_query, session_params),
+ (lookup_query, lookup_params),
+ ]
diff --git a/agents-api/agents_api/queries/sessions/delete_session.py b/agents-api/agents_api/queries/sessions/delete_session.py
new file mode 100644
index 000000000..2e3234fe2
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/delete_session.py
@@ -0,0 +1,69 @@
+"""This module contains the implementation for deleting sessions from the PostgreSQL database."""
+
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL queries
+lookup_query = parse_one("""
+DELETE FROM session_lookup
+WHERE developer_id = $1 AND session_id = $2;
+""").sql(pretty=True)
+
+session_query = parse_one("""
+DELETE FROM sessions
+WHERE developer_id = $1 AND session_id = $2
+RETURNING session_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["session_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@increase_counter("delete_session")
+@pg_query
+@beartype
+async def delete_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+) -> list[tuple[str, list]]:
+ """
+ Constructs SQL queries to delete a session and its participant lookups.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ session_id (UUID): The session's UUID to delete
+
+ Returns:
+ list[tuple[str, list]]: List of SQL queries and their parameters
+ """
+ params = [developer_id, session_id]
+
+ return [
+ (lookup_query, params), # Delete from lookup table first due to FK constraint
+ (session_query, params), # Then delete from sessions table
+ ]
diff --git a/agents-api/agents_api/queries/sessions/get_session.py b/agents-api/agents_api/queries/sessions/get_session.py
new file mode 100644
index 000000000..441a1c5c3
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/get_session.py
@@ -0,0 +1,85 @@
+"""This module contains functions for retrieving session data from the PostgreSQL database."""
+
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import Session
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query
+raw_query = """
+WITH session_participants AS (
+ SELECT
+ sl.session_id,
+ array_agg(sl.participant_id) FILTER (WHERE sl.participant_type = 'agent') as agents,
+ array_agg(sl.participant_id) FILTER (WHERE sl.participant_type = 'user') as users
+ FROM session_lookup sl
+ WHERE sl.developer_id = $1 AND sl.session_id = $2
+ GROUP BY sl.session_id
+)
+SELECT
+ s.session_id as id,
+ s.developer_id,
+ s.situation,
+ s.system_template,
+ s.metadata,
+ s.render_templates,
+ s.token_budget,
+ s.context_overflow,
+ s.forward_tool_calls,
+ s.recall_options,
+ s.created_at,
+ s.updated_at,
+ sp.agents,
+ sp.users
+FROM sessions s
+LEFT JOIN session_participants sp ON s.session_id = sp.session_id
+WHERE s.developer_id = $1 AND s.session_id = $2;
+"""
+
+# Parse and optimize the query
+query = parse_one(raw_query).sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found"
+ ),
+ }
+)
+@wrap_in_class(Session, one=True)
+@increase_counter("get_session")
+@pg_query
+@beartype
+async def get_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+) -> tuple[str, list]:
+ """
+ Constructs SQL query to retrieve a session and its participants.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ session_id (UUID): The session's UUID
+
+ Returns:
+ tuple[str, list]: SQL query and parameters
+ """
+ return (
+ query,
+ [developer_id, session_id],
+ )
diff --git a/agents-api/agents_api/queries/sessions/list_sessions.py b/agents-api/agents_api/queries/sessions/list_sessions.py
new file mode 100644
index 000000000..80986a867
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/list_sessions.py
@@ -0,0 +1,109 @@
+"""This module contains functions for querying session data from the PostgreSQL database."""
+
+from typing import Any, Literal, TypeVar
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import Session
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query
+raw_query = """
+WITH session_participants AS (
+ SELECT
+ sl.session_id,
+ array_agg(sl.participant_id) FILTER (WHERE sl.participant_type = 'agent') as agents,
+ array_agg(sl.participant_id) FILTER (WHERE sl.participant_type = 'user') as users
+ FROM session_lookup sl
+ WHERE sl.developer_id = $1
+ GROUP BY sl.session_id
+)
+SELECT
+ s.session_id as id,
+ s.developer_id,
+ s.situation,
+ s.system_template,
+ s.metadata,
+ s.render_templates,
+ s.token_budget,
+ s.context_overflow,
+ s.forward_tool_calls,
+ s.recall_options,
+ s.created_at,
+ s.updated_at,
+ sp.agents,
+ sp.users
+FROM sessions s
+LEFT JOIN session_participants sp ON s.session_id = sp.session_id
+WHERE s.developer_id = $1
+ AND ($5::jsonb IS NULL OR s.metadata @> $5::jsonb)
+ORDER BY
+ CASE WHEN $3 = 'created_at' AND $4 = 'desc' THEN s.created_at END DESC,
+ CASE WHEN $3 = 'created_at' AND $4 = 'asc' THEN s.created_at END ASC,
+ CASE WHEN $3 = 'updated_at' AND $4 = 'desc' THEN s.updated_at END DESC,
+ CASE WHEN $3 = 'updated_at' AND $4 = 'asc' THEN s.updated_at END ASC
+LIMIT $2 OFFSET $6;
+"""
+
+# Parse and optimize the query
+# query = parse_one(raw_query).sql(pretty=True)
+query = raw_query
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="No sessions found"
+ ),
+ }
+)
+@wrap_in_class(Session)
+@increase_counter("list_sessions")
+@pg_query
+@beartype
+async def list_sessions(
+ *,
+ developer_id: UUID,
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "updated_at"] = "created_at",
+ direction: Literal["asc", "desc"] = "desc",
+ metadata_filter: dict[str, Any] = {},
+) -> tuple[str, list]:
+ """
+ Lists sessions from the PostgreSQL database based on the provided filters.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ limit (int): Maximum number of sessions to return
+ offset (int): Number of sessions to skip
+ sort_by (str): Field to sort by ('created_at' or 'updated_at')
+ direction (str): Sort direction ('asc' or 'desc')
+ metadata_filter (dict): Dictionary of metadata fields to filter by
+
+ Returns:
+ tuple[str, list]: SQL query and parameters
+ """
+ return (
+ query,
+ [
+ developer_id, # $1
+ limit, # $2
+ sort_by, # $3
+ direction, # $4
+ metadata_filter or None, # $5
+ offset, # $6
+ ],
+ )
diff --git a/agents-api/agents_api/queries/sessions/patch_session.py b/agents-api/agents_api/queries/sessions/patch_session.py
new file mode 100644
index 000000000..b14b94a8a
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/patch_session.py
@@ -0,0 +1,131 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import PatchSessionRequest, ResourceUpdatedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL queries
+# Build dynamic SET clause based on provided fields
+session_query = parse_one("""
+WITH updated_session AS (
+ UPDATE sessions
+ SET
+ situation = COALESCE($3, situation),
+ system_template = COALESCE($4, system_template),
+ metadata = sessions.metadata || $5,
+ render_templates = COALESCE($6, render_templates),
+ token_budget = COALESCE($7, token_budget),
+ context_overflow = COALESCE($8, context_overflow),
+ forward_tool_calls = COALESCE($9, forward_tool_calls),
+ recall_options = sessions.recall_options || $10
+ WHERE
+ developer_id = $1
+ AND session_id = $2
+ RETURNING *
+)
+SELECT * FROM updated_session;
+""").sql(pretty=True)
+
+lookup_query = parse_one("""
+WITH deleted_lookups AS (
+ DELETE FROM session_lookup
+ WHERE developer_id = $1 AND session_id = $2
+)
+INSERT INTO session_lookup (
+ developer_id,
+ session_id,
+ participant_type,
+ participant_id
+)
+SELECT
+ $1 as developer_id,
+ $2 as session_id,
+ unnest($3::participant_type[]) as participant_type,
+ unnest($4::uuid[]) as participant_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or participant does not exist.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
+@wrap_in_class(ResourceUpdatedResponse, one=True)
+@increase_counter("patch_session")
+@pg_query
+@beartype
+async def patch_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: PatchSessionRequest,
+) -> list[tuple[str, list]]:
+ """
+ Constructs SQL queries to patch a session and its participant lookups.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ session_id (UUID): The session's UUID
+ data (PatchSessionRequest): Session patch data
+
+ Returns:
+ list[tuple[str, list]]: List of SQL queries and their parameters
+ """
+ # Handle participants
+ users = data.users or ([data.user] if data.user else [])
+ agents = data.agents or ([data.agent] if data.agent else [])
+
+ if data.agent and data.agents:
+ raise HTTPException(
+ status_code=400,
+ detail="Only one of 'agent' or 'agents' should be provided",
+ )
+
+ # Prepare participant arrays for lookup query if participants are provided
+ participant_types = []
+ participant_ids = []
+ if users or agents:
+ participant_types = ["user"] * len(users) + ["agent"] * len(agents)
+ participant_ids = [str(u) for u in users] + [str(a) for a in agents]
+
+ # Extract fields from data, using None for unset fields
+ session_params = [
+ developer_id, # $1
+ session_id, # $2
+ data.situation, # $3
+ data.system_template, # $4
+ data.metadata or {}, # $5
+ data.render_templates, # $6
+ data.token_budget, # $7
+ data.context_overflow, # $8
+ data.forward_tool_calls, # $9
+ data.recall_options or {}, # $10
+ ]
+
+ queries = [(session_query, session_params)]
+
+ # Only add lookup query if participants are provided
+ if participant_types:
+ lookup_params = [
+ developer_id, # $1
+ session_id, # $2
+ participant_types, # $3
+ participant_ids, # $4
+ ]
+ queries.append((lookup_query, lookup_params))
+
+ return queries
diff --git a/agents-api/agents_api/queries/sessions/update_session.py b/agents-api/agents_api/queries/sessions/update_session.py
new file mode 100644
index 000000000..2999e21f6
--- /dev/null
+++ b/agents-api/agents_api/queries/sessions/update_session.py
@@ -0,0 +1,131 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateSessionRequest
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL queries
+session_query = parse_one("""
+UPDATE sessions
+SET
+ situation = $3,
+ system_template = $4,
+ metadata = $5,
+ render_templates = $6,
+ token_budget = $7,
+ context_overflow = $8,
+ forward_tool_calls = $9,
+ recall_options = $10
+WHERE
+ developer_id = $1
+ AND session_id = $2
+RETURNING *;
+""").sql(pretty=True)
+
+lookup_query = parse_one("""
+WITH deleted_lookups AS (
+ DELETE FROM session_lookup
+ WHERE developer_id = $1 AND session_id = $2
+)
+INSERT INTO session_lookup (
+ developer_id,
+ session_id,
+ participant_type,
+ participant_id
+)
+SELECT
+ $1 as developer_id,
+ $2 as session_id,
+ unnest($3::participant_type[]) as participant_type,
+ unnest($4::uuid[]) as participant_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or participant does not exist.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
+@wrap_in_class(ResourceUpdatedResponse, one=True)
+@increase_counter("update_session")
+@pg_query
+@beartype
+async def update_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: UpdateSessionRequest,
+) -> list[tuple[str, list]]:
+ """
+ Constructs SQL queries to update a session and its participant lookups.
+
+ Args:
+ developer_id (UUID): The developer's UUID
+ session_id (UUID): The session's UUID
+ data (UpdateSessionRequest): Session update data
+
+ Returns:
+ list[tuple[str, list]]: List of SQL queries and their parameters
+ """
+ # Handle participants
+ users = data.users or ([data.user] if data.user else [])
+ agents = data.agents or ([data.agent] if data.agent else [])
+
+ if not agents:
+ raise HTTPException(
+ status_code=400,
+ detail="At least one agent must be provided",
+ )
+
+ if data.agent and data.agents:
+ raise HTTPException(
+ status_code=400,
+ detail="Only one of 'agent' or 'agents' should be provided",
+ )
+
+ # Prepare participant arrays for lookup query
+ participant_types = (
+ ["user"] * len(users) + ["agent"] * len(agents)
+ )
+ participant_ids = [str(u) for u in users] + [str(a) for a in agents]
+
+ # Prepare session parameters
+ session_params = [
+ developer_id, # $1
+ session_id, # $2
+ data.situation, # $3
+ data.system_template, # $4
+ data.metadata or {}, # $5
+ data.render_templates, # $6
+ data.token_budget, # $7
+ data.context_overflow, # $8
+ data.forward_tool_calls, # $9
+ data.recall_options or {}, # $10
+ ]
+
+ # Prepare lookup parameters
+ lookup_params = [
+ developer_id, # $1
+ session_id, # $2
+ participant_types, # $3
+ participant_ids, # $4
+ ]
+
+ return [
+ (session_query, session_params),
+ (lookup_query, lookup_params),
+ ]
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 7f3677eab..74b40eb7b 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -37,9 +37,6 @@
OFFSET $3;
"""
-# Parse and optimize the query
-# query = parse_one(raw_query).sql(pretty=True)
-
@rewrap_exceptions(
{
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index e8ec40367..262b5aef8 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -1,160 +1,191 @@
-# # Tests for session queries
-
-# from uuid_extensions import uuid7
-# from ward import test
-
-# from agents_api.autogen.openapi_model import (
-# CreateOrUpdateSessionRequest,
-# CreateSessionRequest,
-# Session,
-# )
-# from agents_api.queries.session.count_sessions import count_sessions
-# from agents_api.queries.session.create_or_update_session import create_or_update_session
-# from agents_api.queries.session.create_session import create_session
-# from agents_api.queries.session.delete_session import delete_session
-# from agents_api.queries.session.get_session import get_session
-# from agents_api.queries.session.list_sessions import list_sessions
-# from tests.fixtures import (
-# cozo_client,
-# test_agent,
-# test_developer_id,
-# test_session,
-# test_user,
-# )
-
-# MODEL = "gpt-4o-mini"
-
-
-# @test("query: create session")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
-# ):
-# create_session(
+"""
+This module contains tests for SQL query generation functions in the sessions module.
+Tests verify the SQL queries without actually executing them against a database.
+"""
+
+
+from uuid import UUID
+
+import asyncpg
+from uuid_extensions import uuid7
+from ward import raises, test
+
+from agents_api.autogen.openapi_model import (
+ CreateOrUpdateSessionRequest,
+ CreateSessionRequest,
+ PatchSessionRequest,
+ ResourceDeletedResponse,
+ ResourceUpdatedResponse,
+ Session,
+ UpdateSessionRequest,
+)
+from agents_api.clients.pg import create_db_pool
+from agents_api.queries.sessions import (
+ count_sessions,
+ create_or_update_session,
+ create_session,
+ delete_session,
+ get_session,
+ list_sessions,
+ patch_session,
+ update_session,
+)
+from tests.fixtures import pg_dsn, test_developer_id # , test_session, test_agent, test_user
+
+
+# @test("query: create session sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user):
+# """Test that a session can be successfully created."""
+
+# pool = await create_db_pool(dsn=dsn)
+# await create_session(
# developer_id=developer_id,
+# session_id=uuid7(),
# data=CreateSessionRequest(
# users=[user.id],
# agents=[agent.id],
-# situation="test session about",
+# situation="test session",
# ),
-# client=client,
+# connection_pool=pool,
# )
-# @test("query: create session no user")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# create_session(
+# @test("query: create or update session sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user):
+# """Test that a session can be successfully created or updated."""
+
+# pool = await create_db_pool(dsn=dsn)
+# await create_or_update_session(
# developer_id=developer_id,
-# data=CreateSessionRequest(
+# session_id=uuid7(),
+# data=CreateOrUpdateSessionRequest(
+# users=[user.id],
# agents=[agent.id],
-# situation="test session about",
+# situation="test session",
# ),
-# client=client,
+# connection_pool=pool,
# )
-# @test("query: get session not exists")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# session_id = uuid7()
-
-# try:
-# get_session(
-# session_id=session_id,
-# developer_id=developer_id,
-# client=client,
-# )
-# except Exception:
-# pass
-# else:
-# assert False, "Session should not exist"
-
+# @test("query: update session sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent):
+# """Test that an existing session's information can be successfully updated."""
-# @test("query: get session exists")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# result = get_session(
+# pool = await create_db_pool(dsn=dsn)
+# update_result = await update_session(
# session_id=session.id,
# developer_id=developer_id,
-# client=client,
+# data=UpdateSessionRequest(
+# agents=[agent.id],
+# situation="updated session",
+# ),
+# connection_pool=pool,
# )
-# assert result is not None
-# assert isinstance(result, Session)
+# assert update_result is not None
+# assert isinstance(update_result, ResourceUpdatedResponse)
+# assert update_result.updated_at > session.created_at
-# @test("query: delete session")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# session = create_session(
-# developer_id=developer_id,
-# data=CreateSessionRequest(
-# agent=agent.id,
-# situation="test session about",
-# ),
-# client=client,
-# )
+@test("query: get session not exists sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that retrieving a non-existent session returns an empty result."""
-# delete_session(
-# session_id=session.id,
-# developer_id=developer_id,
-# client=client,
-# )
+ session_id = uuid7()
+ pool = await create_db_pool(dsn=dsn)
-# try:
-# get_session(
-# session_id=session.id,
-# developer_id=developer_id,
-# client=client,
-# )
-# except Exception:
-# pass
+ with raises(Exception):
+ await get_session(
+ session_id=session_id,
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
-# else:
-# assert False, "Session should not exist"
+# @test("query: get session exists sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test that retrieving an existing session returns the correct session information."""
-# @test("query: list sessions")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# result = list_sessions(
+# pool = await create_db_pool(dsn=dsn)
+# result = await get_session(
+# session_id=session.id,
# developer_id=developer_id,
-# client=client,
+# connection_pool=pool,
# )
-# assert isinstance(result, list)
-# assert len(result) > 0
+# assert result is not None
+# assert isinstance(result, Session)
-# @test("query: count sessions")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# result = count_sessions(
-# developer_id=developer_id,
-# client=client,
-# )
+@test("query: list sessions sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that listing sessions returns a collection of session information."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_sessions(
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
-# assert isinstance(result, dict)
-# assert result["count"] > 0
+ assert isinstance(result, list)
+ assert len(result) >= 1
+ assert all(isinstance(session, Session) for session in result)
-# @test("query: create or update session")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
-# ):
-# session_id = uuid7()
+# @test("query: patch session sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent):
+# """Test that a session can be successfully patched."""
-# create_or_update_session(
-# session_id=session_id,
+# pool = await create_db_pool(dsn=dsn)
+# patch_result = await patch_session(
# developer_id=developer_id,
-# data=CreateOrUpdateSessionRequest(
-# users=[user.id],
+# session_id=session.id,
+# data=PatchSessionRequest(
# agents=[agent.id],
-# situation="test session about",
+# situation="patched session",
+# metadata={"test": "metadata"},
# ),
-# client=client,
+# connection_pool=pool,
# )
-# result = get_session(
-# session_id=session_id,
+# assert patch_result is not None
+# assert isinstance(patch_result, ResourceUpdatedResponse)
+# assert patch_result.updated_at > session.created_at
+
+
+# @test("query: delete session sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test that a session can be successfully deleted."""
+
+# pool = await create_db_pool(dsn=dsn)
+# delete_result = await delete_session(
# developer_id=developer_id,
-# client=client,
+# session_id=session.id,
+# connection_pool=pool,
# )
-# assert result is not None
-# assert isinstance(result, Session)
-# assert result.id == session_id
+# assert delete_result is not None
+# assert isinstance(delete_result, ResourceDeletedResponse)
+
+# # Verify the session no longer exists
+# with raises(Exception):
+# await get_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# connection_pool=pool,
+# )
+
+
+@test("query: count sessions sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that sessions can be counted."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await count_sessions(
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
+
+ assert isinstance(result, dict)
+ assert "count" in result
+ assert isinstance(result["count"], int)
diff --git a/memory-store/migrations/000009_sessions.up.sql b/memory-store/migrations/000009_sessions.up.sql
index 082f3823c..75b5fde9a 100644
--- a/memory-store/migrations/000009_sessions.up.sql
+++ b/memory-store/migrations/000009_sessions.up.sql
@@ -7,8 +7,7 @@ CREATE TABLE IF NOT EXISTS sessions (
situation TEXT,
system_template TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- -- NOTE: Derived from entries
- -- updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
render_templates BOOLEAN NOT NULL DEFAULT TRUE,
token_budget INTEGER,
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index 9985e4c41..e9d5c6a4f 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -85,4 +85,20 @@ OR
UPDATE ON entries FOR EACH ROW
EXECUTE FUNCTION optimized_update_token_count_after ();
+-- Add trigger to update parent session's updated_at
+CREATE OR REPLACE FUNCTION update_session_updated_at()
+RETURNS TRIGGER AS $$
+BEGIN
+ UPDATE sessions
+ SET updated_at = CURRENT_TIMESTAMP
+ WHERE session_id = NEW.session_id;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+CREATE TRIGGER trg_update_session_updated_at
+AFTER INSERT OR UPDATE ON entries
+FOR EACH ROW
+EXECUTE FUNCTION update_session_updated_at();
+
COMMIT;
\ No newline at end of file
From 065c7d2ef68a762eb455a559f48e9108cc0d0d11 Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Tue, 17 Dec 2024 18:30:17 +0000
Subject: [PATCH 054/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/sessions/__init__.py | 1 -
agents-api/agents_api/queries/sessions/count_sessions.py | 2 +-
.../queries/sessions/create_or_update_session.py | 9 +++++----
agents-api/agents_api/queries/sessions/create_session.py | 4 +---
agents-api/agents_api/queries/sessions/get_session.py | 4 +---
agents-api/agents_api/queries/sessions/list_sessions.py | 4 +---
agents-api/agents_api/queries/sessions/update_session.py | 4 +---
agents-api/tests/test_session_queries.py | 9 +++++----
8 files changed, 15 insertions(+), 22 deletions(-)
diff --git a/agents-api/agents_api/queries/sessions/__init__.py b/agents-api/agents_api/queries/sessions/__init__.py
index bf192210b..d0f64ea5e 100644
--- a/agents-api/agents_api/queries/sessions/__init__.py
+++ b/agents-api/agents_api/queries/sessions/__init__.py
@@ -28,4 +28,3 @@
"patch_session",
"update_session",
]
-
diff --git a/agents-api/agents_api/queries/sessions/count_sessions.py b/agents-api/agents_api/queries/sessions/count_sessions.py
index 71c1ec0dc..2abdf22e5 100644
--- a/agents-api/agents_api/queries/sessions/count_sessions.py
+++ b/agents-api/agents_api/queries/sessions/count_sessions.py
@@ -52,4 +52,4 @@ async def count_sessions(
return (
query,
[developer_id],
- )
\ No newline at end of file
+ )
diff --git a/agents-api/agents_api/queries/sessions/create_or_update_session.py b/agents-api/agents_api/queries/sessions/create_or_update_session.py
index 4bbbef091..bc54bf31b 100644
--- a/agents-api/agents_api/queries/sessions/create_or_update_session.py
+++ b/agents-api/agents_api/queries/sessions/create_or_update_session.py
@@ -5,7 +5,10 @@
from fastapi import HTTPException
from sqlglot import parse_one
-from ...autogen.openapi_model import CreateOrUpdateSessionRequest, ResourceUpdatedResponse
+from ...autogen.openapi_model import (
+ CreateOrUpdateSessionRequest,
+ ResourceUpdatedResponse,
+)
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
@@ -118,9 +121,7 @@ async def create_or_update_session(
)
# Prepare participant arrays for lookup query
- participant_types = (
- ["user"] * len(users) + ["agent"] * len(agents)
- )
+ participant_types = ["user"] * len(users) + ["agent"] * len(agents)
participant_ids = [str(u) for u in users] + [str(a) for a in agents]
# Prepare session parameters
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 9f756f25c..3074f087b 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -105,9 +105,7 @@ async def create_session(
)
# Prepare participant arrays for lookup query
- participant_types = (
- ["user"] * len(users) + ["agent"] * len(agents)
- )
+ participant_types = ["user"] * len(users) + ["agent"] * len(agents)
participant_ids = [str(u) for u in users] + [str(a) for a in agents]
# Prepare session parameters
diff --git a/agents-api/agents_api/queries/sessions/get_session.py b/agents-api/agents_api/queries/sessions/get_session.py
index 441a1c5c3..1f704539e 100644
--- a/agents-api/agents_api/queries/sessions/get_session.py
+++ b/agents-api/agents_api/queries/sessions/get_session.py
@@ -54,9 +54,7 @@
detail="The specified developer does not exist.",
),
asyncpg.NoDataFoundError: partialclass(
- HTTPException,
- status_code=404,
- detail="Session not found"
+ HTTPException, status_code=404, detail="Session not found"
),
}
)
diff --git a/agents-api/agents_api/queries/sessions/list_sessions.py b/agents-api/agents_api/queries/sessions/list_sessions.py
index 80986a867..5ce31803b 100644
--- a/agents-api/agents_api/queries/sessions/list_sessions.py
+++ b/agents-api/agents_api/queries/sessions/list_sessions.py
@@ -63,9 +63,7 @@
detail="The specified developer does not exist.",
),
asyncpg.NoDataFoundError: partialclass(
- HTTPException,
- status_code=404,
- detail="No sessions found"
+ HTTPException, status_code=404, detail="No sessions found"
),
}
)
diff --git a/agents-api/agents_api/queries/sessions/update_session.py b/agents-api/agents_api/queries/sessions/update_session.py
index 2999e21f6..01e21e732 100644
--- a/agents-api/agents_api/queries/sessions/update_session.py
+++ b/agents-api/agents_api/queries/sessions/update_session.py
@@ -98,9 +98,7 @@ async def update_session(
)
# Prepare participant arrays for lookup query
- participant_types = (
- ["user"] * len(users) + ["agent"] * len(agents)
- )
+ participant_types = ["user"] * len(users) + ["agent"] * len(agents)
participant_ids = [str(u) for u in users] + [str(a) for a in agents]
# Prepare session parameters
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 262b5aef8..90b40a0d8 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -3,7 +3,6 @@
Tests verify the SQL queries without actually executing them against a database.
"""
-
from uuid import UUID
import asyncpg
@@ -30,13 +29,15 @@
patch_session,
update_session,
)
-from tests.fixtures import pg_dsn, test_developer_id # , test_session, test_agent, test_user
-
+from tests.fixtures import (
+ pg_dsn,
+ test_developer_id,
+) # , test_session, test_agent, test_user
# @test("query: create session sql")
# async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user):
# """Test that a session can be successfully created."""
-
+
# pool = await create_db_pool(dsn=dsn)
# await create_session(
# developer_id=developer_id,
From 2eb10d3110872e3c8a302a1b7a48c0f1e13580b6 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Tue, 17 Dec 2024 23:33:09 -0500
Subject: [PATCH 055/274] chore: developers and user refactor + add test for
entry queries + bug fixes
---
agents-api/agents_api/autogen/Entries.py | 1 +
.../agents_api/autogen/openapi_model.py | 1 +
.../agents_api/queries/developers/__init__.py | 7 +
.../queries/developers/create_developer.py | 34 +--
.../queries/developers/get_developer.py | 25 ++-
.../queries/developers/patch_developer.py | 28 ++-
.../queries/developers/update_developer.py | 25 ++-
.../queries/{entry => entries}/__init__.py | 8 +-
.../queries/entries/create_entry.py | 196 +++++++++++++++++
.../queries/entries/delete_entry.py | 96 +++++++++
.../agents_api/queries/entries/get_history.py | 72 +++++++
.../agents_api/queries/entries/list_entry.py | 80 +++++++
.../queries/entry/create_entries.py | 107 ----------
.../queries/entry/delete_entries.py | 48 -----
.../agents_api/queries/entry/get_history.py | 73 -------
.../agents_api/queries/entry/list_entries.py | 76 -------
.../queries/users/create_or_update_user.py | 43 ++--
.../agents_api/queries/users/create_user.py | 41 ++--
.../agents_api/queries/users/delete_user.py | 31 +--
.../agents_api/queries/users/get_user.py | 33 ++-
.../agents_api/queries/users/list_users.py | 42 ++--
.../agents_api/queries/users/patch_user.py | 50 +++--
.../agents_api/queries/users/update_user.py | 27 +--
agents-api/tests/test_developer_queries.py | 1 -
agents-api/tests/test_entry_queries.py | 200 ++++++++----------
agents-api/tests/test_user_queries.py | 1 -
agents-api/tests/utils.py | 2 -
.../integrations/autogen/Entries.py | 1 +
typespec/entries/models.tsp | 1 +
.../@typespec/openapi3/openapi-1.0.0.yaml | 4 +
30 files changed, 758 insertions(+), 596 deletions(-)
rename agents-api/agents_api/queries/{entry => entries}/__init__.py (68%)
create mode 100644 agents-api/agents_api/queries/entries/create_entry.py
create mode 100644 agents-api/agents_api/queries/entries/delete_entry.py
create mode 100644 agents-api/agents_api/queries/entries/get_history.py
create mode 100644 agents-api/agents_api/queries/entries/list_entry.py
delete mode 100644 agents-api/agents_api/queries/entry/create_entries.py
delete mode 100644 agents-api/agents_api/queries/entry/delete_entries.py
delete mode 100644 agents-api/agents_api/queries/entry/get_history.py
delete mode 100644 agents-api/agents_api/queries/entry/list_entries.py
diff --git a/agents-api/agents_api/autogen/Entries.py b/agents-api/agents_api/autogen/Entries.py
index de37e77d8..d195b518f 100644
--- a/agents-api/agents_api/autogen/Entries.py
+++ b/agents-api/agents_api/autogen/Entries.py
@@ -52,6 +52,7 @@ class BaseEntry(BaseModel):
]
tokenizer: str
token_count: int
+ modelname: str = "gpt-40-mini"
tool_calls: (
list[
ChosenFunctionCall
diff --git a/agents-api/agents_api/autogen/openapi_model.py b/agents-api/agents_api/autogen/openapi_model.py
index d19684cee..01042c58c 100644
--- a/agents-api/agents_api/autogen/openapi_model.py
+++ b/agents-api/agents_api/autogen/openapi_model.py
@@ -400,6 +400,7 @@ def from_model_input(
source=source,
tokenizer=tokenizer["type"],
token_count=token_count,
+ modelname=model,
**kwargs,
)
diff --git a/agents-api/agents_api/queries/developers/__init__.py b/agents-api/agents_api/queries/developers/__init__.py
index b3964aba4..c3d1d4bbb 100644
--- a/agents-api/agents_api/queries/developers/__init__.py
+++ b/agents-api/agents_api/queries/developers/__init__.py
@@ -20,3 +20,10 @@
from .get_developer import get_developer
from .patch_developer import patch_developer
from .update_developer import update_developer
+
+__all__ = [
+ "create_developer",
+ "get_developer",
+ "patch_developer",
+ "update_developer",
+]
diff --git a/agents-api/agents_api/queries/developers/create_developer.py b/agents-api/agents_api/queries/developers/create_developer.py
index 7ee845fbf..793d2f184 100644
--- a/agents-api/agents_api/queries/developers/create_developer.py
+++ b/agents-api/agents_api/queries/developers/create_developer.py
@@ -3,14 +3,19 @@
from beartype import beartype
from sqlglot import parse_one
from uuid_extensions import uuid7
+import asyncpg
+from fastapi import HTTPException
from ...common.protocol.developers import Developer
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
-query = parse_one("""
+# Define the raw SQL query
+developer_query = parse_one("""
INSERT INTO developers (
developer_id,
email,
@@ -19,22 +24,25 @@
settings
)
VALUES (
- $1,
- $2,
- $3,
- $4,
- $5::jsonb
+ $1, -- developer_id
+ $2, -- email
+ $3, -- active
+ $4, -- tags
+ $5::jsonb -- settings
)
RETURNING *;
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=403),
-# ValidationError: partialclass(HTTPException, status_code=500),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
@pg_query
@beartype
@@ -49,6 +57,6 @@ async def create_developer(
developer_id = str(developer_id or uuid7())
return (
- query,
+ developer_query,
[developer_id, email, active, tags or [], settings or {}],
)
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index 38302ab3b..54d4cf9d9 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -5,11 +5,12 @@
from beartype import beartype
from fastapi import HTTPException
-from pydantic import ValidationError
from sqlglot import parse_one
+import asyncpg
from ...common.protocol.developers import Developer
from ..utils import (
+ partialclass,
pg_query,
rewrap_exceptions,
wrap_in_class,
@@ -18,18 +19,24 @@
# TODO: Add verify_developer
verify_developer = None
-query = parse_one("SELECT * FROM developers WHERE developer_id = $1").sql(pretty=True)
+# Define the raw SQL query
+developer_query = parse_one("""
+SELECT * FROM developers WHERE developer_id = $1 -- developer_id
+""").sql(pretty=True)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=403),
-# ValidationError: partialclass(HTTPException, status_code=500),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
@pg_query
@beartype
@@ -40,6 +47,6 @@ async def get_developer(
developer_id = str(developer_id)
return (
- query,
+ developer_query,
[developer_id],
)
diff --git a/agents-api/agents_api/queries/developers/patch_developer.py b/agents-api/agents_api/queries/developers/patch_developer.py
index 49edfe370..b37fc7c5e 100644
--- a/agents-api/agents_api/queries/developers/patch_developer.py
+++ b/agents-api/agents_api/queries/developers/patch_developer.py
@@ -2,27 +2,35 @@
from beartype import beartype
from sqlglot import parse_one
+import asyncpg
+from fastapi import HTTPException
from ...common.protocol.developers import Developer
from ..utils import (
pg_query,
wrap_in_class,
+ partialclass,
+ rewrap_exceptions,
)
-query = parse_one("""
+# Define the raw SQL query
+developer_query = parse_one("""
UPDATE developers
-SET email = $1, active = $2, tags = tags || $3, settings = settings || $4
-WHERE developer_id = $5
+SET email = $1, active = $2, tags = tags || $3, settings = settings || $4 -- settings
+WHERE developer_id = $5 -- developer_id
RETURNING *;
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=403),
-# ValidationError: partialclass(HTTPException, status_code=500),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
@pg_query
@beartype
@@ -37,6 +45,6 @@ async def patch_developer(
developer_id = str(developer_id)
return (
- query,
+ developer_query,
[email, active, tags or [], settings or {}, developer_id],
)
diff --git a/agents-api/agents_api/queries/developers/update_developer.py b/agents-api/agents_api/queries/developers/update_developer.py
index 8350d45a0..410d5ca12 100644
--- a/agents-api/agents_api/queries/developers/update_developer.py
+++ b/agents-api/agents_api/queries/developers/update_developer.py
@@ -2,14 +2,18 @@
from beartype import beartype
from sqlglot import parse_one
-
+import asyncpg
+from fastapi import HTTPException
from ...common.protocol.developers import Developer
from ..utils import (
pg_query,
wrap_in_class,
+ partialclass,
+ rewrap_exceptions,
)
-query = parse_one("""
+# Define the raw SQL query
+developer_query = parse_one("""
UPDATE developers
SET email = $1, active = $2, tags = $3, settings = $4
WHERE developer_id = $5
@@ -17,12 +21,15 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=403),
-# ValidationError: partialclass(HTTPException, status_code=500),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
@pg_query
@beartype
@@ -37,6 +44,6 @@ async def update_developer(
developer_id = str(developer_id)
return (
- query,
+ developer_query,
[email, active, tags or [], settings or {}, developer_id],
)
diff --git a/agents-api/agents_api/queries/entry/__init__.py b/agents-api/agents_api/queries/entries/__init__.py
similarity index 68%
rename from agents-api/agents_api/queries/entry/__init__.py
rename to agents-api/agents_api/queries/entries/__init__.py
index 2ad83f115..7c196dd62 100644
--- a/agents-api/agents_api/queries/entry/__init__.py
+++ b/agents-api/agents_api/queries/entries/__init__.py
@@ -8,14 +8,14 @@
- Listing entries with filtering and pagination
"""
-from .create_entries import create_entries
-from .delete_entries import delete_entries_for_session
+from .create_entry import create_entries
+from .delete_entry import delete_entries
from .get_history import get_history
-from .list_entries import list_entries
+from .list_entry import list_entries
__all__ = [
"create_entries",
- "delete_entries_for_session",
+ "delete_entries",
"get_history",
"list_entries",
]
diff --git a/agents-api/agents_api/queries/entries/create_entry.py b/agents-api/agents_api/queries/entries/create_entry.py
new file mode 100644
index 000000000..471d02fe6
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/create_entry.py
@@ -0,0 +1,196 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
+from ...common.utils.datetime import utcnow
+from ...common.utils.messages import content_to_json
+from ...metrics.counters import increase_counter
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for creating entries with a developer check
+entry_query = ("""
+WITH data AS (
+ SELECT
+ unnest($1::uuid[]) AS session_id,
+ unnest($2::uuid[]) AS entry_id,
+ unnest($3::text[]) AS source,
+ unnest($4::text[])::chat_role AS role,
+ unnest($5::text[]) AS event_type,
+ unnest($6::text[]) AS name,
+ array[unnest($7::jsonb[])] AS content,
+ unnest($8::text[]) AS tool_call_id,
+ array[unnest($9::jsonb[])] AS tool_calls,
+ unnest($10::text[]) AS model,
+ unnest($11::int[]) AS token_count,
+ unnest($12::timestamptz[]) AS created_at,
+ unnest($13::timestamptz[]) AS timestamp
+)
+INSERT INTO entries (
+ session_id,
+ entry_id,
+ source,
+ role,
+ event_type,
+ name,
+ content,
+ tool_call_id,
+ tool_calls,
+ model,
+ token_count,
+ created_at,
+ timestamp
+)
+SELECT
+ d.session_id,
+ d.entry_id,
+ d.source,
+ d.role,
+ d.event_type,
+ d.name,
+ d.content,
+ d.tool_call_id,
+ d.tool_calls,
+ d.model,
+ d.token_count,
+ d.created_at,
+ d.timestamp
+FROM
+ data d
+JOIN
+ developers ON developers.developer_id = $14
+RETURNING *;
+""")
+
+# Define the raw SQL query for creating entry relations
+entry_relation_query = ("""
+WITH data AS (
+ SELECT
+ unnest($1::uuid[]) AS session_id,
+ unnest($2::uuid[]) AS head,
+ unnest($3::text[]) AS relation,
+ unnest($4::uuid[]) AS tail,
+ unnest($5::boolean[]) AS is_leaf
+)
+INSERT INTO entry_relations (
+ session_id,
+ head,
+ relation,
+ tail,
+ is_leaf
+)
+SELECT
+ d.session_id,
+ d.head,
+ d.relation,
+ d.tail,
+ d.is_leaf
+FROM
+ data d
+JOIN
+ developers ON developers.developer_id = $6
+RETURNING *;
+""")
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail=str(exc),
+ ),
+ asyncpg.NotNullViolationError: lambda exc: HTTPException(
+ status_code=400,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(
+ Entry,
+ transform=lambda d: {
+ "id": UUID(d.pop("entry_id")),
+ **d,
+ },
+)
+@increase_counter("create_entries")
+@pg_query
+@beartype
+async def create_entries(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: list[CreateEntryRequest],
+) -> tuple[str, list]:
+ # Convert the data to a list of dictionaries
+ data_dicts = [item.model_dump(mode="json") for item in data]
+
+ # Prepare the parameters for the query
+ params = [
+ [session_id] * len(data_dicts), # $1
+ [item.pop("id", None) or str(uuid7()) for item in data_dicts], # $2
+ [item.get("source") for item in data_dicts], # $3
+ [item.get("role") for item in data_dicts], # $4
+ [item.get("event_type") or "message.create" for item in data_dicts], # $5
+ [item.get("name") for item in data_dicts], # $6
+ [content_to_json(item.get("content") or {}) for item in data_dicts], # $7
+ [item.get("tool_call_id") for item in data_dicts], # $8
+ [content_to_json(item.get("tool_calls") or {}) for item in data_dicts], # $9
+ [item.get("modelname") for item in data_dicts], # $10
+ [item.get("token_count") for item in data_dicts], # $11
+ [item.get("created_at") or utcnow() for item in data_dicts], # $12
+ [utcnow() for _ in data_dicts], # $13
+ developer_id, # $14
+ ]
+
+ return (
+ entry_query,
+ params,
+ )
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(Relation)
+@increase_counter("add_entry_relations")
+@pg_query
+@beartype
+async def add_entry_relations(
+ *,
+ developer_id: UUID,
+ data: list[Relation],
+) -> tuple[str, list]:
+ # Convert the data to a list of dictionaries
+ data_dicts = [item.model_dump(mode="json") for item in data]
+
+ # Prepare the parameters for the query
+ params = [
+ [item.get("session_id") for item in data_dicts], # $1
+ [item.get("head") for item in data_dicts], # $2
+ [item.get("relation") for item in data_dicts], # $3
+ [item.get("tail") for item in data_dicts], # $4
+ [item.get("is_leaf", False) for item in data_dicts], # $5
+ developer_id, # $6
+ ]
+
+ return (
+ entry_relation_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/entries/delete_entry.py b/agents-api/agents_api/queries/entries/delete_entry.py
new file mode 100644
index 000000000..82615745f
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/delete_entry.py
@@ -0,0 +1,96 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...common.utils.datetime import utcnow
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for deleting entries with a developer check
+entry_query = parse_one("""
+DELETE FROM entries
+USING developers
+WHERE entries.session_id = $1 -- session_id
+AND developers.developer_id = $2
+RETURNING entries.session_id as session_id;
+""").sql(pretty=True)
+
+# Define the raw SQL query for deleting entries by entry_ids with a developer check
+delete_entry_by_ids_query = parse_one("""
+DELETE FROM entries
+USING developers
+WHERE entries.entry_id = ANY($1) -- entry_ids
+AND developers.developer_id = $2
+AND entries.session_id = $3 -- session_id
+RETURNING entries.entry_id as entry_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=400,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["session_id"], # Only return session cleared
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@pg_query
+@beartype
+async def delete_entries_for_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+) -> tuple[str, list]:
+ return (
+ entry_query,
+ [session_id, developer_id],
+ )
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="One or more specified entries do not exist.",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ transform=lambda d: {
+ "id": d["entry_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@pg_query
+@beartype
+async def delete_entries(
+ *, developer_id: UUID, session_id: UUID, entry_ids: list[UUID]
+) -> tuple[str, list]:
+ return (
+ delete_entry_by_ids_query,
+ [entry_ids, developer_id, session_id],
+ )
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
new file mode 100644
index 000000000..c6c38d366
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -0,0 +1,72 @@
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import History
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for getting history with a developer check
+history_query = parse_one("""
+SELECT
+ e.entry_id as id, -- entry_id
+ e.session_id, -- session_id
+ e.role, -- role
+ e.name, -- name
+ e.content, -- content
+ e.source, -- source
+ e.token_count, -- token_count
+ e.created_at, -- created_at
+ e.timestamp, -- timestamp
+ e.tool_calls, -- tool_calls
+ e.tool_call_id -- tool_call_id
+FROM entries e
+JOIN developers d ON d.developer_id = $3
+WHERE e.session_id = $1
+AND e.source = ANY($2)
+ORDER BY e.created_at;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(
+ History,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "relations": [
+ {
+ "head": r["head"],
+ "relation": r["relation"],
+ "tail": r["tail"],
+ }
+ for r in d.pop("relations")
+ ],
+ "entries": d.pop("entries"),
+ },
+)
+@pg_query
+@beartype
+async def get_history(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ allowed_sources: list[str] = ["api_request", "api_response"],
+) -> tuple[str, list]:
+ return (
+ history_query,
+ [session_id, allowed_sources, developer_id],
+ )
diff --git a/agents-api/agents_api/queries/entries/list_entry.py b/agents-api/agents_api/queries/entries/list_entry.py
new file mode 100644
index 000000000..5a4871a88
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/list_entry.py
@@ -0,0 +1,80 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+
+from ...autogen.openapi_model import Entry
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+
+entry_query = """
+SELECT
+ e.entry_id as id, -- entry_id
+ e.session_id, -- session_id
+ e.role, -- role
+ e.name, -- name
+ e.content, -- content
+ e.source, -- source
+ e.token_count, -- token_count
+ e.created_at, -- created_at
+ e.timestamp -- timestamp
+FROM entries e
+JOIN developers d ON d.developer_id = $7
+LEFT JOIN entry_relations er ON er.head = e.entry_id AND er.session_id = e.session_id
+WHERE e.session_id = $1
+AND e.source = ANY($2)
+AND (er.relation IS NULL OR er.relation != ALL($8))
+ORDER BY e.$3 $4
+LIMIT $5
+OFFSET $6;
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(Entry)
+@pg_query
+@beartype
+async def list_entries(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ allowed_sources: list[str] = ["api_request", "api_response"],
+ limit: int = 1,
+ offset: int = 0,
+ sort_by: Literal["created_at", "timestamp"] = "timestamp",
+ direction: Literal["asc", "desc"] = "asc",
+ exclude_relations: list[str] = [],
+) -> tuple[str, list]:
+
+ if limit < 1 or limit > 1000:
+ raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
+ if offset < 0:
+ raise HTTPException(status_code=400, detail="Offset must be non-negative")
+
+ # making the parameters for the query
+ params = [
+ session_id, # $1
+ allowed_sources, # $2
+ sort_by, # $3
+ direction, # $4
+ limit, # $5
+ offset, # $6
+ developer_id, # $7
+ exclude_relations, # $8
+ ]
+ return (
+ entry_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/entry/create_entries.py b/agents-api/agents_api/queries/entry/create_entries.py
deleted file mode 100644
index d3b3b4982..000000000
--- a/agents-api/agents_api/queries/entry/create_entries.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateEntryRequest, Entry
-from ...common.utils.datetime import utcnow
-from ...common.utils.messages import content_to_json
-from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-
-# Define the raw SQL query for creating entries with a developer check
-raw_query = """
-INSERT INTO entries (
- session_id,
- entry_id,
- source,
- role,
- event_type,
- name,
- content,
- tool_call_id,
- tool_calls,
- model,
- token_count,
- created_at,
- timestamp
-)
-SELECT
- $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13
-FROM
- developers
-WHERE
- developer_id = $14
-RETURNING *;
-"""
-
-# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "entries": {
- "session_id": "UUID",
- "entry_id": "UUID",
- "source": "TEXT",
- "role": "chat_role",
- "event_type": "TEXT",
- "name": "TEXT",
- "content": "JSONB[]",
- "tool_call_id": "TEXT",
- "tool_calls": "JSONB[]",
- "model": "TEXT",
- "token_count": "INTEGER",
- "created_at": "TIMESTAMP",
- "timestamp": "TIMESTAMP",
- }
- },
-).sql(pretty=True)
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
- asyncpg.UniqueViolationError: partialclass(HTTPException, status_code=409),
- }
-)
-@wrap_in_class(Entry)
-@increase_counter("create_entries")
-@pg_query
-@beartype
-def create_entries(
- *,
- developer_id: UUID,
- session_id: UUID,
- data: list[CreateEntryRequest],
- mark_session_as_updated: bool = True,
-) -> tuple[str, list]:
- data_dicts = [item.model_dump(mode="json") for item in data]
-
- params = [
- (
- session_id,
- item.pop("id", None) or str(uuid7()),
- item.get("source"),
- item.get("role"),
- item.get("event_type") or "message.create",
- item.get("name"),
- content_to_json(item.get("content") or []),
- item.get("tool_call_id"),
- item.get("tool_calls") or [],
- item.get("model"),
- item.get("token_count"),
- (item.get("created_at") or utcnow()).timestamp(),
- utcnow().timestamp(),
- developer_id,
- )
- for item in data_dicts
- ]
-
- return (
- query,
- params,
- )
diff --git a/agents-api/agents_api/queries/entry/delete_entries.py b/agents-api/agents_api/queries/entry/delete_entries.py
deleted file mode 100644
index 1fa34176f..000000000
--- a/agents-api/agents_api/queries/entry/delete_entries.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-
-# Define the raw SQL query for deleting entries with a developer check
-raw_query = """
-DELETE FROM entries
-USING developers
-WHERE entries.session_id = $1
-AND developers.developer_id = $2
-RETURNING entries.session_id as id;
-"""
-
-# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "entries": {
- "session_id": "UUID",
- }
- },
-).sql(pretty=True)
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(ResourceDeletedResponse, one=True)
-@increase_counter("delete_entries_for_session")
-@pg_query
-@beartype
-def delete_entries_for_session(
- *, developer_id: UUID, session_id: UUID, mark_session_as_updated: bool = True
-) -> tuple[str, list]:
- return (
- query,
- [session_id, developer_id],
- )
diff --git a/agents-api/agents_api/queries/entry/get_history.py b/agents-api/agents_api/queries/entry/get_history.py
deleted file mode 100644
index dd06734b0..000000000
--- a/agents-api/agents_api/queries/entry/get_history.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
-from ...autogen.openapi_model import History
-from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-
-# Define the raw SQL query for getting history with a developer check
-raw_query = """
-SELECT
- e.entry_id as id,
- e.session_id,
- e.role,
- e.name,
- e.content,
- e.source,
- e.token_count,
- e.created_at,
- e.timestamp,
- e.tool_calls,
- e.tool_call_id
-FROM entries e
-JOIN developers d ON d.developer_id = $3
-WHERE e.session_id = $1
-AND e.source = ANY($2)
-ORDER BY e.created_at;
-"""
-
-# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "entries": {
- "entry_id": "UUID",
- "session_id": "UUID",
- "role": "STRING",
- "name": "STRING",
- "content": "JSONB",
- "source": "STRING",
- "token_count": "INTEGER",
- "created_at": "TIMESTAMP",
- "timestamp": "TIMESTAMP",
- "tool_calls": "JSONB",
- "tool_call_id": "UUID",
- }
- },
-).sql(pretty=True)
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(History, one=True)
-@increase_counter("get_history")
-@pg_query
-@beartype
-def get_history(
- *,
- developer_id: UUID,
- session_id: UUID,
- allowed_sources: list[str] = ["api_request", "api_response"],
-) -> tuple[str, list]:
- return (
- query,
- [session_id, allowed_sources, developer_id],
- )
diff --git a/agents-api/agents_api/queries/entry/list_entries.py b/agents-api/agents_api/queries/entry/list_entries.py
deleted file mode 100644
index 42add6899..000000000
--- a/agents-api/agents_api/queries/entry/list_entries.py
+++ /dev/null
@@ -1,76 +0,0 @@
-from typing import Literal
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
-from ...autogen.openapi_model import Entry
-from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-
-# Define the raw SQL query for listing entries with a developer check
-raw_query = """
-SELECT
- e.entry_id as id,
- e.session_id,
- e.role,
- e.name,
- e.content,
- e.source,
- e.token_count,
- e.created_at,
- e.timestamp
-FROM entries e
-JOIN developers d ON d.developer_id = $7
-WHERE e.session_id = $1
-AND e.source = ANY($2)
-ORDER BY e.$3 $4
-LIMIT $5 OFFSET $6;
-"""
-
-# Parse and optimize the query
-query = optimize(
- parse_one(raw_query),
- schema={
- "entries": {
- "entry_id": "UUID",
- "session_id": "UUID",
- "role": "STRING",
- "name": "STRING",
- "content": "JSONB",
- "source": "STRING",
- "token_count": "INTEGER",
- "created_at": "TIMESTAMP",
- "timestamp": "TIMESTAMP",
- }
- },
-).sql(pretty=True)
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(Entry)
-@increase_counter("list_entries")
-@pg_query
-@beartype
-def list_entries(
- *,
- developer_id: UUID,
- session_id: UUID,
- allowed_sources: list[str] = ["api_request", "api_response"],
- limit: int = -1,
- offset: int = 0,
- sort_by: Literal["created_at", "timestamp"] = "timestamp",
- direction: Literal["asc", "desc"] = "asc",
- exclude_relations: list[str] = [],
-) -> tuple[str, list]:
- return (
- query,
- [session_id, allowed_sources, sort_by, direction, limit, offset, developer_id],
- )
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index d2be71bb4..6fd97942a 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -4,14 +4,13 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-from ...autogen.openapi_model import CreateOrUpdateUserRequest, User
from ...metrics.counters import increase_counter
+from ...autogen.openapi_model import CreateOrUpdateUserRequest, User
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Optimize the raw query by using COALESCE for metadata to avoid explicit check
-raw_query = """
+# Define the raw SQL query for creating or updating a user
+user_query = parse_one("""
INSERT INTO users (
developer_id,
user_id,
@@ -20,21 +19,18 @@
metadata
)
VALUES (
- $1,
- $2,
- $3,
- $4,
- $5
+ $1, -- developer_id
+ $2, -- user_id
+ $3, -- name
+ $4, -- about
+ $5::jsonb -- metadata
)
ON CONFLICT (developer_id, user_id) DO UPDATE SET
name = EXCLUDED.name,
about = EXCLUDED.about,
metadata = EXCLUDED.metadata
RETURNING *;
-"""
-
-# Add index hint for better performance
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
@rewrap_exceptions(
@@ -51,7 +47,14 @@
),
}
)
-@wrap_in_class(User, one=True, transform=lambda d: {**d, "id": d["user_id"]})
+@wrap_in_class(
+ User,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "id": d["user_id"],
+ },
+)
@increase_counter("create_or_update_user")
@pg_query
@beartype
@@ -73,14 +76,14 @@ async def create_or_update_user(
HTTPException: If developer doesn't exist (404) or on unique constraint violation (409)
"""
params = [
- developer_id,
- user_id,
- data.name,
- data.about,
- data.metadata or {},
+ developer_id, # $1
+ user_id, # $2
+ data.name, # $3
+ data.about, # $4
+ data.metadata or {}, # $5
]
return (
- query,
+ user_query,
params,
)
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 66e8bcc27..d77fbff47 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -4,15 +4,14 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateUserRequest, User
from ...metrics.counters import increase_counter
+from ...autogen.openapi_model import CreateUserRequest, User
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
-raw_query = """
+user_query = parse_one("""
INSERT INTO users (
developer_id,
user_id,
@@ -21,17 +20,14 @@
metadata
)
VALUES (
- $1,
- $2,
- $3,
- $4,
- $5
+ $1, -- developer_id
+ $2, -- user_id
+ $3, -- name
+ $4, -- about
+ $5::jsonb -- metadata
)
RETURNING *;
-"""
-
-# Parse and optimize the query
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
@rewrap_exceptions(
@@ -48,7 +44,14 @@
),
}
)
-@wrap_in_class(User, one=True, transform=lambda d: {**d, "id": d["user_id"]})
+@wrap_in_class(
+ User,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "id": d["user_id"],
+ },
+)
@increase_counter("create_user")
@pg_query
@beartype
@@ -72,14 +75,14 @@ async def create_user(
user_id = user_id or uuid7()
params = [
- developer_id,
- user_id,
- data.name,
- data.about,
- data.metadata or {},
+ developer_id, # $1
+ user_id, # $2
+ data.name, # $3
+ data.about, # $4
+ data.metadata or {}, # $5
]
return (
- query,
+ user_query,
params,
)
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 520c8d695..86bcc0b26 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -4,18 +4,17 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
-from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
-raw_query = """
+delete_query = parse_one("""
WITH deleted_data AS (
- DELETE FROM user_files
- WHERE developer_id = $1 AND user_id = $2
+ DELETE FROM user_files -- user_files
+ WHERE developer_id = $1 -- developer_id
+ AND user_id = $2 -- user_id
),
deleted_docs AS (
DELETE FROM user_docs
@@ -24,10 +23,7 @@
DELETE FROM users
WHERE developer_id = $1 AND user_id = $2
RETURNING user_id, developer_id;
-"""
-
-# Parse and optimize the query
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
@rewrap_exceptions(
@@ -36,15 +32,24 @@
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
- )
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified user does not exist.",
+ ),
}
)
@wrap_in_class(
ResourceDeletedResponse,
one=True,
- transform=lambda d: {**d, "id": d["user_id"], "deleted_at": utcnow()},
+ transform=lambda d: {
+ **d,
+ "id": d["user_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
)
-@increase_counter("delete_user")
@pg_query
@beartype
async def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
@@ -61,6 +66,6 @@ async def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
"""
return (
- query,
+ delete_query,
[developer_id, user_id],
)
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 6989c8edb..2b71f9192 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -4,29 +4,24 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import User
-from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
-raw_query = """
+user_query = parse_one("""
SELECT
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at
+ user_id as id, -- user_id
+ developer_id, -- developer_id
+ name, -- name
+ about, -- about
+ metadata, -- metadata
+ created_at, -- created_at
+ updated_at -- updated_at
FROM users
WHERE developer_id = $1
AND user_id = $2;
-"""
-
-# Parse and optimize the query
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
@rewrap_exceptions(
@@ -35,11 +30,15 @@
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
- )
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified user does not exist.",
+ ),
}
)
@wrap_in_class(User, one=True)
-@increase_counter("get_user")
@pg_query
@beartype
async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
@@ -56,6 +55,6 @@ async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
"""
return (
- query,
+ user_query,
[developer_id, user_id],
)
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 7f3677eab..0f0818135 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -4,24 +4,21 @@
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import User
-from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
-raw_query = """
+user_query = """
WITH filtered_users AS (
SELECT
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at
+ user_id as id, -- user_id
+ developer_id, -- developer_id
+ name, -- name
+ about, -- about
+ metadata, -- metadata
+ created_at, -- created_at
+ updated_at -- updated_at
FROM users
WHERE developer_id = $1
AND ($4::jsonb IS NULL OR metadata @> $4)
@@ -37,9 +34,6 @@
OFFSET $3;
"""
-# Parse and optimize the query
-# query = parse_one(raw_query).sql(pretty=True)
-
@rewrap_exceptions(
{
@@ -47,11 +41,15 @@
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
- )
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified user does not exist.",
+ ),
}
)
@wrap_in_class(User)
-@increase_counter("list_users")
@pg_query
@beartype
async def list_users(
@@ -84,15 +82,15 @@ async def list_users(
raise HTTPException(status_code=400, detail="Offset must be non-negative")
params = [
- developer_id,
- limit,
- offset,
+ developer_id, # $1
+ limit, # $2
+ offset, # $3
metadata_filter, # Will be NULL if not provided
- sort_by,
- direction,
+ sort_by, # $4
+ direction, # $5
]
return (
- raw_query,
+ user_query,
params,
)
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index 971e96b81..c55ee31b7 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -4,42 +4,38 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
-raw_query = """
+user_query = parse_one("""
UPDATE users
SET
name = CASE
- WHEN $3::text IS NOT NULL THEN $3
+ WHEN $3::text IS NOT NULL THEN $3 -- name
ELSE name
END,
about = CASE
- WHEN $4::text IS NOT NULL THEN $4
+ WHEN $4::text IS NOT NULL THEN $4 -- about
ELSE about
END,
metadata = CASE
- WHEN $5::jsonb IS NOT NULL THEN metadata || $5
+ WHEN $5::jsonb IS NOT NULL THEN metadata || $5 -- metadata
ELSE metadata
END
WHERE developer_id = $1
AND user_id = $2
RETURNING
- user_id as id,
- developer_id,
- name,
- about,
- metadata,
- created_at,
- updated_at;
-"""
-
-# Parse and optimize the query
-query = parse_one(raw_query).sql(pretty=True)
+ user_id as id, -- user_id
+ developer_id, -- developer_id
+ name, -- name
+ about, -- about
+ metadata, -- metadata
+ created_at, -- created_at
+ updated_at; -- updated_at
+""").sql(pretty=True)
@rewrap_exceptions(
@@ -48,7 +44,12 @@
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
- )
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified user does not exist.",
+ ),
}
)
@wrap_in_class(ResourceUpdatedResponse, one=True)
@@ -71,11 +72,14 @@ async def patch_user(
tuple[str, list]: SQL query and parameters
"""
params = [
- developer_id,
- user_id,
- data.name, # Will be NULL if not provided
- data.about, # Will be NULL if not provided
- data.metadata, # Will be NULL if not provided
+ developer_id, # $1
+ user_id, # $2
+ data.name, # $3. Will be NULL if not provided
+ data.about, # $4. Will be NULL if not provided
+ data.metadata, # $5. Will be NULL if not provided
]
- return query, params
+ return (
+ user_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 1fffdebe7..91572e15d 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -4,26 +4,22 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
-raw_query = """
+user_query = parse_one("""
UPDATE users
SET
- name = $3,
- about = $4,
- metadata = $5
-WHERE developer_id = $1
-AND user_id = $2
+ name = $3, -- name
+ about = $4, -- about
+ metadata = $5 -- metadata
+WHERE developer_id = $1 -- developer_id
+AND user_id = $2 -- user_id
RETURNING *
-"""
-
-# Parse and optimize the query
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
@rewrap_exceptions(
@@ -32,7 +28,12 @@
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
- )
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified user does not exist.",
+ ),
}
)
@wrap_in_class(
@@ -67,6 +68,6 @@ async def update_user(
]
return (
- query,
+ user_query,
params,
)
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index d360a7dc2..eedc07dd2 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -4,7 +4,6 @@
from ward import raises, test
from agents_api.clients.pg import create_db_pool
-from agents_api.common.protocol.developers import Developer
from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import (
get_developer,
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 220b8d232..242d0abfb 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -1,89 +1,53 @@
-# """
-# This module contains tests for entry queries against the CozoDB database.
-# It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
-# """
+"""
+This module contains tests for entry queries against the CozoDB database.
+It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
+"""
-# # Tests for entry queries
+from uuid import UUID
-# import time
+from ward import test
+from agents_api.clients.pg import create_db_pool
-# from ward import test
+from agents_api.queries.entries.create_entry import create_entries
+from agents_api.queries.entries.list_entry import list_entries
+from agents_api.queries.entries.get_history import get_history
+from agents_api.queries.entries.delete_entry import delete_entries
+from tests.fixtures import pg_dsn, test_developer_id # , test_session
+from agents_api.autogen.openapi_model import CreateEntryRequest, Entry
-# from agents_api.autogen.openapi_model import CreateEntryRequest
-# from agents_api.queries.entry.create_entries import create_entries
-# from agents_api.queries.entry.delete_entries import delete_entries
-# from agents_api.queries.entry.get_history import get_history
-# from agents_api.queries.entry.list_entries import list_entries
-# from agents_api.queries.session.get_session import get_session
-# from tests.fixtures import cozo_client, test_developer_id, test_session
+# Test UUIDs for consistent testing
+MODEL = "gpt-4o-mini"
+SESSION_ID = UUID("123e4567-e89b-12d3-a456-426614174001")
+TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
+TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
-# MODEL = "gpt-4o-mini"
+@test("query: create entry")
+async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+ """Test the addition of a new entry to the database."""
-# @test("query: create entry")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# """
-# Tests the addition of a new entry to the database.
-# Verifies that the entry can be successfully added using the create_entries function.
-# """
+ pool = await create_db_pool(dsn=dsn)
+ test_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ source="internal",
+ content="test entry content",
+ )
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="internal",
-# content="test entry content",
-# )
-
-# create_entries(
-# developer_id=developer_id,
-# session_id=session.id,
-# data=[test_entry],
-# mark_session_as_updated=False,
-# client=client,
-# )
+ await create_entries(
+ developer_id=TEST_DEVELOPER_ID,
+ session_id=SESSION_ID,
+ data=[test_entry],
+ connection_pool=pool,
+ )
-# @test("query: create entry, update session")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# """
-# Tests the addition of a new entry to the database.
-# Verifies that the entry can be successfully added using the create_entries function.
-# """
-
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="internal",
-# content="test entry content",
-# )
-
-# # TODO: We should make sessions.updated_at also a updated_at_ms field to avoid this sleep
-# time.sleep(1)
-
-# create_entries(
-# developer_id=developer_id,
-# session_id=session.id,
-# data=[test_entry],
-# mark_session_as_updated=True,
-# client=client,
-# )
-
-# updated_session = get_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# client=client,
-# )
-
-# assert updated_session.updated_at > session.updated_at
-
# @test("query: get entries")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# """
-# Tests the retrieval of entries from the database.
-# Verifies that entries matching specific criteria can be successfully retrieved.
-# """
+# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+# """Test the retrieval of entries from the database."""
+# pool = await create_db_pool(dsn=dsn)
# test_entry = CreateEntryRequest.from_model_input(
# model=MODEL,
# role="user",
@@ -98,30 +62,32 @@
# source="internal",
# )
-# create_entries(
-# developer_id=developer_id,
-# session_id=session.id,
+# await create_entries(
+# developer_id=TEST_DEVELOPER_ID,
+# session_id=SESSION_ID,
# data=[test_entry, internal_entry],
-# client=client,
+# connection_pool=pool,
# )
-# result = list_entries(
-# developer_id=developer_id,
-# session_id=session.id,
-# client=client,
+# result = await list_entries(
+# developer_id=TEST_DEVELOPER_ID,
+# session_id=SESSION_ID,
+# connection_pool=pool,
# )
-# # Asserts that only one entry is retrieved, matching the session_id.
+
+
+# # Assert that only one entry is retrieved, matching the session_id.
# assert len(result) == 1
+# assert isinstance(result[0], Entry)
+# assert result is not None
# @test("query: get history")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# """
-# Tests the retrieval of entries from the database.
-# Verifies that entries matching specific criteria can be successfully retrieved.
-# """
+# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+# """Test the retrieval of entry history from the database."""
+# pool = await create_db_pool(dsn=dsn)
# test_entry = CreateEntryRequest.from_model_input(
# model=MODEL,
# role="user",
@@ -136,31 +102,31 @@
# source="internal",
# )
-# create_entries(
+# await create_entries(
# developer_id=developer_id,
-# session_id=session.id,
+# session_id=SESSION_ID,
# data=[test_entry, internal_entry],
-# client=client,
+# connection_pool=pool,
# )
-# result = get_history(
+# result = await get_history(
# developer_id=developer_id,
-# session_id=session.id,
-# client=client,
+# session_id=SESSION_ID,
+# connection_pool=pool,
# )
-# # Asserts that only one entry is retrieved, matching the session_id.
+# # Assert that entries are retrieved and have valid IDs.
+# assert result is not None
+# assert isinstance(result, History)
# assert len(result.entries) > 0
# assert result.entries[0].id
# @test("query: delete entries")
-# def _(client=cozo_client, developer_id=test_developer_id, session=test_session):
-# """
-# Tests the deletion of entries from the database.
-# Verifies that entries can be successfully deleted using the delete_entries function.
-# """
+# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+# """Test the deletion of entries from the database."""
+# pool = await create_db_pool(dsn=dsn)
# test_entry = CreateEntryRequest.from_model_input(
# model=MODEL,
# role="user",
@@ -175,27 +141,29 @@
# source="internal",
# )
-# created_entries = create_entries(
+# created_entries = await create_entries(
# developer_id=developer_id,
-# session_id=session.id,
+# session_id=SESSION_ID,
# data=[test_entry, internal_entry],
-# client=client,
+# connection_pool=pool,
# )
-# entry_ids = [entry.id for entry in created_entries]
+ # entry_ids = [entry.id for entry in created_entries]
-# delete_entries(
-# developer_id=developer_id,
-# session_id=session.id,
-# entry_ids=entry_ids,
-# client=client,
-# )
+ # await delete_entries(
+ # developer_id=developer_id,
+ # session_id=SESSION_ID,
+ # entry_ids=[UUID("123e4567-e89b-12d3-a456-426614174002")],
+ # connection_pool=pool,
+ # )
-# result = list_entries(
-# developer_id=developer_id,
-# session_id=session.id,
-# client=client,
-# )
+ # result = await list_entries(
+ # developer_id=developer_id,
+ # session_id=SESSION_ID,
+ # connection_pool=pool,
+ # )
-# # Asserts that no entries are retrieved after deletion.
-# assert all(id not in [entry.id for entry in result] for id in entry_ids)
+ # Assert that no entries are retrieved after deletion.
+ # assert all(id not in [entry.id for entry in result] for id in entry_ids)
+ # assert len(result) == 0
+ # assert result is not None
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index cbe7e0353..002532816 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -5,7 +5,6 @@
from uuid import UUID
-import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
diff --git a/agents-api/tests/utils.py b/agents-api/tests/utils.py
index 990a1015e..a4f98ac80 100644
--- a/agents-api/tests/utils.py
+++ b/agents-api/tests/utils.py
@@ -1,5 +1,4 @@
import asyncio
-import json
import logging
import subprocess
from contextlib import asynccontextmanager, contextmanager
@@ -7,7 +6,6 @@
from typing import Any, Dict, Optional
from unittest.mock import patch
-import asyncpg
from botocore import exceptions
from fastapi.testclient import TestClient
from litellm.types.utils import ModelResponse
diff --git a/integrations-service/integrations/autogen/Entries.py b/integrations-service/integrations/autogen/Entries.py
index de37e77d8..d195b518f 100644
--- a/integrations-service/integrations/autogen/Entries.py
+++ b/integrations-service/integrations/autogen/Entries.py
@@ -52,6 +52,7 @@ class BaseEntry(BaseModel):
]
tokenizer: str
token_count: int
+ modelname: str = "gpt-40-mini"
tool_calls: (
list[
ChosenFunctionCall
diff --git a/typespec/entries/models.tsp b/typespec/entries/models.tsp
index 7f8c8b9fa..640e6831d 100644
--- a/typespec/entries/models.tsp
+++ b/typespec/entries/models.tsp
@@ -107,6 +107,7 @@ model BaseEntry {
tokenizer: string;
token_count: uint16;
+ modelname: string = "gpt-40-mini";
/** Tool calls generated by the model. */
tool_calls?: ChosenToolCall[] | null = null;
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index 0a12aac74..9b36baa2b 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -3064,6 +3064,7 @@ components:
- source
- tokenizer
- token_count
+ - modelname
- timestamp
properties:
role:
@@ -3307,6 +3308,9 @@ components:
token_count:
type: integer
format: uint16
+ modelname:
+ type: string
+ default: gpt-40-mini
tool_calls:
type: array
items:
From b064234b2cdd37d33ee9acd547e13df673295eba Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Wed, 18 Dec 2024 04:34:14 +0000
Subject: [PATCH 056/274] refactor: Lint agents-api (CI)
---
.../queries/developers/create_developer.py | 8 ++--
.../queries/developers/get_developer.py | 2 +-
.../queries/developers/patch_developer.py | 8 ++--
.../queries/developers/update_developer.py | 9 ++--
.../queries/entries/create_entry.py | 8 ++--
.../queries/entries/delete_entry.py | 2 +-
.../agents_api/queries/entries/list_entry.py | 3 +-
.../queries/users/create_or_update_user.py | 2 +-
.../agents_api/queries/users/create_user.py | 2 +-
agents-api/tests/test_entry_queries.py | 48 +++++++++----------
10 files changed, 45 insertions(+), 47 deletions(-)
diff --git a/agents-api/agents_api/queries/developers/create_developer.py b/agents-api/agents_api/queries/developers/create_developer.py
index 793d2f184..bed6371c4 100644
--- a/agents-api/agents_api/queries/developers/create_developer.py
+++ b/agents-api/agents_api/queries/developers/create_developer.py
@@ -1,17 +1,17 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
from sqlglot import parse_one
from uuid_extensions import uuid7
-import asyncpg
-from fastapi import HTTPException
from ...common.protocol.developers import Developer
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index 54d4cf9d9..373a2fb36 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -3,10 +3,10 @@
from typing import Any, TypeVar
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-import asyncpg
from ...common.protocol.developers import Developer
from ..utils import (
diff --git a/agents-api/agents_api/queries/developers/patch_developer.py b/agents-api/agents_api/queries/developers/patch_developer.py
index b37fc7c5e..af2ddb1f8 100644
--- a/agents-api/agents_api/queries/developers/patch_developer.py
+++ b/agents-api/agents_api/queries/developers/patch_developer.py
@@ -1,16 +1,16 @@
from uuid import UUID
-from beartype import beartype
-from sqlglot import parse_one
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
from ...common.protocol.developers import Developer
from ..utils import (
- pg_query,
- wrap_in_class,
partialclass,
+ pg_query,
rewrap_exceptions,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/developers/update_developer.py b/agents-api/agents_api/queries/developers/update_developer.py
index 410d5ca12..d41b333d5 100644
--- a/agents-api/agents_api/queries/developers/update_developer.py
+++ b/agents-api/agents_api/queries/developers/update_developer.py
@@ -1,15 +1,16 @@
from uuid import UUID
-from beartype import beartype
-from sqlglot import parse_one
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+
from ...common.protocol.developers import Developer
from ..utils import (
- pg_query,
- wrap_in_class,
partialclass,
+ pg_query,
rewrap_exceptions,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/entries/create_entry.py b/agents-api/agents_api/queries/entries/create_entry.py
index 471d02fe6..ea0e7e97d 100644
--- a/agents-api/agents_api/queries/entries/create_entry.py
+++ b/agents-api/agents_api/queries/entries/create_entry.py
@@ -13,7 +13,7 @@
from ..utils import pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for creating entries with a developer check
-entry_query = ("""
+entry_query = """
WITH data AS (
SELECT
unnest($1::uuid[]) AS session_id,
@@ -64,10 +64,10 @@
JOIN
developers ON developers.developer_id = $14
RETURNING *;
-""")
+"""
# Define the raw SQL query for creating entry relations
-entry_relation_query = ("""
+entry_relation_query = """
WITH data AS (
SELECT
unnest($1::uuid[]) AS session_id,
@@ -94,7 +94,7 @@
JOIN
developers ON developers.developer_id = $6
RETURNING *;
-""")
+"""
@rewrap_exceptions(
diff --git a/agents-api/agents_api/queries/entries/delete_entry.py b/agents-api/agents_api/queries/entries/delete_entry.py
index 82615745f..d6cdc6e87 100644
--- a/agents-api/agents_api/queries/entries/delete_entry.py
+++ b/agents-api/agents_api/queries/entries/delete_entry.py
@@ -5,8 +5,8 @@
from fastapi import HTTPException
from sqlglot import parse_one
-from ...common.utils.datetime import utcnow
from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for deleting entries with a developer check
diff --git a/agents-api/agents_api/queries/entries/list_entry.py b/agents-api/agents_api/queries/entries/list_entry.py
index 5a4871a88..1fa6479d1 100644
--- a/agents-api/agents_api/queries/entries/list_entry.py
+++ b/agents-api/agents_api/queries/entries/list_entry.py
@@ -57,12 +57,11 @@ async def list_entries(
direction: Literal["asc", "desc"] = "asc",
exclude_relations: list[str] = [],
) -> tuple[str, list]:
-
if limit < 1 or limit > 1000:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
if offset < 0:
raise HTTPException(status_code=400, detail="Offset must be non-negative")
-
+
# making the parameters for the query
params = [
session_id, # $1
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index 6fd97942a..965ae4ce4 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -5,8 +5,8 @@
from fastapi import HTTPException
from sqlglot import parse_one
-from ...metrics.counters import increase_counter
from ...autogen.openapi_model import CreateOrUpdateUserRequest, User
+from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for creating or updating a user
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index d77fbff47..8f35a646c 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -6,8 +6,8 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
-from ...metrics.counters import increase_counter
from ...autogen.openapi_model import CreateUserRequest, User
+from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query outside the function
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 242d0abfb..c07891305 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -6,14 +6,14 @@
from uuid import UUID
from ward import test
-from agents_api.clients.pg import create_db_pool
+from agents_api.autogen.openapi_model import CreateEntryRequest, Entry
+from agents_api.clients.pg import create_db_pool
from agents_api.queries.entries.create_entry import create_entries
-from agents_api.queries.entries.list_entry import list_entries
-from agents_api.queries.entries.get_history import get_history
from agents_api.queries.entries.delete_entry import delete_entries
+from agents_api.queries.entries.get_history import get_history
+from agents_api.queries.entries.list_entry import list_entries
from tests.fixtures import pg_dsn, test_developer_id # , test_session
-from agents_api.autogen.openapi_model import CreateEntryRequest, Entry
# Test UUIDs for consistent testing
MODEL = "gpt-4o-mini"
@@ -42,7 +42,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_sessi
)
-
# @test("query: get entries")
# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
# """Test the retrieval of entries from the database."""
@@ -76,7 +75,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_sessi
# )
-
# # Assert that only one entry is retrieved, matching the session_id.
# assert len(result) == 1
# assert isinstance(result[0], Entry)
@@ -148,22 +146,22 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_sessi
# connection_pool=pool,
# )
- # entry_ids = [entry.id for entry in created_entries]
-
- # await delete_entries(
- # developer_id=developer_id,
- # session_id=SESSION_ID,
- # entry_ids=[UUID("123e4567-e89b-12d3-a456-426614174002")],
- # connection_pool=pool,
- # )
-
- # result = await list_entries(
- # developer_id=developer_id,
- # session_id=SESSION_ID,
- # connection_pool=pool,
- # )
-
- # Assert that no entries are retrieved after deletion.
- # assert all(id not in [entry.id for entry in result] for id in entry_ids)
- # assert len(result) == 0
- # assert result is not None
+# entry_ids = [entry.id for entry in created_entries]
+
+# await delete_entries(
+# developer_id=developer_id,
+# session_id=SESSION_ID,
+# entry_ids=[UUID("123e4567-e89b-12d3-a456-426614174002")],
+# connection_pool=pool,
+# )
+
+# result = await list_entries(
+# developer_id=developer_id,
+# session_id=SESSION_ID,
+# connection_pool=pool,
+# )
+
+# Assert that no entries are retrieved after deletion.
+# assert all(id not in [entry.id for entry in result] for id in entry_ids)
+# assert len(result) == 0
+# assert result is not None
From a72812946d4bed45d68041962f4f6d1c7487c7d5 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Wed, 18 Dec 2024 13:21:02 +0530
Subject: [PATCH 057/274] feat(agents-api): Fix tests for sessions
Signed-off-by: Diwank Singh Tomer
---
agents-api/agents_api/app.py | 10 +--
.../queries/sessions/list_sessions.py | 3 +-
.../queries/users/create_or_update_user.py | 1 -
.../agents_api/queries/users/create_user.py | 1 -
.../agents_api/queries/users/delete_user.py | 1 -
.../agents_api/queries/users/get_user.py | 1 -
.../agents_api/queries/users/list_users.py | 2 -
.../agents_api/queries/users/patch_user.py | 1 -
.../agents_api/queries/users/update_user.py | 1 -
agents-api/agents_api/queries/utils.py | 54 +++++++-------
agents-api/agents_api/web.py | 2 +-
agents-api/tests/fixtures.py | 70 +++++++++++--------
agents-api/tests/test_session_queries.py | 3 +-
agents-api/tests/test_user_queries.py | 1 -
agents-api/tests/utils.py | 2 -
memory-store/migrations/000015_entries.up.sql | 4 +-
16 files changed, 79 insertions(+), 78 deletions(-)
diff --git a/agents-api/agents_api/app.py b/agents-api/agents_api/app.py
index 735dfc8c0..ced41decb 100644
--- a/agents-api/agents_api/app.py
+++ b/agents-api/agents_api/app.py
@@ -1,7 +1,5 @@
-import json
from contextlib import asynccontextmanager
-import asyncpg
from fastapi import FastAPI
from prometheus_fastapi_instrumentator import Instrumentator
@@ -11,9 +9,13 @@
@asynccontextmanager
async def lifespan(app: FastAPI):
- app.state.postgres_pool = await create_db_pool()
+ if not app.state.postgres_pool:
+ app.state.postgres_pool = await create_db_pool()
+
yield
- await app.state.postgres_pool.close()
+
+ if app.state.postgres_pool:
+ await app.state.postgres_pool.close()
app: FastAPI = FastAPI(
diff --git a/agents-api/agents_api/queries/sessions/list_sessions.py b/agents-api/agents_api/queries/sessions/list_sessions.py
index 5ce31803b..3aabaf32d 100644
--- a/agents-api/agents_api/queries/sessions/list_sessions.py
+++ b/agents-api/agents_api/queries/sessions/list_sessions.py
@@ -1,12 +1,11 @@
"""This module contains functions for querying session data from the PostgreSQL database."""
-from typing import Any, Literal, TypeVar
+from typing import Any, Literal
from uuid import UUID
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
from ...autogen.openapi_model import Session
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index d2be71bb4..cff9ed09b 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -4,7 +4,6 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import CreateOrUpdateUserRequest, User
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 66e8bcc27..bdab2541f 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -4,7 +4,6 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateUserRequest, User
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 520c8d695..6ea5e9664 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -4,7 +4,6 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 6989c8edb..ee75157e0 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -4,7 +4,6 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import User
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 74b40eb7b..4c30cd100 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -4,8 +4,6 @@
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import User
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index 971e96b81..3a2189014 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -4,7 +4,6 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 1fffdebe7..c3f436b5c 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -4,7 +4,6 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index e93135172..e7be9f981 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -6,6 +6,7 @@
from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar, cast
import asyncpg
+from beartype import beartype
import pandas as pd
from asyncpg import Record
from fastapi import HTTPException
@@ -30,13 +31,16 @@ class NewCls(cls):
return NewCls
+@beartype
def pg_query(
func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
debug: bool | None = None,
only_on_error: bool = False,
timeit: bool = False,
-):
- def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
+) -> Callable[..., Callable[P, list[Record]]] | Callable[P, list[Record]]:
+ def pg_query_dec(
+ func: Callable[P, tuple[str, list[Any]] | list[tuple[str, list[Any]]]]
+ ) -> Callable[..., Callable[P, list[Record]]]:
"""
Decorator that wraps a function that takes arbitrary arguments, and
returns a (query string, variables) tuple.
@@ -47,19 +51,6 @@ def pg_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
from pprint import pprint
- # from tenacity import (
- # retry,
- # retry_if_exception,
- # stop_after_attempt,
- # wait_exponential,
- # )
-
- # TODO: Remove all tenacity decorators
- # @retry(
- # stop=stop_after_attempt(4),
- # wait=wait_exponential(multiplier=1, min=4, max=10),
- # # retry=retry_if_exception(is_resource_busy),
- # )
@wraps(func)
async def wrapper(
*args: P.args,
@@ -76,17 +67,25 @@ async def wrapper(
)
# Run the query
+ pool = (
+ connection_pool
+ if connection_pool is not None
+ else cast(asyncpg.Pool, app.state.postgres_pool)
+ )
+
+ assert isinstance(variables, list) and len(variables) > 0
+
+ queries = query if isinstance(query, list) else [query]
+ variables_list = variables if isinstance(variables[0], list) else [variables]
+ zipped = zip(queries, variables_list)
try:
- pool = (
- connection_pool
- if connection_pool is not None
- else cast(asyncpg.Pool, app.state.postgres_pool)
- )
async with pool.acquire() as conn:
async with conn.transaction():
start = timeit and time.perf_counter()
- results: list[Record] = await conn.fetch(query, *variables)
+ for query, variables in zipped:
+ results: list[Record] = await conn.fetch(query, *variables)
+
end = timeit and time.perf_counter()
timeit and print(
@@ -136,8 +135,7 @@ def wrap_in_class(
cls: Type[ModelT] | Callable[..., ModelT],
one: bool = False,
transform: Callable[[dict], dict] | None = None,
- _kind: str | None = None,
-):
+) -> Callable[..., Callable[..., ModelT | list[ModelT]]]:
def _return_data(rec: list[Record]):
data = [dict(r.items()) for r in rec]
@@ -152,7 +150,9 @@ def _return_data(rec: list[Record]):
objs: list[ModelT] = [cls(**item) for item in map(transform, data)]
return objs
- def decorator(func: Callable[P, pd.DataFrame | Awaitable[pd.DataFrame]]):
+ def decorator(
+ func: Callable[P, list[Record] | Awaitable[list[Record]]]
+ ) -> Callable[P, ModelT | list[ModelT]]:
@wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> ModelT | list[ModelT]:
return _return_data(func(*args, **kwargs))
@@ -179,7 +179,7 @@ def rewrap_exceptions(
Type[BaseException] | Callable[[BaseException], BaseException],
],
/,
-):
+) -> Callable[..., Callable[P, T | Awaitable[T]]]:
def _check_error(error):
nonlocal mapping
@@ -199,7 +199,9 @@ def _check_error(error):
raise new_error from error
- def decorator(func: Callable[P, T | Awaitable[T]]):
+ def decorator(
+ func: Callable[P, T | Awaitable[T]]
+ ) -> Callable[..., Callable[P, T | Awaitable[T]]]:
@wraps(func)
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
try:
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index b354f97bf..379526e0f 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -9,7 +9,7 @@
import sentry_sdk
import uvicorn
import uvloop
-from fastapi import APIRouter, Depends, FastAPI, Request, status
+from fastapi import APIRouter, FastAPI, Request, status
from fastapi.exceptions import HTTPException, RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 389dafab2..1b86224a6 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -43,8 +43,8 @@
# from agents_api.queries.tools.create_tools import create_tools
# from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
+from agents_api.queries.users.delete_user import delete_user
-# from agents_api.queries.users.delete_user import delete_user
from agents_api.web import app
from .utils import (
@@ -67,11 +67,10 @@ def pg_dsn():
@fixture(scope="global")
def test_developer_id():
if not multi_tenant_mode:
- yield UUID(int=0)
- return
+ return UUID(int=0)
developer_id = uuid7()
- yield developer_id
+ return developer_id
# @fixture(scope="global")
@@ -98,8 +97,7 @@ async def test_developer(dsn=pg_dsn, developer_id=test_developer_id):
connection_pool=pool,
)
- yield developer
- await pool.close()
+ return developer
@fixture(scope="test")
@@ -138,8 +136,7 @@ async def test_user(dsn=pg_dsn, developer=test_developer):
connection_pool=pool,
)
- yield user
- await pool.close()
+ return user
@fixture(scope="test")
@@ -345,38 +342,49 @@ async def test_new_developer(dsn=pg_dsn, email=random_email):
# "type": "function",
# }
-# async with get_pg_client(dsn=dsn) as client:
-# [tool, *_] = await create_tools(
+# [tool, *_] = await create_tools(
+# developer_id=developer_id,
+# agent_id=agent.id,
+# data=[CreateToolRequest(**tool)],
+# connection_pool=pool,
+# )
+# yield tool
+
+# # Cleanup
+# try:
+# await delete_tool(
# developer_id=developer_id,
-# agent_id=agent.id,
-# data=[CreateToolRequest(**tool)],
-# client=client,
+# tool_id=tool.id,
+# connection_pool=pool,
# )
-# yield tool
+# finally:
+# await pool.close()
-# @fixture(scope="global")
-# def client(dsn=pg_dsn):
-# client = TestClient(app=app)
-# client.state.pg_client = get_pg_client(dsn=dsn)
-# return client
+@fixture(scope="global")
+async def client(dsn=pg_dsn):
+ pool = await create_db_pool(dsn=dsn)
+ client = TestClient(app=app)
+ client.state.postgres_pool = pool
+ return client
-# @fixture(scope="global")
-# def make_request(client=client, developer_id=test_developer_id):
-# def _make_request(method, url, **kwargs):
-# headers = kwargs.pop("headers", {})
-# headers = {
-# **headers,
-# api_key_header_name: api_key,
-# }
-# if multi_tenant_mode:
-# headers["X-Developer-Id"] = str(developer_id)
+@fixture(scope="global")
+async def make_request(client=client, developer_id=test_developer_id):
+ def _make_request(method, url, **kwargs):
+ headers = kwargs.pop("headers", {})
+ headers = {
+ **headers,
+ api_key_header_name: api_key,
+ }
+
+ if multi_tenant_mode:
+ headers["X-Developer-Id"] = str(developer_id)
-# return client.request(method, url, headers=headers, **kwargs)
+ return client.request(method, url, headers=headers, **kwargs)
-# return _make_request
+ return _make_request
@fixture(scope="global")
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 90b40a0d8..d182586dc 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -32,6 +32,7 @@
from tests.fixtures import (
pg_dsn,
test_developer_id,
+ test_user,
) # , test_session, test_agent, test_user
# @test("query: create session sql")
@@ -118,7 +119,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
# assert isinstance(result, Session)
-@test("query: list sessions sql")
+@test("query: list sessions when none exist sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that listing sessions returns a collection of session information."""
diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py
index cbe7e0353..002532816 100644
--- a/agents-api/tests/test_user_queries.py
+++ b/agents-api/tests/test_user_queries.py
@@ -5,7 +5,6 @@
from uuid import UUID
-import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
diff --git a/agents-api/tests/utils.py b/agents-api/tests/utils.py
index 990a1015e..a4f98ac80 100644
--- a/agents-api/tests/utils.py
+++ b/agents-api/tests/utils.py
@@ -1,5 +1,4 @@
import asyncio
-import json
import logging
import subprocess
from contextlib import asynccontextmanager, contextmanager
@@ -7,7 +6,6 @@
from typing import Any, Dict, Optional
from unittest.mock import patch
-import asyncpg
from botocore import exceptions
from fastapi.testclient import TestClient
from litellm.types.utils import ModelResponse
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index e9d5c6a4f..c104091a2 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -1,7 +1,7 @@
BEGIN;
-- Create chat_role enum
-CREATE TYPE chat_role AS ENUM('user', 'assistant', 'tool', 'system');
+CREATE TYPE chat_role AS ENUM('user', 'assistant', 'tool', 'system', 'developer');
-- Create entries table
CREATE TABLE IF NOT EXISTS entries (
@@ -101,4 +101,4 @@ AFTER INSERT OR UPDATE ON entries
FOR EACH ROW
EXECUTE FUNCTION update_session_updated_at();
-COMMIT;
\ No newline at end of file
+COMMIT;
From 372f3203f390839716428d678ad78be60142f4d9 Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Wed, 18 Dec 2024 07:52:14 +0000
Subject: [PATCH 058/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/utils.py | 12 +++++++-----
agents-api/tests/fixtures.py | 1 -
2 files changed, 7 insertions(+), 6 deletions(-)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index e7be9f981..3b5dc0bb0 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -6,9 +6,9 @@
from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar, cast
import asyncpg
-from beartype import beartype
import pandas as pd
from asyncpg import Record
+from beartype import beartype
from fastapi import HTTPException
from pydantic import BaseModel
@@ -39,7 +39,7 @@ def pg_query(
timeit: bool = False,
) -> Callable[..., Callable[P, list[Record]]] | Callable[P, list[Record]]:
def pg_query_dec(
- func: Callable[P, tuple[str, list[Any]] | list[tuple[str, list[Any]]]]
+ func: Callable[P, tuple[str, list[Any]] | list[tuple[str, list[Any]]]],
) -> Callable[..., Callable[P, list[Record]]]:
"""
Decorator that wraps a function that takes arbitrary arguments, and
@@ -76,7 +76,9 @@ async def wrapper(
assert isinstance(variables, list) and len(variables) > 0
queries = query if isinstance(query, list) else [query]
- variables_list = variables if isinstance(variables[0], list) else [variables]
+ variables_list = (
+ variables if isinstance(variables[0], list) else [variables]
+ )
zipped = zip(queries, variables_list)
try:
@@ -151,7 +153,7 @@ def _return_data(rec: list[Record]):
return objs
def decorator(
- func: Callable[P, list[Record] | Awaitable[list[Record]]]
+ func: Callable[P, list[Record] | Awaitable[list[Record]]],
) -> Callable[P, ModelT | list[ModelT]]:
@wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> ModelT | list[ModelT]:
@@ -200,7 +202,7 @@ def _check_error(error):
raise new_error from error
def decorator(
- func: Callable[P, T | Awaitable[T]]
+ func: Callable[P, T | Awaitable[T]],
) -> Callable[..., Callable[P, T | Awaitable[T]]]:
@wraps(func)
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 1b86224a6..c2aa350a8 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -44,7 +44,6 @@
# from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
from agents_api.queries.users.delete_user import delete_user
-
from agents_api.web import app
from .utils import (
From 919c03ab8b266d440669afd435bd95d0e70aa240 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Wed, 18 Dec 2024 11:48:48 +0300
Subject: [PATCH 059/274] feat(agents-api): implement agent queries and tests
---
.../agents_api/queries/agents/create_agent.py | 85 ++++---
.../queries/agents/create_or_update_agent.py | 88 ++++---
.../agents_api/queries/agents/delete_agent.py | 82 +++---
.../agents_api/queries/agents/get_agent.py | 52 ++--
.../agents_api/queries/agents/list_agents.py | 82 +++---
.../agents_api/queries/agents/patch_agent.py | 72 ++++--
.../agents_api/queries/agents/update_agent.py | 56 ++--
agents-api/agents_api/queries/utils.py | 14 +
agents-api/tests/fixtures.py | 26 +-
agents-api/tests/test_agent_queries.py | 239 ++++++++----------
10 files changed, 427 insertions(+), 369 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 7e95dc3ab..cc6e1ea6d 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -6,6 +6,7 @@
from typing import Any, TypeVar
from uuid import UUID
+from sqlglot import parse_one
from beartype import beartype
from fastapi import HTTPException
from pydantic import ValidationError
@@ -14,7 +15,7 @@
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
- # generate_canonical_name,
+ generate_canonical_name,
partialclass,
pg_query,
rewrap_exceptions,
@@ -24,6 +25,33 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+INSERT INTO agents (
+ developer_id,
+ agent_id,
+ canonical_name,
+ name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings
+)
+VALUES (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7,
+ $8,
+ $9
+)
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
# @rewrap_exceptions(
# {
@@ -58,17 +86,16 @@
Agent,
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
- _kind="inserted",
)
-@pg_query
# @increase_counter("create_agent")
+@pg_query
@beartype
async def create_agent(
*,
developer_id: UUID,
agent_id: UUID | None = None,
data: CreateAgentRequest,
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs and executes a SQL query to create a new agent in the database.
@@ -91,49 +118,23 @@ async def create_agent(
# Convert default_settings to dict if it exists
default_settings = (
- data.default_settings.model_dump() if data.default_settings else None
+ data.default_settings.model_dump() if data.default_settings else {}
)
# Set default values
- data.metadata = data.metadata or None
- # data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ data.metadata = data.metadata or {}
+ data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
- query = """
- INSERT INTO agents (
+ params = [
developer_id,
agent_id,
- canonical_name,
- name,
- about,
- instructions,
- model,
- metadata,
- default_settings
- )
- VALUES (
- %(developer_id)s,
- %(agent_id)s,
- %(canonical_name)s,
- %(name)s,
- %(about)s,
- %(instructions)s,
- %(model)s,
- %(metadata)s,
- %(default_settings)s
- )
- RETURNING *;
- """
-
- params = {
- "developer_id": developer_id,
- "agent_id": agent_id,
- "canonical_name": data.canonical_name,
- "name": data.name,
- "about": data.about,
- "instructions": data.instructions,
- "model": data.model,
- "metadata": data.metadata,
- "default_settings": default_settings,
- }
+ data.canonical_name,
+ data.name,
+ data.about,
+ data.instructions,
+ data.model,
+ data.metadata,
+ default_settings,
+ ]
return query, params
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 50c96a94a..5dfe94431 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -6,13 +6,16 @@
from typing import Any, TypeVar
from uuid import UUID
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
from beartype import beartype
from fastapi import HTTPException
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
- # generate_canonical_name,
+ generate_canonical_name,
partialclass,
pg_query,
rewrap_exceptions,
@@ -22,6 +25,34 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+INSERT INTO agents (
+ developer_id,
+ agent_id,
+ canonical_name,
+ name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings
+)
+VALUES (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7,
+ $8,
+ $9
+)
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
@@ -36,14 +67,13 @@
Agent,
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
- _kind="inserted",
)
+# @increase_counter("create_or_update_agent")
@pg_query
-# @increase_counter("create_or_update_agent1")
@beartype
async def create_or_update_agent(
*, agent_id: UUID, developer_id: UUID, data: CreateOrUpdateAgentRequest
-) -> tuple[list[str], dict]:
+) -> tuple[str, list]:
"""
Constructs the SQL queries to create a new agent or update an existing agent's details.
@@ -65,49 +95,23 @@ async def create_or_update_agent(
# Convert default_settings to dict if it exists
default_settings = (
- data.default_settings.model_dump() if data.default_settings else None
+ data.default_settings.model_dump() if data.default_settings else {}
)
# Set default values
- data.metadata = data.metadata or None
- # data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ data.metadata = data.metadata or {}
+ data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
- query = """
- INSERT INTO agents (
+ params = [
developer_id,
agent_id,
- canonical_name,
- name,
- about,
- instructions,
- model,
- metadata,
- default_settings
- )
- VALUES (
- %(developer_id)s,
- %(agent_id)s,
- %(canonical_name)s,
- %(name)s,
- %(about)s,
- %(instructions)s,
- %(model)s,
- %(metadata)s,
- %(default_settings)s
- )
- RETURNING *;
- """
-
- params = {
- "developer_id": developer_id,
- "agent_id": agent_id,
- "canonical_name": data.canonical_name,
- "name": data.name,
- "about": data.about,
- "instructions": data.instructions,
- "model": data.model,
- "metadata": data.metadata,
- "default_settings": default_settings,
- }
+ data.canonical_name,
+ data.name,
+ data.about,
+ data.instructions,
+ data.model,
+ data.metadata,
+ default_settings,
+ ]
return (query, params)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 282022ad3..c376a9d6a 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -18,10 +18,40 @@
rewrap_exceptions,
wrap_in_class,
)
+from beartype import beartype
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+WITH deleted_docs AS (
+ DELETE FROM docs
+ WHERE developer_id = $1
+ AND doc_id IN (
+ SELECT ad.doc_id
+ FROM agent_docs ad
+ WHERE ad.agent_id = $2
+ AND ad.developer_id = $1
+ )
+), deleted_agent_docs AS (
+ DELETE FROM agent_docs
+ WHERE agent_id = $2 AND developer_id = $1
+), deleted_tools AS (
+ DELETE FROM tools
+ WHERE agent_id = $2 AND developer_id = $1
+)
+DELETE FROM agents
+WHERE agent_id = $2 AND developer_id = $1
+RETURNING developer_id, agent_id;
+"""
+
+
+# Convert the list of queries into a single query string
+query = parse_one(raw_query).sql(pretty=True)
# @rewrap_exceptions(
# {
@@ -36,57 +66,23 @@
@wrap_in_class(
ResourceDeletedResponse,
one=True,
- transform=lambda d: {
- "id": d["agent_id"],
- },
+ transform=lambda d: {**d, "id": d["agent_id"], "deleted_at": utcnow()},
)
+# @increase_counter("delete_agent")
@pg_query
-# @increase_counter("delete_agent1")
@beartype
-async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
"""
- Constructs the SQL queries to delete an agent and its related settings.
+ Constructs the SQL query to delete an agent and its related settings.
Args:
agent_id (UUID): The UUID of the agent to be deleted.
developer_id (UUID): The UUID of the developer owning the agent.
Returns:
- tuple[list[str], dict]: A tuple containing the list of SQL queries and their parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
-
- queries = [
- """
- -- Delete docs that were only associated with this agent
- DELETE FROM docs
- WHERE developer_id = %(developer_id)s
- AND doc_id IN (
- SELECT ad.doc_id
- FROM agent_docs ad
- WHERE ad.agent_id = %(agent_id)s
- AND ad.developer_id = %(developer_id)s
- );
- """,
- """
- -- Delete agent_docs entries
- DELETE FROM agent_docs
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """,
- """
- -- Delete tools related to the agent
- DELETE FROM tools
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """,
- """
- -- Delete the agent
- DELETE FROM agents
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """,
- ]
-
- params = {
- "agent_id": agent_id,
- "developer_id": developer_id,
- }
-
- return (queries, params)
+ # Note: We swap the parameter order because the queries use $1 for developer_id and $2 for agent_id
+ params = [developer_id, agent_id]
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index a9f6b8368..061d0b165 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -10,12 +10,38 @@
from fastapi import HTTPException
from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ..utils import (
partialclass,
pg_query,
rewrap_exceptions,
wrap_in_class,
)
+from beartype import beartype
+
+from ...autogen.openapi_model import Agent
+
+raw_query = """
+SELECT
+ agent_id,
+ developer_id,
+ name,
+ canonical_name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings,
+ created_at,
+ updated_at
+FROM
+ agents
+WHERE
+ agent_id = $2 AND developer_id = $1;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -31,11 +57,11 @@
# }
# # TODO: Add more exceptions
# )
-@wrap_in_class(Agent, one=True)
+@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
+# @increase_counter("get_agent")
@pg_query
-# @increase_counter("get_agent1")
@beartype
-async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
"""
Constructs the SQL query to retrieve an agent's details.
@@ -46,23 +72,5 @@ async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], d
Returns:
tuple[list[str], dict]: A tuple containing the SQL query and its parameters.
"""
- query = """
- SELECT
- agent_id,
- developer_id,
- name,
- canonical_name,
- about,
- instructions,
- model,
- metadata,
- default_settings,
- created_at,
- updated_at
- FROM
- agents
- WHERE
- agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """
- return (query, {"agent_id": agent_id, "developer_id": developer_id})
+ return (query, [developer_id, agent_id])
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index d2ebf0c07..6a8c3e986 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -17,12 +17,42 @@
rewrap_exceptions,
wrap_in_class,
)
+from beartype import beartype
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import Agent
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+SELECT
+ agent_id,
+ developer_id,
+ name,
+ canonical_name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings,
+ created_at,
+ updated_at
+FROM agents
+WHERE developer_id = $1 $7
+ORDER BY
+ CASE WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at END ASC NULLS LAST,
+ CASE WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at END DESC NULLS LAST,
+ CASE WHEN $4 = 'updated_at' AND $5 = 'asc' THEN updated_at END ASC NULLS LAST,
+ CASE WHEN $4 = 'updated_at' AND $5 = 'desc' THEN updated_at END DESC NULLS LAST
+LIMIT $2 OFFSET $3;
+"""
+
+query = raw_query
+
-# @rewrap_exceptions(
+# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
# HTTPException,
@@ -32,9 +62,9 @@
# }
# # TODO: Add more exceptions
# )
-@wrap_in_class(Agent)
+@wrap_in_class(Agent, transform=lambda d: {"id": d["agent_id"], **d})
+# @increase_counter("list_agents")
@pg_query
-# @increase_counter("list_agents1")
@beartype
async def list_agents(
*,
@@ -44,7 +74,7 @@ async def list_agents(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
metadata_filter: dict[str, Any] = {},
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs query to list agents for a developer with pagination.
@@ -64,33 +94,25 @@ async def list_agents(
raise HTTPException(status_code=400, detail="Invalid sort direction")
# Build metadata filter clause if needed
- metadata_clause = ""
- if metadata_filter:
- metadata_clause = "AND metadata @> %(metadata_filter)s::jsonb"
- query = f"""
- SELECT
- agent_id,
+ final_query = query
+ if metadata_filter:
+ final_query = query.replace("$7", "AND metadata @> $6::jsonb")
+ else:
+ final_query = query.replace("$7", "")
+
+ params = [
developer_id,
- name,
- canonical_name,
- about,
- instructions,
- model,
- metadata,
- default_settings,
- created_at,
- updated_at
- FROM agents
- WHERE developer_id = %(developer_id)s
- {metadata_clause}
- ORDER BY {sort_by} {direction}
- LIMIT %(limit)s OFFSET %(offset)s;
- """
-
- params = {"developer_id": developer_id, "limit": limit, "offset": offset}
-
+ limit,
+ offset
+ ]
+
+ params.append(sort_by)
+ params.append(direction)
if metadata_filter:
- params["metadata_filter"] = metadata_filter
+ params.append(metadata_filter)
+
+ print(final_query)
+ print(params)
- return query, params
+ return final_query, params
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 915aa8c66..647ea3e52 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -10,6 +10,9 @@
from fastapi import HTTPException
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
+from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
@@ -20,6 +23,35 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
+raw_query = """
+UPDATE agents
+SET
+ name = CASE
+ WHEN $3::text IS NOT NULL THEN $3
+ ELSE name
+ END,
+ about = CASE
+ WHEN $4::text IS NOT NULL THEN $4
+ ELSE about
+ END,
+ metadata = CASE
+ WHEN $5::jsonb IS NOT NULL THEN metadata || $5
+ ELSE metadata
+ END,
+ model = CASE
+ WHEN $6::text IS NOT NULL THEN $6
+ ELSE model
+ END,
+ default_settings = CASE
+ WHEN $7::jsonb IS NOT NULL THEN $7
+ ELSE default_settings
+ END
+WHERE agent_id = $2 AND developer_id = $1
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
# @rewrap_exceptions(
@@ -36,14 +68,13 @@
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
- _kind="inserted",
)
+# @increase_counter("patch_agent")
@pg_query
-# @increase_counter("patch_agent1")
@beartype
async def patch_agent(
*, agent_id: UUID, developer_id: UUID, data: PatchAgentRequest
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs the SQL query to partially update an agent's details.
@@ -53,27 +84,16 @@ async def patch_agent(
data (PatchAgentRequest): A dictionary of fields to update.
Returns:
- tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
- patch_fields = data.model_dump(exclude_unset=True)
- set_clauses = []
- params = {}
-
- for key, value in patch_fields.items():
- if value is not None: # Only update non-null values
- set_clauses.append(f"{key} = %({key})s")
- params[key] = value
-
- set_clause = ", ".join(set_clauses)
-
- query = f"""
- UPDATE agents
- SET {set_clause}
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s
- RETURNING *;
- """
-
- params["agent_id"] = agent_id
- params["developer_id"] = developer_id
-
- return (query, params)
+ params = [
+ developer_id,
+ agent_id,
+ data.name,
+ data.about,
+ data.metadata,
+ data.model,
+ data.default_settings.model_dump() if data.default_settings else None,
+ ]
+
+ return query, params
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 48e00bf5a..d65354fa1 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -11,6 +11,9 @@
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...metrics.counters import increase_counter
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
from ..utils import (
partialclass,
pg_query,
@@ -21,6 +24,20 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+UPDATE agents
+SET
+ metadata = $3,
+ name = $4,
+ about = $5,
+ model = $6,
+ default_settings = $7::jsonb
+WHERE agent_id = $2 AND developer_id = $1
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
@@ -35,15 +52,12 @@
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
- transform=lambda d: {"id": d["agent_id"], "jobs": [], **d},
- _kind="inserted",
+ transform=lambda d: {"id": d["agent_id"], **d},
)
+# @increase_counter("update_agent")
@pg_query
-# @increase_counter("update_agent1")
@beartype
-async def update_agent(
- *, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
-) -> tuple[str, dict]:
+async def update_agent(*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest) -> tuple[str, list]:
"""
Constructs the SQL query to fully update an agent's details.
@@ -53,21 +67,19 @@ async def update_agent(
data (UpdateAgentRequest): A dictionary containing all agent fields to update.
Returns:
- tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
- fields = ", ".join(
- [f"{key} = %({key})s" for key in data.model_dump(exclude_unset=True).keys()]
- )
- params = {key: value for key, value in data.model_dump(exclude_unset=True).items()}
-
- query = f"""
- UPDATE agents
- SET {fields}
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s
- RETURNING *;
- """
-
- params["agent_id"] = agent_id
- params["developer_id"] = developer_id
-
+ params = [
+ developer_id,
+ agent_id,
+ data.metadata or {},
+ data.name,
+ data.about,
+ data.model,
+ data.default_settings.model_dump() if data.default_settings else {},
+ ]
+ print("*" * 100)
+ print(query)
+ print(params)
+ print("*" * 100)
return (query, params)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index e93135172..ef2d09027 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,5 +1,6 @@
import concurrent.futures
import inspect
+import re
import socket
import time
from functools import partialmethod, wraps
@@ -17,6 +18,19 @@
T = TypeVar("T")
ModelT = TypeVar("ModelT", bound=BaseModel)
+def generate_canonical_name(name: str) -> str:
+ """Convert a display name to a canonical name.
+ Example: "My Cool Agent!" -> "my_cool_agent"
+ """
+ # Remove special characters, replace spaces with underscores
+ canonical = re.sub(r"[^\w\s-]", "", name.lower())
+ canonical = re.sub(r"[-\s]+", "_", canonical)
+
+ # Ensure it starts with a letter (prepend 'a' if not)
+ if not canonical[0].isalpha():
+ canonical = f"a_{canonical}"
+
+ return canonical
def partialclass(cls, *args, **kwargs):
cls_signature = inspect.signature(cls)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index e4ae60780..70e6aa2c5 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -111,19 +111,19 @@ def patch_embed_acompletion():
@fixture(scope="global")
async def test_agent(dsn=pg_dsn, developer=test_developer):
- pool = await asyncpg.create_pool(dsn=dsn)
-
- async with get_pg_client(pool=pool) as client:
- agent = await create_agent(
- developer_id=developer.id,
- data=CreateAgentRequest(
- model="gpt-4o-mini",
- name="test agent",
- about="test agent about",
- metadata={"test": "test"},
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+
+ agent = await create_agent(
+ developer_id=developer.id,
+ data=CreateAgentRequest(
+ model="gpt-4o-mini",
+ name="test agent",
+ canonical_name=f"test_agent_{str(int(time.time()))}",
+ about="test agent about",
+ metadata={"test": "test"},
+ ),
+ connection_pool=pool,
+ )
yield agent
await pool.close()
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index f8f75fd0b..4b8ccd959 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,7 +1,9 @@
# Tests for agent queries
from uuid import uuid4
+from uuid import UUID
import asyncpg
+from uuid_extensions import uuid7
from ward import raises, test
from agents_api.autogen.openapi_model import (
@@ -9,10 +11,11 @@
CreateAgentRequest,
CreateOrUpdateAgentRequest,
PatchAgentRequest,
+ ResourceDeletedResponse,
ResourceUpdatedResponse,
UpdateAgentRequest,
)
-from agents_api.clients.pg import get_pg_client
+from agents_api.clients.pg import create_db_pool
from agents_api.queries.agents import (
create_agent,
create_or_update_agent,
@@ -25,163 +28,141 @@
from tests.fixtures import pg_dsn, test_agent, test_developer_id
-@test("model: create agent")
+@test("query: create agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- ),
- client=client,
- )
-
-
-@test("model: create agent with instructions")
+ """Test that an agent can be successfully created."""
+
+ pool = await create_db_pool(dsn=dsn)
+ await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ ),
+ connection_pool=pool,
+ )
+
+
+@test("query: create agent with instructions sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
+ """Test that an agent can be successfully created or updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ await create_or_update_agent(
+ developer_id=developer_id,
+ agent_id=uuid4(),
+ data=CreateOrUpdateAgentRequest(
+ name="test agent",
+ canonical_name="test_agent2",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ connection_pool=pool,
+ )
+
+
+@test("query: update agent sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that an existing agent's information can be successfully updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await update_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=UpdateAgentRequest(
+ name="updated agent",
+ about="updated agent about",
+ model="gpt-4o-mini",
+ default_settings={"temperature": 1.0},
+ metadata={"hello": "world"},
+ ),
+ connection_pool=pool,
+ )
-@test("model: create or update agent")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_or_update_agent(
- developer_id=developer_id,
- agent_id=uuid4(),
- data=CreateOrUpdateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
-@test("model: get agent not exists")
+@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that retrieving a non-existent agent raises an exception."""
+
agent_id = uuid4()
- pool = await asyncpg.create_pool(dsn=dsn)
+ pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- async with get_pg_client(pool=pool) as client:
- await get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
+ await get_agent(agent_id=agent_id, developer_id=developer_id, connection_pool=pool)
-@test("model: get agent exists")
+@test("query: get agent exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
+ """Test that retrieving an existing agent returns the correct agent information."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await get_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
assert result is not None
assert isinstance(result, Agent)
-@test("model: delete agent")
+@test("query: list agents sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- temp_agent = await create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
- # Delete the agent
- await delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+ """Test that listing agents returns a collection of agent information."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_agents(developer_id=developer_id, connection_pool=pool)
- # Check that the agent is deleted
- with raises(Exception):
- await get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+ assert isinstance(result, list)
+ assert all(isinstance(agent, Agent) for agent in result)
-@test("model: update agent")
+@test("query: patch agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await update_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- data=UpdateAgentRequest(
- name="updated agent",
- about="updated agent about",
- model="gpt-4o-mini",
- default_settings={"temperature": 1.0},
- metadata={"hello": "world"},
- ),
- client=client,
- )
+ """Test that an agent can be successfully patched."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await patch_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=PatchAgentRequest(
+ name="patched agent",
+ about="patched agent about",
+ default_settings={"temperature": 1.0},
+ metadata={"something": "else"},
+ ),
+ connection_pool=pool,
+ )
assert result is not None
assert isinstance(result, ResourceUpdatedResponse)
- async with get_pg_client(pool=pool) as client:
- agent = await get_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert "test" not in agent.metadata
-
-@test("model: patch agent")
+@test("query: delete agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await patch_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- data=PatchAgentRequest(
- name="patched agent",
- about="patched agent about",
- default_settings={"temperature": 1.0},
- metadata={"something": "else"},
- ),
- client=client,
- )
+ """Test that an agent can be successfully deleted."""
+
+ pool = await create_db_pool(dsn=dsn)
+ delete_result = await delete_agent(agent_id=agent.id, developer_id=developer_id, connection_pool=pool)
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
+ assert delete_result is not None
+ assert isinstance(delete_result, ResourceDeletedResponse)
- async with get_pg_client(pool=pool) as client:
- agent = await get_agent(
- agent_id=agent.id,
+ # Verify the agent no longer exists
+ try:
+ await get_agent(
developer_id=developer_id,
- client=client,
+ agent_id=agent.id,
+ connection_pool=pool,
)
-
- assert "hello" in agent.metadata
-
-
-@test("model: list agents")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
-
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await list_agents(developer_id=developer_id, client=client)
-
- assert isinstance(result, list)
- assert all(isinstance(agent, Agent) for agent in result)
+ except Exception:
+ pass
+ else:
+ assert (
+ False
+ ), "Expected an exception to be raised when retrieving a deleted agent."
From 6f2ca23b967cd3a3c89d52c8f826f0aea2886925 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Wed, 18 Dec 2024 08:51:36 +0000
Subject: [PATCH 060/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/agents/create_agent.py | 3 ++-
.../queries/agents/create_or_update_agent.py | 5 ++--
.../agents_api/queries/agents/delete_agent.py | 10 +++----
.../agents_api/queries/agents/get_agent.py | 24 ++++++++---------
.../agents_api/queries/agents/list_agents.py | 19 +++++--------
.../agents_api/queries/agents/patch_agent.py | 11 ++++----
.../agents_api/queries/agents/update_agent.py | 9 ++++---
agents-api/agents_api/queries/utils.py | 2 ++
agents-api/tests/fixtures.py | 4 +--
agents-api/tests/test_agent_queries.py | 27 ++++++++++---------
10 files changed, 54 insertions(+), 60 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index cc6e1ea6d..a79596caf 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -6,10 +6,10 @@
from typing import Any, TypeVar
from uuid import UUID
-from sqlglot import parse_one
from beartype import beartype
from fastapi import HTTPException
from pydantic import ValidationError
+from sqlglot import parse_one
from uuid_extensions import uuid7
from ...autogen.openapi_model import Agent, CreateAgentRequest
@@ -53,6 +53,7 @@
query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 5dfe94431..9df34c049 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -6,11 +6,10 @@
from typing import Any, TypeVar
from uuid import UUID
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index c376a9d6a..239498df3 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -8,6 +8,8 @@
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
@@ -18,11 +20,6 @@
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -53,6 +50,7 @@
# Convert the list of queries into a single query string
query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
@@ -84,5 +82,5 @@ async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list
"""
# Note: We swap the parameter order because the queries use $1 for developer_id and $2 for agent_id
params = [developer_id, agent_id]
-
+
return (query, params)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 061d0b165..d630a2aeb 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -8,19 +8,17 @@
from beartype import beartype
from fastapi import HTTPException
-from ...autogen.openapi_model import Agent
-from ...metrics.counters import increase_counter
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import Agent
+from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-
-from ...autogen.openapi_model import Agent
raw_query = """
SELECT
@@ -48,14 +46,14 @@
# @rewrap_exceptions(
- # {
- # psycopg_errors.ForeignKeyViolation: partialclass(
- # HTTPException,
- # status_code=404,
- # detail="The specified developer does not exist.",
- # )
- # }
- # # TODO: Add more exceptions
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
# @increase_counter("get_agent")
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 6a8c3e986..6c6e7a0c5 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -8,6 +8,8 @@
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
@@ -17,11 +19,6 @@
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
-from ...autogen.openapi_model import Agent
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -100,18 +97,14 @@ async def list_agents(
final_query = query.replace("$7", "AND metadata @> $6::jsonb")
else:
final_query = query.replace("$7", "")
-
- params = [
- developer_id,
- limit,
- offset
- ]
-
+
+ params = [developer_id, limit, offset]
+
params.append(sort_by)
params.append(direction)
if metadata_filter:
params.append(metadata_filter)
-
+
print(final_query)
print(params)
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 647ea3e52..929fd9c34 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -8,11 +8,10 @@
from beartype import beartype
from fastapi import HTTPException
-
-from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
-from fastapi import HTTPException
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
@@ -23,7 +22,7 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-
+
raw_query = """
UPDATE agents
SET
@@ -93,7 +92,7 @@ async def patch_agent(
data.about,
data.metadata,
data.model,
- data.default_settings.model_dump() if data.default_settings else None,
+ data.default_settings.model_dump() if data.default_settings else None,
]
-
+
return query, params
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index d65354fa1..3f413c78d 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -8,12 +8,11 @@
from beartype import beartype
from fastapi import HTTPException
-
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
-from ...metrics.counters import increase_counter
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
+from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
@@ -57,7 +56,9 @@
# @increase_counter("update_agent")
@pg_query
@beartype
-async def update_agent(*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest) -> tuple[str, list]:
+async def update_agent(
+ *, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
+) -> tuple[str, list]:
"""
Constructs the SQL query to fully update an agent's details.
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index ef2d09027..7a6c7b2d8 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -18,6 +18,7 @@
T = TypeVar("T")
ModelT = TypeVar("ModelT", bound=BaseModel)
+
def generate_canonical_name(name: str) -> str:
"""Convert a display name to a canonical name.
Example: "My Cool Agent!" -> "my_cool_agent"
@@ -32,6 +33,7 @@ def generate_canonical_name(name: str) -> str:
return canonical
+
def partialclass(cls, *args, **kwargs):
cls_signature = inspect.signature(cls)
bound = cls_signature.bind_partial(*args, **kwargs)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 70e6aa2c5..fa00c98e3 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -23,9 +23,9 @@
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
+from agents_api.queries.agents.create_agent import create_agent
from agents_api.queries.developers.create_developer import create_developer
-from agents_api.queries.agents.create_agent import create_agent
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
@@ -116,7 +116,7 @@ async def test_agent(dsn=pg_dsn, developer=test_developer):
agent = await create_agent(
developer_id=developer.id,
data=CreateAgentRequest(
- model="gpt-4o-mini",
+ model="gpt-4o-mini",
name="test agent",
canonical_name=f"test_agent_{str(int(time.time()))}",
about="test agent about",
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 4b8ccd959..b27f8abde 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,7 +1,6 @@
# Tests for agent queries
-from uuid import uuid4
+from uuid import UUID, uuid4
-from uuid import UUID
import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
@@ -31,7 +30,7 @@
@test("query: create agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully created."""
-
+
pool = await create_db_pool(dsn=dsn)
await create_agent(
developer_id=developer_id,
@@ -47,7 +46,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
@test("query: create agent with instructions sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully created or updated."""
-
+
pool = await create_db_pool(dsn=dsn)
await create_or_update_agent(
developer_id=developer_id,
@@ -66,7 +65,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
@test("query: update agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an existing agent's information can be successfully updated."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await update_agent(
agent_id=agent.id,
@@ -88,18 +87,20 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
-
+
agent_id = uuid4()
pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- await get_agent(agent_id=agent_id, developer_id=developer_id, connection_pool=pool)
+ await get_agent(
+ agent_id=agent_id, developer_id=developer_id, connection_pool=pool
+ )
@test("query: get agent exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that retrieving an existing agent returns the correct agent information."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await get_agent(
agent_id=agent.id,
@@ -114,7 +115,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: list agents sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that listing agents returns a collection of agent information."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await list_agents(developer_id=developer_id, connection_pool=pool)
@@ -125,7 +126,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
@test("query: patch agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an agent can be successfully patched."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await patch_agent(
agent_id=agent.id,
@@ -146,9 +147,11 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: delete agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an agent can be successfully deleted."""
-
+
pool = await create_db_pool(dsn=dsn)
- delete_result = await delete_agent(agent_id=agent.id, developer_id=developer_id, connection_pool=pool)
+ delete_result = await delete_agent(
+ agent_id=agent.id, developer_id=developer_id, connection_pool=pool
+ )
assert delete_result is not None
assert isinstance(delete_result, ResourceDeletedResponse)
From 5f9d5cc42468a486478fd8a0a3e38061290e8ccd Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Wed, 18 Dec 2024 13:18:41 +0300
Subject: [PATCH 061/274] fix(agents-api): misc fixes
---
.../agents_api/queries/agents/create_agent.py | 2 +-
.../queries/agents/create_or_update_agent.py | 2 +-
.../agents_api/queries/agents/delete_agent.py | 2 +-
.../agents_api/queries/agents/get_agent.py | 2 +-
.../agents_api/queries/agents/list_agents.py | 29 +++++++++----------
.../agents_api/queries/agents/patch_agent.py | 2 +-
.../agents_api/queries/agents/update_agent.py | 7 ++---
agents-api/agents_api/queries/utils.py | 4 +++
agents-api/tests/test_agent_queries.py | 18 ++++--------
9 files changed, 29 insertions(+), 39 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index a79596caf..0ee250336 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -88,7 +88,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("create_agent")
+@increase_counter("create_agent")
@pg_query
@beartype
async def create_agent(
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 9df34c049..e2b3fc525 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -67,7 +67,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("create_or_update_agent")
+@increase_counter("create_or_update_agent")
@pg_query
@beartype
async def create_or_update_agent(
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 239498df3..0a47bc0eb 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -66,7 +66,7 @@
one=True,
transform=lambda d: {**d, "id": d["agent_id"], "deleted_at": utcnow()},
)
-# @increase_counter("delete_agent")
+@increase_counter("delete_agent")
@pg_query
@beartype
async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index d630a2aeb..a9893d747 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -56,7 +56,7 @@
# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
-# @increase_counter("get_agent")
+@increase_counter("get_agent")
@pg_query
@beartype
async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 6c6e7a0c5..37e82de2a 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -37,7 +37,7 @@
created_at,
updated_at
FROM agents
-WHERE developer_id = $1 $7
+WHERE developer_id = $1 {metadata_filter_query}
ORDER BY
CASE WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at END ASC NULLS LAST,
CASE WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at END DESC NULLS LAST,
@@ -46,8 +46,6 @@
LIMIT $2 OFFSET $3;
"""
-query = raw_query
-
# @rewrap_exceptions(
# {
@@ -60,7 +58,7 @@
# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, transform=lambda d: {"id": d["agent_id"], **d})
-# @increase_counter("list_agents")
+@increase_counter("list_agents")
@pg_query
@beartype
async def list_agents(
@@ -92,20 +90,19 @@ async def list_agents(
# Build metadata filter clause if needed
- final_query = query
- if metadata_filter:
- final_query = query.replace("$7", "AND metadata @> $6::jsonb")
- else:
- final_query = query.replace("$7", "")
-
- params = [developer_id, limit, offset]
+ final_query = raw_query.format(
+ metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
+ )
+
+ params = [
+ developer_id,
+ limit,
+ offset,
+ sort_by,
+ direction,
+ ]
- params.append(sort_by)
- params.append(direction)
if metadata_filter:
params.append(metadata_filter)
- print(final_query)
- print(params)
-
return final_query, params
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 929fd9c34..d2a172838 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -68,7 +68,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("patch_agent")
+@increase_counter("patch_agent")
@pg_query
@beartype
async def patch_agent(
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 3f413c78d..d03994e9c 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -53,7 +53,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("update_agent")
+@increase_counter("update_agent")
@pg_query
@beartype
async def update_agent(
@@ -79,8 +79,5 @@ async def update_agent(
data.model,
data.default_settings.model_dump() if data.default_settings else {},
]
- print("*" * 100)
- print(query)
- print(params)
- print("*" * 100)
+
return (query, params)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 7a6c7b2d8..1bd72dd5b 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,5 +1,6 @@
import concurrent.futures
import inspect
+import random
import re
import socket
import time
@@ -31,6 +32,9 @@ def generate_canonical_name(name: str) -> str:
if not canonical[0].isalpha():
canonical = f"a_{canonical}"
+ # Add 3 random numbers to the end
+ canonical = f"{canonical}_{random.randint(100, 999)}"
+
return canonical
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index b27f8abde..18d95b743 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,6 +1,5 @@
# Tests for agent queries
-from uuid import UUID, uuid4
-
+from uuid import UUID
import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
@@ -50,7 +49,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
pool = await create_db_pool(dsn=dsn)
await create_or_update_agent(
developer_id=developer_id,
- agent_id=uuid4(),
+ agent_id=uuid7(),
data=CreateOrUpdateAgentRequest(
name="test agent",
canonical_name="test_agent2",
@@ -87,8 +86,8 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
-
- agent_id = uuid4()
+
+ agent_id = uuid7()
pool = await create_db_pool(dsn=dsn)
with raises(Exception):
@@ -156,16 +155,9 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert delete_result is not None
assert isinstance(delete_result, ResourceDeletedResponse)
- # Verify the agent no longer exists
- try:
+ with raises(Exception):
await get_agent(
developer_id=developer_id,
agent_id=agent.id,
connection_pool=pool,
)
- except Exception:
- pass
- else:
- assert (
- False
- ), "Expected an exception to be raised when retrieving a deleted agent."
From 451a88fe27747441399a2dc0e19fc37b5fa1ee1d Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Wed, 18 Dec 2024 10:27:40 +0000
Subject: [PATCH 062/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/agents/list_agents.py | 2 +-
agents-api/tests/test_agent_queries.py | 3 ++-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 37e82de2a..3613268c5 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -93,7 +93,7 @@ async def list_agents(
final_query = raw_query.format(
metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
)
-
+
params = [
developer_id,
limit,
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 18d95b743..56a07ed03 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,5 +1,6 @@
# Tests for agent queries
from uuid import UUID
+
import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
@@ -86,7 +87,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
-
+
agent_id = uuid7()
pool = await create_db_pool(dsn=dsn)
From 2b907eff42c33f8fc5fcc3acc30350e5c3af99cd Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Wed, 18 Dec 2024 19:56:31 +0530
Subject: [PATCH 063/274] wip(agents-api): Entry queries
Signed-off-by: Diwank Singh Tomer
---
agents-api/agents_api/env.py | 2 +
.../agents_api/queries/agents/create_agent.py | 1 -
.../queries/agents/create_or_update_agent.py | 1 -
.../agents_api/queries/agents/delete_agent.py | 2 -
.../agents_api/queries/agents/get_agent.py | 1 -
.../agents_api/queries/agents/list_agents.py | 1 -
.../agents_api/queries/agents/patch_agent.py | 1 -
.../agents_api/queries/agents/update_agent.py | 1 -
.../agents_api/queries/entries/__init__.py | 6 +-
.../queries/entries/create_entries.py | 181 ++++++++++++++++
.../queries/entries/create_entry.py | 196 ------------------
.../queries/entries/delete_entries.py | 128 ++++++++++++
.../queries/entries/delete_entry.py | 96 ---------
.../agents_api/queries/entries/get_history.py | 2 +-
.../queries/entries/list_entries.py | 112 ++++++++++
.../agents_api/queries/entries/list_entry.py | 79 -------
agents-api/agents_api/queries/utils.py | 108 +++++++---
agents-api/agents_api/web.py | 1 -
agents-api/tests/fixtures.py | 13 --
agents-api/tests/test_entry_queries.py | 53 +++--
agents-api/tests/test_session_queries.py | 15 --
21 files changed, 538 insertions(+), 462 deletions(-)
create mode 100644 agents-api/agents_api/queries/entries/create_entries.py
delete mode 100644 agents-api/agents_api/queries/entries/create_entry.py
create mode 100644 agents-api/agents_api/queries/entries/delete_entries.py
delete mode 100644 agents-api/agents_api/queries/entries/delete_entry.py
create mode 100644 agents-api/agents_api/queries/entries/list_entries.py
delete mode 100644 agents-api/agents_api/queries/entries/list_entry.py
diff --git a/agents-api/agents_api/env.py b/agents-api/agents_api/env.py
index 48623b771..8b9fd4dae 100644
--- a/agents-api/agents_api/env.py
+++ b/agents-api/agents_api/env.py
@@ -66,6 +66,8 @@
default="postgres://postgres:postgres@0.0.0.0:5432/postgres?sslmode=disable",
)
+query_timeout: float = env.float("QUERY_TIMEOUT", default=90.0)
+
# Auth
# ----
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 46dc453f9..4c731d3dd 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -13,7 +13,6 @@
from uuid_extensions import uuid7
from ...autogen.openapi_model import Agent, CreateAgentRequest
-from ...metrics.counters import increase_counter
from ..utils import (
# generate_canonical_name,
partialclass,
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 261508237..96681255c 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -11,7 +11,6 @@
from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
-from ...metrics.counters import increase_counter
from ..utils import (
# generate_canonical_name,
partialclass,
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 9d6869a94..f3c64fd18 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -11,8 +11,6 @@
from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 9061db7cf..5e0edbb98 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -11,7 +11,6 @@
from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent
-from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 62aed6536..5fda7c626 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -11,7 +11,6 @@
from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent
-from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index c418f5c26..450cbf8cc 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -11,7 +11,6 @@
from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
-from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 4e38adfac..61548de70 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -11,7 +11,6 @@
from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
-from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
diff --git a/agents-api/agents_api/queries/entries/__init__.py b/agents-api/agents_api/queries/entries/__init__.py
index 7c196dd62..e6db0efed 100644
--- a/agents-api/agents_api/queries/entries/__init__.py
+++ b/agents-api/agents_api/queries/entries/__init__.py
@@ -8,10 +8,10 @@
- Listing entries with filtering and pagination
"""
-from .create_entry import create_entries
-from .delete_entry import delete_entries
+from .create_entries import create_entries
+from .delete_entries import delete_entries
from .get_history import get_history
-from .list_entry import list_entries
+from .list_entries import list_entries
__all__ = [
"create_entries",
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
new file mode 100644
index 000000000..ffbd2de22
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -0,0 +1,181 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
+from ...common.utils.datetime import utcnow
+from ...common.utils.messages import content_to_json
+from ...metrics.counters import increase_counter
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+
+# Query for checking if the session exists
+session_exists_query = """
+SELECT CASE
+ WHEN EXISTS (
+ SELECT 1 FROM sessions
+ WHERE session_id = $1 AND developer_id = $2
+ )
+ THEN TRUE
+ ELSE (SELECT NULL::boolean WHERE FALSE) -- This raises a NO_DATA_FOUND error
+END;
+"""
+
+# Define the raw SQL query for creating entries
+entry_query = """
+INSERT INTO entries (
+ session_id,
+ entry_id,
+ source,
+ role,
+ event_type,
+ name,
+ content,
+ tool_call_id,
+ tool_calls,
+ model,
+ token_count,
+ created_at,
+ timestamp
+) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
+RETURNING *;
+"""
+
+# Define the raw SQL query for creating entry relations
+entry_relation_query = """
+INSERT INTO entry_relations (
+ session_id,
+ head,
+ relation,
+ tail,
+ is_leaf
+) VALUES ($1, $2, $3, $4, $5)
+RETURNING *;
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail=str(exc),
+ ),
+ asyncpg.NotNullViolationError: lambda exc: HTTPException(
+ status_code=400,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(
+ Entry,
+ transform=lambda d: {
+ "id": UUID(d.pop("entry_id")),
+ **d,
+ },
+)
+@increase_counter("create_entries")
+@pg_query
+@beartype
+async def create_entries(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: list[CreateEntryRequest],
+) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+ # Convert the data to a list of dictionaries
+ data_dicts = [item.model_dump(mode="json") for item in data]
+
+ # Prepare the parameters for the query
+ params = []
+
+ for item in data_dicts:
+ params.append(
+ [
+ session_id, # $1
+ item.pop("id", None) or str(uuid7()), # $2
+ item.get("source"), # $3
+ item.get("role"), # $4
+ item.get("event_type") or "message.create", # $5
+ item.get("name"), # $6
+ content_to_json(item.get("content") or {}), # $7
+ item.get("tool_call_id"), # $8
+ content_to_json(item.get("tool_calls") or {}), # $9
+ item.get("modelname"), # $10
+ item.get("token_count"), # $11
+ item.get("created_at") or utcnow(), # $12
+ utcnow(), # $13
+ ]
+ )
+
+ return [
+ (
+ session_exists_query,
+ [session_id, developer_id],
+ "fetch",
+ ),
+ (
+ entry_query,
+ params,
+ "fetchmany",
+ ),
+ ]
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(Relation)
+@increase_counter("add_entry_relations")
+@pg_query
+@beartype
+async def add_entry_relations(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ data: list[Relation],
+) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+ # Convert the data to a list of dictionaries
+ data_dicts = [item.model_dump(mode="json") for item in data]
+
+ # Prepare the parameters for the query
+ params = []
+
+ for item in data_dicts:
+ params.append(
+ [
+ item.get("session_id"), # $1
+ item.get("head"), # $2
+ item.get("relation"), # $3
+ item.get("tail"), # $4
+ item.get("is_leaf", False), # $5
+ ]
+ )
+
+ return [
+ (
+ session_exists_query,
+ [session_id, developer_id],
+ "fetch",
+ ),
+ (
+ entry_relation_query,
+ params,
+ "fetchmany",
+ ),
+ ]
diff --git a/agents-api/agents_api/queries/entries/create_entry.py b/agents-api/agents_api/queries/entries/create_entry.py
deleted file mode 100644
index ea0e7e97d..000000000
--- a/agents-api/agents_api/queries/entries/create_entry.py
+++ /dev/null
@@ -1,196 +0,0 @@
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-from sqlglot import parse_one
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
-from ...common.utils.datetime import utcnow
-from ...common.utils.messages import content_to_json
-from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class
-
-# Define the raw SQL query for creating entries with a developer check
-entry_query = """
-WITH data AS (
- SELECT
- unnest($1::uuid[]) AS session_id,
- unnest($2::uuid[]) AS entry_id,
- unnest($3::text[]) AS source,
- unnest($4::text[])::chat_role AS role,
- unnest($5::text[]) AS event_type,
- unnest($6::text[]) AS name,
- array[unnest($7::jsonb[])] AS content,
- unnest($8::text[]) AS tool_call_id,
- array[unnest($9::jsonb[])] AS tool_calls,
- unnest($10::text[]) AS model,
- unnest($11::int[]) AS token_count,
- unnest($12::timestamptz[]) AS created_at,
- unnest($13::timestamptz[]) AS timestamp
-)
-INSERT INTO entries (
- session_id,
- entry_id,
- source,
- role,
- event_type,
- name,
- content,
- tool_call_id,
- tool_calls,
- model,
- token_count,
- created_at,
- timestamp
-)
-SELECT
- d.session_id,
- d.entry_id,
- d.source,
- d.role,
- d.event_type,
- d.name,
- d.content,
- d.tool_call_id,
- d.tool_calls,
- d.model,
- d.token_count,
- d.created_at,
- d.timestamp
-FROM
- data d
-JOIN
- developers ON developers.developer_id = $14
-RETURNING *;
-"""
-
-# Define the raw SQL query for creating entry relations
-entry_relation_query = """
-WITH data AS (
- SELECT
- unnest($1::uuid[]) AS session_id,
- unnest($2::uuid[]) AS head,
- unnest($3::text[]) AS relation,
- unnest($4::uuid[]) AS tail,
- unnest($5::boolean[]) AS is_leaf
-)
-INSERT INTO entry_relations (
- session_id,
- head,
- relation,
- tail,
- is_leaf
-)
-SELECT
- d.session_id,
- d.head,
- d.relation,
- d.tail,
- d.is_leaf
-FROM
- data d
-JOIN
- developers ON developers.developer_id = $6
-RETURNING *;
-"""
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail=str(exc),
- ),
- asyncpg.NotNullViolationError: lambda exc: HTTPException(
- status_code=400,
- detail=str(exc),
- ),
- }
-)
-@wrap_in_class(
- Entry,
- transform=lambda d: {
- "id": UUID(d.pop("entry_id")),
- **d,
- },
-)
-@increase_counter("create_entries")
-@pg_query
-@beartype
-async def create_entries(
- *,
- developer_id: UUID,
- session_id: UUID,
- data: list[CreateEntryRequest],
-) -> tuple[str, list]:
- # Convert the data to a list of dictionaries
- data_dicts = [item.model_dump(mode="json") for item in data]
-
- # Prepare the parameters for the query
- params = [
- [session_id] * len(data_dicts), # $1
- [item.pop("id", None) or str(uuid7()) for item in data_dicts], # $2
- [item.get("source") for item in data_dicts], # $3
- [item.get("role") for item in data_dicts], # $4
- [item.get("event_type") or "message.create" for item in data_dicts], # $5
- [item.get("name") for item in data_dicts], # $6
- [content_to_json(item.get("content") or {}) for item in data_dicts], # $7
- [item.get("tool_call_id") for item in data_dicts], # $8
- [content_to_json(item.get("tool_calls") or {}) for item in data_dicts], # $9
- [item.get("modelname") for item in data_dicts], # $10
- [item.get("token_count") for item in data_dicts], # $11
- [item.get("created_at") or utcnow() for item in data_dicts], # $12
- [utcnow() for _ in data_dicts], # $13
- developer_id, # $14
- ]
-
- return (
- entry_query,
- params,
- )
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail=str(exc),
- ),
- }
-)
-@wrap_in_class(Relation)
-@increase_counter("add_entry_relations")
-@pg_query
-@beartype
-async def add_entry_relations(
- *,
- developer_id: UUID,
- data: list[Relation],
-) -> tuple[str, list]:
- # Convert the data to a list of dictionaries
- data_dicts = [item.model_dump(mode="json") for item in data]
-
- # Prepare the parameters for the query
- params = [
- [item.get("session_id") for item in data_dicts], # $1
- [item.get("head") for item in data_dicts], # $2
- [item.get("relation") for item in data_dicts], # $3
- [item.get("tail") for item in data_dicts], # $4
- [item.get("is_leaf", False) for item in data_dicts], # $5
- developer_id, # $6
- ]
-
- return (
- entry_relation_query,
- params,
- )
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
new file mode 100644
index 000000000..9a5d6faa3
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -0,0 +1,128 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for deleting entries with a developer check
+delete_entry_query = parse_one("""
+DELETE FROM entries
+USING developers
+WHERE entries.session_id = $1 -- session_id
+ AND developers.developer_id = $2 -- developer_id
+
+RETURNING entries.session_id as session_id;
+""").sql(pretty=True)
+
+# Define the raw SQL query for deleting entries with a developer check
+delete_entry_relations_query = parse_one("""
+DELETE FROM entry_relations
+WHERE entry_relations.session_id = $1 -- session_id
+""").sql(pretty=True)
+
+# Define the raw SQL query for deleting entries with a developer check
+delete_entry_relations_by_ids_query = parse_one("""
+DELETE FROM entry_relations
+WHERE entry_relations.session_id = $1 -- session_id
+ AND (entry_relations.head = ANY($2) -- entry_ids
+ OR entry_relations.tail = ANY($2)) -- entry_ids
+""").sql(pretty=True)
+
+# Define the raw SQL query for deleting entries by entry_ids with a developer check
+delete_entry_by_ids_query = parse_one("""
+DELETE FROM entries
+USING developers
+WHERE entries.entry_id = ANY($1) -- entry_ids
+ AND developers.developer_id = $2 -- developer_id
+ AND entries.session_id = $3 -- session_id
+
+RETURNING entries.entry_id as entry_id;
+""").sql(pretty=True)
+
+# Add a session_exists_query similar to create_entries.py
+session_exists_query = """
+SELECT EXISTS (
+ SELECT 1
+ FROM sessions
+ WHERE session_id = $1
+ AND developer_id = $2
+);
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail="The specified session or developer does not exist.",
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail="The specified session has already been deleted.",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["session_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@increase_counter("delete_entries_for_session")
+@pg_query
+@beartype
+async def delete_entries_for_session(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+ """Delete all entries for a given session."""
+ return [
+ (session_exists_query, [session_id, developer_id], "fetch"),
+ (delete_entry_relations_query, [session_id], "fetchmany"),
+ (delete_entry_query, [session_id, developer_id], "fetchmany"),
+ ]
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail="The specified entries, session, or developer does not exist.",
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail="One or more specified entries have already been deleted.",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ transform=lambda d: {
+ "id": d["entry_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@increase_counter("delete_entries")
+@pg_query
+@beartype
+async def delete_entries(
+ *, developer_id: UUID, session_id: UUID, entry_ids: list[UUID]
+) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+ """Delete specific entries by their IDs."""
+ return [
+ (session_exists_query, [session_id, developer_id], "fetch"),
+ (delete_entry_relations_by_ids_query, [session_id, entry_ids], "fetchmany"),
+ (delete_entry_by_ids_query, [entry_ids, developer_id, session_id], "fetchmany"),
+ ]
diff --git a/agents-api/agents_api/queries/entries/delete_entry.py b/agents-api/agents_api/queries/entries/delete_entry.py
deleted file mode 100644
index d6cdc6e87..000000000
--- a/agents-api/agents_api/queries/entries/delete_entry.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-from sqlglot import parse_one
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-
-# Define the raw SQL query for deleting entries with a developer check
-entry_query = parse_one("""
-DELETE FROM entries
-USING developers
-WHERE entries.session_id = $1 -- session_id
-AND developers.developer_id = $2
-RETURNING entries.session_id as session_id;
-""").sql(pretty=True)
-
-# Define the raw SQL query for deleting entries by entry_ids with a developer check
-delete_entry_by_ids_query = parse_one("""
-DELETE FROM entries
-USING developers
-WHERE entries.entry_id = ANY($1) -- entry_ids
-AND developers.developer_id = $2
-AND entries.session_id = $3 -- session_id
-RETURNING entries.entry_id as entry_id;
-""").sql(pretty=True)
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=400,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": d["session_id"], # Only return session cleared
- "deleted_at": utcnow(),
- "jobs": [],
- },
-)
-@pg_query
-@beartype
-async def delete_entries_for_session(
- *,
- developer_id: UUID,
- session_id: UUID,
-) -> tuple[str, list]:
- return (
- entry_query,
- [session_id, developer_id],
- )
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: partialclass(
- HTTPException,
- status_code=400,
- detail="The specified developer does not exist.",
- ),
- asyncpg.UniqueViolationError: partialclass(
- HTTPException,
- status_code=404,
- detail="One or more specified entries do not exist.",
- ),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- transform=lambda d: {
- "id": d["entry_id"],
- "deleted_at": utcnow(),
- "jobs": [],
- },
-)
-@pg_query
-@beartype
-async def delete_entries(
- *, developer_id: UUID, session_id: UUID, entry_ids: list[UUID]
-) -> tuple[str, list]:
- return (
- delete_entry_by_ids_query,
- [entry_ids, developer_id, session_id],
- )
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index c6c38d366..8f0ddf4a1 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -6,7 +6,7 @@
from sqlglot import parse_one
from ...autogen.openapi_model import History
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for getting history with a developer check
history_query = parse_one("""
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
new file mode 100644
index 000000000..a3fa6d0a0
--- /dev/null
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -0,0 +1,112 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+
+from ...autogen.openapi_model import Entry
+from ...metrics.counters import increase_counter
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+
+# Query for checking if the session exists
+session_exists_query = """
+SELECT CASE
+ WHEN EXISTS (
+ SELECT 1 FROM sessions
+ WHERE session_id = $1 AND developer_id = $2
+ )
+ THEN TRUE
+ ELSE (SELECT NULL::boolean WHERE FALSE) -- This raises a NO_DATA_FOUND error
+END;
+"""
+
+list_entries_query = """
+SELECT
+ e.entry_id as id,
+ e.session_id,
+ e.role,
+ e.name,
+ e.content,
+ e.source,
+ e.token_count,
+ e.created_at,
+ e.timestamp,
+ e.event_type,
+ e.tool_call_id,
+ e.tool_calls,
+ e.model
+FROM entries e
+JOIN developers d ON d.developer_id = $5
+LEFT JOIN entry_relations er ON er.head = e.entry_id AND er.session_id = e.session_id
+WHERE e.session_id = $1
+AND e.source = ANY($2)
+AND (er.relation IS NULL OR er.relation != ALL($6))
+ORDER BY e.{sort_by} {direction} -- safe to interpolate
+LIMIT $3
+OFFSET $4;
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
+ status_code=404,
+ detail=str(exc),
+ ),
+ asyncpg.UniqueViolationError: lambda exc: HTTPException(
+ status_code=409,
+ detail=str(exc),
+ ),
+ asyncpg.NotNullViolationError: lambda exc: HTTPException(
+ status_code=400,
+ detail=str(exc),
+ ),
+ }
+)
+@wrap_in_class(Entry)
+@increase_counter("list_entries")
+@pg_query
+@beartype
+async def list_entries(
+ *,
+ developer_id: UUID,
+ session_id: UUID,
+ allowed_sources: list[str] = ["api_request", "api_response"],
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "timestamp"] = "timestamp",
+ direction: Literal["asc", "desc"] = "asc",
+ exclude_relations: list[str] = [],
+) -> list[tuple[str, list]]:
+ if limit < 1 or limit > 1000:
+ raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
+ if offset < 0:
+ raise HTTPException(status_code=400, detail="Offset must be non-negative")
+
+ query = list_entries_query.format(
+ sort_by=sort_by,
+ direction=direction,
+ )
+
+ # Parameters for the entry query
+ entry_params = [
+ session_id, # $1
+ allowed_sources, # $2
+ limit, # $3
+ offset, # $4
+ developer_id, # $5
+ exclude_relations, # $6
+ ]
+
+ return [
+ (
+ session_exists_query,
+ [session_id, developer_id],
+ ),
+ (
+ query,
+ entry_params,
+ ),
+ ]
+
diff --git a/agents-api/agents_api/queries/entries/list_entry.py b/agents-api/agents_api/queries/entries/list_entry.py
deleted file mode 100644
index 1fa6479d1..000000000
--- a/agents-api/agents_api/queries/entries/list_entry.py
+++ /dev/null
@@ -1,79 +0,0 @@
-from typing import Literal
-from uuid import UUID
-
-import asyncpg
-from beartype import beartype
-from fastapi import HTTPException
-
-from ...autogen.openapi_model import Entry
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class
-
-entry_query = """
-SELECT
- e.entry_id as id, -- entry_id
- e.session_id, -- session_id
- e.role, -- role
- e.name, -- name
- e.content, -- content
- e.source, -- source
- e.token_count, -- token_count
- e.created_at, -- created_at
- e.timestamp -- timestamp
-FROM entries e
-JOIN developers d ON d.developer_id = $7
-LEFT JOIN entry_relations er ON er.head = e.entry_id AND er.session_id = e.session_id
-WHERE e.session_id = $1
-AND e.source = ANY($2)
-AND (er.relation IS NULL OR er.relation != ALL($8))
-ORDER BY e.$3 $4
-LIMIT $5
-OFFSET $6;
-"""
-
-
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- }
-)
-@wrap_in_class(Entry)
-@pg_query
-@beartype
-async def list_entries(
- *,
- developer_id: UUID,
- session_id: UUID,
- allowed_sources: list[str] = ["api_request", "api_response"],
- limit: int = 1,
- offset: int = 0,
- sort_by: Literal["created_at", "timestamp"] = "timestamp",
- direction: Literal["asc", "desc"] = "asc",
- exclude_relations: list[str] = [],
-) -> tuple[str, list]:
- if limit < 1 or limit > 1000:
- raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
- if offset < 0:
- raise HTTPException(status_code=400, detail="Offset must be non-negative")
-
- # making the parameters for the query
- params = [
- session_id, # $1
- allowed_sources, # $2
- sort_by, # $3
- direction, # $4
- limit, # $5
- offset, # $6
- developer_id, # $7
- exclude_relations, # $8
- ]
- return (
- entry_query,
- params,
- )
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 3b5dc0bb0..db583e08f 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -3,16 +3,27 @@
import socket
import time
from functools import partialmethod, wraps
-from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar, cast
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Literal,
+ NotRequired,
+ ParamSpec,
+ Type,
+ TypeVar,
+ cast,
+)
import asyncpg
-import pandas as pd
from asyncpg import Record
from beartype import beartype
from fastapi import HTTPException
from pydantic import BaseModel
+from typing_extensions import TypedDict
from ..app import app
+from ..env import query_timeout
P = ParamSpec("P")
T = TypeVar("T")
@@ -31,15 +42,61 @@ class NewCls(cls):
return NewCls
+class AsyncPGFetchArgs(TypedDict):
+ query: str
+ args: list[Any]
+ timeout: NotRequired[float | None]
+
+
+type SQLQuery = str
+type FetchMethod = Literal["fetch", "fetchmany"]
+type PGQueryArgs = tuple[SQLQuery, list[Any]] | tuple[SQLQuery, list[Any], FetchMethod]
+type PreparedPGQueryArgs = tuple[FetchMethod, AsyncPGFetchArgs]
+type BatchedPreparedPGQueryArgs = list[PreparedPGQueryArgs]
+
+
+@beartype
+def prepare_pg_query_args(
+ query_args: PGQueryArgs | list[PGQueryArgs],
+) -> BatchedPreparedPGQueryArgs:
+ batch = []
+ query_args = [query_args] if isinstance(query_args, tuple) else query_args
+
+ for query_arg in query_args:
+ match query_arg:
+ case (query, variables) | (query, variables, "fetch"):
+ batch.append(
+ (
+ "fetch",
+ AsyncPGFetchArgs(
+ query=query, args=variables, timeout=query_timeout
+ ),
+ )
+ )
+ case (query, variables, "fetchmany"):
+ batch.append(
+ (
+ "fetchmany",
+ AsyncPGFetchArgs(
+ query=query, args=[variables], timeout=query_timeout
+ ),
+ )
+ )
+ case _:
+ raise ValueError("Invalid query arguments")
+
+ return batch
+
+
@beartype
def pg_query(
- func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
+ func: Callable[P, PGQueryArgs | list[PGQueryArgs]] | None = None,
debug: bool | None = None,
only_on_error: bool = False,
timeit: bool = False,
) -> Callable[..., Callable[P, list[Record]]] | Callable[P, list[Record]]:
def pg_query_dec(
- func: Callable[P, tuple[str, list[Any]] | list[tuple[str, list[Any]]]],
+ func: Callable[P, PGQueryArgs | list[PGQueryArgs]],
) -> Callable[..., Callable[P, list[Record]]]:
"""
Decorator that wraps a function that takes arbitrary arguments, and
@@ -57,14 +114,10 @@ async def wrapper(
connection_pool: asyncpg.Pool | None = None,
**kwargs: P.kwargs,
) -> list[Record]:
- query, variables = await func(*args, **kwargs)
+ query_args = await func(*args, **kwargs)
+ batch = prepare_pg_query_args(query_args)
- not only_on_error and debug and print(query)
- not only_on_error and debug and pprint(
- dict(
- variables=variables,
- )
- )
+ not only_on_error and debug and pprint(batch)
# Run the query
pool = (
@@ -73,20 +126,20 @@ async def wrapper(
else cast(asyncpg.Pool, app.state.postgres_pool)
)
- assert isinstance(variables, list) and len(variables) > 0
-
- queries = query if isinstance(query, list) else [query]
- variables_list = (
- variables if isinstance(variables[0], list) else [variables]
- )
- zipped = zip(queries, variables_list)
-
try:
async with pool.acquire() as conn:
async with conn.transaction():
start = timeit and time.perf_counter()
- for query, variables in zipped:
- results: list[Record] = await conn.fetch(query, *variables)
+ for method_name, payload in batch:
+ method = getattr(conn, method_name)
+
+ query = payload["query"]
+ args = payload["args"]
+ timeout = payload.get("timeout")
+
+ results: list[Record] = await method(
+ query, *args, timeout=timeout
+ )
end = timeit and time.perf_counter()
@@ -96,8 +149,7 @@ async def wrapper(
except Exception as e:
if only_on_error and debug:
- print(query)
- pprint(variables)
+ pprint(batch)
debug and print(repr(e))
connection_error = isinstance(
@@ -113,11 +165,7 @@ async def wrapper(
raise
- not only_on_error and debug and pprint(
- dict(
- results=[dict(result.items()) for result in results],
- )
- )
+ not only_on_error and debug and pprint(results)
return results
@@ -210,7 +258,7 @@ async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
result: T = await func(*args, **kwargs)
except BaseException as error:
_check_error(error)
- raise
+ raise error
return result
@@ -220,7 +268,7 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
result: T = func(*args, **kwargs)
except BaseException as error:
_check_error(error)
- raise
+ raise error
return result
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 379526e0f..a04a7fc66 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -20,7 +20,6 @@
from .app import app
from .common.exceptions import BaseCommonException
-from .dependencies.auth import get_api_key
from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
from .exceptions import PromptTooBigError
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index c2aa350a8..4a02efac4 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,24 +1,12 @@
-import json
import random
import string
-import time
from uuid import UUID
-import asyncpg
from fastapi.testclient import TestClient
-from temporalio.client import WorkflowHandle
from uuid_extensions import uuid7
from ward import fixture
from agents_api.autogen.openapi_model import (
- CreateAgentRequest,
- CreateDocRequest,
- CreateExecutionRequest,
- CreateFileRequest,
- CreateSessionRequest,
- CreateTaskRequest,
- CreateToolRequest,
- CreateTransitionRequest,
CreateUserRequest,
)
from agents_api.clients.pg import create_db_pool
@@ -43,7 +31,6 @@
# from agents_api.queries.tools.create_tools import create_tools
# from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
-from agents_api.queries.users.delete_user import delete_user
from agents_api.web import app
from .utils import (
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index c07891305..87d9cdb4f 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,27 +3,21 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-from uuid import UUID
+from uuid import uuid4
-from ward import test
+from fastapi import HTTPException
+from ward import raises, test
-from agents_api.autogen.openapi_model import CreateEntryRequest, Entry
+from agents_api.autogen.openapi_model import CreateEntryRequest
from agents_api.clients.pg import create_db_pool
-from agents_api.queries.entries.create_entry import create_entries
-from agents_api.queries.entries.delete_entry import delete_entries
-from agents_api.queries.entries.get_history import get_history
-from agents_api.queries.entries.list_entry import list_entries
-from tests.fixtures import pg_dsn, test_developer_id # , test_session
+from agents_api.queries.entries import create_entries, list_entries
+from tests.fixtures import pg_dsn, test_developer # , test_session
-# Test UUIDs for consistent testing
MODEL = "gpt-4o-mini"
-SESSION_ID = UUID("123e4567-e89b-12d3-a456-426614174001")
-TEST_DEVELOPER_ID = UUID("123e4567-e89b-12d3-a456-426614174000")
-TEST_USER_ID = UUID("987e6543-e21b-12d3-a456-426614174000")
-@test("query: create entry")
-async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+@test("query: create entry no session")
+async def _(dsn=pg_dsn, developer=test_developer):
"""Test the addition of a new entry to the database."""
pool = await create_db_pool(dsn=dsn)
@@ -34,12 +28,31 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_sessi
content="test entry content",
)
- await create_entries(
- developer_id=TEST_DEVELOPER_ID,
- session_id=SESSION_ID,
- data=[test_entry],
- connection_pool=pool,
- )
+ with raises(HTTPException) as exc_info:
+ await create_entries(
+ developer_id=developer.id,
+ session_id=uuid4(),
+ data=[test_entry],
+ connection_pool=pool,
+ )
+
+ assert exc_info.raised.status_code == 404
+
+
+@test("query: list entries no session")
+async def _(dsn=pg_dsn, developer=test_developer):
+ """Test the retrieval of entries from the database."""
+
+ pool = await create_db_pool(dsn=dsn)
+
+ with raises(HTTPException) as exc_info:
+ await list_entries(
+ developer_id=developer.id,
+ session_id=uuid4(),
+ connection_pool=pool,
+ )
+
+ assert exc_info.raised.status_code == 404
# @test("query: get entries")
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index d182586dc..4fdc7e6e4 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -3,36 +3,21 @@
Tests verify the SQL queries without actually executing them against a database.
"""
-from uuid import UUID
-
-import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
from agents_api.autogen.openapi_model import (
- CreateOrUpdateSessionRequest,
- CreateSessionRequest,
- PatchSessionRequest,
- ResourceDeletedResponse,
- ResourceUpdatedResponse,
Session,
- UpdateSessionRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.queries.sessions import (
count_sessions,
- create_or_update_session,
- create_session,
- delete_session,
get_session,
list_sessions,
- patch_session,
- update_session,
)
from tests.fixtures import (
pg_dsn,
test_developer_id,
- test_user,
) # , test_session, test_agent, test_user
# @test("query: create session sql")
From 2b8686c2f52996899eb41cf35a0dbacbc0d07d06 Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Wed, 18 Dec 2024 14:27:48 +0000
Subject: [PATCH 064/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/entries/list_entries.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index a3fa6d0a0..0aeb92a25 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -109,4 +109,3 @@ async def list_entries(
entry_params,
),
]
-
From 94aa3ce1684b0a058d4b3bd0cf68e630918fb2cb Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Wed, 18 Dec 2024 18:11:02 +0300
Subject: [PATCH 065/274] fix(agents-api): change modelname to model in
BaseEntry
---
agents-api/agents_api/autogen/Entries.py | 2 +-
agents-api/agents_api/autogen/openapi_model.py | 2 +-
agents-api/agents_api/queries/entries/create_entries.py | 2 +-
agents-api/agents_api/queries/entries/delete_entries.py | 2 +-
integrations-service/integrations/autogen/Entries.py | 2 +-
typespec/entries/models.tsp | 2 +-
typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml | 6 +++---
7 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/agents-api/agents_api/autogen/Entries.py b/agents-api/agents_api/autogen/Entries.py
index d195b518f..867b10192 100644
--- a/agents-api/agents_api/autogen/Entries.py
+++ b/agents-api/agents_api/autogen/Entries.py
@@ -52,7 +52,7 @@ class BaseEntry(BaseModel):
]
tokenizer: str
token_count: int
- modelname: str = "gpt-40-mini"
+ model: str = "gpt-4o-mini"
tool_calls: (
list[
ChosenFunctionCall
diff --git a/agents-api/agents_api/autogen/openapi_model.py b/agents-api/agents_api/autogen/openapi_model.py
index 01042c58c..af73e8015 100644
--- a/agents-api/agents_api/autogen/openapi_model.py
+++ b/agents-api/agents_api/autogen/openapi_model.py
@@ -400,7 +400,7 @@ def from_model_input(
source=source,
tokenizer=tokenizer["type"],
token_count=token_count,
- modelname=model,
+ model=model,
**kwargs,
)
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index ffbd2de22..24c0be26e 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -107,7 +107,7 @@ async def create_entries(
content_to_json(item.get("content") or {}), # $7
item.get("tool_call_id"), # $8
content_to_json(item.get("tool_calls") or {}), # $9
- item.get("modelname"), # $10
+ item.get("model"), # $10
item.get("token_count"), # $11
item.get("created_at") or utcnow(), # $12
utcnow(), # $13
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index 9a5d6faa3..dfdadb8da 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -9,7 +9,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for deleting entries with a developer check
delete_entry_query = parse_one("""
diff --git a/integrations-service/integrations/autogen/Entries.py b/integrations-service/integrations/autogen/Entries.py
index d195b518f..867b10192 100644
--- a/integrations-service/integrations/autogen/Entries.py
+++ b/integrations-service/integrations/autogen/Entries.py
@@ -52,7 +52,7 @@ class BaseEntry(BaseModel):
]
tokenizer: str
token_count: int
- modelname: str = "gpt-40-mini"
+ model: str = "gpt-4o-mini"
tool_calls: (
list[
ChosenFunctionCall
diff --git a/typespec/entries/models.tsp b/typespec/entries/models.tsp
index 640e6831d..d7eae55e7 100644
--- a/typespec/entries/models.tsp
+++ b/typespec/entries/models.tsp
@@ -107,7 +107,7 @@ model BaseEntry {
tokenizer: string;
token_count: uint16;
- modelname: string = "gpt-40-mini";
+ "model": string = "gpt-4o-mini";
/** Tool calls generated by the model. */
tool_calls?: ChosenToolCall[] | null = null;
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index 9b36baa2b..9298ab458 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -3064,7 +3064,7 @@ components:
- source
- tokenizer
- token_count
- - modelname
+ - model
- timestamp
properties:
role:
@@ -3308,9 +3308,9 @@ components:
token_count:
type: integer
format: uint16
- modelname:
+ model:
type: string
- default: gpt-40-mini
+ default: gpt-4o-mini
tool_calls:
type: array
items:
From 64a34cdac3883d63d1764e9473fcab982ab346bd Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Tue, 17 Dec 2024 13:39:21 +0300
Subject: [PATCH 066/274] feat(agents-api): add agent queries tests
---
.../agents_api/queries/agents/__init__.py | 12 +-
.../agents_api/queries/agents/create_agent.py | 61 ++-
.../queries/agents/create_or_update_agent.py | 21 +-
.../agents_api/queries/agents/delete_agent.py | 23 +-
.../agents_api/queries/agents/get_agent.py | 24 +-
.../agents_api/queries/agents/list_agents.py | 23 +-
.../agents_api/queries/agents/patch_agent.py | 23 +-
.../agents_api/queries/agents/update_agent.py | 23 +-
agents-api/tests/fixtures.py | 34 +-
agents-api/tests/test_agent_queries.py | 350 ++++++++++--------
10 files changed, 307 insertions(+), 287 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/__init__.py b/agents-api/agents_api/queries/agents/__init__.py
index 709b051ea..ebd169040 100644
--- a/agents-api/agents_api/queries/agents/__init__.py
+++ b/agents-api/agents_api/queries/agents/__init__.py
@@ -13,9 +13,9 @@
# ruff: noqa: F401, F403, F405
from .create_agent import create_agent
-from .create_or_update_agent import create_or_update_agent_query
-from .delete_agent import delete_agent_query
-from .get_agent import get_agent_query
-from .list_agents import list_agents_query
-from .patch_agent import patch_agent_query
-from .update_agent import update_agent_query
+from .create_or_update_agent import create_or_update_agent
+from .delete_agent import delete_agent
+from .get_agent import get_agent
+from .list_agents import list_agents
+from .patch_agent import patch_agent
+from .update_agent import update_agent
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 4c731d3dd..cbdb32972 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from pydantic import ValidationError
from uuid_extensions import uuid7
@@ -25,35 +24,35 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- ),
- psycopg_errors.UniqueViolation: partialclass(
- HTTPException,
- status_code=409,
- detail="An agent with this canonical name already exists for this developer.",
- ),
- psycopg_errors.CheckViolation: partialclass(
- HTTPException,
- status_code=400,
- detail="The provided data violates one or more constraints. Please check the input values.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. Please review the input.",
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# ),
+# psycopg_errors.UniqueViolation: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="An agent with this canonical name already exists for this developer.",
+# ),
+# psycopg_errors.CheckViolation: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="The provided data violates one or more constraints. Please check the input values.",
+# ),
+# ValidationError: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="Input validation failed. Please check the provided data.",
+# ),
+# TypeError: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="A type mismatch occurred. Please review the input.",
+# ),
+# }
+# )
@wrap_in_class(
Agent,
one=True,
@@ -63,7 +62,7 @@
@pg_query
# @increase_counter("create_agent")
@beartype
-def create_agent(
+async def create_agent(
*,
developer_id: UUID,
agent_id: UUID | None = None,
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 96681255c..9c92f0b46 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ..utils import (
@@ -23,15 +22,15 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# )
@wrap_in_class(
Agent,
one=True,
@@ -41,7 +40,7 @@
@pg_query
# @increase_counter("create_or_update_agent1")
@beartype
-def create_or_update_agent_query(
+async def create_or_update_agent(
*, agent_id: UUID, developer_id: UUID, data: CreateOrUpdateAgentRequest
) -> tuple[list[str], dict]:
"""
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index f3c64fd18..545a976d5 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import ResourceDeletedResponse
from ..utils import (
@@ -22,16 +21,16 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(
ResourceDeletedResponse,
one=True,
@@ -42,7 +41,7 @@
@pg_query
# @increase_counter("delete_agent1")
@beartype
-def delete_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
"""
Constructs the SQL queries to delete an agent and its related settings.
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 5e0edbb98..18d253e8d 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -8,8 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
-
from ...autogen.openapi_model import Agent
from ..utils import (
partialclass,
@@ -22,21 +20,21 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+ # {
+ # psycopg_errors.ForeignKeyViolation: partialclass(
+ # HTTPException,
+ # status_code=404,
+ # detail="The specified developer does not exist.",
+ # )
+ # }
+ # # TODO: Add more exceptions
+# )
@wrap_in_class(Agent, one=True)
@pg_query
# @increase_counter("get_agent1")
@beartype
-def get_agent_query(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
"""
Constructs the SQL query to retrieve an agent's details.
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 5fda7c626..c24276a97 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import Agent
from ..utils import (
@@ -22,21 +21,21 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(Agent)
@pg_query
# @increase_counter("list_agents1")
@beartype
-def list_agents_query(
+async def list_agents(
*,
developer_id: UUID,
limit: int = 100,
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 450cbf8cc..d4adff092 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ..utils import (
@@ -22,16 +21,16 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
@@ -41,7 +40,7 @@
@pg_query
# @increase_counter("patch_agent1")
@beartype
-def patch_agent_query(
+async def patch_agent(
*, agent_id: UUID, developer_id: UUID, data: PatchAgentRequest
) -> tuple[str, dict]:
"""
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 61548de70..2116e49b0 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -8,7 +8,6 @@
from beartype import beartype
from fastapi import HTTPException
-from psycopg import errors as psycopg_errors
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ..utils import (
@@ -22,16 +21,16 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- psycopg_errors.ForeignKeyViolation: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
- )
- }
- # TODO: Add more exceptions
-)
+# @rewrap_exceptions(
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
@@ -41,7 +40,7 @@
@pg_query
# @increase_counter("update_agent1")
@beartype
-def update_agent_query(
+async def update_agent(
*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
) -> tuple[str, dict]:
"""
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 4a02efac4..1151b433d 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -13,7 +13,7 @@
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
from agents_api.queries.developers.create_developer import create_developer
-# from agents_api.queries.agents.create_agent import create_agent
+from agents_api.queries.agents.create_agent import create_agent
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
@@ -93,20 +93,24 @@ def patch_embed_acompletion():
yield embed, acompletion
-# @fixture(scope="global")
-# async def test_agent(dsn=pg_dsn, developer_id=test_developer_id):
-# async with get_pg_client(dsn=dsn) as client:
-# agent = await create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# model="gpt-4o-mini",
-# name="test agent",
-# about="test agent about",
-# metadata={"test": "test"},
-# ),
-# client=client,
-# )
-# yield agent
+@fixture(scope="global")
+async def test_agent(dsn=pg_dsn, developer=test_developer):
+ pool = await asyncpg.create_pool(dsn=dsn)
+
+ async with get_pg_client(pool=pool) as client:
+ agent = await create_agent(
+ developer_id=developer.id,
+ data=CreateAgentRequest(
+ model="gpt-4o-mini",
+ name="test agent",
+ about="test agent about",
+ metadata={"test": "test"},
+ ),
+ client=client,
+ )
+
+ yield agent
+ await pool.close()
@fixture(scope="global")
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index f079642b3..f8f75fd0b 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,163 +1,187 @@
-# # Tests for agent queries
-
-# from uuid_extensions import uuid7
-# from ward import raises, test
-
-# from agents_api.autogen.openapi_model import (
-# Agent,
-# CreateAgentRequest,
-# CreateOrUpdateAgentRequest,
-# PatchAgentRequest,
-# ResourceUpdatedResponse,
-# UpdateAgentRequest,
-# )
-# from agents_api.queries.agent.create_agent import create_agent
-# from agents_api.queries.agent.create_or_update_agent import create_or_update_agent
-# from agents_api.queries.agent.delete_agent import delete_agent
-# from agents_api.queries.agent.get_agent import get_agent
-# from agents_api.queries.agent.list_agents import list_agents
-# from agents_api.queries.agent.patch_agent import patch_agent
-# from agents_api.queries.agent.update_agent import update_agent
-# from tests.fixtures import cozo_client, test_agent, test_developer_id
-
-
-# @test("query: create agent")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# ),
-# client=client,
-# )
-
-
-# @test("query: create agent with instructions")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# ),
-# client=client,
-# )
-
-
-# @test("query: create or update agent")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_or_update_agent(
-# developer_id=developer_id,
-# agent_id=uuid7(),
-# data=CreateOrUpdateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# ),
-# client=client,
-# )
-
-
-# @test("query: get agent not exists")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# agent_id = uuid7()
-
-# with raises(Exception):
-# get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
-
-
-# @test("query: get agent exists")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# result = get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
-
-# assert result is not None
-# assert isinstance(result, Agent)
-
-
-# @test("query: delete agent")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# temp_agent = create_agent(
-# developer_id=developer_id,
-# data=CreateAgentRequest(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# ),
-# client=client,
-# )
-
-# # Delete the agent
-# delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
-
-# # Check that the agent is deleted
-# with raises(Exception):
-# get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
-
-
-# @test("query: update agent")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# result = update_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# data=UpdateAgentRequest(
-# name="updated agent",
-# about="updated agent about",
-# model="gpt-4o-mini",
-# default_settings={"temperature": 1.0},
-# metadata={"hello": "world"},
-# ),
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-
-# agent = get_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert "test" not in agent.metadata
-
-
-# @test("query: patch agent")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# result = patch_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# data=PatchAgentRequest(
-# name="patched agent",
-# about="patched agent about",
-# default_settings={"temperature": 1.0},
-# metadata={"something": "else"},
-# ),
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-
-# agent = get_agent(
-# agent_id=agent.id,
-# developer_id=developer_id,
-# client=client,
-# )
-
-# assert "hello" in agent.metadata
-
-
-# @test("query: list agents")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
-
-# result = list_agents(developer_id=developer_id, client=client)
-
-# assert isinstance(result, list)
-# assert all(isinstance(agent, Agent) for agent in result)
+# Tests for agent queries
+from uuid import uuid4
+
+import asyncpg
+from ward import raises, test
+
+from agents_api.autogen.openapi_model import (
+ Agent,
+ CreateAgentRequest,
+ CreateOrUpdateAgentRequest,
+ PatchAgentRequest,
+ ResourceUpdatedResponse,
+ UpdateAgentRequest,
+)
+from agents_api.clients.pg import get_pg_client
+from agents_api.queries.agents import (
+ create_agent,
+ create_or_update_agent,
+ delete_agent,
+ get_agent,
+ list_agents,
+ patch_agent,
+ update_agent,
+)
+from tests.fixtures import pg_dsn, test_agent, test_developer_id
+
+
+@test("model: create agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ ),
+ client=client,
+ )
+
+
+@test("model: create agent with instructions")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ client=client,
+ )
+
+
+@test("model: create or update agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ await create_or_update_agent(
+ developer_id=developer_id,
+ agent_id=uuid4(),
+ data=CreateOrUpdateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ client=client,
+ )
+
+
+@test("model: get agent not exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ agent_id = uuid4()
+ pool = await asyncpg.create_pool(dsn=dsn)
+
+ with raises(Exception):
+ async with get_pg_client(pool=pool) as client:
+ await get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
+
+
+@test("model: get agent exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
+
+ assert result is not None
+ assert isinstance(result, Agent)
+
+
+@test("model: delete agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ temp_agent = await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ client=client,
+ )
+
+ # Delete the agent
+ await delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+
+ # Check that the agent is deleted
+ with raises(Exception):
+ await get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+
+
+@test("model: update agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await update_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=UpdateAgentRequest(
+ name="updated agent",
+ about="updated agent about",
+ model="gpt-4o-mini",
+ default_settings={"temperature": 1.0},
+ metadata={"hello": "world"},
+ ),
+ client=client,
+ )
+
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+
+ async with get_pg_client(pool=pool) as client:
+ agent = await get_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert "test" not in agent.metadata
+
+
+@test("model: patch agent")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await patch_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=PatchAgentRequest(
+ name="patched agent",
+ about="patched agent about",
+ default_settings={"temperature": 1.0},
+ metadata={"something": "else"},
+ ),
+ client=client,
+ )
+
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+
+ async with get_pg_client(pool=pool) as client:
+ agent = await get_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ client=client,
+ )
+
+ assert "hello" in agent.metadata
+
+
+@test("model: list agents")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
+
+ pool = await asyncpg.create_pool(dsn=dsn)
+ async with get_pg_client(pool=pool) as client:
+ result = await list_agents(developer_id=developer_id, client=client)
+
+ assert isinstance(result, list)
+ assert all(isinstance(agent, Agent) for agent in result)
From 8cc2ae31b95e596edc69f0ccf80f7695afd52a24 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Wed, 18 Dec 2024 11:48:48 +0300
Subject: [PATCH 067/274] feat(agents-api): implement agent queries and tests
---
.../agents_api/queries/agents/create_agent.py | 85 ++++---
.../queries/agents/create_or_update_agent.py | 88 ++++---
.../agents_api/queries/agents/delete_agent.py | 82 +++---
.../agents_api/queries/agents/get_agent.py | 53 ++--
.../agents_api/queries/agents/list_agents.py | 82 +++---
.../agents_api/queries/agents/patch_agent.py | 73 ++++--
.../agents_api/queries/agents/update_agent.py | 57 +++--
agents-api/agents_api/queries/utils.py | 14 +
agents-api/tests/fixtures.py | 26 +-
agents-api/tests/test_agent_queries.py | 239 ++++++++----------
10 files changed, 430 insertions(+), 369 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index cbdb32972..63ac4870f 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -6,6 +6,7 @@
from typing import Any, TypeVar
from uuid import UUID
+from sqlglot import parse_one
from beartype import beartype
from fastapi import HTTPException
from pydantic import ValidationError
@@ -13,7 +14,7 @@
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ..utils import (
- # generate_canonical_name,
+ generate_canonical_name,
partialclass,
pg_query,
rewrap_exceptions,
@@ -23,6 +24,33 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+INSERT INTO agents (
+ developer_id,
+ agent_id,
+ canonical_name,
+ name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings
+)
+VALUES (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7,
+ $8,
+ $9
+)
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
# @rewrap_exceptions(
# {
@@ -57,17 +85,16 @@
Agent,
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
- _kind="inserted",
)
-@pg_query
# @increase_counter("create_agent")
+@pg_query
@beartype
async def create_agent(
*,
developer_id: UUID,
agent_id: UUID | None = None,
data: CreateAgentRequest,
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs and executes a SQL query to create a new agent in the database.
@@ -90,49 +117,23 @@ async def create_agent(
# Convert default_settings to dict if it exists
default_settings = (
- data.default_settings.model_dump() if data.default_settings else None
+ data.default_settings.model_dump() if data.default_settings else {}
)
# Set default values
- data.metadata = data.metadata or None
- # data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ data.metadata = data.metadata or {}
+ data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
- query = """
- INSERT INTO agents (
+ params = [
developer_id,
agent_id,
- canonical_name,
- name,
- about,
- instructions,
- model,
- metadata,
- default_settings
- )
- VALUES (
- %(developer_id)s,
- %(agent_id)s,
- %(canonical_name)s,
- %(name)s,
- %(about)s,
- %(instructions)s,
- %(model)s,
- %(metadata)s,
- %(default_settings)s
- )
- RETURNING *;
- """
-
- params = {
- "developer_id": developer_id,
- "agent_id": agent_id,
- "canonical_name": data.canonical_name,
- "name": data.name,
- "about": data.about,
- "instructions": data.instructions,
- "model": data.model,
- "metadata": data.metadata,
- "default_settings": default_settings,
- }
+ data.canonical_name,
+ data.name,
+ data.about,
+ data.instructions,
+ data.model,
+ data.metadata,
+ default_settings,
+ ]
return query, params
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 9c92f0b46..bbb897fe5 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -6,12 +6,15 @@
from typing import Any, TypeVar
from uuid import UUID
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
from beartype import beartype
from fastapi import HTTPException
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ..utils import (
- # generate_canonical_name,
+ generate_canonical_name,
partialclass,
pg_query,
rewrap_exceptions,
@@ -21,6 +24,34 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+INSERT INTO agents (
+ developer_id,
+ agent_id,
+ canonical_name,
+ name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings
+)
+VALUES (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7,
+ $8,
+ $9
+)
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
@@ -35,14 +66,13 @@
Agent,
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
- _kind="inserted",
)
+# @increase_counter("create_or_update_agent")
@pg_query
-# @increase_counter("create_or_update_agent1")
@beartype
async def create_or_update_agent(
*, agent_id: UUID, developer_id: UUID, data: CreateOrUpdateAgentRequest
-) -> tuple[list[str], dict]:
+) -> tuple[str, list]:
"""
Constructs the SQL queries to create a new agent or update an existing agent's details.
@@ -64,49 +94,23 @@ async def create_or_update_agent(
# Convert default_settings to dict if it exists
default_settings = (
- data.default_settings.model_dump() if data.default_settings else None
+ data.default_settings.model_dump() if data.default_settings else {}
)
# Set default values
- data.metadata = data.metadata or None
- # data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ data.metadata = data.metadata or {}
+ data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
- query = """
- INSERT INTO agents (
+ params = [
developer_id,
agent_id,
- canonical_name,
- name,
- about,
- instructions,
- model,
- metadata,
- default_settings
- )
- VALUES (
- %(developer_id)s,
- %(agent_id)s,
- %(canonical_name)s,
- %(name)s,
- %(about)s,
- %(instructions)s,
- %(model)s,
- %(metadata)s,
- %(default_settings)s
- )
- RETURNING *;
- """
-
- params = {
- "developer_id": developer_id,
- "agent_id": agent_id,
- "canonical_name": data.canonical_name,
- "name": data.name,
- "about": data.about,
- "instructions": data.instructions,
- "model": data.model,
- "metadata": data.metadata,
- "default_settings": default_settings,
- }
+ data.canonical_name,
+ data.name,
+ data.about,
+ data.instructions,
+ data.model,
+ data.metadata,
+ default_settings,
+ ]
return (query, params)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 545a976d5..a5062f783 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -16,10 +16,40 @@
rewrap_exceptions,
wrap_in_class,
)
+from beartype import beartype
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+WITH deleted_docs AS (
+ DELETE FROM docs
+ WHERE developer_id = $1
+ AND doc_id IN (
+ SELECT ad.doc_id
+ FROM agent_docs ad
+ WHERE ad.agent_id = $2
+ AND ad.developer_id = $1
+ )
+), deleted_agent_docs AS (
+ DELETE FROM agent_docs
+ WHERE agent_id = $2 AND developer_id = $1
+), deleted_tools AS (
+ DELETE FROM tools
+ WHERE agent_id = $2 AND developer_id = $1
+)
+DELETE FROM agents
+WHERE agent_id = $2 AND developer_id = $1
+RETURNING developer_id, agent_id;
+"""
+
+
+# Convert the list of queries into a single query string
+query = parse_one(raw_query).sql(pretty=True)
# @rewrap_exceptions(
# {
@@ -34,57 +64,23 @@
@wrap_in_class(
ResourceDeletedResponse,
one=True,
- transform=lambda d: {
- "id": d["agent_id"],
- },
+ transform=lambda d: {**d, "id": d["agent_id"], "deleted_at": utcnow()},
)
+# @increase_counter("delete_agent")
@pg_query
-# @increase_counter("delete_agent1")
@beartype
-async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
"""
- Constructs the SQL queries to delete an agent and its related settings.
+ Constructs the SQL query to delete an agent and its related settings.
Args:
agent_id (UUID): The UUID of the agent to be deleted.
developer_id (UUID): The UUID of the developer owning the agent.
Returns:
- tuple[list[str], dict]: A tuple containing the list of SQL queries and their parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
-
- queries = [
- """
- -- Delete docs that were only associated with this agent
- DELETE FROM docs
- WHERE developer_id = %(developer_id)s
- AND doc_id IN (
- SELECT ad.doc_id
- FROM agent_docs ad
- WHERE ad.agent_id = %(agent_id)s
- AND ad.developer_id = %(developer_id)s
- );
- """,
- """
- -- Delete agent_docs entries
- DELETE FROM agent_docs
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """,
- """
- -- Delete tools related to the agent
- DELETE FROM tools
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """,
- """
- -- Delete the agent
- DELETE FROM agents
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """,
- ]
-
- params = {
- "agent_id": agent_id,
- "developer_id": developer_id,
- }
-
- return (queries, params)
+ # Note: We swap the parameter order because the queries use $1 for developer_id and $2 for agent_id
+ params = [developer_id, agent_id]
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 18d253e8d..061d0b165 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -9,12 +9,39 @@
from beartype import beartype
from fastapi import HTTPException
from ...autogen.openapi_model import Agent
+from ...metrics.counters import increase_counter
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ..utils import (
partialclass,
pg_query,
rewrap_exceptions,
wrap_in_class,
)
+from beartype import beartype
+
+from ...autogen.openapi_model import Agent
+
+raw_query = """
+SELECT
+ agent_id,
+ developer_id,
+ name,
+ canonical_name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings,
+ created_at,
+ updated_at
+FROM
+ agents
+WHERE
+ agent_id = $2 AND developer_id = $1;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -30,11 +57,11 @@
# }
# # TODO: Add more exceptions
# )
-@wrap_in_class(Agent, one=True)
+@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
+# @increase_counter("get_agent")
@pg_query
-# @increase_counter("get_agent1")
@beartype
-async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], dict]:
+async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
"""
Constructs the SQL query to retrieve an agent's details.
@@ -45,23 +72,5 @@ async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[list[str], d
Returns:
tuple[list[str], dict]: A tuple containing the SQL query and its parameters.
"""
- query = """
- SELECT
- agent_id,
- developer_id,
- name,
- canonical_name,
- about,
- instructions,
- model,
- metadata,
- default_settings,
- created_at,
- updated_at
- FROM
- agents
- WHERE
- agent_id = %(agent_id)s AND developer_id = %(developer_id)s;
- """
- return (query, {"agent_id": agent_id, "developer_id": developer_id})
+ return (query, [developer_id, agent_id])
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index c24276a97..92165e414 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -16,12 +16,42 @@
rewrap_exceptions,
wrap_in_class,
)
+from beartype import beartype
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import Agent
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+SELECT
+ agent_id,
+ developer_id,
+ name,
+ canonical_name,
+ about,
+ instructions,
+ model,
+ metadata,
+ default_settings,
+ created_at,
+ updated_at
+FROM agents
+WHERE developer_id = $1 $7
+ORDER BY
+ CASE WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at END ASC NULLS LAST,
+ CASE WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at END DESC NULLS LAST,
+ CASE WHEN $4 = 'updated_at' AND $5 = 'asc' THEN updated_at END ASC NULLS LAST,
+ CASE WHEN $4 = 'updated_at' AND $5 = 'desc' THEN updated_at END DESC NULLS LAST
+LIMIT $2 OFFSET $3;
+"""
+
+query = raw_query
+
-# @rewrap_exceptions(
+# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
# HTTPException,
@@ -31,9 +61,9 @@
# }
# # TODO: Add more exceptions
# )
-@wrap_in_class(Agent)
+@wrap_in_class(Agent, transform=lambda d: {"id": d["agent_id"], **d})
+# @increase_counter("list_agents")
@pg_query
-# @increase_counter("list_agents1")
@beartype
async def list_agents(
*,
@@ -43,7 +73,7 @@ async def list_agents(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
metadata_filter: dict[str, Any] = {},
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs query to list agents for a developer with pagination.
@@ -63,33 +93,25 @@ async def list_agents(
raise HTTPException(status_code=400, detail="Invalid sort direction")
# Build metadata filter clause if needed
- metadata_clause = ""
- if metadata_filter:
- metadata_clause = "AND metadata @> %(metadata_filter)s::jsonb"
- query = f"""
- SELECT
- agent_id,
+ final_query = query
+ if metadata_filter:
+ final_query = query.replace("$7", "AND metadata @> $6::jsonb")
+ else:
+ final_query = query.replace("$7", "")
+
+ params = [
developer_id,
- name,
- canonical_name,
- about,
- instructions,
- model,
- metadata,
- default_settings,
- created_at,
- updated_at
- FROM agents
- WHERE developer_id = %(developer_id)s
- {metadata_clause}
- ORDER BY {sort_by} {direction}
- LIMIT %(limit)s OFFSET %(offset)s;
- """
-
- params = {"developer_id": developer_id, "limit": limit, "offset": offset}
-
+ limit,
+ offset
+ ]
+
+ params.append(sort_by)
+ params.append(direction)
if metadata_filter:
- params["metadata_filter"] = metadata_filter
+ params.append(metadata_filter)
+
+ print(final_query)
+ print(params)
- return query, params
+ return final_query, params
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index d4adff092..647ea3e52 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -10,6 +10,10 @@
from fastapi import HTTPException
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
+from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
@@ -19,6 +23,35 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+
+raw_query = """
+UPDATE agents
+SET
+ name = CASE
+ WHEN $3::text IS NOT NULL THEN $3
+ ELSE name
+ END,
+ about = CASE
+ WHEN $4::text IS NOT NULL THEN $4
+ ELSE about
+ END,
+ metadata = CASE
+ WHEN $5::jsonb IS NOT NULL THEN metadata || $5
+ ELSE metadata
+ END,
+ model = CASE
+ WHEN $6::text IS NOT NULL THEN $6
+ ELSE model
+ END,
+ default_settings = CASE
+ WHEN $7::jsonb IS NOT NULL THEN $7
+ ELSE default_settings
+ END
+WHERE agent_id = $2 AND developer_id = $1
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
# @rewrap_exceptions(
@@ -35,14 +68,13 @@
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
- _kind="inserted",
)
+# @increase_counter("patch_agent")
@pg_query
-# @increase_counter("patch_agent1")
@beartype
async def patch_agent(
*, agent_id: UUID, developer_id: UUID, data: PatchAgentRequest
-) -> tuple[str, dict]:
+) -> tuple[str, list]:
"""
Constructs the SQL query to partially update an agent's details.
@@ -52,27 +84,16 @@ async def patch_agent(
data (PatchAgentRequest): A dictionary of fields to update.
Returns:
- tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
- patch_fields = data.model_dump(exclude_unset=True)
- set_clauses = []
- params = {}
-
- for key, value in patch_fields.items():
- if value is not None: # Only update non-null values
- set_clauses.append(f"{key} = %({key})s")
- params[key] = value
-
- set_clause = ", ".join(set_clauses)
-
- query = f"""
- UPDATE agents
- SET {set_clause}
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s
- RETURNING *;
- """
-
- params["agent_id"] = agent_id
- params["developer_id"] = developer_id
-
- return (query, params)
+ params = [
+ developer_id,
+ agent_id,
+ data.name,
+ data.about,
+ data.metadata,
+ data.model,
+ data.default_settings.model_dump() if data.default_settings else None,
+ ]
+
+ return query, params
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 2116e49b0..d65354fa1 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -10,6 +10,10 @@
from fastapi import HTTPException
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
+from ...metrics.counters import increase_counter
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
+
from ..utils import (
partialclass,
pg_query,
@@ -20,6 +24,20 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+raw_query = """
+UPDATE agents
+SET
+ metadata = $3,
+ name = $4,
+ about = $5,
+ model = $6,
+ default_settings = $7::jsonb
+WHERE agent_id = $2 AND developer_id = $1
+RETURNING *;
+"""
+
+query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
@@ -34,15 +52,12 @@
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
- transform=lambda d: {"id": d["agent_id"], "jobs": [], **d},
- _kind="inserted",
+ transform=lambda d: {"id": d["agent_id"], **d},
)
+# @increase_counter("update_agent")
@pg_query
-# @increase_counter("update_agent1")
@beartype
-async def update_agent(
- *, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
-) -> tuple[str, dict]:
+async def update_agent(*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest) -> tuple[str, list]:
"""
Constructs the SQL query to fully update an agent's details.
@@ -52,21 +67,19 @@ async def update_agent(
data (UpdateAgentRequest): A dictionary containing all agent fields to update.
Returns:
- tuple[str, dict]: A tuple containing the SQL query and its parameters.
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
"""
- fields = ", ".join(
- [f"{key} = %({key})s" for key in data.model_dump(exclude_unset=True).keys()]
- )
- params = {key: value for key, value in data.model_dump(exclude_unset=True).items()}
-
- query = f"""
- UPDATE agents
- SET {fields}
- WHERE agent_id = %(agent_id)s AND developer_id = %(developer_id)s
- RETURNING *;
- """
-
- params["agent_id"] = agent_id
- params["developer_id"] = developer_id
-
+ params = [
+ developer_id,
+ agent_id,
+ data.metadata or {},
+ data.name,
+ data.about,
+ data.model,
+ data.default_settings.model_dump() if data.default_settings else {},
+ ]
+ print("*" * 100)
+ print(query)
+ print(params)
+ print("*" * 100)
return (query, params)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index db583e08f..152ab5ba9 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,5 +1,6 @@
import concurrent.futures
import inspect
+import re
import socket
import time
from functools import partialmethod, wraps
@@ -29,6 +30,19 @@
T = TypeVar("T")
ModelT = TypeVar("ModelT", bound=BaseModel)
+def generate_canonical_name(name: str) -> str:
+ """Convert a display name to a canonical name.
+ Example: "My Cool Agent!" -> "my_cool_agent"
+ """
+ # Remove special characters, replace spaces with underscores
+ canonical = re.sub(r"[^\w\s-]", "", name.lower())
+ canonical = re.sub(r"[-\s]+", "_", canonical)
+
+ # Ensure it starts with a letter (prepend 'a' if not)
+ if not canonical[0].isalpha():
+ canonical = f"a_{canonical}"
+
+ return canonical
def partialclass(cls, *args, **kwargs):
cls_signature = inspect.signature(cls)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 1151b433d..46e45dbc7 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -95,19 +95,19 @@ def patch_embed_acompletion():
@fixture(scope="global")
async def test_agent(dsn=pg_dsn, developer=test_developer):
- pool = await asyncpg.create_pool(dsn=dsn)
-
- async with get_pg_client(pool=pool) as client:
- agent = await create_agent(
- developer_id=developer.id,
- data=CreateAgentRequest(
- model="gpt-4o-mini",
- name="test agent",
- about="test agent about",
- metadata={"test": "test"},
- ),
- client=client,
- )
+ pool = await create_db_pool(dsn=dsn)
+
+ agent = await create_agent(
+ developer_id=developer.id,
+ data=CreateAgentRequest(
+ model="gpt-4o-mini",
+ name="test agent",
+ canonical_name=f"test_agent_{str(int(time.time()))}",
+ about="test agent about",
+ metadata={"test": "test"},
+ ),
+ connection_pool=pool,
+ )
yield agent
await pool.close()
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index f8f75fd0b..4b8ccd959 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,7 +1,9 @@
# Tests for agent queries
from uuid import uuid4
+from uuid import UUID
import asyncpg
+from uuid_extensions import uuid7
from ward import raises, test
from agents_api.autogen.openapi_model import (
@@ -9,10 +11,11 @@
CreateAgentRequest,
CreateOrUpdateAgentRequest,
PatchAgentRequest,
+ ResourceDeletedResponse,
ResourceUpdatedResponse,
UpdateAgentRequest,
)
-from agents_api.clients.pg import get_pg_client
+from agents_api.clients.pg import create_db_pool
from agents_api.queries.agents import (
create_agent,
create_or_update_agent,
@@ -25,163 +28,141 @@
from tests.fixtures import pg_dsn, test_agent, test_developer_id
-@test("model: create agent")
+@test("query: create agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- ),
- client=client,
- )
-
-
-@test("model: create agent with instructions")
+ """Test that an agent can be successfully created."""
+
+ pool = await create_db_pool(dsn=dsn)
+ await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ ),
+ connection_pool=pool,
+ )
+
+
+@test("query: create agent with instructions sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
+ """Test that an agent can be successfully created or updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ await create_or_update_agent(
+ developer_id=developer_id,
+ agent_id=uuid4(),
+ data=CreateOrUpdateAgentRequest(
+ name="test agent",
+ canonical_name="test_agent2",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ ),
+ connection_pool=pool,
+ )
+
+
+@test("query: update agent sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that an existing agent's information can be successfully updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await update_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=UpdateAgentRequest(
+ name="updated agent",
+ about="updated agent about",
+ model="gpt-4o-mini",
+ default_settings={"temperature": 1.0},
+ metadata={"hello": "world"},
+ ),
+ connection_pool=pool,
+ )
-@test("model: create or update agent")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- await create_or_update_agent(
- developer_id=developer_id,
- agent_id=uuid4(),
- data=CreateOrUpdateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
-@test("model: get agent not exists")
+@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that retrieving a non-existent agent raises an exception."""
+
agent_id = uuid4()
- pool = await asyncpg.create_pool(dsn=dsn)
+ pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- async with get_pg_client(pool=pool) as client:
- await get_agent(agent_id=agent_id, developer_id=developer_id, client=client)
+ await get_agent(agent_id=agent_id, developer_id=developer_id, connection_pool=pool)
-@test("model: get agent exists")
+@test("query: get agent exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await get_agent(agent_id=agent.id, developer_id=developer_id, client=client)
+ """Test that retrieving an existing agent returns the correct agent information."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await get_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
assert result is not None
assert isinstance(result, Agent)
-@test("model: delete agent")
+@test("query: list agents sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- temp_agent = await create_agent(
- developer_id=developer_id,
- data=CreateAgentRequest(
- name="test agent",
- about="test agent about",
- model="gpt-4o-mini",
- instructions=["test instruction"],
- ),
- client=client,
- )
-
- # Delete the agent
- await delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+ """Test that listing agents returns a collection of agent information."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_agents(developer_id=developer_id, connection_pool=pool)
- # Check that the agent is deleted
- with raises(Exception):
- await get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client)
+ assert isinstance(result, list)
+ assert all(isinstance(agent, Agent) for agent in result)
-@test("model: update agent")
+@test("query: patch agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await update_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- data=UpdateAgentRequest(
- name="updated agent",
- about="updated agent about",
- model="gpt-4o-mini",
- default_settings={"temperature": 1.0},
- metadata={"hello": "world"},
- ),
- client=client,
- )
+ """Test that an agent can be successfully patched."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await patch_agent(
+ agent_id=agent.id,
+ developer_id=developer_id,
+ data=PatchAgentRequest(
+ name="patched agent",
+ about="patched agent about",
+ default_settings={"temperature": 1.0},
+ metadata={"something": "else"},
+ ),
+ connection_pool=pool,
+ )
assert result is not None
assert isinstance(result, ResourceUpdatedResponse)
- async with get_pg_client(pool=pool) as client:
- agent = await get_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- client=client,
- )
-
- assert "test" not in agent.metadata
-
-@test("model: patch agent")
+@test("query: delete agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await patch_agent(
- agent_id=agent.id,
- developer_id=developer_id,
- data=PatchAgentRequest(
- name="patched agent",
- about="patched agent about",
- default_settings={"temperature": 1.0},
- metadata={"something": "else"},
- ),
- client=client,
- )
+ """Test that an agent can be successfully deleted."""
+
+ pool = await create_db_pool(dsn=dsn)
+ delete_result = await delete_agent(agent_id=agent.id, developer_id=developer_id, connection_pool=pool)
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
+ assert delete_result is not None
+ assert isinstance(delete_result, ResourceDeletedResponse)
- async with get_pg_client(pool=pool) as client:
- agent = await get_agent(
- agent_id=agent.id,
+ # Verify the agent no longer exists
+ try:
+ await get_agent(
developer_id=developer_id,
- client=client,
+ agent_id=agent.id,
+ connection_pool=pool,
)
-
- assert "hello" in agent.metadata
-
-
-@test("model: list agents")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved."""
-
- pool = await asyncpg.create_pool(dsn=dsn)
- async with get_pg_client(pool=pool) as client:
- result = await list_agents(developer_id=developer_id, client=client)
-
- assert isinstance(result, list)
- assert all(isinstance(agent, Agent) for agent in result)
+ except Exception:
+ pass
+ else:
+ assert (
+ False
+ ), "Expected an exception to be raised when retrieving a deleted agent."
From e745acce3ea2dcd7a7fd49685371689c36e27f5d Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Wed, 18 Dec 2024 08:51:36 +0000
Subject: [PATCH 068/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/agents/create_agent.py | 3 ++-
.../queries/agents/create_or_update_agent.py | 5 ++--
.../agents_api/queries/agents/delete_agent.py | 10 +++----
.../agents_api/queries/agents/get_agent.py | 24 ++++++++---------
.../agents_api/queries/agents/list_agents.py | 19 +++++--------
.../agents_api/queries/agents/patch_agent.py | 11 ++++----
.../agents_api/queries/agents/update_agent.py | 9 ++++---
agents-api/agents_api/queries/utils.py | 2 ++
agents-api/tests/fixtures.py | 4 +--
agents-api/tests/test_agent_queries.py | 27 ++++++++++---------
10 files changed, 54 insertions(+), 60 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 63ac4870f..454b24e3b 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -6,10 +6,10 @@
from typing import Any, TypeVar
from uuid import UUID
-from sqlglot import parse_one
from beartype import beartype
from fastapi import HTTPException
from pydantic import ValidationError
+from sqlglot import parse_one
from uuid_extensions import uuid7
from ...autogen.openapi_model import Agent, CreateAgentRequest
@@ -52,6 +52,7 @@
query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index bbb897fe5..745be3fb8 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -6,11 +6,10 @@
from typing import Any, TypeVar
from uuid import UUID
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ..utils import (
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index a5062f783..73da33261 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -8,6 +8,8 @@
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
from ..utils import (
@@ -16,11 +18,6 @@
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -51,6 +48,7 @@
# Convert the list of queries into a single query string
query = parse_one(raw_query).sql(pretty=True)
+
# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
@@ -82,5 +80,5 @@ async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list
"""
# Note: We swap the parameter order because the queries use $1 for developer_id and $2 for agent_id
params = [developer_id, agent_id]
-
+
return (query, params)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 061d0b165..d630a2aeb 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -8,19 +8,17 @@
from beartype import beartype
from fastapi import HTTPException
-from ...autogen.openapi_model import Agent
-from ...metrics.counters import increase_counter
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import Agent
+from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-
-from ...autogen.openapi_model import Agent
raw_query = """
SELECT
@@ -48,14 +46,14 @@
# @rewrap_exceptions(
- # {
- # psycopg_errors.ForeignKeyViolation: partialclass(
- # HTTPException,
- # status_code=404,
- # detail="The specified developer does not exist.",
- # )
- # }
- # # TODO: Add more exceptions
+# {
+# psycopg_errors.ForeignKeyViolation: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist.",
+# )
+# }
+# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
# @increase_counter("get_agent")
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 92165e414..b49e71886 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -8,6 +8,8 @@
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
+from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent
from ..utils import (
@@ -16,11 +18,6 @@
rewrap_exceptions,
wrap_in_class,
)
-from beartype import beartype
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
-
-from ...autogen.openapi_model import Agent
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
@@ -99,18 +96,14 @@ async def list_agents(
final_query = query.replace("$7", "AND metadata @> $6::jsonb")
else:
final_query = query.replace("$7", "")
-
- params = [
- developer_id,
- limit,
- offset
- ]
-
+
+ params = [developer_id, limit, offset]
+
params.append(sort_by)
params.append(direction)
if metadata_filter:
params.append(metadata_filter)
-
+
print(final_query)
print(params)
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 647ea3e52..929fd9c34 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -8,11 +8,10 @@
from beartype import beartype
from fastapi import HTTPException
-
-from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
-from fastapi import HTTPException
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+
+from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
@@ -23,7 +22,7 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-
+
raw_query = """
UPDATE agents
SET
@@ -93,7 +92,7 @@ async def patch_agent(
data.about,
data.metadata,
data.model,
- data.default_settings.model_dump() if data.default_settings else None,
+ data.default_settings.model_dump() if data.default_settings else None,
]
-
+
return query, params
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index d65354fa1..3f413c78d 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -8,12 +8,11 @@
from beartype import beartype
from fastapi import HTTPException
-
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
-from ...metrics.counters import increase_counter
from sqlglot import parse_one
from sqlglot.optimizer import optimize
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
+from ...metrics.counters import increase_counter
from ..utils import (
partialclass,
pg_query,
@@ -57,7 +56,9 @@
# @increase_counter("update_agent")
@pg_query
@beartype
-async def update_agent(*, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest) -> tuple[str, list]:
+async def update_agent(
+ *, agent_id: UUID, developer_id: UUID, data: UpdateAgentRequest
+) -> tuple[str, list]:
"""
Constructs the SQL query to fully update an agent's details.
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 152ab5ba9..a3ce89d98 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -30,6 +30,7 @@
T = TypeVar("T")
ModelT = TypeVar("ModelT", bound=BaseModel)
+
def generate_canonical_name(name: str) -> str:
"""Convert a display name to a canonical name.
Example: "My Cool Agent!" -> "my_cool_agent"
@@ -44,6 +45,7 @@ def generate_canonical_name(name: str) -> str:
return canonical
+
def partialclass(cls, *args, **kwargs):
cls_signature = inspect.signature(cls)
bound = cls_signature.bind_partial(*args, **kwargs)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 46e45dbc7..25892d959 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -11,9 +11,9 @@
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
+from agents_api.queries.agents.create_agent import create_agent
from agents_api.queries.developers.create_developer import create_developer
-from agents_api.queries.agents.create_agent import create_agent
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
@@ -100,7 +100,7 @@ async def test_agent(dsn=pg_dsn, developer=test_developer):
agent = await create_agent(
developer_id=developer.id,
data=CreateAgentRequest(
- model="gpt-4o-mini",
+ model="gpt-4o-mini",
name="test agent",
canonical_name=f"test_agent_{str(int(time.time()))}",
about="test agent about",
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 4b8ccd959..b27f8abde 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,7 +1,6 @@
# Tests for agent queries
-from uuid import uuid4
+from uuid import UUID, uuid4
-from uuid import UUID
import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
@@ -31,7 +30,7 @@
@test("query: create agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully created."""
-
+
pool = await create_db_pool(dsn=dsn)
await create_agent(
developer_id=developer_id,
@@ -47,7 +46,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
@test("query: create agent with instructions sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully created or updated."""
-
+
pool = await create_db_pool(dsn=dsn)
await create_or_update_agent(
developer_id=developer_id,
@@ -66,7 +65,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
@test("query: update agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an existing agent's information can be successfully updated."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await update_agent(
agent_id=agent.id,
@@ -88,18 +87,20 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
-
+
agent_id = uuid4()
pool = await create_db_pool(dsn=dsn)
with raises(Exception):
- await get_agent(agent_id=agent_id, developer_id=developer_id, connection_pool=pool)
+ await get_agent(
+ agent_id=agent_id, developer_id=developer_id, connection_pool=pool
+ )
@test("query: get agent exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that retrieving an existing agent returns the correct agent information."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await get_agent(
agent_id=agent.id,
@@ -114,7 +115,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: list agents sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that listing agents returns a collection of agent information."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await list_agents(developer_id=developer_id, connection_pool=pool)
@@ -125,7 +126,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
@test("query: patch agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an agent can be successfully patched."""
-
+
pool = await create_db_pool(dsn=dsn)
result = await patch_agent(
agent_id=agent.id,
@@ -146,9 +147,11 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: delete agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an agent can be successfully deleted."""
-
+
pool = await create_db_pool(dsn=dsn)
- delete_result = await delete_agent(agent_id=agent.id, developer_id=developer_id, connection_pool=pool)
+ delete_result = await delete_agent(
+ agent_id=agent.id, developer_id=developer_id, connection_pool=pool
+ )
assert delete_result is not None
assert isinstance(delete_result, ResourceDeletedResponse)
From 2f392f745cf2f0420185f1179b7761b13866ff1f Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Wed, 18 Dec 2024 13:18:41 +0300
Subject: [PATCH 069/274] fix(agents-api): misc fixes
---
.../agents_api/queries/agents/create_agent.py | 2 +-
.../queries/agents/create_or_update_agent.py | 2 +-
.../agents_api/queries/agents/delete_agent.py | 2 +-
.../agents_api/queries/agents/get_agent.py | 2 +-
.../agents_api/queries/agents/list_agents.py | 29 +++++++++----------
.../agents_api/queries/agents/patch_agent.py | 2 +-
.../agents_api/queries/agents/update_agent.py | 7 ++---
agents-api/agents_api/queries/utils.py | 4 +++
agents-api/tests/test_agent_queries.py | 18 ++++--------
9 files changed, 29 insertions(+), 39 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 454b24e3b..81a408f30 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -87,7 +87,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("create_agent")
+@increase_counter("create_agent")
@pg_query
@beartype
async def create_agent(
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 745be3fb8..d74cd57c2 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -66,7 +66,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("create_or_update_agent")
+@increase_counter("create_or_update_agent")
@pg_query
@beartype
async def create_or_update_agent(
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 73da33261..db4a3ab4f 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -64,7 +64,7 @@
one=True,
transform=lambda d: {**d, "id": d["agent_id"], "deleted_at": utcnow()},
)
-# @increase_counter("delete_agent")
+@increase_counter("delete_agent")
@pg_query
@beartype
async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index d630a2aeb..a9893d747 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -56,7 +56,7 @@
# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
-# @increase_counter("get_agent")
+@increase_counter("get_agent")
@pg_query
@beartype
async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index b49e71886..48df01b90 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -36,7 +36,7 @@
created_at,
updated_at
FROM agents
-WHERE developer_id = $1 $7
+WHERE developer_id = $1 {metadata_filter_query}
ORDER BY
CASE WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at END ASC NULLS LAST,
CASE WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at END DESC NULLS LAST,
@@ -45,8 +45,6 @@
LIMIT $2 OFFSET $3;
"""
-query = raw_query
-
# @rewrap_exceptions(
# {
@@ -59,7 +57,7 @@
# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, transform=lambda d: {"id": d["agent_id"], **d})
-# @increase_counter("list_agents")
+@increase_counter("list_agents")
@pg_query
@beartype
async def list_agents(
@@ -91,20 +89,19 @@ async def list_agents(
# Build metadata filter clause if needed
- final_query = query
- if metadata_filter:
- final_query = query.replace("$7", "AND metadata @> $6::jsonb")
- else:
- final_query = query.replace("$7", "")
-
- params = [developer_id, limit, offset]
+ final_query = raw_query.format(
+ metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
+ )
+
+ params = [
+ developer_id,
+ limit,
+ offset,
+ sort_by,
+ direction,
+ ]
- params.append(sort_by)
- params.append(direction)
if metadata_filter:
params.append(metadata_filter)
- print(final_query)
- print(params)
-
return final_query, params
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 929fd9c34..d2a172838 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -68,7 +68,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("patch_agent")
+@increase_counter("patch_agent")
@pg_query
@beartype
async def patch_agent(
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 3f413c78d..d03994e9c 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -53,7 +53,7 @@
one=True,
transform=lambda d: {"id": d["agent_id"], **d},
)
-# @increase_counter("update_agent")
+@increase_counter("update_agent")
@pg_query
@beartype
async def update_agent(
@@ -79,8 +79,5 @@ async def update_agent(
data.model,
data.default_settings.model_dump() if data.default_settings else {},
]
- print("*" * 100)
- print(query)
- print(params)
- print("*" * 100)
+
return (query, params)
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index a3ce89d98..ba9bade9e 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,5 +1,6 @@
import concurrent.futures
import inspect
+import random
import re
import socket
import time
@@ -43,6 +44,9 @@ def generate_canonical_name(name: str) -> str:
if not canonical[0].isalpha():
canonical = f"a_{canonical}"
+ # Add 3 random numbers to the end
+ canonical = f"{canonical}_{random.randint(100, 999)}"
+
return canonical
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index b27f8abde..18d95b743 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,6 +1,5 @@
# Tests for agent queries
-from uuid import UUID, uuid4
-
+from uuid import UUID
import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
@@ -50,7 +49,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
pool = await create_db_pool(dsn=dsn)
await create_or_update_agent(
developer_id=developer_id,
- agent_id=uuid4(),
+ agent_id=uuid7(),
data=CreateOrUpdateAgentRequest(
name="test agent",
canonical_name="test_agent2",
@@ -87,8 +86,8 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
-
- agent_id = uuid4()
+
+ agent_id = uuid7()
pool = await create_db_pool(dsn=dsn)
with raises(Exception):
@@ -156,16 +155,9 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert delete_result is not None
assert isinstance(delete_result, ResourceDeletedResponse)
- # Verify the agent no longer exists
- try:
+ with raises(Exception):
await get_agent(
developer_id=developer_id,
agent_id=agent.id,
connection_pool=pool,
)
- except Exception:
- pass
- else:
- assert (
- False
- ), "Expected an exception to be raised when retrieving a deleted agent."
From 0579f3c03f62b1d02b597bd4918de5ab1eb4bd34 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Wed, 18 Dec 2024 10:27:40 +0000
Subject: [PATCH 070/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/agents/list_agents.py | 2 +-
agents-api/tests/test_agent_queries.py | 3 ++-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 48df01b90..69e91f206 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -92,7 +92,7 @@ async def list_agents(
final_query = raw_query.format(
metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
)
-
+
params = [
developer_id,
limit,
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 18d95b743..56a07ed03 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,5 +1,6 @@
# Tests for agent queries
from uuid import UUID
+
import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
@@ -86,7 +87,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
-
+
agent_id = uuid7()
pool = await create_db_pool(dsn=dsn)
From 1b7a022d8d3aab446a683eed0914ffa021426b73 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Thu, 19 Dec 2024 01:14:40 +0300
Subject: [PATCH 071/274] wip
---
agents-api/agents_api/autogen/Sessions.py | 40 +++
.../agents_api/queries/agents/create_agent.py | 7 +-
.../queries/agents/create_or_update_agent.py | 6 +-
.../agents_api/queries/agents/delete_agent.py | 7 +-
.../agents_api/queries/agents/get_agent.py | 5 +-
.../agents_api/queries/agents/list_agents.py | 6 +-
.../agents_api/queries/agents/patch_agent.py | 5 +-
.../agents_api/queries/agents/update_agent.py | 5 +-
.../queries/developers/get_developer.py | 2 +-
.../queries/entries/create_entries.py | 18 +-
.../queries/entries/list_entries.py | 10 +-
.../queries/sessions/create_session.py | 28 +-
agents-api/agents_api/queries/utils.py | 17 +-
agents-api/tests/fixtures.py | 44 ++-
agents-api/tests/test_agent_queries.py | 2 -
agents-api/tests/test_entry_queries.py | 10 +-
agents-api/tests/test_messages_truncation.py | 2 +-
agents-api/tests/test_session_queries.py | 339 +++++++++++-------
.../integrations/autogen/Sessions.py | 40 +++
typespec/sessions/models.tsp | 6 +
.../@typespec/openapi3/openapi-1.0.0.yaml | 53 +++
21 files changed, 439 insertions(+), 213 deletions(-)
diff --git a/agents-api/agents_api/autogen/Sessions.py b/agents-api/agents_api/autogen/Sessions.py
index 460fd25ce..e2a9ce164 100644
--- a/agents-api/agents_api/autogen/Sessions.py
+++ b/agents-api/agents_api/autogen/Sessions.py
@@ -31,6 +31,10 @@ class CreateSessionRequest(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -51,6 +55,10 @@ class CreateSessionRequest(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
metadata: dict[str, Any] | None = None
@@ -67,6 +75,10 @@ class PatchSessionRequest(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -87,6 +99,10 @@ class PatchSessionRequest(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptionsUpdate | None = None
metadata: dict[str, Any] | None = None
@@ -121,6 +137,10 @@ class Session(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
summary: Annotated[str | None, Field(json_schema_extra={"readOnly": True})] = None
"""
Summary (null at the beginning) - generated automatically after every interaction
@@ -145,6 +165,10 @@ class Session(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})]
metadata: dict[str, Any] | None = None
@@ -197,6 +221,10 @@ class UpdateSessionRequest(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -217,6 +245,10 @@ class UpdateSessionRequest(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
metadata: dict[str, Any] | None = None
@@ -240,6 +272,10 @@ class CreateOrUpdateSessionRequest(CreateSessionRequest):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -260,6 +296,10 @@ class CreateOrUpdateSessionRequest(CreateSessionRequest):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
metadata: dict[str, Any] | None = None
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 81a408f30..bb111b0df 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -7,18 +7,17 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
-from pydantic import ValidationError
from sqlglot import parse_one
from uuid_extensions import uuid7
+from ...metrics.counters import increase_counter
+
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ..utils import (
generate_canonical_name,
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index d74cd57c2..6cfb83767 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -7,17 +7,15 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
+from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index db4a3ab4f..9c3ee5585 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -7,16 +7,15 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceDeletedResponse
+from ...metrics.counters import increase_counter
+from ...common.utils.datetime import utcnow
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index a9893d747..dce424771 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -7,17 +7,14 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent
from ...metrics.counters import increase_counter
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
raw_query = """
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 69e91f206..3698c68f1 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -8,15 +8,13 @@
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import Agent
+from ...metrics.counters import increase_counter
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index d2a172838..6f9cb3b9c 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -7,17 +7,14 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index d03994e9c..cd15313a2 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -7,17 +7,14 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
-from sqlglot.optimizer import optimize
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index 373a2fb36..28be9a4b1 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -12,8 +12,8 @@
from ..utils import (
partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
+ rewrap_exceptions,
)
# TODO: Add verify_developer
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 24c0be26e..a54104274 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -14,14 +14,10 @@
# Query for checking if the session exists
session_exists_query = """
-SELECT CASE
- WHEN EXISTS (
- SELECT 1 FROM sessions
- WHERE session_id = $1 AND developer_id = $2
- )
- THEN TRUE
- ELSE (SELECT NULL::boolean WHERE FALSE) -- This raises a NO_DATA_FOUND error
-END;
+SELECT EXISTS (
+ SELECT 1 FROM sessions
+ WHERE session_id = $1 AND developer_id = $2
+) AS exists;
"""
# Define the raw SQL query for creating entries
@@ -71,6 +67,10 @@
status_code=400,
detail=str(exc),
),
+ asyncpg.NoDataFoundError: lambda exc: HTTPException(
+ status_code=404,
+ detail="Session not found",
+ ),
}
)
@wrap_in_class(
@@ -166,7 +166,7 @@ async def add_entry_relations(
item.get("is_leaf", False), # $5
]
)
-
+
return [
(
session_exists_query,
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 0aeb92a25..3f4a0699e 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -62,6 +62,10 @@
status_code=400,
detail=str(exc),
),
+ asyncpg.NoDataFoundError: lambda exc: HTTPException(
+ status_code=404,
+ detail="Session not found",
+ ),
}
)
@wrap_in_class(Entry)
@@ -78,7 +82,7 @@ async def list_entries(
sort_by: Literal["created_at", "timestamp"] = "timestamp",
direction: Literal["asc", "desc"] = "asc",
exclude_relations: list[str] = [],
-) -> list[tuple[str, list]]:
+) -> list[tuple[str, list] | tuple[str, list, str]]:
if limit < 1 or limit > 1000:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
if offset < 0:
@@ -98,14 +102,14 @@ async def list_entries(
developer_id, # $5
exclude_relations, # $6
]
-
return [
(
session_exists_query,
[session_id, developer_id],
+ "fetchrow",
),
(
query,
- entry_params,
+ entry_params
),
]
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 3074f087b..baa3f09d1 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -45,11 +45,7 @@
participant_type,
participant_id
)
-SELECT
- $1 as developer_id,
- $2 as session_id,
- unnest($3::participant_type[]) as participant_type,
- unnest($4::uuid[]) as participant_id;
+VALUES ($1, $2, $3, $4);
""").sql(pretty=True)
@@ -67,7 +63,7 @@
),
}
)
-@wrap_in_class(Session, one=True, transform=lambda d: {**d, "id": d["session_id"]})
+@wrap_in_class(Session, transform=lambda d: {**d, "id": d["session_id"]})
@increase_counter("create_session")
@pg_query
@beartype
@@ -76,7 +72,7 @@ async def create_session(
developer_id: UUID,
session_id: UUID,
data: CreateSessionRequest,
-) -> list[tuple[str, list]]:
+) -> list[tuple[str, list] | tuple[str, list, str]]:
"""
Constructs SQL queries to create a new session and its participant lookups.
@@ -86,7 +82,7 @@ async def create_session(
data (CreateSessionRequest): Session creation data
Returns:
- list[tuple[str, list]]: SQL queries and their parameters
+ list[tuple[str, list] | tuple[str, list, str]]: SQL queries and their parameters
"""
# Handle participants
users = data.users or ([data.user] if data.user else [])
@@ -122,15 +118,15 @@ async def create_session(
data.recall_options or {}, # $10
]
- # Prepare lookup parameters
- lookup_params = [
- developer_id, # $1
- session_id, # $2
- participant_types, # $3
- participant_ids, # $4
- ]
+ # Prepare lookup parameters as a list of parameter lists
+ lookup_params = []
+ for ptype, pid in zip(participant_types, participant_ids):
+ lookup_params.append([developer_id, session_id, ptype, pid])
+ print("*" * 100)
+ print(lookup_params)
+ print("*" * 100)
return [
(session_query, session_params),
- (lookup_query, lookup_params),
+ (lookup_query, lookup_params, "fetchmany"),
]
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index ba9bade9e..194cba7bc 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -69,7 +69,7 @@ class AsyncPGFetchArgs(TypedDict):
type SQLQuery = str
-type FetchMethod = Literal["fetch", "fetchmany"]
+type FetchMethod = Literal["fetch", "fetchmany", "fetchrow"]
type PGQueryArgs = tuple[SQLQuery, list[Any]] | tuple[SQLQuery, list[Any], FetchMethod]
type PreparedPGQueryArgs = tuple[FetchMethod, AsyncPGFetchArgs]
type BatchedPreparedPGQueryArgs = list[PreparedPGQueryArgs]
@@ -102,6 +102,13 @@ def prepare_pg_query_args(
),
)
)
+ case (query, variables, "fetchrow"):
+ batch.append(
+ (
+ "fetchrow",
+ AsyncPGFetchArgs(query=query, args=variables, timeout=query_timeout),
+ )
+ )
case _:
raise ValueError("Invalid query arguments")
@@ -161,6 +168,14 @@ async def wrapper(
query, *args, timeout=timeout
)
+ print("%" * 100)
+ print(results)
+ print(*args)
+ print("%" * 100)
+
+ if method_name == "fetchrow" and (len(results) == 0 or results.get("bool") is None):
+ raise asyncpg.NoDataFoundError
+
end = timeit and time.perf_counter()
timeit and print(
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 25892d959..9153785a4 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,5 +1,6 @@
import random
import string
+import time
from uuid import UUID
from fastapi.testclient import TestClient
@@ -7,6 +8,8 @@
from ward import fixture
from agents_api.autogen.openapi_model import (
+ CreateAgentRequest,
+ CreateSessionRequest,
CreateUserRequest,
)
from agents_api.clients.pg import create_db_pool
@@ -24,8 +27,8 @@
# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
# from agents_api.queries.files.create_file import create_file
# from agents_api.queries.files.delete_file import delete_file
-# from agents_api.queries.session.create_session import create_session
-# from agents_api.queries.session.delete_session import delete_session
+from agents_api.queries.sessions.create_session import create_session
+
# from agents_api.queries.task.create_task import create_task
# from agents_api.queries.task.delete_task import delete_task
# from agents_api.queries.tools.create_tools import create_tools
@@ -150,22 +153,27 @@ async def test_new_developer(dsn=pg_dsn, email=random_email):
return developer
-# @fixture(scope="global")
-# async def test_session(
-# dsn=pg_dsn,
-# developer_id=test_developer_id,
-# test_user=test_user,
-# test_agent=test_agent,
-# ):
-# async with get_pg_client(dsn=dsn) as client:
-# session = await create_session(
-# developer_id=developer_id,
-# data=CreateSessionRequest(
-# agent=test_agent.id, user=test_user.id, metadata={"test": "test"}
-# ),
-# client=client,
-# )
-# yield session
+@fixture(scope="global")
+async def test_session(
+ dsn=pg_dsn,
+ developer_id=test_developer_id,
+ test_user=test_user,
+ test_agent=test_agent,
+):
+ pool = await create_db_pool(dsn=dsn)
+
+ session = await create_session(
+ developer_id=developer_id,
+ data=CreateSessionRequest(
+ agent=test_agent.id,
+ user=test_user.id,
+ metadata={"test": "test"},
+ system_template="test system template",
+ ),
+ connection_pool=pool,
+ )
+
+ return session
# @fixture(scope="global")
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 56a07ed03..b6cb7aedc 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -1,7 +1,5 @@
# Tests for agent queries
-from uuid import UUID
-import asyncpg
from uuid_extensions import uuid7
from ward import raises, test
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 87d9cdb4f..da53ce06d 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,7 +3,7 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-from uuid import uuid4
+from uuid_extensions import uuid7
from fastapi import HTTPException
from ward import raises, test
@@ -11,7 +11,7 @@
from agents_api.autogen.openapi_model import CreateEntryRequest
from agents_api.clients.pg import create_db_pool
from agents_api.queries.entries import create_entries, list_entries
-from tests.fixtures import pg_dsn, test_developer # , test_session
+from tests.fixtures import pg_dsn, test_developer, test_session # , test_session
MODEL = "gpt-4o-mini"
@@ -31,11 +31,10 @@ async def _(dsn=pg_dsn, developer=test_developer):
with raises(HTTPException) as exc_info:
await create_entries(
developer_id=developer.id,
- session_id=uuid4(),
+ session_id=uuid7(),
data=[test_entry],
connection_pool=pool,
)
-
assert exc_info.raised.status_code == 404
@@ -48,10 +47,9 @@ async def _(dsn=pg_dsn, developer=test_developer):
with raises(HTTPException) as exc_info:
await list_entries(
developer_id=developer.id,
- session_id=uuid4(),
+ session_id=uuid7(),
connection_pool=pool,
)
-
assert exc_info.raised.status_code == 404
diff --git a/agents-api/tests/test_messages_truncation.py b/agents-api/tests/test_messages_truncation.py
index 39cc02c2c..bb1eaee30 100644
--- a/agents-api/tests/test_messages_truncation.py
+++ b/agents-api/tests/test_messages_truncation.py
@@ -1,4 +1,4 @@
-# from uuid import uuid4
+
# from uuid_extensions import uuid7
# from ward import raises, test
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 4fdc7e6e4..b85268434 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -8,79 +8,116 @@
from agents_api.autogen.openapi_model import (
Session,
+ CreateSessionRequest,
+ CreateOrUpdateSessionRequest,
+ UpdateSessionRequest,
+ PatchSessionRequest,
+ ResourceUpdatedResponse,
+ ResourceDeletedResponse,
)
from agents_api.clients.pg import create_db_pool
from agents_api.queries.sessions import (
count_sessions,
get_session,
list_sessions,
+ create_session,
+ create_or_update_session,
+ update_session,
+ patch_session,
+ delete_session,
)
from tests.fixtures import (
pg_dsn,
test_developer_id,
-) # , test_session, test_agent, test_user
-
-# @test("query: create session sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user):
-# """Test that a session can be successfully created."""
-
-# pool = await create_db_pool(dsn=dsn)
-# await create_session(
-# developer_id=developer_id,
-# session_id=uuid7(),
-# data=CreateSessionRequest(
-# users=[user.id],
-# agents=[agent.id],
-# situation="test session",
-# ),
-# connection_pool=pool,
-# )
-
-
-# @test("query: create or update session sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user):
-# """Test that a session can be successfully created or updated."""
-
-# pool = await create_db_pool(dsn=dsn)
-# await create_or_update_session(
-# developer_id=developer_id,
-# session_id=uuid7(),
-# data=CreateOrUpdateSessionRequest(
-# users=[user.id],
-# agents=[agent.id],
-# situation="test session",
-# ),
-# connection_pool=pool,
-# )
-
-
-# @test("query: update session sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent):
-# """Test that an existing session's information can be successfully updated."""
-
-# pool = await create_db_pool(dsn=dsn)
-# update_result = await update_session(
-# session_id=session.id,
-# developer_id=developer_id,
-# data=UpdateSessionRequest(
-# agents=[agent.id],
-# situation="updated session",
-# ),
-# connection_pool=pool,
-# )
-
-# assert update_result is not None
-# assert isinstance(update_result, ResourceUpdatedResponse)
-# assert update_result.updated_at > session.created_at
-
-
-@test("query: get session not exists sql")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- """Test that retrieving a non-existent session returns an empty result."""
+ test_developer,
+ test_user,
+ test_agent,
+ test_session,
+)
+
+@test("query: create session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
+):
+ """Test that a session can be successfully created."""
+
+ pool = await create_db_pool(dsn=dsn)
session_id = uuid7()
+ data = CreateSessionRequest(
+ users=[user.id],
+ agents=[agent.id],
+ situation="test session",
+ system_template="test system template",
+ )
+ result = await create_session(
+ developer_id=developer_id,
+ session_id=session_id,
+ data=data,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert isinstance(result, Session), f"Result is not a Session, {result}"
+ assert result.id == session_id
+ assert result.developer_id == developer_id
+ assert result.situation == "test session"
+ assert set(result.users) == {user.id}
+ assert set(result.agents) == {agent.id}
+
+
+@test("query: create or update session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
+):
+ """Test that a session can be successfully created or updated."""
+
pool = await create_db_pool(dsn=dsn)
+ session_id = uuid7()
+ data = CreateOrUpdateSessionRequest(
+ users=[user.id],
+ agents=[agent.id],
+ situation="test session",
+ )
+ result = await create_or_update_session(
+ developer_id=developer_id,
+ session_id=session_id,
+ data=data,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert isinstance(result, Session)
+ assert result.id == session_id
+ assert result.developer_id == developer_id
+ assert result.situation == "test session"
+ assert set(result.users) == {user.id}
+ assert set(result.agents) == {agent.id}
+
+
+@test("query: get session exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test retrieving an existing session."""
+ pool = await create_db_pool(dsn=dsn)
+ result = await get_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert isinstance(result, Session)
+ assert result.id == session.id
+ assert result.developer_id == developer_id
+
+
+@test("query: get session does not exist")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test retrieving a non-existent session."""
+
+ session_id = uuid7()
+ pool = await create_db_pool(dsn=dsn)
with raises(Exception):
await get_session(
session_id=session_id,
@@ -89,90 +126,136 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
)
-# @test("query: get session exists sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test that retrieving an existing session returns the correct session information."""
+@test("query: list sessions")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test listing sessions with default pagination."""
-# pool = await create_db_pool(dsn=dsn)
-# result = await get_session(
-# session_id=session.id,
-# developer_id=developer_id,
-# connection_pool=pool,
-# )
+ pool = await create_db_pool(dsn=dsn)
+ result, _ = await list_sessions(
+ developer_id=developer_id,
+ limit=10,
+ offset=0,
+ connection_pool=pool,
+ )
-# assert result is not None
-# assert isinstance(result, Session)
+ assert isinstance(result, list)
+ assert len(result) >= 1
+ assert any(s.id == session.id for s in result)
-@test("query: list sessions when none exist sql")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- """Test that listing sessions returns a collection of session information."""
+@test("query: list sessions with filters")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test listing sessions with specific filters."""
pool = await create_db_pool(dsn=dsn)
- result = await list_sessions(
+ result, _ = await list_sessions(
developer_id=developer_id,
+ limit=10,
+ offset=0,
+ filters={"situation": "test session"},
connection_pool=pool,
)
assert isinstance(result, list)
assert len(result) >= 1
- assert all(isinstance(session, Session) for session in result)
-
-
-# @test("query: patch session sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent):
-# """Test that a session can be successfully patched."""
-
-# pool = await create_db_pool(dsn=dsn)
-# patch_result = await patch_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# data=PatchSessionRequest(
-# agents=[agent.id],
-# situation="patched session",
-# metadata={"test": "metadata"},
-# ),
-# connection_pool=pool,
-# )
-
-# assert patch_result is not None
-# assert isinstance(patch_result, ResourceUpdatedResponse)
-# assert patch_result.updated_at > session.created_at
-
-
-# @test("query: delete session sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test that a session can be successfully deleted."""
-
-# pool = await create_db_pool(dsn=dsn)
-# delete_result = await delete_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# connection_pool=pool,
-# )
-
-# assert delete_result is not None
-# assert isinstance(delete_result, ResourceDeletedResponse)
-
-# # Verify the session no longer exists
-# with raises(Exception):
-# await get_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# connection_pool=pool,
-# )
-
-
-@test("query: count sessions sql")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- """Test that sessions can be counted."""
+ assert all(s.situation == "test session" for s in result)
+
+
+@test("query: count sessions")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test counting the number of sessions for a developer."""
pool = await create_db_pool(dsn=dsn)
- result = await count_sessions(
+ count = await count_sessions(
developer_id=developer_id,
connection_pool=pool,
)
- assert isinstance(result, dict)
- assert "count" in result
- assert isinstance(result["count"], int)
+ assert isinstance(count, int)
+ assert count >= 1
+
+
+@test("query: update session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
+):
+ """Test that an existing session's information can be successfully updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ data = UpdateSessionRequest(
+ agents=[agent.id],
+ situation="updated session",
+ )
+ result = await update_session(
+ session_id=session.id,
+ developer_id=developer_id,
+ data=data,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+ assert result.updated_at > session.created_at
+
+ updated_session = await get_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+ assert updated_session.situation == "updated session"
+ assert set(updated_session.agents) == {agent.id}
+
+
+@test("query: patch session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
+):
+ """Test that a session can be successfully patched."""
+
+ pool = await create_db_pool(dsn=dsn)
+ data = PatchSessionRequest(
+ agents=[agent.id],
+ situation="patched session",
+ metadata={"test": "metadata"},
+ )
+ result = await patch_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ data=data,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+ assert result.updated_at > session.created_at
+
+ patched_session = await get_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+ assert patched_session.situation == "patched session"
+ assert set(patched_session.agents) == {agent.id}
+ assert patched_session.metadata == {"test": "metadata"}
+
+
+@test("query: delete session sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test that a session can be successfully deleted."""
+
+ pool = await create_db_pool(dsn=dsn)
+ delete_result = await delete_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+
+ assert delete_result is not None
+ assert isinstance(delete_result, ResourceDeletedResponse)
+
+ with raises(Exception):
+ await get_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
diff --git a/integrations-service/integrations/autogen/Sessions.py b/integrations-service/integrations/autogen/Sessions.py
index 460fd25ce..e2a9ce164 100644
--- a/integrations-service/integrations/autogen/Sessions.py
+++ b/integrations-service/integrations/autogen/Sessions.py
@@ -31,6 +31,10 @@ class CreateSessionRequest(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -51,6 +55,10 @@ class CreateSessionRequest(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
metadata: dict[str, Any] | None = None
@@ -67,6 +75,10 @@ class PatchSessionRequest(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -87,6 +99,10 @@ class PatchSessionRequest(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptionsUpdate | None = None
metadata: dict[str, Any] | None = None
@@ -121,6 +137,10 @@ class Session(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
summary: Annotated[str | None, Field(json_schema_extra={"readOnly": True})] = None
"""
Summary (null at the beginning) - generated automatically after every interaction
@@ -145,6 +165,10 @@ class Session(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})]
metadata: dict[str, Any] | None = None
@@ -197,6 +221,10 @@ class UpdateSessionRequest(BaseModel):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -217,6 +245,10 @@ class UpdateSessionRequest(BaseModel):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
metadata: dict[str, Any] | None = None
@@ -240,6 +272,10 @@ class CreateOrUpdateSessionRequest(CreateSessionRequest):
"""
A specific situation that sets the background for this session
"""
+ system_template: str | None = None
+ """
+ System prompt for this session
+ """
render_templates: StrictBool = True
"""
Render system and assistant message content as jinja templates
@@ -260,6 +296,10 @@ class CreateOrUpdateSessionRequest(CreateSessionRequest):
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
"""
+ forward_tool_calls: StrictBool = False
+ """
+ Whether to forward tool calls to the model
+ """
recall_options: RecallOptions | None = None
metadata: dict[str, Any] | None = None
diff --git a/typespec/sessions/models.tsp b/typespec/sessions/models.tsp
index f15453a5f..720625f3b 100644
--- a/typespec/sessions/models.tsp
+++ b/typespec/sessions/models.tsp
@@ -63,6 +63,9 @@ model Session {
/** A specific situation that sets the background for this session */
situation: string = defaultSessionSystemMessage;
+ /** System prompt for this session */
+ system_template: string | null = null;
+
/** Summary (null at the beginning) - generated automatically after every interaction */
@visibility("read")
summary: string | null = null;
@@ -83,6 +86,9 @@ model Session {
* If a tool call is not made, the model's output will be returned as is. */
auto_run_tools: boolean = false;
+ /** Whether to forward tool calls to the model */
+ forward_tool_calls: boolean = false;
+
recall_options?: RecallOptions | null = null;
...HasId;
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index 9298ab458..d4835a695 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -3761,10 +3761,12 @@ components:
required:
- id
- situation
+ - system_template
- render_templates
- token_budget
- context_overflow
- auto_run_tools
+ - forward_tool_calls
properties:
id:
$ref: '#/components/schemas/Common.uuid'
@@ -3840,6 +3842,11 @@ components:
{{"---"}}
{%- endfor -%}
{%- endif -%}
+ system_template:
+ type: string
+ nullable: true
+ description: System prompt for this session
+ default: null
render_templates:
type: boolean
description: Render system and assistant message content as jinja templates
@@ -3865,6 +3872,10 @@ components:
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
default: false
+ forward_tool_calls:
+ type: boolean
+ description: Whether to forward tool calls to the model
+ default: false
recall_options:
type: object
allOf:
@@ -3880,10 +3891,12 @@ components:
type: object
required:
- situation
+ - system_template
- render_templates
- token_budget
- context_overflow
- auto_run_tools
+ - forward_tool_calls
properties:
user:
allOf:
@@ -3957,6 +3970,11 @@ components:
{{"---"}}
{%- endfor -%}
{%- endif -%}
+ system_template:
+ type: string
+ nullable: true
+ description: System prompt for this session
+ default: null
render_templates:
type: boolean
description: Render system and assistant message content as jinja templates
@@ -3982,6 +4000,10 @@ components:
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
default: false
+ forward_tool_calls:
+ type: boolean
+ description: Whether to forward tool calls to the model
+ default: false
recall_options:
type: object
allOf:
@@ -4096,6 +4118,11 @@ components:
{{"---"}}
{%- endfor -%}
{%- endif -%}
+ system_template:
+ type: string
+ nullable: true
+ description: System prompt for this session
+ default: null
render_templates:
type: boolean
description: Render system and assistant message content as jinja templates
@@ -4121,6 +4148,10 @@ components:
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
default: false
+ forward_tool_calls:
+ type: boolean
+ description: Whether to forward tool calls to the model
+ default: false
recall_options:
type: object
allOf:
@@ -4189,11 +4220,13 @@ components:
type: object
required:
- situation
+ - system_template
- summary
- render_templates
- token_budget
- context_overflow
- auto_run_tools
+ - forward_tool_calls
- id
- created_at
- updated_at
@@ -4254,6 +4287,11 @@ components:
{{"---"}}
{%- endfor -%}
{%- endif -%}
+ system_template:
+ type: string
+ nullable: true
+ description: System prompt for this session
+ default: null
summary:
type: string
nullable: true
@@ -4285,6 +4323,10 @@ components:
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
default: false
+ forward_tool_calls:
+ type: boolean
+ description: Whether to forward tool calls to the model
+ default: false
recall_options:
type: object
allOf:
@@ -4360,10 +4402,12 @@ components:
type: object
required:
- situation
+ - system_template
- render_templates
- token_budget
- context_overflow
- auto_run_tools
+ - forward_tool_calls
properties:
situation:
type: string
@@ -4421,6 +4465,11 @@ components:
{{"---"}}
{%- endfor -%}
{%- endif -%}
+ system_template:
+ type: string
+ nullable: true
+ description: System prompt for this session
+ default: null
render_templates:
type: boolean
description: Render system and assistant message content as jinja templates
@@ -4446,6 +4495,10 @@ components:
If a tool call is made, the tool's output will be sent back to the model as the model's input.
If a tool call is not made, the model's output will be returned as is.
default: false
+ forward_tool_calls:
+ type: boolean
+ description: Whether to forward tool calls to the model
+ default: false
recall_options:
type: object
allOf:
From db318013484ef0eeab5171b9456c8c221e545867 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Wed, 18 Dec 2024 22:15:29 +0000
Subject: [PATCH 072/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/agents/create_agent.py | 5 ++---
.../queries/agents/create_or_update_agent.py | 2 +-
.../agents_api/queries/agents/delete_agent.py | 4 ++--
.../agents_api/queries/agents/get_agent.py | 2 +-
.../agents_api/queries/agents/list_agents.py | 2 +-
.../agents_api/queries/agents/patch_agent.py | 2 +-
.../agents_api/queries/agents/update_agent.py | 2 +-
.../queries/developers/get_developer.py | 2 +-
.../queries/entries/create_entries.py | 2 +-
.../queries/entries/list_entries.py | 5 +----
agents-api/agents_api/queries/utils.py | 8 +++++--
agents-api/tests/test_entry_queries.py | 3 +--
agents-api/tests/test_messages_truncation.py | 1 -
agents-api/tests/test_session_queries.py | 22 +++++++++----------
14 files changed, 30 insertions(+), 32 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index bb111b0df..a6b56d84f 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -10,14 +10,13 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
-from ...metrics.counters import increase_counter
-
from ...autogen.openapi_model import Agent, CreateAgentRequest
+from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 6cfb83767..2aa0d1501 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -14,8 +14,8 @@
from ..utils import (
generate_canonical_name,
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 9c3ee5585..df0f0c325 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -10,12 +10,12 @@
from sqlglot import parse_one
from ...autogen.openapi_model import ResourceDeletedResponse
-from ...metrics.counters import increase_counter
from ...common.utils.datetime import utcnow
+from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index dce424771..2cf1ef28d 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -13,8 +13,8 @@
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
raw_query = """
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 3698c68f1..306b7465b 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -13,8 +13,8 @@
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 6f9cb3b9c..8d17c9f49 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -13,8 +13,8 @@
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index cd15313a2..fe5e31ac6 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -13,8 +13,8 @@
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index 28be9a4b1..373a2fb36 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -12,8 +12,8 @@
from ..utils import (
partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
+ wrap_in_class,
)
# TODO: Add verify_developer
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index a54104274..4c1f7bfa7 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -166,7 +166,7 @@ async def add_entry_relations(
item.get("is_leaf", False), # $5
]
)
-
+
return [
(
session_exists_query,
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 3f4a0699e..1c398f0ab 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -108,8 +108,5 @@ async def list_entries(
[session_id, developer_id],
"fetchrow",
),
- (
- query,
- entry_params
- ),
+ (query, entry_params),
]
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 194cba7bc..73113580d 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -106,7 +106,9 @@ def prepare_pg_query_args(
batch.append(
(
"fetchrow",
- AsyncPGFetchArgs(query=query, args=variables, timeout=query_timeout),
+ AsyncPGFetchArgs(
+ query=query, args=variables, timeout=query_timeout
+ ),
)
)
case _:
@@ -173,7 +175,9 @@ async def wrapper(
print(*args)
print("%" * 100)
- if method_name == "fetchrow" and (len(results) == 0 or results.get("bool") is None):
+ if method_name == "fetchrow" and (
+ len(results) == 0 or results.get("bool") is None
+ ):
raise asyncpg.NoDataFoundError
end = timeit and time.perf_counter()
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index da53ce06d..60a387591 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,9 +3,8 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-from uuid_extensions import uuid7
-
from fastapi import HTTPException
+from uuid_extensions import uuid7
from ward import raises, test
from agents_api.autogen.openapi_model import CreateEntryRequest
diff --git a/agents-api/tests/test_messages_truncation.py b/agents-api/tests/test_messages_truncation.py
index bb1eaee30..1a6c344e6 100644
--- a/agents-api/tests/test_messages_truncation.py
+++ b/agents-api/tests/test_messages_truncation.py
@@ -1,4 +1,3 @@
-
# from uuid_extensions import uuid7
# from ward import raises, test
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index b85268434..8e512379f 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -7,32 +7,32 @@
from ward import raises, test
from agents_api.autogen.openapi_model import (
- Session,
- CreateSessionRequest,
CreateOrUpdateSessionRequest,
- UpdateSessionRequest,
+ CreateSessionRequest,
PatchSessionRequest,
- ResourceUpdatedResponse,
ResourceDeletedResponse,
+ ResourceUpdatedResponse,
+ Session,
+ UpdateSessionRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.queries.sessions import (
count_sessions,
+ create_or_update_session,
+ create_session,
+ delete_session,
get_session,
list_sessions,
- create_session,
- create_or_update_session,
- update_session,
patch_session,
- delete_session,
+ update_session,
)
from tests.fixtures import (
pg_dsn,
- test_developer_id,
- test_developer,
- test_user,
test_agent,
+ test_developer,
+ test_developer_id,
test_session,
+ test_user,
)
From 638fefb6b2a5c79729db03be298f7c47c243de25 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Wed, 18 Dec 2024 18:18:39 -0500
Subject: [PATCH 073/274] chore: minor refactors
---
.../agents_api/queries/agents/__init__.py | 10 +++
.../agents_api/queries/agents/create_agent.py | 15 ++--
.../queries/agents/create_or_update_agent.py | 15 ++--
.../agents_api/queries/agents/delete_agent.py | 20 +++---
.../agents_api/queries/agents/get_agent.py | 17 ++---
.../agents_api/queries/agents/list_agents.py | 13 ++--
.../agents_api/queries/agents/patch_agent.py | 14 ++--
.../agents_api/queries/agents/update_agent.py | 15 ++--
.../queries/entries/create_entries.py | 72 ++++++++++---------
.../queries/entries/delete_entries.py | 54 +++++++-------
.../agents_api/queries/entries/get_history.py | 28 ++++----
.../queries/entries/list_entries.py | 51 +++++++------
12 files changed, 171 insertions(+), 153 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/__init__.py b/agents-api/agents_api/queries/agents/__init__.py
index ebd169040..c0712c47c 100644
--- a/agents-api/agents_api/queries/agents/__init__.py
+++ b/agents-api/agents_api/queries/agents/__init__.py
@@ -19,3 +19,13 @@
from .list_agents import list_agents
from .patch_agent import patch_agent
from .update_agent import update_agent
+
+__all__ = [
+ "create_agent",
+ "create_or_update_agent",
+ "delete_agent",
+ "get_agent",
+ "list_agents",
+ "patch_agent",
+ "update_agent",
+]
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index a6b56d84f..2d8df7978 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -19,10 +19,8 @@
wrap_in_class,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-raw_query = """
+# Define the raw SQL query
+agent_query = parse_one("""
INSERT INTO agents (
developer_id,
agent_id,
@@ -46,9 +44,7 @@
$9
)
RETURNING *;
-"""
-
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
# @rewrap_exceptions(
@@ -135,4 +131,7 @@ async def create_agent(
default_settings,
]
- return query, params
+ return (
+ agent_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 2aa0d1501..e96b30c77 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -18,10 +18,8 @@
wrap_in_class,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-raw_query = """
+# Define the raw SQL query
+agent_query = parse_one("""
INSERT INTO agents (
developer_id,
agent_id,
@@ -45,9 +43,7 @@
$9
)
RETURNING *;
-"""
-
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
# @rewrap_exceptions(
@@ -110,4 +106,7 @@ async def create_or_update_agent(
default_settings,
]
- return (query, params)
+ return (
+ agent_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index df0f0c325..6738374db 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -11,17 +11,14 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
-from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
rewrap_exceptions,
wrap_in_class,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-raw_query = """
+# Define the raw SQL query
+agent_query = parse_one("""
WITH deleted_docs AS (
DELETE FROM docs
WHERE developer_id = $1
@@ -41,13 +38,10 @@
DELETE FROM agents
WHERE agent_id = $2 AND developer_id = $1
RETURNING developer_id, agent_id;
-"""
-
-
-# Convert the list of queries into a single query string
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
+# @rewrap_exceptions(
# @rewrap_exceptions(
# {
# psycopg_errors.ForeignKeyViolation: partialclass(
@@ -63,7 +57,6 @@
one=True,
transform=lambda d: {**d, "id": d["agent_id"], "deleted_at": utcnow()},
)
-@increase_counter("delete_agent")
@pg_query
@beartype
async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
@@ -80,4 +73,7 @@ async def delete_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list
# Note: We swap the parameter order because the queries use $1 for developer_id and $2 for agent_id
params = [developer_id, agent_id]
- return (query, params)
+ return (
+ agent_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 2cf1ef28d..916572db1 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -10,14 +10,14 @@
from sqlglot import parse_one
from ...autogen.openapi_model import Agent
-from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
rewrap_exceptions,
wrap_in_class,
)
-raw_query = """
+# Define the raw SQL query
+agent_query = parse_one("""
SELECT
agent_id,
developer_id,
@@ -34,12 +34,7 @@
agents
WHERE
agent_id = $2 AND developer_id = $1;
-"""
-
-query = parse_one(raw_query).sql(pretty=True)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
+""").sql(pretty=True)
# @rewrap_exceptions(
@@ -53,7 +48,6 @@
# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
-@increase_counter("get_agent")
@pg_query
@beartype
async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
@@ -68,4 +62,7 @@ async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
tuple[list[str], dict]: A tuple containing the SQL query and its parameters.
"""
- return (query, [developer_id, agent_id])
+ return (
+ agent_query,
+ [developer_id, agent_id],
+ )
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 306b7465b..ce12b32b3 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -10,16 +10,13 @@
from fastapi import HTTPException
from ...autogen.openapi_model import Agent
-from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
rewrap_exceptions,
wrap_in_class,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
+# Define the raw SQL query
raw_query = """
SELECT
agent_id,
@@ -55,7 +52,6 @@
# # TODO: Add more exceptions
# )
@wrap_in_class(Agent, transform=lambda d: {"id": d["agent_id"], **d})
-@increase_counter("list_agents")
@pg_query
@beartype
async def list_agents(
@@ -87,7 +83,7 @@ async def list_agents(
# Build metadata filter clause if needed
- final_query = raw_query.format(
+ agent_query = raw_query.format(
metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
)
@@ -102,4 +98,7 @@ async def list_agents(
if metadata_filter:
params.append(metadata_filter)
- return final_query, params
+ return (
+ agent_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 8d17c9f49..7fb63feda 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -17,10 +17,9 @@
wrap_in_class,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-raw_query = """
+# Define the raw SQL query
+agent_query = parse_one("""
UPDATE agents
SET
name = CASE
@@ -45,9 +44,7 @@
END
WHERE agent_id = $2 AND developer_id = $1
RETURNING *;
-"""
-
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
# @rewrap_exceptions(
@@ -92,4 +89,7 @@ async def patch_agent(
data.default_settings.model_dump() if data.default_settings else None,
]
- return query, params
+ return (
+ agent_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index fe5e31ac6..79b520cb8 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -17,10 +17,8 @@
wrap_in_class,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-raw_query = """
+# Define the raw SQL query
+agent_query = parse_one("""
UPDATE agents
SET
metadata = $3,
@@ -30,9 +28,7 @@
default_settings = $7::jsonb
WHERE agent_id = $2 AND developer_id = $1
RETURNING *;
-"""
-
-query = parse_one(raw_query).sql(pretty=True)
+""").sql(pretty=True)
# @rewrap_exceptions(
@@ -77,4 +73,7 @@ async def update_agent(
data.default_settings.model_dump() if data.default_settings else {},
]
- return (query, params)
+ return (
+ agent_query,
+ params,
+ )
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 4c1f7bfa7..7f6e2d4d7 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -10,7 +10,7 @@
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
# Query for checking if the session exists
session_exists_query = """
@@ -53,26 +53,30 @@
"""
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail=str(exc),
- ),
- asyncpg.NotNullViolationError: lambda exc: HTTPException(
- status_code=400,
- detail=str(exc),
- ),
- asyncpg.NoDataFoundError: lambda exc: HTTPException(
- status_code=404,
- detail="Session not found",
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="Entry already exists",
+# ),
+# asyncpg.NotNullViolationError: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="Not null violation",
+# ),
+# asyncpg.NoDataFoundError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# }
+# )
@wrap_in_class(
Entry,
transform=lambda d: {
@@ -128,18 +132,20 @@ async def create_entries(
]
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail=str(exc),
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="Entry already exists",
+# ),
+# }
+# )
@wrap_in_class(Relation)
@increase_counter("add_entry_relations")
@pg_query
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index dfdadb8da..ce1590fd4 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -9,7 +9,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
# Define the raw SQL query for deleting entries with a developer check
delete_entry_query = parse_one("""
@@ -57,18 +57,20 @@
"""
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail="The specified session or developer does not exist.",
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail="The specified session has already been deleted.",
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified session or developer does not exist.",
+# ),
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="The specified session has already been deleted.",
+# ),
+# }
+# )
@wrap_in_class(
ResourceDeletedResponse,
one=True,
@@ -94,18 +96,20 @@ async def delete_entries_for_session(
]
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail="The specified entries, session, or developer does not exist.",
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail="One or more specified entries have already been deleted.",
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified entries, session, or developer does not exist.",
+# ),
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="One or more specified entries have already been deleted.",
+# ),
+# }
+# )
@wrap_in_class(
ResourceDeletedResponse,
transform=lambda d: {
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index 8f0ddf4a1..2c28b4f21 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -6,7 +6,7 @@
from sqlglot import parse_one
from ...autogen.openapi_model import History
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
# Define the raw SQL query for getting history with a developer check
history_query = parse_one("""
@@ -30,18 +30,20 @@
""").sql(pretty=True)
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# }
+# )
@wrap_in_class(
History,
one=True,
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 1c398f0ab..657f5563b 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -7,7 +7,7 @@
from ...autogen.openapi_model import Entry
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
# Query for checking if the session exists
session_exists_query = """
@@ -48,26 +48,30 @@
"""
-@rewrap_exceptions(
- {
- asyncpg.ForeignKeyViolationError: lambda exc: HTTPException(
- status_code=404,
- detail=str(exc),
- ),
- asyncpg.UniqueViolationError: lambda exc: HTTPException(
- status_code=409,
- detail=str(exc),
- ),
- asyncpg.NotNullViolationError: lambda exc: HTTPException(
- status_code=400,
- detail=str(exc),
- ),
- asyncpg.NoDataFoundError: lambda exc: HTTPException(
- status_code=404,
- detail="Session not found",
- ),
- }
-)
+# @rewrap_exceptions(
+# {
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="Entry already exists",
+# ),
+# asyncpg.NotNullViolationError: partialclass(
+# HTTPException,
+# status_code=400,
+# detail="Entry is required",
+# ),
+# asyncpg.NoDataFoundError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Session not found",
+# ),
+# }
+# )
@wrap_in_class(Entry)
@increase_counter("list_entries")
@pg_query
@@ -108,5 +112,8 @@ async def list_entries(
[session_id, developer_id],
"fetchrow",
),
- (query, entry_params),
+ (
+ query,
+ entry_params,
+ ),
]
From 2ba91ad2eeb66ff039d184dd28324e8f99672bc0 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Wed, 18 Dec 2024 23:19:36 +0000
Subject: [PATCH 074/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/agents/patch_agent.py | 1 -
agents-api/agents_api/queries/entries/create_entries.py | 2 +-
agents-api/agents_api/queries/entries/delete_entries.py | 2 +-
agents-api/agents_api/queries/entries/get_history.py | 2 +-
agents-api/agents_api/queries/entries/list_entries.py | 2 +-
5 files changed, 4 insertions(+), 5 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 7fb63feda..2325ab33f 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -17,7 +17,6 @@
wrap_in_class,
)
-
# Define the raw SQL query
agent_query = parse_one("""
UPDATE agents
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 7f6e2d4d7..72de8db90 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -10,7 +10,7 @@
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Query for checking if the session exists
session_exists_query = """
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index ce1590fd4..4539ae4df 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -9,7 +9,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for deleting entries with a developer check
delete_entry_query = parse_one("""
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index 2c28b4f21..7ad940c0a 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -6,7 +6,7 @@
from sqlglot import parse_one
from ...autogen.openapi_model import History
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for getting history with a developer check
history_query = parse_one("""
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 657f5563b..4920e39c1 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -7,7 +7,7 @@
from ...autogen.openapi_model import Entry
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Query for checking if the session exists
session_exists_query = """
From 30b57633aafd9b5152fe88cfe104ba60c03fe6bc Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Thu, 19 Dec 2024 10:11:48 +0530
Subject: [PATCH 075/274] fix(memory-store): Change association structure of
files and docs
Signed-off-by: Diwank Singh Tomer
---
memory-store/migrations/000005_files.down.sql | 10 ++--
memory-store/migrations/000005_files.up.sql | 56 ++++++++++++-------
memory-store/migrations/000006_docs.down.sql | 42 +++++---------
memory-store/migrations/000006_docs.up.sql | 56 +++++++++++++------
4 files changed, 93 insertions(+), 71 deletions(-)
diff --git a/memory-store/migrations/000005_files.down.sql b/memory-store/migrations/000005_files.down.sql
index 80bf6fecd..c582f7b67 100644
--- a/memory-store/migrations/000005_files.down.sql
+++ b/memory-store/migrations/000005_files.down.sql
@@ -1,14 +1,12 @@
BEGIN;
--- Drop agent_files table and its dependencies
-DROP TABLE IF EXISTS agent_files;
-
--- Drop user_files table and its dependencies
-DROP TABLE IF EXISTS user_files;
+-- Drop file_owners table and its dependencies
+DROP TRIGGER IF EXISTS trg_validate_file_owner ON file_owners;
+DROP FUNCTION IF EXISTS validate_file_owner();
+DROP TABLE IF EXISTS file_owners;
-- Drop files table and its dependencies
DROP TRIGGER IF EXISTS trg_files_updated_at ON files;
-
DROP TABLE IF EXISTS files;
COMMIT;
diff --git a/memory-store/migrations/000005_files.up.sql b/memory-store/migrations/000005_files.up.sql
index ef4c22b3d..40a2cbccf 100644
--- a/memory-store/migrations/000005_files.up.sql
+++ b/memory-store/migrations/000005_files.up.sql
@@ -56,30 +56,48 @@ DO $$ BEGIN
END IF;
END $$;
--- Create the user_files table
-CREATE TABLE IF NOT EXISTS user_files (
+-- Create the file_owners table
+CREATE TABLE IF NOT EXISTS file_owners (
developer_id UUID NOT NULL,
- user_id UUID NOT NULL,
file_id UUID NOT NULL,
- CONSTRAINT pk_user_files PRIMARY KEY (developer_id, user_id, file_id),
- CONSTRAINT fk_user_files_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
- CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id)
+ owner_type TEXT NOT NULL, -- 'user' or 'agent'
+ owner_id UUID NOT NULL,
+ CONSTRAINT pk_file_owners PRIMARY KEY (developer_id, file_id),
+ CONSTRAINT fk_file_owners_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id),
+ CONSTRAINT ct_file_owners_owner_type CHECK (owner_type IN ('user', 'agent'))
);
--- Create index if it doesn't exist
-CREATE INDEX IF NOT EXISTS idx_user_files_user ON user_files (developer_id, user_id);
+-- Create indexes
+CREATE INDEX IF NOT EXISTS idx_file_owners_owner
+ ON file_owners (developer_id, owner_type, owner_id);
--- Create the agent_files table
-CREATE TABLE IF NOT EXISTS agent_files (
- developer_id UUID NOT NULL,
- agent_id UUID NOT NULL,
- file_id UUID NOT NULL,
- CONSTRAINT pk_agent_files PRIMARY KEY (developer_id, agent_id, file_id),
- CONSTRAINT fk_agent_files_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
- CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id)
-);
+-- Create function to validate owner reference
+CREATE OR REPLACE FUNCTION validate_file_owner()
+RETURNS TRIGGER AS $$
+BEGIN
+ IF NEW.owner_type = 'user' THEN
+ IF NOT EXISTS (
+ SELECT 1 FROM users
+ WHERE developer_id = NEW.developer_id AND user_id = NEW.owner_id
+ ) THEN
+ RAISE EXCEPTION 'Invalid user reference';
+ END IF;
+ ELSIF NEW.owner_type = 'agent' THEN
+ IF NOT EXISTS (
+ SELECT 1 FROM agents
+ WHERE developer_id = NEW.developer_id AND agent_id = NEW.owner_id
+ ) THEN
+ RAISE EXCEPTION 'Invalid agent reference';
+ END IF;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
--- Create index if it doesn't exist
-CREATE INDEX IF NOT EXISTS idx_agent_files_agent ON agent_files (developer_id, agent_id);
+-- Create trigger for validation
+CREATE TRIGGER trg_validate_file_owner
+BEFORE INSERT OR UPDATE ON file_owners
+FOR EACH ROW
+EXECUTE FUNCTION validate_file_owner();
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000006_docs.down.sql b/memory-store/migrations/000006_docs.down.sql
index 468b1b483..ea67b0005 100644
--- a/memory-store/migrations/000006_docs.down.sql
+++ b/memory-store/migrations/000006_docs.down.sql
@@ -1,41 +1,27 @@
BEGIN;
+-- Drop doc_owners table and its dependencies
+DROP TRIGGER IF EXISTS trg_validate_doc_owner ON doc_owners;
+DROP FUNCTION IF EXISTS validate_doc_owner();
+DROP TABLE IF EXISTS doc_owners;
+
+-- Drop docs table and its dependencies
+DROP TRIGGER IF EXISTS trg_docs_search_tsv ON docs;
+DROP TRIGGER IF EXISTS trg_docs_updated_at ON docs;
+DROP FUNCTION IF EXISTS docs_update_search_tsv();
+
-- Drop indexes
DROP INDEX IF EXISTS idx_docs_content_trgm;
-
DROP INDEX IF EXISTS idx_docs_title_trgm;
-
DROP INDEX IF EXISTS idx_docs_search_tsv;
-
DROP INDEX IF EXISTS idx_docs_metadata;
-
-DROP INDEX IF EXISTS idx_agent_docs_agent;
-
-DROP INDEX IF EXISTS idx_user_docs_user;
-
DROP INDEX IF EXISTS idx_docs_developer;
-
DROP INDEX IF EXISTS idx_docs_id_sorted;
--- Drop triggers
-DROP TRIGGER IF EXISTS trg_docs_search_tsv ON docs;
-
-DROP TRIGGER IF EXISTS trg_docs_updated_at ON docs;
-
--- Drop the constraint that depends on is_valid_language function
-ALTER TABLE IF EXISTS docs
-DROP CONSTRAINT IF EXISTS ct_docs_valid_language;
-
--- Drop functions
-DROP FUNCTION IF EXISTS docs_update_search_tsv ();
-
-DROP FUNCTION IF EXISTS is_valid_language (text);
-
--- Drop tables (in correct order due to foreign key constraints)
-DROP TABLE IF EXISTS agent_docs;
-
-DROP TABLE IF EXISTS user_docs;
-
+-- Drop docs table
DROP TABLE IF EXISTS docs;
+-- Drop language validation function
+DROP FUNCTION IF EXISTS is_valid_language(text);
+
COMMIT;
diff --git a/memory-store/migrations/000006_docs.up.sql b/memory-store/migrations/000006_docs.up.sql
index 5b532bbef..193fae122 100644
--- a/memory-store/migrations/000006_docs.up.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -63,31 +63,51 @@ BEGIN
END IF;
END $$;
--- Create the user_docs table
-CREATE TABLE IF NOT EXISTS user_docs (
+-- Create the doc_owners table
+CREATE TABLE IF NOT EXISTS doc_owners (
developer_id UUID NOT NULL,
- user_id UUID NOT NULL,
doc_id UUID NOT NULL,
- CONSTRAINT pk_user_docs PRIMARY KEY (developer_id, user_id, doc_id),
- CONSTRAINT fk_user_docs_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
- CONSTRAINT fk_user_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id)
+ owner_type TEXT NOT NULL, -- 'user' or 'agent'
+ owner_id UUID NOT NULL,
+ CONSTRAINT pk_doc_owners PRIMARY KEY (developer_id, doc_id),
+ CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
+ CONSTRAINT ct_doc_owners_owner_type CHECK (owner_type IN ('user', 'agent'))
);
--- Create the agent_docs table
-CREATE TABLE IF NOT EXISTS agent_docs (
- developer_id UUID NOT NULL,
- agent_id UUID NOT NULL,
- doc_id UUID NOT NULL,
- CONSTRAINT pk_agent_docs PRIMARY KEY (developer_id, agent_id, doc_id),
- CONSTRAINT fk_agent_docs_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
- CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id)
-);
+-- Create indexes
+CREATE INDEX IF NOT EXISTS idx_doc_owners_owner
+ ON doc_owners (developer_id, owner_type, owner_id);
--- Create indexes if not exists
-CREATE INDEX IF NOT EXISTS idx_user_docs_user ON user_docs (developer_id, user_id);
+-- Create function to validate owner reference
+CREATE OR REPLACE FUNCTION validate_doc_owner()
+RETURNS TRIGGER AS $$
+BEGIN
+ IF NEW.owner_type = 'user' THEN
+ IF NOT EXISTS (
+ SELECT 1 FROM users
+ WHERE developer_id = NEW.developer_id AND user_id = NEW.owner_id
+ ) THEN
+ RAISE EXCEPTION 'Invalid user reference';
+ END IF;
+ ELSIF NEW.owner_type = 'agent' THEN
+ IF NOT EXISTS (
+ SELECT 1 FROM agents
+ WHERE developer_id = NEW.developer_id AND agent_id = NEW.owner_id
+ ) THEN
+ RAISE EXCEPTION 'Invalid agent reference';
+ END IF;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
-CREATE INDEX IF NOT EXISTS idx_agent_docs_agent ON agent_docs (developer_id, agent_id);
+-- Create trigger for validation
+CREATE TRIGGER trg_validate_doc_owner
+BEFORE INSERT OR UPDATE ON doc_owners
+FOR EACH ROW
+EXECUTE FUNCTION validate_doc_owner();
+-- Create indexes if not exists
CREATE INDEX IF NOT EXISTS idx_docs_metadata ON docs USING GIN (metadata);
-- Enable necessary PostgreSQL extensions
From 116edf8d3c57558ea57409521996f018b163712a Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Wed, 18 Dec 2024 23:43:07 -0500
Subject: [PATCH 076/274] wip(agents-api): Add file sql queries
---
.../agents_api/queries/files/__init__.py | 21 +++
.../agents_api/queries/files/create_file.py | 150 ++++++++++++++++
.../agents_api/queries/files/delete_file.py | 118 +++++++++++++
.../agents_api/queries/files/get_file.py | 69 ++++++++
.../agents_api/queries/files/list_files.py | 161 ++++++++++++++++++
agents-api/tests/test_files_queries.py | 73 +++++---
6 files changed, 567 insertions(+), 25 deletions(-)
create mode 100644 agents-api/agents_api/queries/files/__init__.py
create mode 100644 agents-api/agents_api/queries/files/create_file.py
create mode 100644 agents-api/agents_api/queries/files/delete_file.py
create mode 100644 agents-api/agents_api/queries/files/get_file.py
create mode 100644 agents-api/agents_api/queries/files/list_files.py
diff --git a/agents-api/agents_api/queries/files/__init__.py b/agents-api/agents_api/queries/files/__init__.py
new file mode 100644
index 000000000..1da09114a
--- /dev/null
+++ b/agents-api/agents_api/queries/files/__init__.py
@@ -0,0 +1,21 @@
+"""
+The `files` module within the `queries` package provides SQL query functions for managing files
+in the PostgreSQL database. This includes operations for:
+
+- Creating new files
+- Retrieving file details
+- Listing files with filtering and pagination
+- Deleting files and their associations
+"""
+
+from .create_file import create_file
+from .delete_file import delete_file
+from .get_file import get_file
+from .list_files import list_files
+
+__all__ = [
+ "create_file",
+ "delete_file",
+ "get_file",
+ "list_files"
+]
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
new file mode 100644
index 000000000..77e065433
--- /dev/null
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -0,0 +1,150 @@
+"""
+This module contains the functionality for creating files in the PostgreSQL database.
+It includes functions to construct and execute SQL queries for inserting new file records.
+"""
+
+from typing import Any, Literal
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+from uuid_extensions import uuid7
+import asyncpg
+from fastapi import HTTPException
+import base64
+import hashlib
+
+from ...autogen.openapi_model import CreateFileRequest, File
+from ...metrics.counters import increase_counter
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+
+# Create file
+file_query = parse_one("""
+INSERT INTO files (
+ developer_id,
+ file_id,
+ name,
+ description,
+ mime_type,
+ size,
+ hash,
+)
+VALUES (
+ $1, -- developer_id
+ $2, -- file_id
+ $3, -- name
+ $4, -- description
+ $5, -- mime_type
+ $6, -- size
+ $7, -- hash
+)
+RETURNING *;
+""").sql(pretty=True)
+
+# Create user file association
+user_file_query = parse_one("""
+INSERT INTO user_files (
+ developer_id,
+ user_id,
+ file_id
+)
+VALUES ($1, $2, $3)
+ON CONFLICT (developer_id, user_id, file_id) DO NOTHING; -- Uses primary key index
+""").sql(pretty=True)
+
+# Create agent file association
+agent_file_query = parse_one("""
+INSERT INTO agent_files (
+ developer_id,
+ agent_id,
+ file_id
+)
+VALUES ($1, $2, $3)
+ON CONFLICT (developer_id, agent_id, file_id) DO NOTHING; -- Uses primary key index
+""").sql(pretty=True)
+
+# Add error handling decorator
+# @rewrap_exceptions(
+# {
+# asyncpg.UniqueViolationError: partialclass(
+# HTTPException,
+# status_code=409,
+# detail="A file with this name already exists for this developer",
+# ),
+# asyncpg.NoDataFoundError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified owner does not exist",
+# ),
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="The specified developer does not exist",
+# ),
+# }
+# )
+@wrap_in_class(
+ File,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "id": d["file_id"],
+ "content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
+ },
+)
+@increase_counter("create_file")
+@pg_query
+@beartype
+async def create_file(
+ *,
+ developer_id: UUID,
+ file_id: UUID | None = None,
+ data: CreateFileRequest,
+ owner_type: Literal["user", "agent"] | None = None,
+ owner_id: UUID | None = None,
+) -> list[tuple[str, list] | tuple[str, list, str]]:
+ """
+ Constructs and executes SQL queries to create a new file and optionally associate it with an owner.
+
+ Parameters:
+ developer_id (UUID): The unique identifier for the developer.
+ file_id (UUID | None): Optional unique identifier for the file.
+ data (CreateFileRequest): The file data to insert.
+ owner_type (Literal["user", "agent"] | None): Optional type of owner
+ owner_id (UUID | None): Optional ID of the owner
+
+ Returns:
+ list[tuple[str, list] | tuple[str, list, str]]: List of SQL queries, their parameters, and fetch type
+ """
+ file_id = file_id or uuid7()
+
+ # Calculate size and hash
+ content_bytes = base64.b64decode(data.content)
+ data.size = len(content_bytes)
+ data.hash = hashlib.sha256(content_bytes).digest()
+
+ # Base file parameters
+ file_params = [
+ developer_id,
+ file_id,
+ data.name,
+ data.description,
+ data.mime_type,
+ data.size,
+ data.hash,
+ ]
+
+ queries = []
+
+ # Create the file
+ queries.append((file_query, file_params))
+
+ # Create the association only if both owner_type and owner_id are provided
+ if owner_type and owner_id:
+ assoc_params = [developer_id, owner_id, file_id]
+ if owner_type == "user":
+ queries.append((user_file_query, assoc_params))
+ else: # agent
+ queries.append((agent_file_query, assoc_params))
+
+ return queries
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/files/delete_file.py b/agents-api/agents_api/queries/files/delete_file.py
new file mode 100644
index 000000000..d37e6f3e8
--- /dev/null
+++ b/agents-api/agents_api/queries/files/delete_file.py
@@ -0,0 +1,118 @@
+"""
+This module contains the functionality for deleting files from the PostgreSQL database.
+It constructs and executes SQL queries to remove file records and associated data.
+"""
+
+from typing import Literal
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+import asyncpg
+from fastapi import HTTPException
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
+from ...metrics.counters import increase_counter
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+
+# Simple query to delete file (when no associations exist)
+delete_file_query = parse_one("""
+DELETE FROM files
+WHERE developer_id = $1
+AND file_id = $2
+AND NOT EXISTS (
+ SELECT 1
+ FROM user_files uf
+ WHERE uf.file_id = $2
+ LIMIT 1
+)
+AND NOT EXISTS (
+ SELECT 1
+ FROM agent_files af
+ WHERE af.file_id = $2
+ LIMIT 1
+)
+RETURNING file_id;
+""").sql(pretty=True)
+
+# Query to delete owner's association
+delete_user_assoc_query = parse_one("""
+DELETE FROM user_files
+WHERE developer_id = $1
+AND file_id = $2
+AND user_id = $3
+RETURNING file_id;
+""").sql(pretty=True)
+
+delete_agent_assoc_query = parse_one("""
+DELETE FROM agent_files
+WHERE developer_id = $1
+AND file_id = $2
+AND agent_id = $3
+RETURNING file_id;
+""").sql(pretty=True)
+
+
+# @rewrap_exceptions(
+# {
+# asyncpg.NoDataFoundError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="File not found",
+# ),
+# }
+# )
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["file_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@increase_counter("delete_file")
+@pg_query
+@beartype
+async def delete_file(
+ *,
+ file_id: UUID,
+ developer_id: UUID,
+ owner_id: UUID | None = None,
+ owner_type: Literal["user", "agent"] | None = None,
+) -> list[tuple[str, list] | tuple[str, list, str]]:
+ """
+ Deletes a file and/or its association using simple, efficient queries.
+
+ If owner details provided:
+ 1. Deletes the owner's association
+ 2. Checks for remaining associations
+ 3. Deletes file if no associations remain
+ If no owner details:
+ - Deletes file only if it has no associations
+
+ Args:
+ file_id (UUID): The UUID of the file to be deleted.
+ developer_id (UUID): The UUID of the developer owning the file.
+ owner_id (UUID | None): Optional owner ID to verify ownership
+ owner_type (str | None): Optional owner type to verify ownership
+
+ Returns:
+ list[tuple[str, list] | tuple[str, list, str]]: List of SQL queries, their parameters, and fetch type
+ """
+ queries = []
+
+ if owner_id and owner_type:
+ # Delete specific association
+ assoc_params = [developer_id, file_id, owner_id]
+ assoc_query = delete_user_assoc_query if owner_type == "user" else delete_agent_assoc_query
+ queries.append((assoc_query, assoc_params))
+
+ # If no associations, delete file
+ queries.append((delete_file_query, [developer_id, file_id]))
+ else:
+ # Try to delete file if it has no associations
+ queries.append((delete_file_query, [developer_id, file_id]))
+
+ return queries
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
new file mode 100644
index 000000000..3143b8ff0
--- /dev/null
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -0,0 +1,69 @@
+"""
+This module contains the functionality for retrieving a single file from the PostgreSQL database.
+It constructs and executes SQL queries to fetch file details based on file ID and developer ID.
+"""
+
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+from fastapi import HTTPException
+import asyncpg
+
+from ...autogen.openapi_model import File
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+
+# Define the raw SQL query
+file_query = parse_one("""
+SELECT
+ file_id, -- Only select needed columns
+ developer_id,
+ name,
+ description,
+ mime_type,
+ size,
+ hash,
+ created_at,
+ updated_at
+FROM files
+WHERE developer_id = $1 -- Order matches composite index (developer_id, file_id)
+ AND file_id = $2 -- Using both parts of the index
+LIMIT 1; -- Early termination once found
+""").sql(pretty=True)
+
+@rewrap_exceptions(
+ {
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="File not found",
+ ),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Developer not found",
+ ),
+ }
+)
+@wrap_in_class(File, one=True, transform=lambda d: {"id": d["file_id"], **d})
+@pg_query
+@beartype
+async def get_file(*, file_id: UUID, developer_id: UUID) -> tuple[str, list]:
+ """
+ Constructs the SQL query to retrieve a file's details.
+ Uses composite index on (developer_id, file_id) for efficient lookup.
+
+ Args:
+ file_id (UUID): The UUID of the file to retrieve.
+ developer_id (UUID): The UUID of the developer owning the file.
+
+ Returns:
+ tuple[str, list]: A tuple containing the SQL query and its parameters.
+
+ Raises:
+ HTTPException: If file or developer not found (404)
+ """
+ return (
+ file_query,
+ [developer_id, file_id], # Order matches index columns
+ )
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
new file mode 100644
index 000000000..a01f74214
--- /dev/null
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -0,0 +1,161 @@
+"""
+This module contains the functionality for listing files from the PostgreSQL database.
+It constructs and executes SQL queries to fetch a list of files based on developer ID with pagination.
+"""
+
+from typing import Any, Literal
+from uuid import UUID
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import File
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+
+# Query to list all files for a developer (uses developer_id index)
+developer_files_query = parse_one("""
+SELECT
+ file_id,
+ developer_id,
+ name,
+ description,
+ mime_type,
+ size,
+ hash,
+ created_at,
+ updated_at
+FROM files
+WHERE developer_id = $1
+ORDER BY
+ CASE
+ WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at
+ WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at
+ WHEN $4 = 'updated_at' AND $5 = 'asc' THEN updated_at
+ WHEN $4 = 'updated_at' AND $5 = 'desc' THEN updated_at
+ END DESC NULLS LAST
+LIMIT $2
+OFFSET $3;
+""").sql(pretty=True)
+
+# Query to list files for a specific user (uses composite indexes)
+user_files_query = parse_one("""
+SELECT
+ f.file_id,
+ f.developer_id,
+ f.name,
+ f.description,
+ f.mime_type,
+ f.size,
+ f.hash,
+ f.created_at,
+ f.updated_at
+FROM user_files uf
+JOIN files f USING (developer_id, file_id)
+WHERE uf.developer_id = $1
+AND uf.user_id = $6
+ORDER BY
+ CASE
+ WHEN $4 = 'created_at' AND $5 = 'asc' THEN f.created_at
+ WHEN $4 = 'created_at' AND $5 = 'desc' THEN f.created_at
+ WHEN $4 = 'updated_at' AND $5 = 'asc' THEN f.updated_at
+ WHEN $4 = 'updated_at' AND $5 = 'desc' THEN f.updated_at
+ END DESC NULLS LAST
+LIMIT $2
+OFFSET $3;
+""").sql(pretty=True)
+
+# Query to list files for a specific agent (uses composite indexes)
+agent_files_query = parse_one("""
+SELECT
+ f.file_id,
+ f.developer_id,
+ f.name,
+ f.description,
+ f.mime_type,
+ f.size,
+ f.hash,
+ f.created_at,
+ f.updated_at
+FROM agent_files af
+JOIN files f USING (developer_id, file_id)
+WHERE af.developer_id = $1
+AND af.agent_id = $6
+ORDER BY
+ CASE
+ WHEN $4 = 'created_at' AND $5 = 'asc' THEN f.created_at
+ WHEN $4 = 'created_at' AND $5 = 'desc' THEN f.created_at
+ WHEN $4 = 'updated_at' AND $5 = 'asc' THEN f.updated_at
+ WHEN $4 = 'updated_at' AND $5 = 'desc' THEN f.updated_at
+ END DESC NULLS LAST
+LIMIT $2
+OFFSET $3;
+""").sql(pretty=True)
+
+@wrap_in_class(
+ File,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
+ },
+)
+@pg_query
+@beartype
+async def list_files(
+ *,
+ developer_id: UUID,
+ owner_id: UUID | None = None,
+ owner_type: Literal["user", "agent"] | None = None,
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "updated_at"] = "created_at",
+ direction: Literal["asc", "desc"] = "desc",
+) -> tuple[str, list]:
+ """
+ Lists files with optimized queries for two cases:
+ 1. Owner specified: Returns files associated with that owner
+ 2. No owner: Returns all files for the developer
+
+ Args:
+ developer_id: UUID of the developer
+ owner_id: Optional UUID of the owner (user or agent)
+ owner_type: Optional type of owner ("user" or "agent")
+ limit: Maximum number of records to return (1-100)
+ offset: Number of records to skip
+ sort_by: Field to sort by
+ direction: Sort direction ('asc' or 'desc')
+
+ Returns:
+ Tuple of (query, params)
+
+ Raises:
+ HTTPException: If parameters are invalid
+ """
+ # Validate parameters
+ if direction.lower() not in ["asc", "desc"]:
+ raise HTTPException(status_code=400, detail="Invalid sort direction")
+
+ if limit > 100 or limit < 1:
+ raise HTTPException(status_code=400, detail="Limit must be between 1 and 100")
+
+ if offset < 0:
+ raise HTTPException(status_code=400, detail="Offset must be non-negative")
+
+ # Base parameters used in all queries
+ params = [
+ developer_id,
+ limit,
+ offset,
+ sort_by,
+ direction,
+ ]
+
+ # Choose appropriate query based on owner details
+ if owner_id and owner_type:
+ params.append(owner_id) # Add owner_id as $6
+ query = user_files_query if owner_type == "user" else agent_files_query
+ else:
+ query = developer_files_query
+
+ return (query, params)
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index 367fcccd4..5565d4059 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -1,22 +1,36 @@
# # Tests for entry queries
-# from ward import test
-
-# from agents_api.autogen.openapi_model import CreateFileRequest
-# from agents_api.queries.files.create_file import create_file
-# from agents_api.queries.files.delete_file import delete_file
-# from agents_api.queries.files.get_file import get_file
-# from tests.fixtures import (
-# cozo_client,
-# test_developer_id,
-# test_file,
-# )
-
-
-# @test("query: create file")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# create_file(
+from uuid_extensions import uuid7
+from ward import raises, test
+from fastapi import HTTPException
+from agents_api.autogen.openapi_model import CreateFileRequest
+from agents_api.queries.files.create_file import create_file
+from agents_api.queries.files.delete_file import delete_file
+from agents_api.queries.files.get_file import get_file
+from tests.fixtures import pg_dsn, test_agent, test_developer_id
+from agents_api.clients.pg import create_db_pool
+
+
+@test("query: create file")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ pool = await create_db_pool(dsn=dsn)
+ await create_file(
+ developer_id=developer_id,
+ data=CreateFileRequest(
+ name="Hello",
+ description="World",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ connection_pool=pool,
+ )
+
+
+# @test("query: get file")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id):
+# pool = await create_db_pool(dsn=dsn)
+# file = create_file(
# developer_id=developer_id,
# data=CreateFileRequest(
# name="Hello",
@@ -24,21 +38,20 @@
# mime_type="text/plain",
# content="eyJzYW1wbGUiOiAidGVzdCJ9",
# ),
-# client=client,
+# connection_pool=pool,
# )
-
-# @test("query: get file")
-# def _(client=cozo_client, file=test_file, developer_id=test_developer_id):
-# get_file(
+# get_file_result = get_file(
# developer_id=developer_id,
# file_id=file.id,
-# client=client,
+# connection_pool=pool,
# )
+# assert file == get_file_result
# @test("query: delete file")
-# def _(client=cozo_client, developer_id=test_developer_id):
+# async def _(dsn=pg_dsn, developer_id=test_developer_id):
+# pool = await create_db_pool(dsn=dsn)
# file = create_file(
# developer_id=developer_id,
# data=CreateFileRequest(
@@ -47,11 +60,21 @@
# mime_type="text/plain",
# content="eyJzYW1wbGUiOiAidGVzdCJ9",
# ),
-# client=client,
+# connection_pool=pool,
# )
# delete_file(
# developer_id=developer_id,
# file_id=file.id,
-# client=client,
+# connection_pool=pool,
# )
+
+# with raises(HTTPException) as e:
+# get_file(
+# developer_id=developer_id,
+# file_id=file.id,
+# connection_pool=pool,
+# )
+
+# assert e.value.status_code == 404
+# assert e.value.detail == "The specified file does not exist"
\ No newline at end of file
From 57e453f51260f1458e1b0e2c0c86d8af16f3474a Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Thu, 19 Dec 2024 10:13:50 +0530
Subject: [PATCH 077/274] feat(memory-store,agents-api): Move is_leaf handling
to postgres
Signed-off-by: Diwank Singh Tomer
---
.../agents_api/queries/agents/create_agent.py | 2 --
.../queries/agents/create_or_update_agent.py | 2 --
.../agents_api/queries/agents/delete_agent.py | 2 --
.../agents_api/queries/agents/get_agent.py | 2 --
.../agents_api/queries/agents/list_agents.py | 3 +-
.../agents_api/queries/agents/patch_agent.py | 2 --
.../agents_api/queries/agents/update_agent.py | 2 --
.../queries/entries/create_entries.py | 6 +---
.../queries/entries/delete_entries.py | 4 +--
.../agents_api/queries/entries/get_history.py | 4 +--
.../queries/entries/list_entries.py | 3 +-
agents-api/tests/test_entry_queries.py | 2 +-
agents-api/tests/test_session_queries.py | 1 -
.../migrations/000016_entry_relations.up.sql | 34 +++++++++++--------
14 files changed, 25 insertions(+), 44 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 2d8df7978..76c96f46b 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -3,7 +3,6 @@
It includes functions to construct and execute SQL queries for inserting new agent records.
"""
-from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
@@ -15,7 +14,6 @@
from ..utils import (
generate_canonical_name,
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index e96b30c77..ef3a0abe5 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -3,7 +3,6 @@
It constructs and executes SQL queries to insert a new agent or update an existing agent's details based on agent ID and developer ID.
"""
-from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
@@ -14,7 +13,6 @@
from ..utils import (
generate_canonical_name,
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 6738374db..3527f3611 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -3,7 +3,6 @@
It constructs and executes SQL queries to remove agent records and associated data.
"""
-from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
@@ -13,7 +12,6 @@
from ...common.utils.datetime import utcnow
from ..utils import (
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 916572db1..a731300fa 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -3,7 +3,6 @@
It constructs and executes SQL queries to fetch agent details based on agent ID and developer ID.
"""
-from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
@@ -12,7 +11,6 @@
from ...autogen.openapi_model import Agent
from ..utils import (
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index ce12b32b3..87a0c942d 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -3,7 +3,7 @@
It constructs and executes SQL queries to fetch a list of agents based on developer ID with pagination.
"""
-from typing import Any, Literal, TypeVar
+from typing import Any, Literal
from uuid import UUID
from beartype import beartype
@@ -12,7 +12,6 @@
from ...autogen.openapi_model import Agent
from ..utils import (
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 2325ab33f..69a5a6ca5 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -3,7 +3,6 @@
It constructs and executes SQL queries to update specific fields of an agent based on agent ID and developer ID.
"""
-from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
@@ -13,7 +12,6 @@
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 79b520cb8..f28e28264 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -3,7 +3,6 @@
It constructs and executes SQL queries to replace an agent's details based on agent ID and developer ID.
"""
-from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
@@ -13,7 +12,6 @@
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 72de8db90..fb61b7c7e 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -1,16 +1,14 @@
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Query for checking if the session exists
session_exists_query = """
@@ -47,7 +45,6 @@
head,
relation,
tail,
- is_leaf
) VALUES ($1, $2, $3, $4, $5)
RETURNING *;
"""
@@ -169,7 +166,6 @@ async def add_entry_relations(
item.get("head"), # $2
item.get("relation"), # $3
item.get("tail"), # $4
- item.get("is_leaf", False), # $5
]
)
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index 4539ae4df..628ef9011 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -1,15 +1,13 @@
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Define the raw SQL query for deleting entries with a developer check
delete_entry_query = parse_one("""
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index 7ad940c0a..b0b767c08 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -1,12 +1,10 @@
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import History
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Define the raw SQL query for getting history with a developer check
history_query = parse_one("""
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 4920e39c1..a6c355f53 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -1,13 +1,12 @@
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
from fastapi import HTTPException
from ...autogen.openapi_model import Entry
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Query for checking if the session exists
session_exists_query = """
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 60a387591..f5b9d8d56 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -10,7 +10,7 @@
from agents_api.autogen.openapi_model import CreateEntryRequest
from agents_api.clients.pg import create_db_pool
from agents_api.queries.entries import create_entries, list_entries
-from tests.fixtures import pg_dsn, test_developer, test_session # , test_session
+from tests.fixtures import pg_dsn, test_developer # , test_session
MODEL = "gpt-4o-mini"
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 8e512379f..4e04468bf 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -29,7 +29,6 @@
from tests.fixtures import (
pg_dsn,
test_agent,
- test_developer,
test_developer_id,
test_session,
test_user,
diff --git a/memory-store/migrations/000016_entry_relations.up.sql b/memory-store/migrations/000016_entry_relations.up.sql
index c61c7cd24..bcdb7fb72 100644
--- a/memory-store/migrations/000016_entry_relations.up.sql
+++ b/memory-store/migrations/000016_entry_relations.up.sql
@@ -31,25 +31,29 @@ CREATE INDEX idx_entry_relations_components ON entry_relations (session_id, head
CREATE INDEX idx_entry_relations_leaf ON entry_relations (session_id, relation, is_leaf);
-CREATE
-OR REPLACE FUNCTION enforce_leaf_nodes () RETURNS TRIGGER AS $$
+CREATE OR REPLACE FUNCTION auto_update_leaf_status() RETURNS TRIGGER AS $$
BEGIN
- IF NEW.is_leaf THEN
- -- Ensure no other relations point to this leaf node as a head
- IF EXISTS (
- SELECT 1 FROM entry_relations
- WHERE tail = NEW.head AND session_id = NEW.session_id
- ) THEN
- RAISE EXCEPTION 'Cannot assign relations to a leaf node.';
- END IF;
- END IF;
+ -- Set is_leaf = false for any existing rows that will now have this new relation as a child
+ UPDATE entry_relations
+ SET is_leaf = false
+ WHERE session_id = NEW.session_id
+ AND tail = NEW.head;
+
+ -- Set is_leaf for the new row based on whether it has any children
+ NEW.is_leaf := NOT EXISTS (
+ SELECT 1
+ FROM entry_relations
+ WHERE session_id = NEW.session_id
+ AND head = NEW.tail
+ );
+
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-CREATE TRIGGER trg_enforce_leaf_nodes BEFORE INSERT
-OR
-UPDATE ON entry_relations FOR EACH ROW
-EXECUTE FUNCTION enforce_leaf_nodes ();
+CREATE TRIGGER trg_auto_update_leaf_status
+BEFORE INSERT OR UPDATE ON entry_relations
+FOR EACH ROW
+EXECUTE FUNCTION auto_update_leaf_status();
COMMIT;
\ No newline at end of file
From 47c3fc936349ebbc8b09850da14460d3fa6d2e2d Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Thu, 19 Dec 2024 04:44:24 +0000
Subject: [PATCH 078/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/files/__init__.py | 7 +-----
.../agents_api/queries/files/create_file.py | 13 ++++++-----
.../agents_api/queries/files/delete_file.py | 22 +++++++++++--------
.../agents_api/queries/files/get_file.py | 9 ++++----
.../agents_api/queries/files/list_files.py | 10 +++++----
agents-api/tests/test_files_queries.py | 7 +++---
6 files changed, 36 insertions(+), 32 deletions(-)
diff --git a/agents-api/agents_api/queries/files/__init__.py b/agents-api/agents_api/queries/files/__init__.py
index 1da09114a..99670a8fc 100644
--- a/agents-api/agents_api/queries/files/__init__.py
+++ b/agents-api/agents_api/queries/files/__init__.py
@@ -13,9 +13,4 @@
from .get_file import get_file
from .list_files import list_files
-__all__ = [
- "create_file",
- "delete_file",
- "get_file",
- "list_files"
-]
\ No newline at end of file
+__all__ = ["create_file", "delete_file", "get_file", "list_files"]
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index 77e065433..64527bc31 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -3,20 +3,20 @@
It includes functions to construct and execute SQL queries for inserting new file records.
"""
+import base64
+import hashlib
from typing import Any, Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
from sqlglot import parse_one
from uuid_extensions import uuid7
-import asyncpg
-from fastapi import HTTPException
-import base64
-import hashlib
from ...autogen.openapi_model import CreateFileRequest, File
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Create file
file_query = parse_one("""
@@ -63,6 +63,7 @@
ON CONFLICT (developer_id, agent_id, file_id) DO NOTHING; -- Uses primary key index
""").sql(pretty=True)
+
# Add error handling decorator
# @rewrap_exceptions(
# {
@@ -147,4 +148,4 @@ async def create_file(
else: # agent
queries.append((agent_file_query, assoc_params))
- return queries
\ No newline at end of file
+ return queries
diff --git a/agents-api/agents_api/queries/files/delete_file.py b/agents-api/agents_api/queries/files/delete_file.py
index d37e6f3e8..99f57f5e0 100644
--- a/agents-api/agents_api/queries/files/delete_file.py
+++ b/agents-api/agents_api/queries/files/delete_file.py
@@ -6,15 +6,15 @@
from typing import Literal
from uuid import UUID
-from beartype import beartype
-from sqlglot import parse_one
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Simple query to delete file (when no associations exist)
delete_file_query = parse_one("""
@@ -67,7 +67,7 @@
ResourceDeletedResponse,
one=True,
transform=lambda d: {
- "id": d["file_id"],
+ "id": d["file_id"],
"deleted_at": utcnow(),
"jobs": [],
},
@@ -76,15 +76,15 @@
@pg_query
@beartype
async def delete_file(
- *,
- file_id: UUID,
+ *,
+ file_id: UUID,
developer_id: UUID,
owner_id: UUID | None = None,
owner_type: Literal["user", "agent"] | None = None,
) -> list[tuple[str, list] | tuple[str, list, str]]:
"""
Deletes a file and/or its association using simple, efficient queries.
-
+
If owner details provided:
1. Deletes the owner's association
2. Checks for remaining associations
@@ -106,9 +106,13 @@ async def delete_file(
if owner_id and owner_type:
# Delete specific association
assoc_params = [developer_id, file_id, owner_id]
- assoc_query = delete_user_assoc_query if owner_type == "user" else delete_agent_assoc_query
+ assoc_query = (
+ delete_user_assoc_query
+ if owner_type == "user"
+ else delete_agent_assoc_query
+ )
queries.append((assoc_query, assoc_params))
-
+
# If no associations, delete file
queries.append((delete_file_query, [developer_id, file_id]))
else:
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 3143b8ff0..8f04f8029 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -5,13 +5,13 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
+from sqlglot import parse_one
from ...autogen.openapi_model import File
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query
file_query = parse_one("""
@@ -31,6 +31,7 @@
LIMIT 1; -- Early termination once found
""").sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.NoDataFoundError: partialclass(
@@ -66,4 +67,4 @@ async def get_file(*, file_id: UUID, developer_id: UUID) -> tuple[str, list]:
return (
file_query,
[developer_id, file_id], # Order matches index columns
- )
\ No newline at end of file
+ )
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index a01f74214..e6f65d88d 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -5,13 +5,14 @@
from typing import Any, Literal
from uuid import UUID
+
import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import File
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Query to list all files for a developer (uses developer_id index)
developer_files_query = parse_one("""
@@ -92,8 +93,9 @@
OFFSET $3;
""").sql(pretty=True)
+
@wrap_in_class(
- File,
+ File,
one=True,
transform=lambda d: {
**d,
@@ -135,10 +137,10 @@ async def list_files(
# Validate parameters
if direction.lower() not in ["asc", "desc"]:
raise HTTPException(status_code=400, detail="Invalid sort direction")
-
+
if limit > 100 or limit < 1:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 100")
-
+
if offset < 0:
raise HTTPException(status_code=400, detail="Offset must be non-negative")
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index 5565d4059..02ad888f5 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -1,15 +1,16 @@
# # Tests for entry queries
+from fastapi import HTTPException
from uuid_extensions import uuid7
from ward import raises, test
-from fastapi import HTTPException
+
from agents_api.autogen.openapi_model import CreateFileRequest
+from agents_api.clients.pg import create_db_pool
from agents_api.queries.files.create_file import create_file
from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.files.get_file import get_file
from tests.fixtures import pg_dsn, test_agent, test_developer_id
-from agents_api.clients.pg import create_db_pool
@test("query: create file")
@@ -77,4 +78,4 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
# )
# assert e.value.status_code == 404
-# assert e.value.detail == "The specified file does not exist"
\ No newline at end of file
+# assert e.value.detail == "The specified file does not exist"
From cc2a5bf8aeda56016b647148a7155f4361f8f51f Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Thu, 19 Dec 2024 01:55:25 -0500
Subject: [PATCH 079/274] chore: bug fixes for file queries + added tests
---
.../agents_api/queries/agents/delete_agent.py | 39 +-
.../agents_api/queries/files/create_file.py | 58 +-
.../agents_api/queries/files/delete_file.py | 113 ++--
.../agents_api/queries/files/get_file.py | 81 +--
.../agents_api/queries/files/list_files.py | 83 +--
.../agents_api/queries/users/delete_user.py | 35 +-
agents-api/agents_api/queries/utils.py | 5 -
agents-api/tests/fixtures.py | 20 +-
agents-api/tests/test_agent_queries.py | 15 +-
agents-api/tests/test_entry_queries.py | 318 +++++------
agents-api/tests/test_files_queries.py | 282 ++++++++--
agents-api/tests/test_session_queries.py | 522 +++++++++---------
12 files changed, 868 insertions(+), 703 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 6738374db..a957ab2c5 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -19,19 +19,39 @@
# Define the raw SQL query
agent_query = parse_one("""
-WITH deleted_docs AS (
+WITH deleted_file_owners AS (
+ DELETE FROM file_owners
+ WHERE developer_id = $1
+ AND owner_type = 'agent'
+ AND owner_id = $2
+),
+deleted_doc_owners AS (
+ DELETE FROM doc_owners
+ WHERE developer_id = $1
+ AND owner_type = 'agent'
+ AND owner_id = $2
+),
+deleted_files AS (
+ DELETE FROM files
+ WHERE developer_id = $1
+ AND file_id IN (
+ SELECT file_id FROM file_owners
+ WHERE developer_id = $1
+ AND owner_type = 'agent'
+ AND owner_id = $2
+ )
+),
+deleted_docs AS (
DELETE FROM docs
WHERE developer_id = $1
AND doc_id IN (
- SELECT ad.doc_id
- FROM agent_docs ad
- WHERE ad.agent_id = $2
- AND ad.developer_id = $1
+ SELECT doc_id FROM doc_owners
+ WHERE developer_id = $1
+ AND owner_type = 'agent'
+ AND owner_id = $2
)
-), deleted_agent_docs AS (
- DELETE FROM agent_docs
- WHERE agent_id = $2 AND developer_id = $1
-), deleted_tools AS (
+),
+deleted_tools AS (
DELETE FROM tools
WHERE agent_id = $2 AND developer_id = $1
)
@@ -40,7 +60,6 @@
RETURNING developer_id, agent_id;
""").sql(pretty=True)
-
# @rewrap_exceptions(
# @rewrap_exceptions(
# {
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index 64527bc31..8438978e6 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -27,7 +27,7 @@
description,
mime_type,
size,
- hash,
+ hash
)
VALUES (
$1, -- developer_id
@@ -36,34 +36,28 @@
$4, -- description
$5, -- mime_type
$6, -- size
- $7, -- hash
+ $7 -- hash
)
RETURNING *;
""").sql(pretty=True)
-# Create user file association
-user_file_query = parse_one("""
-INSERT INTO user_files (
- developer_id,
- user_id,
- file_id
-)
-VALUES ($1, $2, $3)
-ON CONFLICT (developer_id, user_id, file_id) DO NOTHING; -- Uses primary key index
-""").sql(pretty=True)
-
-# Create agent file association
-agent_file_query = parse_one("""
-INSERT INTO agent_files (
- developer_id,
- agent_id,
- file_id
+# Replace both user_file and agent_file queries with a single file_owner query
+file_owner_query = parse_one("""
+WITH inserted_owner AS (
+ INSERT INTO file_owners (
+ developer_id,
+ file_id,
+ owner_type,
+ owner_id
+ )
+ VALUES ($1, $2, $3, $4)
+ RETURNING file_id
)
-VALUES ($1, $2, $3)
-ON CONFLICT (developer_id, agent_id, file_id) DO NOTHING; -- Uses primary key index
+SELECT f.*
+FROM inserted_owner io
+JOIN files f ON f.file_id = io.file_id;
""").sql(pretty=True)
-
# Add error handling decorator
# @rewrap_exceptions(
# {
@@ -90,6 +84,7 @@
transform=lambda d: {
**d,
"id": d["file_id"],
+ "hash": d["hash"].hex(),
"content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
},
)
@@ -121,8 +116,8 @@ async def create_file(
# Calculate size and hash
content_bytes = base64.b64decode(data.content)
- data.size = len(content_bytes)
- data.hash = hashlib.sha256(content_bytes).digest()
+ size = len(content_bytes)
+ hash_bytes = hashlib.sha256(content_bytes).digest()
# Base file parameters
file_params = [
@@ -131,21 +126,18 @@ async def create_file(
data.name,
data.description,
data.mime_type,
- data.size,
- data.hash,
+ size,
+ hash_bytes,
]
queries = []
- # Create the file
+ # Create the file first
queries.append((file_query, file_params))
- # Create the association only if both owner_type and owner_id are provided
+ # Then create the association if owner info provided
if owner_type and owner_id:
- assoc_params = [developer_id, owner_id, file_id]
- if owner_type == "user":
- queries.append((user_file_query, assoc_params))
- else: # agent
- queries.append((agent_file_query, assoc_params))
+ assoc_params = [developer_id, file_id, owner_type, owner_id]
+ queries.append((file_owner_query, assoc_params))
return queries
diff --git a/agents-api/agents_api/queries/files/delete_file.py b/agents-api/agents_api/queries/files/delete_file.py
index 99f57f5e0..31cb43404 100644
--- a/agents-api/agents_api/queries/files/delete_file.py
+++ b/agents-api/agents_api/queries/files/delete_file.py
@@ -16,53 +16,40 @@
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Simple query to delete file (when no associations exist)
+# Delete file query with ownership check
delete_file_query = parse_one("""
+WITH deleted_owners AS (
+ DELETE FROM file_owners
+ WHERE developer_id = $1
+ AND file_id = $2
+ AND (
+ ($3::text IS NULL AND $4::uuid IS NULL) OR
+ (owner_type = $3 AND owner_id = $4)
+ )
+)
DELETE FROM files
WHERE developer_id = $1
AND file_id = $2
-AND NOT EXISTS (
- SELECT 1
- FROM user_files uf
- WHERE uf.file_id = $2
- LIMIT 1
-)
-AND NOT EXISTS (
- SELECT 1
- FROM agent_files af
- WHERE af.file_id = $2
- LIMIT 1
-)
-RETURNING file_id;
-""").sql(pretty=True)
-
-# Query to delete owner's association
-delete_user_assoc_query = parse_one("""
-DELETE FROM user_files
-WHERE developer_id = $1
-AND file_id = $2
-AND user_id = $3
-RETURNING file_id;
-""").sql(pretty=True)
-
-delete_agent_assoc_query = parse_one("""
-DELETE FROM agent_files
-WHERE developer_id = $1
-AND file_id = $2
-AND agent_id = $3
+AND ($3::text IS NULL OR EXISTS (
+ SELECT 1 FROM file_owners
+ WHERE developer_id = $1
+ AND file_id = $2
+ AND owner_type = $3
+ AND owner_id = $4
+))
RETURNING file_id;
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# asyncpg.NoDataFoundError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="File not found",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="File not found",
+ ),
+ }
+)
@wrap_in_class(
ResourceDeletedResponse,
one=True,
@@ -77,46 +64,24 @@
@beartype
async def delete_file(
*,
- file_id: UUID,
developer_id: UUID,
- owner_id: UUID | None = None,
+ file_id: UUID,
owner_type: Literal["user", "agent"] | None = None,
-) -> list[tuple[str, list] | tuple[str, list, str]]:
+ owner_id: UUID | None = None,
+) -> tuple[str, list]:
"""
- Deletes a file and/or its association using simple, efficient queries.
-
- If owner details provided:
- 1. Deletes the owner's association
- 2. Checks for remaining associations
- 3. Deletes file if no associations remain
- If no owner details:
- - Deletes file only if it has no associations
+ Deletes a file and its ownership records.
Args:
- file_id (UUID): The UUID of the file to be deleted.
- developer_id (UUID): The UUID of the developer owning the file.
- owner_id (UUID | None): Optional owner ID to verify ownership
- owner_type (str | None): Optional owner type to verify ownership
+ developer_id: The developer's UUID
+ file_id: The file's UUID
+ owner_type: Optional type of owner ("user" or "agent")
+ owner_id: Optional UUID of the owner
Returns:
- list[tuple[str, list] | tuple[str, list, str]]: List of SQL queries, their parameters, and fetch type
+ tuple[str, list]: SQL query and parameters
"""
- queries = []
-
- if owner_id and owner_type:
- # Delete specific association
- assoc_params = [developer_id, file_id, owner_id]
- assoc_query = (
- delete_user_assoc_query
- if owner_type == "user"
- else delete_agent_assoc_query
- )
- queries.append((assoc_query, assoc_params))
-
- # If no associations, delete file
- queries.append((delete_file_query, [developer_id, file_id]))
- else:
- # Try to delete file if it has no associations
- queries.append((delete_file_query, [developer_id, file_id]))
-
- return queries
+ return (
+ delete_file_query,
+ [developer_id, file_id, owner_type, owner_id],
+ )
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 8f04f8029..ace417d5d 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -4,6 +4,7 @@
"""
from uuid import UUID
+from typing import Literal
import asyncpg
from beartype import beartype
@@ -15,56 +16,66 @@
# Define the raw SQL query
file_query = parse_one("""
-SELECT
- file_id, -- Only select needed columns
- developer_id,
- name,
- description,
- mime_type,
- size,
- hash,
- created_at,
- updated_at
-FROM files
-WHERE developer_id = $1 -- Order matches composite index (developer_id, file_id)
- AND file_id = $2 -- Using both parts of the index
-LIMIT 1; -- Early termination once found
+SELECT f.*
+FROM files f
+LEFT JOIN file_owners fo ON f.developer_id = fo.developer_id AND f.file_id = fo.file_id
+WHERE f.developer_id = $1
+AND f.file_id = $2
+AND (
+ ($3::text IS NULL AND $4::uuid IS NULL) OR
+ (fo.owner_type = $3 AND fo.owner_id = $4)
+)
+LIMIT 1;
""").sql(pretty=True)
-@rewrap_exceptions(
- {
- asyncpg.NoDataFoundError: partialclass(
- HTTPException,
- status_code=404,
- detail="File not found",
- ),
- asyncpg.ForeignKeyViolationError: partialclass(
- HTTPException,
- status_code=404,
- detail="Developer not found",
- ),
+# @rewrap_exceptions(
+# {
+# asyncpg.NoDataFoundError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="File not found",
+# ),
+# asyncpg.ForeignKeyViolationError: partialclass(
+# HTTPException,
+# status_code=404,
+# detail="Developer not found",
+# ),
+# }
+# )
+@wrap_in_class(
+ File,
+ one=True,
+ transform=lambda d: {
+ "id": d["file_id"],
+ **d,
+ "hash": d["hash"].hex(),
+ "content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
}
)
-@wrap_in_class(File, one=True, transform=lambda d: {"id": d["file_id"], **d})
@pg_query
@beartype
-async def get_file(*, file_id: UUID, developer_id: UUID) -> tuple[str, list]:
+async def get_file(
+ *,
+ file_id: UUID,
+ developer_id: UUID,
+ owner_type: Literal["user", "agent"] | None = None,
+ owner_id: UUID | None = None,
+) -> tuple[str, list]:
"""
Constructs the SQL query to retrieve a file's details.
Uses composite index on (developer_id, file_id) for efficient lookup.
Args:
- file_id (UUID): The UUID of the file to retrieve.
- developer_id (UUID): The UUID of the developer owning the file.
+ file_id: The UUID of the file to retrieve
+ developer_id: The UUID of the developer owning the file
+ owner_type: Optional type of owner ("user" or "agent")
+ owner_id: Optional UUID of the owner
Returns:
- tuple[str, list]: A tuple containing the SQL query and its parameters.
-
- Raises:
- HTTPException: If file or developer not found (404)
+ tuple[str, list]: SQL query and parameters
"""
return (
file_query,
- [developer_id, file_id], # Order matches index columns
+ [developer_id, file_id, owner_type, owner_id],
)
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index e6f65d88d..2bc42f842 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -16,18 +16,10 @@
# Query to list all files for a developer (uses developer_id index)
developer_files_query = parse_one("""
-SELECT
- file_id,
- developer_id,
- name,
- description,
- mime_type,
- size,
- hash,
- created_at,
- updated_at
-FROM files
-WHERE developer_id = $1
+SELECT f.*
+FROM files f
+LEFT JOIN file_owners fo ON f.developer_id = fo.developer_id AND f.file_id = fo.file_id
+WHERE f.developer_id = $1
ORDER BY
CASE
WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at
@@ -39,55 +31,20 @@
OFFSET $3;
""").sql(pretty=True)
-# Query to list files for a specific user (uses composite indexes)
-user_files_query = parse_one("""
-SELECT
- f.file_id,
- f.developer_id,
- f.name,
- f.description,
- f.mime_type,
- f.size,
- f.hash,
- f.created_at,
- f.updated_at
-FROM user_files uf
-JOIN files f USING (developer_id, file_id)
-WHERE uf.developer_id = $1
-AND uf.user_id = $6
+# Query to list files for a specific owner (uses composite indexes)
+owner_files_query = parse_one("""
+SELECT f.*
+FROM files f
+JOIN file_owners fo ON f.developer_id = fo.developer_id AND f.file_id = fo.file_id
+WHERE fo.developer_id = $1
+AND fo.owner_id = $6
+AND fo.owner_type = $7
ORDER BY
CASE
- WHEN $4 = 'created_at' AND $5 = 'asc' THEN f.created_at
- WHEN $4 = 'created_at' AND $5 = 'desc' THEN f.created_at
- WHEN $4 = 'updated_at' AND $5 = 'asc' THEN f.updated_at
- WHEN $4 = 'updated_at' AND $5 = 'desc' THEN f.updated_at
- END DESC NULLS LAST
-LIMIT $2
-OFFSET $3;
-""").sql(pretty=True)
-
-# Query to list files for a specific agent (uses composite indexes)
-agent_files_query = parse_one("""
-SELECT
- f.file_id,
- f.developer_id,
- f.name,
- f.description,
- f.mime_type,
- f.size,
- f.hash,
- f.created_at,
- f.updated_at
-FROM agent_files af
-JOIN files f USING (developer_id, file_id)
-WHERE af.developer_id = $1
-AND af.agent_id = $6
-ORDER BY
- CASE
- WHEN $4 = 'created_at' AND $5 = 'asc' THEN f.created_at
- WHEN $4 = 'created_at' AND $5 = 'desc' THEN f.created_at
- WHEN $4 = 'updated_at' AND $5 = 'asc' THEN f.updated_at
- WHEN $4 = 'updated_at' AND $5 = 'desc' THEN f.updated_at
+ WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at
+ WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at
+ WHEN $4 = 'updated_at' AND $5 = 'asc' THEN updated_at
+ WHEN $4 = 'updated_at' AND $5 = 'desc' THEN updated_at
END DESC NULLS LAST
LIMIT $2
OFFSET $3;
@@ -96,9 +53,11 @@
@wrap_in_class(
File,
- one=True,
+ one=False,
transform=lambda d: {
**d,
+ "id": d["file_id"],
+ "hash": d["hash"].hex(),
"content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
},
)
@@ -155,8 +114,8 @@ async def list_files(
# Choose appropriate query based on owner details
if owner_id and owner_type:
- params.append(owner_id) # Add owner_id as $6
- query = user_files_query if owner_type == "user" else agent_files_query
+ params.extend([owner_id, owner_type]) # Add owner_id as $6 and owner_type as $7
+ query = owner_files_query # Use single query with owner_type parameter
else:
query = developer_files_query
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index 86bcc0b26..ad5befd73 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -11,14 +11,37 @@
# Define the raw SQL query outside the function
delete_query = parse_one("""
-WITH deleted_data AS (
- DELETE FROM user_files -- user_files
- WHERE developer_id = $1 -- developer_id
- AND user_id = $2 -- user_id
+WITH deleted_file_owners AS (
+ DELETE FROM file_owners
+ WHERE developer_id = $1
+ AND owner_type = 'user'
+ AND owner_id = $2
+),
+deleted_doc_owners AS (
+ DELETE FROM doc_owners
+ WHERE developer_id = $1
+ AND owner_type = 'user'
+ AND owner_id = $2
+),
+deleted_files AS (
+ DELETE FROM files
+ WHERE developer_id = $1
+ AND file_id IN (
+ SELECT file_id FROM file_owners
+ WHERE developer_id = $1
+ AND owner_type = 'user'
+ AND owner_id = $2
+ )
),
deleted_docs AS (
- DELETE FROM user_docs
- WHERE developer_id = $1 AND user_id = $2
+ DELETE FROM docs
+ WHERE developer_id = $1
+ AND doc_id IN (
+ SELECT doc_id FROM doc_owners
+ WHERE developer_id = $1
+ AND owner_type = 'user'
+ AND owner_id = $2
+ )
)
DELETE FROM users
WHERE developer_id = $1 AND user_id = $2
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 73113580d..e9cca6e95 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -170,11 +170,6 @@ async def wrapper(
query, *args, timeout=timeout
)
- print("%" * 100)
- print(results)
- print(*args)
- print("%" * 100)
-
if method_name == "fetchrow" and (
len(results) == 0 or results.get("bool") is None
):
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 9153785a4..2cad999e8 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -11,6 +11,7 @@
CreateAgentRequest,
CreateSessionRequest,
CreateUserRequest,
+ CreateFileRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
@@ -25,7 +26,7 @@
# from agents_api.queries.execution.create_execution import create_execution
# from agents_api.queries.execution.create_execution_transition import create_execution_transition
# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
-# from agents_api.queries.files.create_file import create_file
+from agents_api.queries.files.create_file import create_file
# from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
@@ -132,6 +133,23 @@ async def test_user(dsn=pg_dsn, developer=test_developer):
return user
+@fixture(scope="test")
+async def test_file(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="Hello",
+ description="World",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ connection_pool=pool,
+ )
+
+ return file
+
+
@fixture(scope="test")
async def random_email():
return f"{"".join([random.choice(string.ascii_lowercase) for _ in range(10)])}@mail.com"
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index b6cb7aedc..9192773ab 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -143,12 +143,21 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
@test("query: delete agent sql")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully deleted."""
pool = await create_db_pool(dsn=dsn)
+ create_result = await create_agent(
+ developer_id=developer_id,
+ data=CreateAgentRequest(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ ),
+ connection_pool=pool,
+ )
delete_result = await delete_agent(
- agent_id=agent.id, developer_id=developer_id, connection_pool=pool
+ agent_id=create_result.id, developer_id=developer_id, connection_pool=pool
)
assert delete_result is not None
@@ -157,6 +166,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
with raises(Exception):
await get_agent(
developer_id=developer_id,
- agent_id=agent.id,
+ agent_id=create_result.id,
connection_pool=pool,
)
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 60a387591..eab6bb718 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -1,177 +1,177 @@
-"""
-This module contains tests for entry queries against the CozoDB database.
-It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
-"""
-
-from fastapi import HTTPException
-from uuid_extensions import uuid7
-from ward import raises, test
-
-from agents_api.autogen.openapi_model import CreateEntryRequest
-from agents_api.clients.pg import create_db_pool
-from agents_api.queries.entries import create_entries, list_entries
-from tests.fixtures import pg_dsn, test_developer, test_session # , test_session
-
-MODEL = "gpt-4o-mini"
-
-
-@test("query: create entry no session")
-async def _(dsn=pg_dsn, developer=test_developer):
- """Test the addition of a new entry to the database."""
-
- pool = await create_db_pool(dsn=dsn)
- test_entry = CreateEntryRequest.from_model_input(
- model=MODEL,
- role="user",
- source="internal",
- content="test entry content",
- )
-
- with raises(HTTPException) as exc_info:
- await create_entries(
- developer_id=developer.id,
- session_id=uuid7(),
- data=[test_entry],
- connection_pool=pool,
- )
- assert exc_info.raised.status_code == 404
-
-
-@test("query: list entries no session")
-async def _(dsn=pg_dsn, developer=test_developer):
- """Test the retrieval of entries from the database."""
-
- pool = await create_db_pool(dsn=dsn)
-
- with raises(HTTPException) as exc_info:
- await list_entries(
- developer_id=developer.id,
- session_id=uuid7(),
- connection_pool=pool,
- )
- assert exc_info.raised.status_code == 404
-
-
-# @test("query: get entries")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
-# """Test the retrieval of entries from the database."""
+# """
+# This module contains tests for entry queries against the CozoDB database.
+# It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
+# """
-# pool = await create_db_pool(dsn=dsn)
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="api_request",
-# content="test entry content",
-# )
+# from fastapi import HTTPException
+# from uuid_extensions import uuid7
+# from ward import raises, test
-# internal_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# content="test entry content",
-# source="internal",
-# )
-
-# await create_entries(
-# developer_id=TEST_DEVELOPER_ID,
-# session_id=SESSION_ID,
-# data=[test_entry, internal_entry],
-# connection_pool=pool,
-# )
-
-# result = await list_entries(
-# developer_id=TEST_DEVELOPER_ID,
-# session_id=SESSION_ID,
-# connection_pool=pool,
-# )
+# from agents_api.autogen.openapi_model import CreateEntryRequest
+# from agents_api.clients.pg import create_db_pool
+# from agents_api.queries.entries import create_entries, list_entries
+# from tests.fixtures import pg_dsn, test_developer, test_session # , test_session
+# MODEL = "gpt-4o-mini"
-# # Assert that only one entry is retrieved, matching the session_id.
-# assert len(result) == 1
-# assert isinstance(result[0], Entry)
-# assert result is not None
-
-# @test("query: get history")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
-# """Test the retrieval of entry history from the database."""
+# @test("query: create entry no session")
+# async def _(dsn=pg_dsn, developer=test_developer):
+# """Test the addition of a new entry to the database."""
# pool = await create_db_pool(dsn=dsn)
# test_entry = CreateEntryRequest.from_model_input(
# model=MODEL,
# role="user",
-# source="api_request",
-# content="test entry content",
-# )
-
-# internal_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# content="test entry content",
# source="internal",
+# content="test entry content",
# )
-# await create_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# data=[test_entry, internal_entry],
-# connection_pool=pool,
-# )
-
-# result = await get_history(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# connection_pool=pool,
-# )
-
-# # Assert that entries are retrieved and have valid IDs.
-# assert result is not None
-# assert isinstance(result, History)
-# assert len(result.entries) > 0
-# assert result.entries[0].id
+# with raises(HTTPException) as exc_info:
+# await create_entries(
+# developer_id=developer.id,
+# session_id=uuid7(),
+# data=[test_entry],
+# connection_pool=pool,
+# )
+# assert exc_info.raised.status_code == 404
-# @test("query: delete entries")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
-# """Test the deletion of entries from the database."""
+# @test("query: list entries no session")
+# async def _(dsn=pg_dsn, developer=test_developer):
+# """Test the retrieval of entries from the database."""
# pool = await create_db_pool(dsn=dsn)
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="api_request",
-# content="test entry content",
-# )
-
-# internal_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# content="internal entry content",
-# source="internal",
-# )
-
-# created_entries = await create_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# data=[test_entry, internal_entry],
-# connection_pool=pool,
-# )
-# entry_ids = [entry.id for entry in created_entries]
-
-# await delete_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# entry_ids=[UUID("123e4567-e89b-12d3-a456-426614174002")],
-# connection_pool=pool,
-# )
-
-# result = await list_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# connection_pool=pool,
-# )
-
-# Assert that no entries are retrieved after deletion.
-# assert all(id not in [entry.id for entry in result] for id in entry_ids)
-# assert len(result) == 0
-# assert result is not None
+# with raises(HTTPException) as exc_info:
+# await list_entries(
+# developer_id=developer.id,
+# session_id=uuid7(),
+# connection_pool=pool,
+# )
+# assert exc_info.raised.status_code == 404
+
+
+# # @test("query: get entries")
+# # async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+# # """Test the retrieval of entries from the database."""
+
+# # pool = await create_db_pool(dsn=dsn)
+# # test_entry = CreateEntryRequest.from_model_input(
+# # model=MODEL,
+# # role="user",
+# # source="api_request",
+# # content="test entry content",
+# # )
+
+# # internal_entry = CreateEntryRequest.from_model_input(
+# # model=MODEL,
+# # role="user",
+# # content="test entry content",
+# # source="internal",
+# # )
+
+# # await create_entries(
+# # developer_id=TEST_DEVELOPER_ID,
+# # session_id=SESSION_ID,
+# # data=[test_entry, internal_entry],
+# # connection_pool=pool,
+# # )
+
+# # result = await list_entries(
+# # developer_id=TEST_DEVELOPER_ID,
+# # session_id=SESSION_ID,
+# # connection_pool=pool,
+# # )
+
+
+# # # Assert that only one entry is retrieved, matching the session_id.
+# # assert len(result) == 1
+# # assert isinstance(result[0], Entry)
+# # assert result is not None
+
+
+# # @test("query: get history")
+# # async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+# # """Test the retrieval of entry history from the database."""
+
+# # pool = await create_db_pool(dsn=dsn)
+# # test_entry = CreateEntryRequest.from_model_input(
+# # model=MODEL,
+# # role="user",
+# # source="api_request",
+# # content="test entry content",
+# # )
+
+# # internal_entry = CreateEntryRequest.from_model_input(
+# # model=MODEL,
+# # role="user",
+# # content="test entry content",
+# # source="internal",
+# # )
+
+# # await create_entries(
+# # developer_id=developer_id,
+# # session_id=SESSION_ID,
+# # data=[test_entry, internal_entry],
+# # connection_pool=pool,
+# # )
+
+# # result = await get_history(
+# # developer_id=developer_id,
+# # session_id=SESSION_ID,
+# # connection_pool=pool,
+# # )
+
+# # # Assert that entries are retrieved and have valid IDs.
+# # assert result is not None
+# # assert isinstance(result, History)
+# # assert len(result.entries) > 0
+# # assert result.entries[0].id
+
+
+# # @test("query: delete entries")
+# # async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
+# # """Test the deletion of entries from the database."""
+
+# # pool = await create_db_pool(dsn=dsn)
+# # test_entry = CreateEntryRequest.from_model_input(
+# # model=MODEL,
+# # role="user",
+# # source="api_request",
+# # content="test entry content",
+# # )
+
+# # internal_entry = CreateEntryRequest.from_model_input(
+# # model=MODEL,
+# # role="user",
+# # content="internal entry content",
+# # source="internal",
+# # )
+
+# # created_entries = await create_entries(
+# # developer_id=developer_id,
+# # session_id=SESSION_ID,
+# # data=[test_entry, internal_entry],
+# # connection_pool=pool,
+# # )
+
+# # entry_ids = [entry.id for entry in created_entries]
+
+# # await delete_entries(
+# # developer_id=developer_id,
+# # session_id=SESSION_ID,
+# # entry_ids=[UUID("123e4567-e89b-12d3-a456-426614174002")],
+# # connection_pool=pool,
+# # )
+
+# # result = await list_entries(
+# # developer_id=developer_id,
+# # session_id=SESSION_ID,
+# # connection_pool=pool,
+# # )
+
+# # Assert that no entries are retrieved after deletion.
+# # assert all(id not in [entry.id for entry in result] for id in entry_ids)
+# # assert len(result) == 0
+# # assert result is not None
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index 02ad888f5..dd21be82b 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -10,14 +10,15 @@
from agents_api.queries.files.create_file import create_file
from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.files.get_file import get_file
-from tests.fixtures import pg_dsn, test_agent, test_developer_id
+from agents_api.queries.files.list_files import list_files
+from tests.fixtures import pg_dsn, test_developer, test_file, test_agent, test_user
@test("query: create file")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
+async def _(dsn=pg_dsn, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
await create_file(
- developer_id=developer_id,
+ developer_id=developer.id,
data=CreateFileRequest(
name="Hello",
description="World",
@@ -28,54 +29,227 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
)
-# @test("query: get file")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id):
-# pool = await create_db_pool(dsn=dsn)
-# file = create_file(
-# developer_id=developer_id,
-# data=CreateFileRequest(
-# name="Hello",
-# description="World",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# ),
-# connection_pool=pool,
-# )
-
-# get_file_result = get_file(
-# developer_id=developer_id,
-# file_id=file.id,
-# connection_pool=pool,
-# )
-
-# assert file == get_file_result
-
-# @test("query: delete file")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id):
-# pool = await create_db_pool(dsn=dsn)
-# file = create_file(
-# developer_id=developer_id,
-# data=CreateFileRequest(
-# name="Hello",
-# description="World",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# ),
-# connection_pool=pool,
-# )
-
-# delete_file(
-# developer_id=developer_id,
-# file_id=file.id,
-# connection_pool=pool,
-# )
-
-# with raises(HTTPException) as e:
-# get_file(
-# developer_id=developer_id,
-# file_id=file.id,
-# connection_pool=pool,
-# )
-
-# assert e.value.status_code == 404
-# assert e.value.detail == "The specified file does not exist"
+@test("query: create user file")
+async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="User File",
+ description="Test user file",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert file.name == "User File"
+
+ # Verify file appears in user's files
+ files = await list_files(
+ developer_id=developer.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert any(f.id == file.id for f in files)
+
+
+@test("query: create agent file")
+async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="Agent File",
+ description="Test agent file",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert file.name == "Agent File"
+
+ # Verify file appears in agent's files
+ files = await list_files(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert any(f.id == file.id for f in files)
+
+
+@test("model: get file")
+async def _(dsn=pg_dsn, file=test_file, developer=test_developer):
+ pool = await create_db_pool(dsn=dsn)
+ file_test = await get_file(
+ developer_id=developer.id,
+ file_id=file.id,
+ connection_pool=pool,
+ )
+ assert file_test.id == file.id
+ assert file_test.name == "Hello"
+ assert file_test.description == "World"
+ assert file_test.mime_type == "text/plain"
+ assert file_test.hash == file.hash
+
+
+@test("query: list files")
+async def _(dsn=pg_dsn, developer=test_developer, file=test_file):
+ pool = await create_db_pool(dsn=dsn)
+ files = await list_files(
+ developer_id=developer.id,
+ connection_pool=pool,
+ )
+ assert len(files) >= 1
+ assert any(f.id == file.id for f in files)
+
+
+@test("query: list user files")
+async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a file owned by the user
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="User List Test",
+ description="Test file for user listing",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+
+ # List user's files
+ files = await list_files(
+ developer_id=developer.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert len(files) >= 1
+ assert any(f.id == file.id for f in files)
+
+
+@test("query: list agent files")
+async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a file owned by the agent
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="Agent List Test",
+ description="Test file for agent listing",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+
+ # List agent's files
+ files = await list_files(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert len(files) >= 1
+ assert any(f.id == file.id for f in files)
+
+
+@test("query: delete user file")
+async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a file owned by the user
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="User Delete Test",
+ description="Test file for user deletion",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+
+ # Delete the file
+ await delete_file(
+ developer_id=developer.id,
+ file_id=file.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+
+ # Verify file is no longer in user's files
+ files = await list_files(
+ developer_id=developer.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert not any(f.id == file.id for f in files)
+
+
+@test("query: delete agent file")
+async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a file owned by the agent
+ file = await create_file(
+ developer_id=developer.id,
+ data=CreateFileRequest(
+ name="Agent Delete Test",
+ description="Test file for agent deletion",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ ),
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+
+ # Delete the file
+ await delete_file(
+ developer_id=developer.id,
+ file_id=file.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+
+ # Verify file is no longer in agent's files
+ files = await list_files(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert not any(f.id == file.id for f in files)
+
+
+@test("query: delete file")
+async def _(dsn=pg_dsn, developer=test_developer, file=test_file):
+ pool = await create_db_pool(dsn=dsn)
+
+ await delete_file(
+ developer_id=developer.id,
+ file_id=file.id,
+ connection_pool=pool,
+ )
+
+
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 8e512379f..199382775 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -1,261 +1,261 @@
-"""
-This module contains tests for SQL query generation functions in the sessions module.
-Tests verify the SQL queries without actually executing them against a database.
-"""
-
-from uuid_extensions import uuid7
-from ward import raises, test
-
-from agents_api.autogen.openapi_model import (
- CreateOrUpdateSessionRequest,
- CreateSessionRequest,
- PatchSessionRequest,
- ResourceDeletedResponse,
- ResourceUpdatedResponse,
- Session,
- UpdateSessionRequest,
-)
-from agents_api.clients.pg import create_db_pool
-from agents_api.queries.sessions import (
- count_sessions,
- create_or_update_session,
- create_session,
- delete_session,
- get_session,
- list_sessions,
- patch_session,
- update_session,
-)
-from tests.fixtures import (
- pg_dsn,
- test_agent,
- test_developer,
- test_developer_id,
- test_session,
- test_user,
-)
-
-
-@test("query: create session sql")
-async def _(
- dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
-):
- """Test that a session can be successfully created."""
-
- pool = await create_db_pool(dsn=dsn)
- session_id = uuid7()
- data = CreateSessionRequest(
- users=[user.id],
- agents=[agent.id],
- situation="test session",
- system_template="test system template",
- )
- result = await create_session(
- developer_id=developer_id,
- session_id=session_id,
- data=data,
- connection_pool=pool,
- )
-
- assert result is not None
- assert isinstance(result, Session), f"Result is not a Session, {result}"
- assert result.id == session_id
- assert result.developer_id == developer_id
- assert result.situation == "test session"
- assert set(result.users) == {user.id}
- assert set(result.agents) == {agent.id}
-
-
-@test("query: create or update session sql")
-async def _(
- dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
-):
- """Test that a session can be successfully created or updated."""
-
- pool = await create_db_pool(dsn=dsn)
- session_id = uuid7()
- data = CreateOrUpdateSessionRequest(
- users=[user.id],
- agents=[agent.id],
- situation="test session",
- )
- result = await create_or_update_session(
- developer_id=developer_id,
- session_id=session_id,
- data=data,
- connection_pool=pool,
- )
-
- assert result is not None
- assert isinstance(result, Session)
- assert result.id == session_id
- assert result.developer_id == developer_id
- assert result.situation == "test session"
- assert set(result.users) == {user.id}
- assert set(result.agents) == {agent.id}
-
-
-@test("query: get session exists")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
- """Test retrieving an existing session."""
-
- pool = await create_db_pool(dsn=dsn)
- result = await get_session(
- developer_id=developer_id,
- session_id=session.id,
- connection_pool=pool,
- )
-
- assert result is not None
- assert isinstance(result, Session)
- assert result.id == session.id
- assert result.developer_id == developer_id
-
-
-@test("query: get session does not exist")
-async def _(dsn=pg_dsn, developer_id=test_developer_id):
- """Test retrieving a non-existent session."""
-
- session_id = uuid7()
- pool = await create_db_pool(dsn=dsn)
- with raises(Exception):
- await get_session(
- session_id=session_id,
- developer_id=developer_id,
- connection_pool=pool,
- )
-
-
-@test("query: list sessions")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
- """Test listing sessions with default pagination."""
-
- pool = await create_db_pool(dsn=dsn)
- result, _ = await list_sessions(
- developer_id=developer_id,
- limit=10,
- offset=0,
- connection_pool=pool,
- )
-
- assert isinstance(result, list)
- assert len(result) >= 1
- assert any(s.id == session.id for s in result)
-
-
-@test("query: list sessions with filters")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
- """Test listing sessions with specific filters."""
-
- pool = await create_db_pool(dsn=dsn)
- result, _ = await list_sessions(
- developer_id=developer_id,
- limit=10,
- offset=0,
- filters={"situation": "test session"},
- connection_pool=pool,
- )
-
- assert isinstance(result, list)
- assert len(result) >= 1
- assert all(s.situation == "test session" for s in result)
-
-
-@test("query: count sessions")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
- """Test counting the number of sessions for a developer."""
-
- pool = await create_db_pool(dsn=dsn)
- count = await count_sessions(
- developer_id=developer_id,
- connection_pool=pool,
- )
-
- assert isinstance(count, int)
- assert count >= 1
-
-
-@test("query: update session sql")
-async def _(
- dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
-):
- """Test that an existing session's information can be successfully updated."""
-
- pool = await create_db_pool(dsn=dsn)
- data = UpdateSessionRequest(
- agents=[agent.id],
- situation="updated session",
- )
- result = await update_session(
- session_id=session.id,
- developer_id=developer_id,
- data=data,
- connection_pool=pool,
- )
-
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
- assert result.updated_at > session.created_at
-
- updated_session = await get_session(
- developer_id=developer_id,
- session_id=session.id,
- connection_pool=pool,
- )
- assert updated_session.situation == "updated session"
- assert set(updated_session.agents) == {agent.id}
-
-
-@test("query: patch session sql")
-async def _(
- dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
-):
- """Test that a session can be successfully patched."""
-
- pool = await create_db_pool(dsn=dsn)
- data = PatchSessionRequest(
- agents=[agent.id],
- situation="patched session",
- metadata={"test": "metadata"},
- )
- result = await patch_session(
- developer_id=developer_id,
- session_id=session.id,
- data=data,
- connection_pool=pool,
- )
-
- assert result is not None
- assert isinstance(result, ResourceUpdatedResponse)
- assert result.updated_at > session.created_at
-
- patched_session = await get_session(
- developer_id=developer_id,
- session_id=session.id,
- connection_pool=pool,
- )
- assert patched_session.situation == "patched session"
- assert set(patched_session.agents) == {agent.id}
- assert patched_session.metadata == {"test": "metadata"}
-
-
-@test("query: delete session sql")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
- """Test that a session can be successfully deleted."""
-
- pool = await create_db_pool(dsn=dsn)
- delete_result = await delete_session(
- developer_id=developer_id,
- session_id=session.id,
- connection_pool=pool,
- )
-
- assert delete_result is not None
- assert isinstance(delete_result, ResourceDeletedResponse)
-
- with raises(Exception):
- await get_session(
- developer_id=developer_id,
- session_id=session.id,
- connection_pool=pool,
- )
+# """
+# This module contains tests for SQL query generation functions in the sessions module.
+# Tests verify the SQL queries without actually executing them against a database.
+# """
+
+# from uuid_extensions import uuid7
+# from ward import raises, test
+
+# from agents_api.autogen.openapi_model import (
+# CreateOrUpdateSessionRequest,
+# CreateSessionRequest,
+# PatchSessionRequest,
+# ResourceDeletedResponse,
+# ResourceUpdatedResponse,
+# Session,
+# UpdateSessionRequest,
+# )
+# from agents_api.clients.pg import create_db_pool
+# from agents_api.queries.sessions import (
+# count_sessions,
+# create_or_update_session,
+# create_session,
+# delete_session,
+# get_session,
+# list_sessions,
+# patch_session,
+# update_session,
+# )
+# from tests.fixtures import (
+# pg_dsn,
+# test_agent,
+# test_developer,
+# test_developer_id,
+# test_session,
+# test_user,
+# )
+
+
+# @test("query: create session sql")
+# async def _(
+# dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
+# ):
+# """Test that a session can be successfully created."""
+
+# pool = await create_db_pool(dsn=dsn)
+# session_id = uuid7()
+# data = CreateSessionRequest(
+# users=[user.id],
+# agents=[agent.id],
+# situation="test session",
+# system_template="test system template",
+# )
+# result = await create_session(
+# developer_id=developer_id,
+# session_id=session_id,
+# data=data,
+# connection_pool=pool,
+# )
+
+# assert result is not None
+# assert isinstance(result, Session), f"Result is not a Session, {result}"
+# assert result.id == session_id
+# assert result.developer_id == developer_id
+# assert result.situation == "test session"
+# assert set(result.users) == {user.id}
+# assert set(result.agents) == {agent.id}
+
+
+# @test("query: create or update session sql")
+# async def _(
+# dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
+# ):
+# """Test that a session can be successfully created or updated."""
+
+# pool = await create_db_pool(dsn=dsn)
+# session_id = uuid7()
+# data = CreateOrUpdateSessionRequest(
+# users=[user.id],
+# agents=[agent.id],
+# situation="test session",
+# )
+# result = await create_or_update_session(
+# developer_id=developer_id,
+# session_id=session_id,
+# data=data,
+# connection_pool=pool,
+# )
+
+# assert result is not None
+# assert isinstance(result, Session)
+# assert result.id == session_id
+# assert result.developer_id == developer_id
+# assert result.situation == "test session"
+# assert set(result.users) == {user.id}
+# assert set(result.agents) == {agent.id}
+
+
+# @test("query: get session exists")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test retrieving an existing session."""
+
+# pool = await create_db_pool(dsn=dsn)
+# result = await get_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# connection_pool=pool,
+# )
+
+# assert result is not None
+# assert isinstance(result, Session)
+# assert result.id == session.id
+# assert result.developer_id == developer_id
+
+
+# @test("query: get session does not exist")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id):
+# """Test retrieving a non-existent session."""
+
+# session_id = uuid7()
+# pool = await create_db_pool(dsn=dsn)
+# with raises(Exception):
+# await get_session(
+# session_id=session_id,
+# developer_id=developer_id,
+# connection_pool=pool,
+# )
+
+
+# @test("query: list sessions")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test listing sessions with default pagination."""
+
+# pool = await create_db_pool(dsn=dsn)
+# result, _ = await list_sessions(
+# developer_id=developer_id,
+# limit=10,
+# offset=0,
+# connection_pool=pool,
+# )
+
+# assert isinstance(result, list)
+# assert len(result) >= 1
+# assert any(s.id == session.id for s in result)
+
+
+# @test("query: list sessions with filters")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test listing sessions with specific filters."""
+
+# pool = await create_db_pool(dsn=dsn)
+# result, _ = await list_sessions(
+# developer_id=developer_id,
+# limit=10,
+# offset=0,
+# filters={"situation": "test session"},
+# connection_pool=pool,
+# )
+
+# assert isinstance(result, list)
+# assert len(result) >= 1
+# assert all(s.situation == "test session" for s in result)
+
+
+# @test("query: count sessions")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test counting the number of sessions for a developer."""
+
+# pool = await create_db_pool(dsn=dsn)
+# count = await count_sessions(
+# developer_id=developer_id,
+# connection_pool=pool,
+# )
+
+# assert isinstance(count, int)
+# assert count >= 1
+
+
+# @test("query: update session sql")
+# async def _(
+# dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
+# ):
+# """Test that an existing session's information can be successfully updated."""
+
+# pool = await create_db_pool(dsn=dsn)
+# data = UpdateSessionRequest(
+# agents=[agent.id],
+# situation="updated session",
+# )
+# result = await update_session(
+# session_id=session.id,
+# developer_id=developer_id,
+# data=data,
+# connection_pool=pool,
+# )
+
+# assert result is not None
+# assert isinstance(result, ResourceUpdatedResponse)
+# assert result.updated_at > session.created_at
+
+# updated_session = await get_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# connection_pool=pool,
+# )
+# assert updated_session.situation == "updated session"
+# assert set(updated_session.agents) == {agent.id}
+
+
+# @test("query: patch session sql")
+# async def _(
+# dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
+# ):
+# """Test that a session can be successfully patched."""
+
+# pool = await create_db_pool(dsn=dsn)
+# data = PatchSessionRequest(
+# agents=[agent.id],
+# situation="patched session",
+# metadata={"test": "metadata"},
+# )
+# result = await patch_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# data=data,
+# connection_pool=pool,
+# )
+
+# assert result is not None
+# assert isinstance(result, ResourceUpdatedResponse)
+# assert result.updated_at > session.created_at
+
+# patched_session = await get_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# connection_pool=pool,
+# )
+# assert patched_session.situation == "patched session"
+# assert set(patched_session.agents) == {agent.id}
+# assert patched_session.metadata == {"test": "metadata"}
+
+
+# @test("query: delete session sql")
+# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+# """Test that a session can be successfully deleted."""
+
+# pool = await create_db_pool(dsn=dsn)
+# delete_result = await delete_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# connection_pool=pool,
+# )
+
+# assert delete_result is not None
+# assert isinstance(delete_result, ResourceDeletedResponse)
+
+# with raises(Exception):
+# await get_session(
+# developer_id=developer_id,
+# session_id=session.id,
+# connection_pool=pool,
+# )
From f974fa0f38bba27c8faafaf50f2a6f1476efd334 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Thu, 19 Dec 2024 06:56:33 +0000
Subject: [PATCH 080/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/agents/delete_agent.py | 1 +
.../agents_api/queries/files/create_file.py | 1 +
.../agents_api/queries/files/get_file.py | 12 ++++----
agents-api/tests/fixtures.py | 3 +-
agents-api/tests/test_files_queries.py | 30 +++++++++----------
5 files changed, 24 insertions(+), 23 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index a957ab2c5..d47711345 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -60,6 +60,7 @@
RETURNING developer_id, agent_id;
""").sql(pretty=True)
+
# @rewrap_exceptions(
# @rewrap_exceptions(
# {
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index 8438978e6..48251fa5e 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -58,6 +58,7 @@
JOIN files f ON f.file_id = io.file_id;
""").sql(pretty=True)
+
# Add error handling decorator
# @rewrap_exceptions(
# {
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index ace417d5d..4d5dca4c0 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -3,8 +3,8 @@
It constructs and executes SQL queries to fetch file details based on file ID and developer ID.
"""
-from uuid import UUID
from typing import Literal
+from uuid import UUID
import asyncpg
from beartype import beartype
@@ -44,20 +44,20 @@
# }
# )
@wrap_in_class(
- File,
- one=True,
+ File,
+ one=True,
transform=lambda d: {
"id": d["file_id"],
**d,
"hash": d["hash"].hex(),
"content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
- }
+ },
)
@pg_query
@beartype
async def get_file(
- *,
- file_id: UUID,
+ *,
+ file_id: UUID,
developer_id: UUID,
owner_type: Literal["user", "agent"] | None = None,
owner_id: UUID | None = None,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 2cad999e8..0c904b383 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -9,9 +9,9 @@
from agents_api.autogen.openapi_model import (
CreateAgentRequest,
+ CreateFileRequest,
CreateSessionRequest,
CreateUserRequest,
- CreateFileRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
@@ -27,6 +27,7 @@
# from agents_api.queries.execution.create_execution_transition import create_execution_transition
# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
from agents_api.queries.files.create_file import create_file
+
# from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index dd21be82b..92b52d733 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -11,7 +11,7 @@
from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.files.get_file import get_file
from agents_api.queries.files.list_files import list_files
-from tests.fixtures import pg_dsn, test_developer, test_file, test_agent, test_user
+from tests.fixtures import pg_dsn, test_agent, test_developer, test_file, test_user
@test("query: create file")
@@ -45,7 +45,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
connection_pool=pool,
)
assert file.name == "User File"
-
+
# Verify file appears in user's files
files = await list_files(
developer_id=developer.id,
@@ -59,7 +59,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
@test("query: create agent file")
async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
-
+
file = await create_file(
developer_id=developer.id,
data=CreateFileRequest(
@@ -73,7 +73,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
connection_pool=pool,
)
assert file.name == "Agent File"
-
+
# Verify file appears in agent's files
files = await list_files(
developer_id=developer.id,
@@ -113,7 +113,7 @@ async def _(dsn=pg_dsn, developer=test_developer, file=test_file):
@test("query: list user files")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
-
+
# Create a file owned by the user
file = await create_file(
developer_id=developer.id,
@@ -127,7 +127,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
owner_id=user.id,
connection_pool=pool,
)
-
+
# List user's files
files = await list_files(
developer_id=developer.id,
@@ -142,7 +142,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
@test("query: list agent files")
async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
-
+
# Create a file owned by the agent
file = await create_file(
developer_id=developer.id,
@@ -156,7 +156,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
owner_id=agent.id,
connection_pool=pool,
)
-
+
# List agent's files
files = await list_files(
developer_id=developer.id,
@@ -171,7 +171,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
@test("query: delete user file")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
-
+
# Create a file owned by the user
file = await create_file(
developer_id=developer.id,
@@ -185,7 +185,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
owner_id=user.id,
connection_pool=pool,
)
-
+
# Delete the file
await delete_file(
developer_id=developer.id,
@@ -194,7 +194,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
owner_id=user.id,
connection_pool=pool,
)
-
+
# Verify file is no longer in user's files
files = await list_files(
developer_id=developer.id,
@@ -208,7 +208,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
@test("query: delete agent file")
async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
-
+
# Create a file owned by the agent
file = await create_file(
developer_id=developer.id,
@@ -222,7 +222,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
owner_id=agent.id,
connection_pool=pool,
)
-
+
# Delete the file
await delete_file(
developer_id=developer.id,
@@ -231,7 +231,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
owner_id=agent.id,
connection_pool=pool,
)
-
+
# Verify file is no longer in agent's files
files = await list_files(
developer_id=developer.id,
@@ -251,5 +251,3 @@ async def _(dsn=pg_dsn, developer=test_developer, file=test_file):
file_id=file.id,
connection_pool=pool,
)
-
-
From bbdbb4b369649073fa2334b05e99d34eb44585f4 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Thu, 19 Dec 2024 12:03:30 +0300
Subject: [PATCH 081/274] fix(agents-api): fix sessions and agents queries /
tests
---
.../queries/entries/create_entries.py | 2 +-
.../sessions/create_or_update_session.py | 32 +++++------
.../queries/sessions/create_session.py | 23 +++++---
.../queries/sessions/patch_session.py | 51 +----------------
.../queries/sessions/update_session.py | 56 +++----------------
agents-api/agents_api/queries/utils.py | 17 +++---
agents-api/tests/fixtures.py | 10 ++--
agents-api/tests/test_agent_queries.py | 5 +-
agents-api/tests/test_session_queries.py | 49 +++++++---------
9 files changed, 78 insertions(+), 167 deletions(-)
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index fb61b7c7e..33dcda984 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -173,7 +173,7 @@ async def add_entry_relations(
(
session_exists_query,
[session_id, developer_id],
- "fetch",
+ "fetchrow",
),
(
entry_relation_query,
diff --git a/agents-api/agents_api/queries/sessions/create_or_update_session.py b/agents-api/agents_api/queries/sessions/create_or_update_session.py
index bc54bf31b..26a353e94 100644
--- a/agents-api/agents_api/queries/sessions/create_or_update_session.py
+++ b/agents-api/agents_api/queries/sessions/create_or_update_session.py
@@ -61,11 +61,7 @@
participant_type,
participant_id
)
-SELECT
- $1 as developer_id,
- $2 as session_id,
- unnest($3::participant_type[]) as participant_type,
- unnest($4::uuid[]) as participant_id;
+VALUES ($1, $2, $3, $4);
""").sql(pretty=True)
@@ -83,16 +79,23 @@
),
}
)
-@wrap_in_class(ResourceUpdatedResponse, one=True)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["session_id"],
+ "updated_at": d["updated_at"],
+ },
+)
@increase_counter("create_or_update_session")
-@pg_query
+@pg_query(return_index=0)
@beartype
async def create_or_update_session(
*,
developer_id: UUID,
session_id: UUID,
data: CreateOrUpdateSessionRequest,
-) -> list[tuple[str, list]]:
+) -> list[tuple[str, list] | tuple[str, list, str]]:
"""
Constructs SQL queries to create or update a session and its participant lookups.
@@ -139,14 +142,11 @@ async def create_or_update_session(
]
# Prepare lookup parameters
- lookup_params = [
- developer_id, # $1
- session_id, # $2
- participant_types, # $3
- participant_ids, # $4
- ]
+ lookup_params = []
+ for participant_type, participant_id in zip(participant_types, participant_ids):
+ lookup_params.append([developer_id, session_id, participant_type, participant_id])
return [
- (session_query, session_params),
- (lookup_query, lookup_params),
+ (session_query, session_params, "fetch"),
+ (lookup_query, lookup_params, "fetchmany"),
]
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index baa3f09d1..91badb281 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -1,12 +1,14 @@
from uuid import UUID
+from uuid_extensions import uuid7
import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-from ...autogen.openapi_model import CreateSessionRequest, Session
+from ...autogen.openapi_model import CreateSessionRequest, Session, ResourceCreatedResponse
from ...metrics.counters import increase_counter
+from ...common.utils.datetime import utcnow
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL queries
@@ -63,14 +65,21 @@
),
}
)
-@wrap_in_class(Session, transform=lambda d: {**d, "id": d["session_id"]})
+@wrap_in_class(
+ Session,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "id": d["session_id"],
+ },
+)
@increase_counter("create_session")
-@pg_query
+@pg_query(return_index=0)
@beartype
async def create_session(
*,
developer_id: UUID,
- session_id: UUID,
+ session_id: UUID | None = None,
data: CreateSessionRequest,
) -> list[tuple[str, list] | tuple[str, list, str]]:
"""
@@ -87,6 +96,7 @@ async def create_session(
# Handle participants
users = data.users or ([data.user] if data.user else [])
agents = data.agents or ([data.agent] if data.agent else [])
+ session_id = session_id or uuid7()
if not agents:
raise HTTPException(
@@ -123,10 +133,7 @@ async def create_session(
for ptype, pid in zip(participant_types, participant_ids):
lookup_params.append([developer_id, session_id, ptype, pid])
- print("*" * 100)
- print(lookup_params)
- print("*" * 100)
return [
- (session_query, session_params),
+ (session_query, session_params, "fetch"),
(lookup_query, lookup_params, "fetchmany"),
]
diff --git a/agents-api/agents_api/queries/sessions/patch_session.py b/agents-api/agents_api/queries/sessions/patch_session.py
index b14b94a8a..60d82468e 100644
--- a/agents-api/agents_api/queries/sessions/patch_session.py
+++ b/agents-api/agents_api/queries/sessions/patch_session.py
@@ -31,25 +31,6 @@
SELECT * FROM updated_session;
""").sql(pretty=True)
-lookup_query = parse_one("""
-WITH deleted_lookups AS (
- DELETE FROM session_lookup
- WHERE developer_id = $1 AND session_id = $2
-)
-INSERT INTO session_lookup (
- developer_id,
- session_id,
- participant_type,
- participant_id
-)
-SELECT
- $1 as developer_id,
- $2 as session_id,
- unnest($3::participant_type[]) as participant_type,
- unnest($4::uuid[]) as participant_id;
-""").sql(pretty=True)
-
-
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
@@ -64,7 +45,7 @@
),
}
)
-@wrap_in_class(ResourceUpdatedResponse, one=True)
+@wrap_in_class(ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["session_id"], "updated_at": d["updated_at"]},)
@increase_counter("patch_session")
@pg_query
@beartype
@@ -85,22 +66,6 @@ async def patch_session(
Returns:
list[tuple[str, list]]: List of SQL queries and their parameters
"""
- # Handle participants
- users = data.users or ([data.user] if data.user else [])
- agents = data.agents or ([data.agent] if data.agent else [])
-
- if data.agent and data.agents:
- raise HTTPException(
- status_code=400,
- detail="Only one of 'agent' or 'agents' should be provided",
- )
-
- # Prepare participant arrays for lookup query if participants are provided
- participant_types = []
- participant_ids = []
- if users or agents:
- participant_types = ["user"] * len(users) + ["agent"] * len(agents)
- participant_ids = [str(u) for u in users] + [str(a) for a in agents]
# Extract fields from data, using None for unset fields
session_params = [
@@ -116,16 +81,4 @@ async def patch_session(
data.recall_options or {}, # $10
]
- queries = [(session_query, session_params)]
-
- # Only add lookup query if participants are provided
- if participant_types:
- lookup_params = [
- developer_id, # $1
- session_id, # $2
- participant_types, # $3
- participant_ids, # $4
- ]
- queries.append((lookup_query, lookup_params))
-
- return queries
+ return [(session_query, session_params)]
diff --git a/agents-api/agents_api/queries/sessions/update_session.py b/agents-api/agents_api/queries/sessions/update_session.py
index 01e21e732..7c58d10e6 100644
--- a/agents-api/agents_api/queries/sessions/update_session.py
+++ b/agents-api/agents_api/queries/sessions/update_session.py
@@ -27,24 +27,6 @@
RETURNING *;
""").sql(pretty=True)
-lookup_query = parse_one("""
-WITH deleted_lookups AS (
- DELETE FROM session_lookup
- WHERE developer_id = $1 AND session_id = $2
-)
-INSERT INTO session_lookup (
- developer_id,
- session_id,
- participant_type,
- participant_id
-)
-SELECT
- $1 as developer_id,
- $2 as session_id,
- unnest($3::participant_type[]) as participant_type,
- unnest($4::uuid[]) as participant_id;
-""").sql(pretty=True)
-
@rewrap_exceptions(
{
@@ -60,7 +42,14 @@
),
}
)
-@wrap_in_class(ResourceUpdatedResponse, one=True)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["session_id"],
+ "updated_at": d["updated_at"],
+ },
+)
@increase_counter("update_session")
@pg_query
@beartype
@@ -81,26 +70,6 @@ async def update_session(
Returns:
list[tuple[str, list]]: List of SQL queries and their parameters
"""
- # Handle participants
- users = data.users or ([data.user] if data.user else [])
- agents = data.agents or ([data.agent] if data.agent else [])
-
- if not agents:
- raise HTTPException(
- status_code=400,
- detail="At least one agent must be provided",
- )
-
- if data.agent and data.agents:
- raise HTTPException(
- status_code=400,
- detail="Only one of 'agent' or 'agents' should be provided",
- )
-
- # Prepare participant arrays for lookup query
- participant_types = ["user"] * len(users) + ["agent"] * len(agents)
- participant_ids = [str(u) for u in users] + [str(a) for a in agents]
-
# Prepare session parameters
session_params = [
developer_id, # $1
@@ -115,15 +84,6 @@ async def update_session(
data.recall_options or {}, # $10
]
- # Prepare lookup parameters
- lookup_params = [
- developer_id, # $1
- session_id, # $2
- participant_types, # $3
- participant_ids, # $4
- ]
-
return [
(session_query, session_params),
- (lookup_query, lookup_params),
]
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 73113580d..4126c91dc 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -123,6 +123,7 @@ def pg_query(
debug: bool | None = None,
only_on_error: bool = False,
timeit: bool = False,
+ return_index: int = -1,
) -> Callable[..., Callable[P, list[Record]]] | Callable[P, list[Record]]:
def pg_query_dec(
func: Callable[P, PGQueryArgs | list[PGQueryArgs]],
@@ -159,6 +160,8 @@ async def wrapper(
async with pool.acquire() as conn:
async with conn.transaction():
start = timeit and time.perf_counter()
+ all_results = []
+
for method_name, payload in batch:
method = getattr(conn, method_name)
@@ -169,11 +172,7 @@ async def wrapper(
results: list[Record] = await method(
query, *args, timeout=timeout
)
-
- print("%" * 100)
- print(results)
- print(*args)
- print("%" * 100)
+ all_results.append(results)
if method_name == "fetchrow" and (
len(results) == 0 or results.get("bool") is None
@@ -204,9 +203,11 @@ async def wrapper(
raise
- not only_on_error and debug and pprint(results)
-
- return results
+ # Return results from specified index
+ results_to_return = all_results[return_index] if all_results else []
+ not only_on_error and debug and pprint(results_to_return)
+
+ return results_to_return
# Set the wrapped function as an attribute of the wrapper,
# forwards the __wrapped__ attribute if it exists.
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 9153785a4..49c2e7094 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -96,7 +96,7 @@ def patch_embed_acompletion():
yield embed, acompletion
-@fixture(scope="global")
+@fixture(scope="test")
async def test_agent(dsn=pg_dsn, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
@@ -105,18 +105,16 @@ async def test_agent(dsn=pg_dsn, developer=test_developer):
data=CreateAgentRequest(
model="gpt-4o-mini",
name="test agent",
- canonical_name=f"test_agent_{str(int(time.time()))}",
about="test agent about",
metadata={"test": "test"},
),
connection_pool=pool,
)
- yield agent
- await pool.close()
+ return agent
-@fixture(scope="global")
+@fixture(scope="test")
async def test_user(dsn=pg_dsn, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
@@ -153,7 +151,7 @@ async def test_new_developer(dsn=pg_dsn, email=random_email):
return developer
-@fixture(scope="global")
+@fixture(scope="test")
async def test_session(
dsn=pg_dsn,
developer_id=test_developer_id,
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index b6cb7aedc..594047a82 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -41,7 +41,8 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
)
-@test("query: create agent with instructions sql")
+
+@test("query: create or update agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully created or updated."""
@@ -60,6 +61,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
)
+
@test("query: update agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an existing agent's information can be successfully updated."""
@@ -81,7 +83,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert result is not None
assert isinstance(result, ResourceUpdatedResponse)
-
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 4e04468bf..ec2e511d4 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -7,13 +7,15 @@
from ward import raises, test
from agents_api.autogen.openapi_model import (
+ Session,
CreateOrUpdateSessionRequest,
CreateSessionRequest,
+ UpdateSessionRequest,
PatchSessionRequest,
ResourceDeletedResponse,
ResourceUpdatedResponse,
- Session,
- UpdateSessionRequest,
+ ResourceDeletedResponse,
+ ResourceCreatedResponse,
)
from agents_api.clients.pg import create_db_pool
from agents_api.queries.sessions import (
@@ -46,7 +48,6 @@ async def _(
data = CreateSessionRequest(
users=[user.id],
agents=[agent.id],
- situation="test session",
system_template="test system template",
)
result = await create_session(
@@ -59,10 +60,6 @@ async def _(
assert result is not None
assert isinstance(result, Session), f"Result is not a Session, {result}"
assert result.id == session_id
- assert result.developer_id == developer_id
- assert result.situation == "test session"
- assert set(result.users) == {user.id}
- assert set(result.agents) == {agent.id}
@test("query: create or update session sql")
@@ -76,7 +73,7 @@ async def _(
data = CreateOrUpdateSessionRequest(
users=[user.id],
agents=[agent.id],
- situation="test session",
+ system_template="test system template",
)
result = await create_or_update_session(
developer_id=developer_id,
@@ -86,12 +83,9 @@ async def _(
)
assert result is not None
- assert isinstance(result, Session)
+ assert isinstance(result, ResourceUpdatedResponse)
assert result.id == session_id
- assert result.developer_id == developer_id
- assert result.situation == "test session"
- assert set(result.users) == {user.id}
- assert set(result.agents) == {agent.id}
+ assert result.updated_at is not None
@test("query: get session exists")
@@ -108,7 +102,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
assert result is not None
assert isinstance(result, Session)
assert result.id == session.id
- assert result.developer_id == developer_id
@test("query: get session does not exist")
@@ -130,7 +123,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
"""Test listing sessions with default pagination."""
pool = await create_db_pool(dsn=dsn)
- result, _ = await list_sessions(
+ result = await list_sessions(
developer_id=developer_id,
limit=10,
offset=0,
@@ -147,17 +140,18 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
"""Test listing sessions with specific filters."""
pool = await create_db_pool(dsn=dsn)
- result, _ = await list_sessions(
+ result = await list_sessions(
developer_id=developer_id,
limit=10,
offset=0,
- filters={"situation": "test session"},
connection_pool=pool,
)
assert isinstance(result, list)
assert len(result) >= 1
- assert all(s.situation == "test session" for s in result)
+ assert all(
+ s.situation == session.situation for s in result
+ ), f"Result is not a list of sessions, {result}, {session.situation}"
@test("query: count sessions")
@@ -170,20 +164,21 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
connection_pool=pool,
)
- assert isinstance(count, int)
- assert count >= 1
+ assert isinstance(count, dict)
+ assert count["count"] >= 1
@test("query: update session sql")
async def _(
- dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
+ dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent, user=test_user
):
"""Test that an existing session's information can be successfully updated."""
pool = await create_db_pool(dsn=dsn)
data = UpdateSessionRequest(
- agents=[agent.id],
- situation="updated session",
+ token_budget=1000,
+ forward_tool_calls=True,
+ system_template="updated system template",
)
result = await update_session(
session_id=session.id,
@@ -201,8 +196,7 @@ async def _(
session_id=session.id,
connection_pool=pool,
)
- assert updated_session.situation == "updated session"
- assert set(updated_session.agents) == {agent.id}
+ assert updated_session.forward_tool_calls is True
@test("query: patch session sql")
@@ -213,8 +207,6 @@ async def _(
pool = await create_db_pool(dsn=dsn)
data = PatchSessionRequest(
- agents=[agent.id],
- situation="patched session",
metadata={"test": "metadata"},
)
result = await patch_session(
@@ -233,8 +225,7 @@ async def _(
session_id=session.id,
connection_pool=pool,
)
- assert patched_session.situation == "patched session"
- assert set(patched_session.agents) == {agent.id}
+ assert patched_session.situation == session.situation
assert patched_session.metadata == {"test": "metadata"}
From 8361e7d33e272d193bcd83f15248741751dfde85 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Thu, 19 Dec 2024 09:06:04 +0000
Subject: [PATCH 082/274] refactor: Lint agents-api (CI)
---
.../queries/sessions/create_or_update_session.py | 4 +++-
.../agents_api/queries/sessions/create_session.py | 10 +++++++---
.../agents_api/queries/sessions/patch_session.py | 7 ++++++-
agents-api/agents_api/queries/utils.py | 4 ++--
agents-api/tests/test_agent_queries.py | 3 +--
agents-api/tests/test_session_queries.py | 13 ++++++++-----
6 files changed, 27 insertions(+), 14 deletions(-)
diff --git a/agents-api/agents_api/queries/sessions/create_or_update_session.py b/agents-api/agents_api/queries/sessions/create_or_update_session.py
index 26a353e94..3c4dbf66e 100644
--- a/agents-api/agents_api/queries/sessions/create_or_update_session.py
+++ b/agents-api/agents_api/queries/sessions/create_or_update_session.py
@@ -144,7 +144,9 @@ async def create_or_update_session(
# Prepare lookup parameters
lookup_params = []
for participant_type, participant_id in zip(participant_types, participant_ids):
- lookup_params.append([developer_id, session_id, participant_type, participant_id])
+ lookup_params.append(
+ [developer_id, session_id, participant_type, participant_id]
+ )
return [
(session_query, session_params, "fetch"),
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 91badb281..63fbdc940 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -1,14 +1,18 @@
from uuid import UUID
-from uuid_extensions import uuid7
import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
+from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateSessionRequest, Session, ResourceCreatedResponse
-from ...metrics.counters import increase_counter
+from ...autogen.openapi_model import (
+ CreateSessionRequest,
+ ResourceCreatedResponse,
+ Session,
+)
from ...common.utils.datetime import utcnow
+from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL queries
diff --git a/agents-api/agents_api/queries/sessions/patch_session.py b/agents-api/agents_api/queries/sessions/patch_session.py
index 60d82468e..7d526ae1a 100644
--- a/agents-api/agents_api/queries/sessions/patch_session.py
+++ b/agents-api/agents_api/queries/sessions/patch_session.py
@@ -31,6 +31,7 @@
SELECT * FROM updated_session;
""").sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
@@ -45,7 +46,11 @@
),
}
)
-@wrap_in_class(ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["session_id"], "updated_at": d["updated_at"]},)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {"id": d["session_id"], "updated_at": d["updated_at"]},
+)
@increase_counter("patch_session")
@pg_query
@beartype
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 4126c91dc..0c20ca59e 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -161,7 +161,7 @@ async def wrapper(
async with conn.transaction():
start = timeit and time.perf_counter()
all_results = []
-
+
for method_name, payload in batch:
method = getattr(conn, method_name)
@@ -206,7 +206,7 @@ async def wrapper(
# Return results from specified index
results_to_return = all_results[return_index] if all_results else []
not only_on_error and debug and pprint(results_to_return)
-
+
return results_to_return
# Set the wrapped function as an attribute of the wrapper,
diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py
index 594047a82..85d10f6ea 100644
--- a/agents-api/tests/test_agent_queries.py
+++ b/agents-api/tests/test_agent_queries.py
@@ -41,7 +41,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
)
-
@test("query: create or update agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that an agent can be successfully created or updated."""
@@ -61,7 +60,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id):
)
-
@test("query: update agent sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that an existing agent's information can be successfully updated."""
@@ -83,6 +81,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert result is not None
assert isinstance(result, ResourceUpdatedResponse)
+
@test("query: get agent not exists sql")
async def _(dsn=pg_dsn, developer_id=test_developer_id):
"""Test that retrieving a non-existent agent raises an exception."""
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index ec2e511d4..5f2190e2b 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -7,15 +7,14 @@
from ward import raises, test
from agents_api.autogen.openapi_model import (
- Session,
CreateOrUpdateSessionRequest,
CreateSessionRequest,
- UpdateSessionRequest,
PatchSessionRequest,
+ ResourceCreatedResponse,
ResourceDeletedResponse,
ResourceUpdatedResponse,
- ResourceDeletedResponse,
- ResourceCreatedResponse,
+ Session,
+ UpdateSessionRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.queries.sessions import (
@@ -170,7 +169,11 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
@test("query: update session sql")
async def _(
- dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent, user=test_user
+ dsn=pg_dsn,
+ developer_id=test_developer_id,
+ session=test_session,
+ agent=test_agent,
+ user=test_user,
):
"""Test that an existing session's information can be successfully updated."""
From e158f3adbd41aaeb996cd3a62c0401ca1aa21eaa Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Thu, 19 Dec 2024 19:45:43 +0530
Subject: [PATCH 083/274] feat(agents-api): Remove auto_blob_store in favor of
interceptor based system
Signed-off-by: Diwank Singh Tomer
---
.../agents_api/activities/embed_docs.py | 2 -
.../activities/excecute_api_call.py | 2 -
.../activities/execute_integration.py | 2 -
.../agents_api/activities/execute_system.py | 7 +-
.../activities/sync_items_remote.py | 12 +-
.../activities/task_steps/base_evaluate.py | 2 -
.../activities/task_steps/cozo_query_step.py | 2 -
.../activities/task_steps/evaluate_step.py | 2 -
.../activities/task_steps/for_each_step.py | 2 -
.../activities/task_steps/get_value_step.py | 5 +-
.../activities/task_steps/if_else_step.py | 2 -
.../activities/task_steps/log_step.py | 2 -
.../activities/task_steps/map_reduce_step.py | 2 -
.../activities/task_steps/prompt_step.py | 2 -
.../task_steps/raise_complete_async.py | 2 -
.../activities/task_steps/return_step.py | 2 -
.../activities/task_steps/set_value_step.py | 5 +-
.../activities/task_steps/switch_step.py | 2 -
.../activities/task_steps/tool_call_step.py | 2 -
.../activities/task_steps/transition_step.py | 6 -
.../task_steps/wait_for_input_step.py | 2 -
.../activities/task_steps/yield_step.py | 2 -
agents-api/agents_api/activities/utils.py | 1 -
.../agents_api/autogen/openapi_model.py | 3 +-
agents-api/agents_api/clients/async_s3.py | 1 +
agents-api/agents_api/clients/temporal.py | 9 +-
agents-api/agents_api/common/interceptors.py | 189 +++++++++------
.../agents_api/common/protocol/remote.py | 97 ++------
.../agents_api/common/protocol/sessions.py | 2 +-
.../agents_api/common/protocol/tasks.py | 23 +-
.../agents_api/common/storage_handler.py | 226 ------------------
agents-api/agents_api/env.py | 4 +-
.../routers/healthz/check_health.py | 19 ++
.../workflows/task_execution/__init__.py | 12 +-
.../workflows/task_execution/helpers.py | 7 -
35 files changed, 181 insertions(+), 481 deletions(-)
delete mode 100644 agents-api/agents_api/common/storage_handler.py
create mode 100644 agents-api/agents_api/routers/healthz/check_health.py
diff --git a/agents-api/agents_api/activities/embed_docs.py b/agents-api/agents_api/activities/embed_docs.py
index c6c7663c3..a9a7cae44 100644
--- a/agents-api/agents_api/activities/embed_docs.py
+++ b/agents-api/agents_api/activities/embed_docs.py
@@ -7,13 +7,11 @@
from temporalio import activity
from ..clients import cozo, litellm
-from ..common.storage_handler import auto_blob_store
from ..env import testing
from ..models.docs.embed_snippets import embed_snippets as embed_snippets_query
from .types import EmbedDocsPayload
-@auto_blob_store(deep=True)
@beartype
async def embed_docs(
payload: EmbedDocsPayload, cozo_client=None, max_batch_size: int = 100
diff --git a/agents-api/agents_api/activities/excecute_api_call.py b/agents-api/agents_api/activities/excecute_api_call.py
index 09a33aaa8..2167aaead 100644
--- a/agents-api/agents_api/activities/excecute_api_call.py
+++ b/agents-api/agents_api/activities/excecute_api_call.py
@@ -6,7 +6,6 @@
from temporalio import activity
from ..autogen.openapi_model import ApiCallDef
-from ..common.storage_handler import auto_blob_store
from ..env import testing
@@ -20,7 +19,6 @@ class RequestArgs(TypedDict):
headers: Optional[dict[str, str]]
-@auto_blob_store(deep=True)
@beartype
async def execute_api_call(
api_call: ApiCallDef,
diff --git a/agents-api/agents_api/activities/execute_integration.py b/agents-api/agents_api/activities/execute_integration.py
index 3316ad6f5..d058553c4 100644
--- a/agents-api/agents_api/activities/execute_integration.py
+++ b/agents-api/agents_api/activities/execute_integration.py
@@ -7,12 +7,10 @@
from ..clients import integrations
from ..common.exceptions.tools import IntegrationExecutionException
from ..common.protocol.tasks import ExecutionInput, StepContext
-from ..common.storage_handler import auto_blob_store
from ..env import testing
from ..models.tools import get_tool_args_from_metadata
-@auto_blob_store(deep=True)
@beartype
async def execute_integration(
context: StepContext,
diff --git a/agents-api/agents_api/activities/execute_system.py b/agents-api/agents_api/activities/execute_system.py
index 590849080..647327a8a 100644
--- a/agents-api/agents_api/activities/execute_system.py
+++ b/agents-api/agents_api/activities/execute_system.py
@@ -19,16 +19,14 @@
VectorDocSearchRequest,
)
from ..common.protocol.tasks import ExecutionInput, StepContext
-from ..common.storage_handler import auto_blob_store, load_from_blob_store_if_remote
from ..env import testing
-from ..queries.developer import get_developer
+from ..queries.developers import get_developer
from .utils import get_handler
# For running synchronous code in the background
process_pool_executor = ProcessPoolExecutor()
-@auto_blob_store(deep=True)
@beartype
async def execute_system(
context: StepContext,
@@ -37,9 +35,6 @@ async def execute_system(
"""Execute a system call with the appropriate handler and transformed arguments."""
arguments: dict[str, Any] = system.arguments or {}
- if set(arguments.keys()) == {"bucket", "key"}:
- arguments = await load_from_blob_store_if_remote(arguments)
-
if not isinstance(context.execution_input, ExecutionInput):
raise TypeError("Expected ExecutionInput type for context.execution_input")
diff --git a/agents-api/agents_api/activities/sync_items_remote.py b/agents-api/agents_api/activities/sync_items_remote.py
index d71a5c566..14751c2b6 100644
--- a/agents-api/agents_api/activities/sync_items_remote.py
+++ b/agents-api/agents_api/activities/sync_items_remote.py
@@ -9,20 +9,16 @@
@beartype
async def save_inputs_remote_fn(inputs: list[Any]) -> list[Any | RemoteObject]:
- from ..common.storage_handler import store_in_blob_store_if_large
+ from ..common.interceptors import offload_if_large
- return await asyncio.gather(
- *[store_in_blob_store_if_large(input) for input in inputs]
- )
+ return await asyncio.gather(*[offload_if_large(input) for input in inputs])
@beartype
async def load_inputs_remote_fn(inputs: list[Any | RemoteObject]) -> list[Any]:
- from ..common.storage_handler import load_from_blob_store_if_remote
+ from ..common.interceptors import load_if_remote
- return await asyncio.gather(
- *[load_from_blob_store_if_remote(input) for input in inputs]
- )
+ return await asyncio.gather(*[load_if_remote(input) for input in inputs])
save_inputs_remote = activity.defn(name="save_inputs_remote")(save_inputs_remote_fn)
diff --git a/agents-api/agents_api/activities/task_steps/base_evaluate.py b/agents-api/agents_api/activities/task_steps/base_evaluate.py
index d87b961d3..3bb04e390 100644
--- a/agents-api/agents_api/activities/task_steps/base_evaluate.py
+++ b/agents-api/agents_api/activities/task_steps/base_evaluate.py
@@ -13,7 +13,6 @@
from temporalio import activity # noqa: E402
from thefuzz import fuzz # noqa: E402
-from ...common.storage_handler import auto_blob_store # noqa: E402
from ...env import testing # noqa: E402
from ..utils import get_evaluator # noqa: E402
@@ -63,7 +62,6 @@ def _recursive_evaluate(expr, evaluator: SimpleEval):
raise ValueError(f"Invalid expression: {expr}")
-@auto_blob_store(deep=True)
@beartype
async def base_evaluate(
exprs: Any,
diff --git a/agents-api/agents_api/activities/task_steps/cozo_query_step.py b/agents-api/agents_api/activities/task_steps/cozo_query_step.py
index 16e9a53d8..8d28d83c9 100644
--- a/agents-api/agents_api/activities/task_steps/cozo_query_step.py
+++ b/agents-api/agents_api/activities/task_steps/cozo_query_step.py
@@ -4,11 +4,9 @@
from temporalio import activity
from ... import models
-from ...common.storage_handler import auto_blob_store
from ...env import testing
-@auto_blob_store(deep=True)
@beartype
async def cozo_query_step(
query_name: str,
diff --git a/agents-api/agents_api/activities/task_steps/evaluate_step.py b/agents-api/agents_api/activities/task_steps/evaluate_step.py
index 904ec3b9d..08fa6cd55 100644
--- a/agents-api/agents_api/activities/task_steps/evaluate_step.py
+++ b/agents-api/agents_api/activities/task_steps/evaluate_step.py
@@ -5,11 +5,9 @@
from ...activities.utils import simple_eval_dict
from ...common.protocol.tasks import StepContext, StepOutcome
-from ...common.storage_handler import auto_blob_store
from ...env import testing
-@auto_blob_store(deep=True)
@beartype
async def evaluate_step(
context: StepContext,
diff --git a/agents-api/agents_api/activities/task_steps/for_each_step.py b/agents-api/agents_api/activities/task_steps/for_each_step.py
index f51c1ef76..ca84eb75d 100644
--- a/agents-api/agents_api/activities/task_steps/for_each_step.py
+++ b/agents-api/agents_api/activities/task_steps/for_each_step.py
@@ -6,12 +6,10 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from .base_evaluate import base_evaluate
-@auto_blob_store(deep=True)
@beartype
async def for_each_step(context: StepContext) -> StepOutcome:
try:
diff --git a/agents-api/agents_api/activities/task_steps/get_value_step.py b/agents-api/agents_api/activities/task_steps/get_value_step.py
index ca38bc4fe..feeb71bbf 100644
--- a/agents-api/agents_api/activities/task_steps/get_value_step.py
+++ b/agents-api/agents_api/activities/task_steps/get_value_step.py
@@ -2,13 +2,12 @@
from temporalio import activity
from ...common.protocol.tasks import StepContext, StepOutcome
-from ...common.storage_handler import auto_blob_store
from ...env import testing
-
# TODO: We should use this step to query the parent workflow and get the value from the workflow context
# SCRUM-1
-@auto_blob_store(deep=True)
+
+
@beartype
async def get_value_step(
context: StepContext,
diff --git a/agents-api/agents_api/activities/task_steps/if_else_step.py b/agents-api/agents_api/activities/task_steps/if_else_step.py
index cf3764199..ec4368640 100644
--- a/agents-api/agents_api/activities/task_steps/if_else_step.py
+++ b/agents-api/agents_api/activities/task_steps/if_else_step.py
@@ -6,12 +6,10 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from .base_evaluate import base_evaluate
-@auto_blob_store(deep=True)
@beartype
async def if_else_step(context: StepContext) -> StepOutcome:
# NOTE: This activity is only for logging, so we just evaluate the expression
diff --git a/agents-api/agents_api/activities/task_steps/log_step.py b/agents-api/agents_api/activities/task_steps/log_step.py
index 28fea2dae..f54018683 100644
--- a/agents-api/agents_api/activities/task_steps/log_step.py
+++ b/agents-api/agents_api/activities/task_steps/log_step.py
@@ -6,12 +6,10 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
from ...common.utils.template import render_template
from ...env import testing
-@auto_blob_store(deep=True)
@beartype
async def log_step(context: StepContext) -> StepOutcome:
# NOTE: This activity is only for logging, so we just evaluate the expression
diff --git a/agents-api/agents_api/activities/task_steps/map_reduce_step.py b/agents-api/agents_api/activities/task_steps/map_reduce_step.py
index 872988bb4..c39bace20 100644
--- a/agents-api/agents_api/activities/task_steps/map_reduce_step.py
+++ b/agents-api/agents_api/activities/task_steps/map_reduce_step.py
@@ -8,12 +8,10 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from .base_evaluate import base_evaluate
-@auto_blob_store(deep=True)
@beartype
async def map_reduce_step(context: StepContext) -> StepOutcome:
try:
diff --git a/agents-api/agents_api/activities/task_steps/prompt_step.py b/agents-api/agents_api/activities/task_steps/prompt_step.py
index cf8b169d5..47560cadd 100644
--- a/agents-api/agents_api/activities/task_steps/prompt_step.py
+++ b/agents-api/agents_api/activities/task_steps/prompt_step.py
@@ -8,7 +8,6 @@
litellm, # We dont directly import `acompletion` so we can mock it
)
from ...common.protocol.tasks import ExecutionInput, StepContext, StepOutcome
-from ...common.storage_handler import auto_blob_store
from ...common.utils.template import render_template
from ...env import debug
from .base_evaluate import base_evaluate
@@ -62,7 +61,6 @@ def format_tool(tool: Tool) -> dict:
@activity.defn
-@auto_blob_store(deep=True)
@beartype
async def prompt_step(context: StepContext) -> StepOutcome:
# Get context data
diff --git a/agents-api/agents_api/activities/task_steps/raise_complete_async.py b/agents-api/agents_api/activities/task_steps/raise_complete_async.py
index 640d6ae4e..bbf27c500 100644
--- a/agents-api/agents_api/activities/task_steps/raise_complete_async.py
+++ b/agents-api/agents_api/activities/task_steps/raise_complete_async.py
@@ -6,12 +6,10 @@
from ...autogen.openapi_model import CreateTransitionRequest
from ...common.protocol.tasks import StepContext
-from ...common.storage_handler import auto_blob_store
from .transition_step import original_transition_step
@activity.defn
-@auto_blob_store(deep=True)
@beartype
async def raise_complete_async(context: StepContext, output: Any) -> None:
activity_info = activity.info()
diff --git a/agents-api/agents_api/activities/task_steps/return_step.py b/agents-api/agents_api/activities/task_steps/return_step.py
index 08ac20de4..f15354536 100644
--- a/agents-api/agents_api/activities/task_steps/return_step.py
+++ b/agents-api/agents_api/activities/task_steps/return_step.py
@@ -6,12 +6,10 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from .base_evaluate import base_evaluate
-@auto_blob_store(deep=True)
@beartype
async def return_step(context: StepContext) -> StepOutcome:
try:
diff --git a/agents-api/agents_api/activities/task_steps/set_value_step.py b/agents-api/agents_api/activities/task_steps/set_value_step.py
index 1c97b6551..96db5d0d1 100644
--- a/agents-api/agents_api/activities/task_steps/set_value_step.py
+++ b/agents-api/agents_api/activities/task_steps/set_value_step.py
@@ -5,13 +5,12 @@
from ...activities.utils import simple_eval_dict
from ...common.protocol.tasks import StepContext, StepOutcome
-from ...common.storage_handler import auto_blob_store
from ...env import testing
-
# TODO: We should use this step to signal to the parent workflow and set the value on the workflow context
# SCRUM-2
-@auto_blob_store(deep=True)
+
+
@beartype
async def set_value_step(
context: StepContext,
diff --git a/agents-api/agents_api/activities/task_steps/switch_step.py b/agents-api/agents_api/activities/task_steps/switch_step.py
index 6a95e98d2..100d8020a 100644
--- a/agents-api/agents_api/activities/task_steps/switch_step.py
+++ b/agents-api/agents_api/activities/task_steps/switch_step.py
@@ -6,12 +6,10 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from ..utils import get_evaluator
-@auto_blob_store(deep=True)
@beartype
async def switch_step(context: StepContext) -> StepOutcome:
try:
diff --git a/agents-api/agents_api/activities/task_steps/tool_call_step.py b/agents-api/agents_api/activities/task_steps/tool_call_step.py
index 5725a75d1..a2d7fd7c2 100644
--- a/agents-api/agents_api/activities/task_steps/tool_call_step.py
+++ b/agents-api/agents_api/activities/task_steps/tool_call_step.py
@@ -11,7 +11,6 @@
StepContext,
StepOutcome,
)
-from ...common.storage_handler import auto_blob_store
# FIXME: This shouldn't be here.
@@ -47,7 +46,6 @@ def construct_tool_call(
@activity.defn
-@auto_blob_store(deep=True)
@beartype
async def tool_call_step(context: StepContext) -> StepOutcome:
assert isinstance(context.current_step, ToolCallStep)
diff --git a/agents-api/agents_api/activities/task_steps/transition_step.py b/agents-api/agents_api/activities/task_steps/transition_step.py
index 44046a5e7..11c7befb5 100644
--- a/agents-api/agents_api/activities/task_steps/transition_step.py
+++ b/agents-api/agents_api/activities/task_steps/transition_step.py
@@ -8,7 +8,6 @@
from ...autogen.openapi_model import CreateTransitionRequest, Transition
from ...clients.temporal import get_workflow_handle
from ...common.protocol.tasks import ExecutionInput, StepContext
-from ...common.storage_handler import load_from_blob_store_if_remote
from ...env import (
temporal_activity_after_retry_timeout,
testing,
@@ -48,11 +47,6 @@ async def transition_step(
TaskExecutionWorkflow.set_last_error, LastErrorInput(last_error=None)
)
- # Load output from blob store if it is a remote object
- transition_info.output = await load_from_blob_store_if_remote(
- transition_info.output
- )
-
if not isinstance(context.execution_input, ExecutionInput):
raise TypeError("Expected ExecutionInput type for context.execution_input")
diff --git a/agents-api/agents_api/activities/task_steps/wait_for_input_step.py b/agents-api/agents_api/activities/task_steps/wait_for_input_step.py
index ad6eeb63e..a3cb00f67 100644
--- a/agents-api/agents_api/activities/task_steps/wait_for_input_step.py
+++ b/agents-api/agents_api/activities/task_steps/wait_for_input_step.py
@@ -3,12 +3,10 @@
from ...autogen.openapi_model import WaitForInputStep
from ...common.protocol.tasks import StepContext, StepOutcome
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from .base_evaluate import base_evaluate
-@auto_blob_store(deep=True)
@beartype
async def wait_for_input_step(context: StepContext) -> StepOutcome:
try:
diff --git a/agents-api/agents_api/activities/task_steps/yield_step.py b/agents-api/agents_api/activities/task_steps/yield_step.py
index 199008703..18e5383cc 100644
--- a/agents-api/agents_api/activities/task_steps/yield_step.py
+++ b/agents-api/agents_api/activities/task_steps/yield_step.py
@@ -5,12 +5,10 @@
from ...autogen.openapi_model import TransitionTarget, YieldStep
from ...common.protocol.tasks import ExecutionInput, StepContext, StepOutcome
-from ...common.storage_handler import auto_blob_store
from ...env import testing
from .base_evaluate import base_evaluate
-@auto_blob_store(deep=True)
@beartype
async def yield_step(context: StepContext) -> StepOutcome:
try:
diff --git a/agents-api/agents_api/activities/utils.py b/agents-api/agents_api/activities/utils.py
index d9ad1840c..cedc01695 100644
--- a/agents-api/agents_api/activities/utils.py
+++ b/agents-api/agents_api/activities/utils.py
@@ -304,7 +304,6 @@ def get_handler(system: SystemDef) -> Callable:
from ..models.docs.delete_doc import delete_doc as delete_doc_query
from ..models.docs.list_docs import list_docs as list_docs_query
from ..models.session.create_session import create_session as create_session_query
- from ..models.session.delete_session import delete_session as delete_session_query
from ..models.session.get_session import get_session as get_session_query
from ..models.session.list_sessions import list_sessions as list_sessions_query
from ..models.session.update_session import update_session as update_session_query
diff --git a/agents-api/agents_api/autogen/openapi_model.py b/agents-api/agents_api/autogen/openapi_model.py
index af73e8015..d809e0a35 100644
--- a/agents-api/agents_api/autogen/openapi_model.py
+++ b/agents-api/agents_api/autogen/openapi_model.py
@@ -14,7 +14,6 @@
model_validator,
)
-from ..common.storage_handler import RemoteObject
from ..common.utils.datetime import utcnow
from .Agents import *
from .Chat import *
@@ -358,7 +357,7 @@ def validate_subworkflows(self):
class SystemDef(SystemDef):
- arguments: dict[str, Any] | None | RemoteObject = None
+ arguments: dict[str, Any] | None = None
class CreateTransitionRequest(Transition):
diff --git a/agents-api/agents_api/clients/async_s3.py b/agents-api/agents_api/clients/async_s3.py
index 0cd5235ee..b6ba76d8b 100644
--- a/agents-api/agents_api/clients/async_s3.py
+++ b/agents-api/agents_api/clients/async_s3.py
@@ -16,6 +16,7 @@
)
+@alru_cache(maxsize=1024)
async def list_buckets() -> list[str]:
session = get_session()
diff --git a/agents-api/agents_api/clients/temporal.py b/agents-api/agents_api/clients/temporal.py
index da2d7f6fa..cd2178d95 100644
--- a/agents-api/agents_api/clients/temporal.py
+++ b/agents-api/agents_api/clients/temporal.py
@@ -1,3 +1,4 @@
+import asyncio
from datetime import timedelta
from uuid import UUID
@@ -12,9 +13,9 @@
from temporalio.runtime import PrometheusConfig, Runtime, TelemetryConfig
from ..autogen.openapi_model import TransitionTarget
+from ..common.interceptors import offload_if_large
from ..common.protocol.tasks import ExecutionInput
from ..common.retry_policies import DEFAULT_RETRY_POLICY
-from ..common.storage_handler import store_in_blob_store_if_large
from ..env import (
temporal_client_cert,
temporal_metrics_bind_host,
@@ -96,8 +97,10 @@ async def run_task_execution_workflow(
client = client or (await get_client())
execution_id = execution_input.execution.id
execution_id_key = SearchAttributeKey.for_keyword("CustomStringField")
- execution_input.arguments = await store_in_blob_store_if_large(
- execution_input.arguments
+
+ old_args = execution_input.arguments
+ execution_input.arguments = await asyncio.gather(
+ *[offload_if_large(arg) for arg in old_args]
)
return await client.start_workflow(
diff --git a/agents-api/agents_api/common/interceptors.py b/agents-api/agents_api/common/interceptors.py
index 40600a818..bfd64c374 100644
--- a/agents-api/agents_api/common/interceptors.py
+++ b/agents-api/agents_api/common/interceptors.py
@@ -4,8 +4,12 @@
certain types of errors that are known to be non-retryable.
"""
-from typing import Optional, Type
+import asyncio
+import sys
+from functools import wraps
+from typing import Any, Awaitable, Callable, Optional, Sequence, Type
+from temporalio import workflow
from temporalio.activity import _CompleteAsyncError as CompleteAsyncError
from temporalio.exceptions import ApplicationError, FailureError, TemporalError
from temporalio.service import RPCError
@@ -23,7 +27,97 @@
ReadOnlyContextError,
)
-from .exceptions.tasks import is_retryable_error
+with workflow.unsafe.imports_passed_through():
+ from ..env import blob_store_cutoff_kb, use_blob_store_for_temporal
+ from .exceptions.tasks import is_retryable_error
+ from .protocol.remote import RemoteObject
+
+# Common exceptions that should be re-raised without modification
+PASSTHROUGH_EXCEPTIONS = (
+ ContinueAsNewError,
+ ReadOnlyContextError,
+ NondeterminismError,
+ RPCError,
+ CompleteAsyncError,
+ TemporalError,
+ FailureError,
+ ApplicationError,
+)
+
+
+def is_too_large(result: Any) -> bool:
+ return sys.getsizeof(result) > blob_store_cutoff_kb * 1024
+
+
+async def load_if_remote[T](arg: T | RemoteObject[T]) -> T:
+ if use_blob_store_for_temporal and isinstance(arg, RemoteObject):
+ return await arg.load()
+
+ return arg
+
+
+async def offload_if_large[T](result: T) -> T:
+ if use_blob_store_for_temporal and is_too_large(result):
+ return await RemoteObject.from_value(result)
+
+ return result
+
+
+def offload_to_blob_store[S, T](
+ func: Callable[[S, ExecuteActivityInput | ExecuteWorkflowInput], Awaitable[T]],
+) -> Callable[
+ [S, ExecuteActivityInput | ExecuteWorkflowInput], Awaitable[T | RemoteObject[T]]
+]:
+ @wraps(func)
+ async def wrapper(
+ self,
+ input: ExecuteActivityInput | ExecuteWorkflowInput,
+ ) -> T | RemoteObject[T]:
+ # Load all remote arguments from the blob store
+ args: Sequence[Any] = input.args
+
+ if use_blob_store_for_temporal:
+ input.args = await asyncio.gather(*[load_if_remote(arg) for arg in args])
+
+ # Execute the function
+ result = await func(self, input)
+
+ # Save the result to the blob store if necessary
+ return await offload_if_large(result)
+
+ return wrapper
+
+
+async def handle_execution_with_errors[I, T](
+ execution_fn: Callable[[I], Awaitable[T]],
+ input: I,
+) -> T:
+ """
+ Common error handling logic for both activities and workflows.
+
+ Args:
+ execution_fn: Async function to execute with error handling
+ input: Input to the execution function
+
+ Returns:
+ The result of the execution function
+
+ Raises:
+ ApplicationError: For non-retryable errors
+ Any other exception: For retryable errors
+ """
+ try:
+ return await execution_fn(input)
+ except PASSTHROUGH_EXCEPTIONS:
+ raise
+ except BaseException as e:
+ if not is_retryable_error(e):
+ raise ApplicationError(
+ str(e),
+ type=type(e).__name__,
+ non_retryable=True,
+ )
+ raise
class CustomActivityInterceptor(ActivityInboundInterceptor):
@@ -35,95 +129,45 @@ class CustomActivityInterceptor(ActivityInboundInterceptor):
as non-retryable errors.
"""
- async def execute_activity(self, input: ExecuteActivityInput):
+ @offload_to_blob_store
+ async def execute_activity(self, input: ExecuteActivityInput) -> Any:
"""
- 🎠The Activity Whisperer: Handles activity execution with style and grace
-
- This is like a safety net for your activities - catching errors and deciding
- their fate with the wisdom of a fortune cookie.
+ Handles activity execution by intercepting errors and determining their retry behavior.
"""
- try:
- return await super().execute_activity(input)
- except (
- ContinueAsNewError, # When you need a fresh start
- ReadOnlyContextError, # When someone tries to write in a museum
- NondeterminismError, # When chaos theory kicks in
- RPCError, # When computers can't talk to each other
- CompleteAsyncError, # When async goes wrong
- TemporalError, # When time itself rebels
- FailureError, # When failure is not an option, but happens anyway
- ApplicationError, # When the app says "nope"
- ):
- raise
- except BaseException as e:
- if not is_retryable_error(e):
- # If it's not retryable, we wrap it in a nice bow (ApplicationError)
- # and mark it as non-retryable to prevent further attempts
- raise ApplicationError(
- str(e),
- type=type(e).__name__,
- non_retryable=True,
- )
- # For retryable errors, we'll let Temporal retry with backoff
- # Default retry policy ensures at least 2 retries
- raise
+ return await handle_execution_with_errors(
+ super().execute_activity,
+ input,
+ )
class CustomWorkflowInterceptor(WorkflowInboundInterceptor):
"""
- 🎪 The Workflow Circus Ringmaster
+ Custom interceptor for Temporal workflows.
- This interceptor is like a circus ringmaster - keeping all the workflow acts
- running smoothly and catching any lions (errors) that escape their cages.
+ Handles workflow execution errors and determines their retry behavior.
"""
- async def execute_workflow(self, input: ExecuteWorkflowInput):
+ @offload_to_blob_store
+ async def execute_workflow(self, input: ExecuteWorkflowInput) -> Any:
"""
- 🎪 The Main Event: Workflow Execution Extravaganza!
-
- Watch as we gracefully handle errors like a trapeze artist catching their partner!
+ Executes workflows and handles error cases appropriately.
"""
- try:
- return await super().execute_workflow(input)
- except (
- ContinueAsNewError, # The show must go on!
- ReadOnlyContextError, # No touching, please!
- NondeterminismError, # When butterflies cause hurricanes
- RPCError, # Lost in translation
- CompleteAsyncError, # Async said "bye" too soon
- TemporalError, # Time is relative, errors are absolute
- FailureError, # Task failed successfully
- ApplicationError, # App.exe has stopped working
- ):
- raise
- except BaseException as e:
- if not is_retryable_error(e):
- # Pack the error in a nice box with a "do not retry" sticker
- raise ApplicationError(
- str(e),
- type=type(e).__name__,
- non_retryable=True,
- )
- # Let it retry - everyone deserves a second (or third) chance!
- raise
+ return await handle_execution_with_errors(
+ super().execute_workflow,
+ input,
+ )
class CustomInterceptor(Interceptor):
"""
- 🎠The Grand Interceptor: Master of Ceremonies
-
- This is like the backstage manager of a theater - making sure both the
- activity actors and workflow directors have their interceptor costumes on.
+ Main interceptor class that provides both activity and workflow interceptors.
"""
def intercept_activity(
self, next: ActivityInboundInterceptor
) -> ActivityInboundInterceptor:
"""
- 🎬 Activity Interceptor Factory: Where the magic begins!
-
- Creating custom activity interceptors faster than a caffeinated barista
- makes espresso shots.
+ Creates and returns a custom activity interceptor.
"""
return CustomActivityInterceptor(super().intercept_activity(next))
@@ -131,9 +175,6 @@ def workflow_interceptor_class(
self, input: WorkflowInterceptorClassInput
) -> Optional[Type[WorkflowInboundInterceptor]]:
"""
- 🎪 Workflow Interceptor Class Selector
-
- Like a matchmaker for workflows and their interceptors - a match made in
- exception handling heaven!
+ Returns the custom workflow interceptor class.
"""
return CustomWorkflowInterceptor
diff --git a/agents-api/agents_api/common/protocol/remote.py b/agents-api/agents_api/common/protocol/remote.py
index ce2a2a63a..86add1949 100644
--- a/agents-api/agents_api/common/protocol/remote.py
+++ b/agents-api/agents_api/common/protocol/remote.py
@@ -1,91 +1,34 @@
from dataclasses import dataclass
-from typing import Any
+from typing import Generic, Self, Type, TypeVar, cast
-from temporalio import activity, workflow
+from temporalio import workflow
with workflow.unsafe.imports_passed_through():
- from pydantic import BaseModel
-
+ from ...clients import async_s3
from ...env import blob_store_bucket
+ from ...worker.codec import deserialize, serialize
-@dataclass
-class RemoteObject:
- key: str
- bucket: str = blob_store_bucket
-
-
-class BaseRemoteModel(BaseModel):
- _remote_cache: dict[str, Any]
-
- class Config:
- arbitrary_types_allowed = True
-
- def __init__(self, **data: Any):
- super().__init__(**data)
- self._remote_cache = {}
-
- async def load_item(self, item: Any | RemoteObject) -> Any:
- if not activity.in_activity():
- return item
-
- from ..storage_handler import load_from_blob_store_if_remote
-
- return await load_from_blob_store_if_remote(item)
+T = TypeVar("T")
- async def save_item(self, item: Any) -> Any:
- if not activity.in_activity():
- return item
- from ..storage_handler import store_in_blob_store_if_large
-
- return await store_in_blob_store_if_large(item)
-
- async def get_attribute(self, name: str) -> Any:
- if name.startswith("_"):
- return super().__getattribute__(name)
-
- try:
- value = super().__getattribute__(name)
- except AttributeError:
- raise AttributeError(
- f"'{type(self).__name__}' object has no attribute '{name}'"
- )
-
- if isinstance(value, RemoteObject):
- cache = super().__getattribute__("_remote_cache")
- if name in cache:
- return cache[name]
-
- loaded_data = await self.load_item(value)
- cache[name] = loaded_data
- return loaded_data
-
- return value
-
- async def set_attribute(self, name: str, value: Any) -> None:
- if name.startswith("_"):
- super().__setattr__(name, value)
- return
+@dataclass
+class RemoteObject(Generic[T]):
+ _type: Type[T]
+ key: str
+ bucket: str
- stored_value = await self.save_item(value)
- super().__setattr__(name, stored_value)
+ @classmethod
+ async def from_value(cls, x: T) -> Self:
+ await async_s3.setup()
- if isinstance(stored_value, RemoteObject):
- cache = self.__dict__.get("_remote_cache", {})
- cache.pop(name, None)
+ serialized = serialize(x)
- async def load_all(self) -> None:
- for name in self.model_fields_set:
- await self.get_attribute(name)
+ key = await async_s3.add_object_with_hash(serialized)
+ return RemoteObject[T](key=key, bucket=blob_store_bucket, _type=type(x))
- async def unload_attribute(self, name: str) -> None:
- if name in self._remote_cache:
- data = self._remote_cache.pop(name)
- remote_obj = await self.save_item(data)
- super().__setattr__(name, remote_obj)
+ async def load(self) -> T:
+ await async_s3.setup()
- async def unload_all(self) -> "BaseRemoteModel":
- for name in list(self._remote_cache.keys()):
- await self.unload_attribute(name)
- return self
+ fetched = await async_s3.get_object(self.key)
+ return cast(self._type, deserialize(fetched))
diff --git a/agents-api/agents_api/common/protocol/sessions.py b/agents-api/agents_api/common/protocol/sessions.py
index 121afe702..3b04178e1 100644
--- a/agents-api/agents_api/common/protocol/sessions.py
+++ b/agents-api/agents_api/common/protocol/sessions.py
@@ -103,7 +103,7 @@ def get_active_tools(self) -> list[Tool]:
return active_toolset.tools
- def get_chat_environment(self) -> dict[str, dict | list[dict]]:
+ def get_chat_environment(self) -> dict[str, dict | list[dict] | None]:
"""
Get the chat environment from the session data.
"""
diff --git a/agents-api/agents_api/common/protocol/tasks.py b/agents-api/agents_api/common/protocol/tasks.py
index 430a62f36..f3bb81d07 100644
--- a/agents-api/agents_api/common/protocol/tasks.py
+++ b/agents-api/agents_api/common/protocol/tasks.py
@@ -1,9 +1,8 @@
-import asyncio
from typing import Annotated, Any, Literal
from uuid import UUID
from beartype import beartype
-from temporalio import activity, workflow
+from temporalio import workflow
from temporalio.exceptions import ApplicationError
with workflow.unsafe.imports_passed_through():
@@ -33,8 +32,6 @@
Workflow,
WorkflowStep,
)
- from ...common.storage_handler import load_from_blob_store_if_remote
- from .remote import BaseRemoteModel, RemoteObject
# TODO: Maybe we should use a library for this
@@ -146,16 +143,16 @@ class ExecutionInput(BaseModel):
task: TaskSpecDef
agent: Agent
agent_tools: list[Tool | CreateToolRequest]
- arguments: dict[str, Any] | RemoteObject
+ arguments: dict[str, Any]
# Not used at the moment
user: User | None = None
session: Session | None = None
-class StepContext(BaseRemoteModel):
- execution_input: ExecutionInput | RemoteObject
- inputs: list[Any] | RemoteObject
+class StepContext(BaseModel):
+ execution_input: ExecutionInput
+ inputs: list[Any]
cursor: TransitionTarget
@computed_field
@@ -242,17 +239,9 @@ def model_dump(self, *args, **kwargs) -> dict[str, Any]:
return dump | execution_input
- async def prepare_for_step(
- self, *args, include_remote: bool = True, **kwargs
- ) -> dict[str, Any]:
+ async def prepare_for_step(self, *args, **kwargs) -> dict[str, Any]:
current_input = self.current_input
inputs = self.inputs
- if activity.in_activity() and include_remote:
- await self.load_all()
- inputs = await asyncio.gather(
- *[load_from_blob_store_if_remote(input) for input in inputs]
- )
- current_input = await load_from_blob_store_if_remote(current_input)
# Merge execution inputs into the dump dict
dump = self.model_dump(*args, **kwargs)
diff --git a/agents-api/agents_api/common/storage_handler.py b/agents-api/agents_api/common/storage_handler.py
deleted file mode 100644
index 42beef270..000000000
--- a/agents-api/agents_api/common/storage_handler.py
+++ /dev/null
@@ -1,226 +0,0 @@
-import asyncio
-import sys
-from datetime import timedelta
-from functools import wraps
-from typing import Any, Callable
-
-from pydantic import BaseModel
-from temporalio import workflow
-
-from ..activities.sync_items_remote import load_inputs_remote
-from ..clients import async_s3
-from ..common.protocol.remote import BaseRemoteModel, RemoteObject
-from ..common.retry_policies import DEFAULT_RETRY_POLICY
-from ..env import (
- blob_store_cutoff_kb,
- debug,
- temporal_heartbeat_timeout,
- temporal_schedule_to_close_timeout,
- testing,
- use_blob_store_for_temporal,
-)
-from ..worker.codec import deserialize, serialize
-
-
-async def store_in_blob_store_if_large(x: Any) -> RemoteObject | Any:
- if not use_blob_store_for_temporal:
- return x
-
- await async_s3.setup()
-
- serialized = serialize(x)
- data_size = sys.getsizeof(serialized)
-
- if data_size > blob_store_cutoff_kb * 1024:
- key = await async_s3.add_object_with_hash(serialized)
- return RemoteObject(key=key)
-
- return x
-
-
-async def load_from_blob_store_if_remote(x: Any | RemoteObject) -> Any:
- if not use_blob_store_for_temporal:
- return x
-
- await async_s3.setup()
-
- if isinstance(x, RemoteObject):
- fetched = await async_s3.get_object(x.key)
- return deserialize(fetched)
-
- elif isinstance(x, dict) and set(x.keys()) == {"bucket", "key"}:
- fetched = await async_s3.get_object(x["key"])
- return deserialize(fetched)
-
- return x
-
-
-# Decorator that automatically does two things:
-# 1. store in blob store if the output of a function is large
-# 2. load from blob store if the input is a RemoteObject
-
-
-def auto_blob_store(f: Callable | None = None, *, deep: bool = False) -> Callable:
- def auto_blob_store_decorator(f: Callable) -> Callable:
- async def load_args(
- args: list | tuple, kwargs: dict[str, Any]
- ) -> tuple[list | tuple, dict[str, Any]]:
- new_args = await asyncio.gather(
- *[load_from_blob_store_if_remote(arg) for arg in args]
- )
- kwargs_keys, kwargs_values = list(zip(*kwargs.items())) or ([], [])
- new_kwargs = await asyncio.gather(
- *[load_from_blob_store_if_remote(v) for v in kwargs_values]
- )
- new_kwargs = dict(zip(kwargs_keys, new_kwargs))
-
- if deep:
- args = new_args
- kwargs = new_kwargs
-
- new_args = []
-
- for arg in args:
- if isinstance(arg, list):
- new_args.append(
- await asyncio.gather(
- *[load_from_blob_store_if_remote(item) for item in arg]
- )
- )
- elif isinstance(arg, dict):
- keys, values = list(zip(*arg.items())) or ([], [])
- values = await asyncio.gather(
- *[load_from_blob_store_if_remote(value) for value in values]
- )
- new_args.append(dict(zip(keys, values)))
-
- elif isinstance(arg, BaseRemoteModel):
- new_args.append(await arg.unload_all())
-
- elif isinstance(arg, BaseModel):
- for field in arg.model_fields.keys():
- if isinstance(getattr(arg, field), RemoteObject):
- setattr(
- arg,
- field,
- await load_from_blob_store_if_remote(
- getattr(arg, field)
- ),
- )
- elif isinstance(getattr(arg, field), list):
- setattr(
- arg,
- field,
- await asyncio.gather(
- *[
- load_from_blob_store_if_remote(item)
- for item in getattr(arg, field)
- ]
- ),
- )
- elif isinstance(getattr(arg, field), BaseRemoteModel):
- setattr(
- arg,
- field,
- await getattr(arg, field).unload_all(),
- )
-
- new_args.append(arg)
-
- else:
- new_args.append(arg)
-
- new_kwargs = {}
-
- for k, v in kwargs.items():
- if isinstance(v, list):
- new_kwargs[k] = await asyncio.gather(
- *[load_from_blob_store_if_remote(item) for item in v]
- )
-
- elif isinstance(v, dict):
- keys, values = list(zip(*v.items())) or ([], [])
- values = await asyncio.gather(
- *[load_from_blob_store_if_remote(value) for value in values]
- )
- new_kwargs[k] = dict(zip(keys, values))
-
- elif isinstance(v, BaseRemoteModel):
- new_kwargs[k] = await v.unload_all()
-
- elif isinstance(v, BaseModel):
- for field in v.model_fields.keys():
- if isinstance(getattr(v, field), RemoteObject):
- setattr(
- v,
- field,
- await load_from_blob_store_if_remote(
- getattr(v, field)
- ),
- )
- elif isinstance(getattr(v, field), list):
- setattr(
- v,
- field,
- await asyncio.gather(
- *[
- load_from_blob_store_if_remote(item)
- for item in getattr(v, field)
- ]
- ),
- )
- elif isinstance(getattr(v, field), BaseRemoteModel):
- setattr(
- v,
- field,
- await getattr(v, field).unload_all(),
- )
- new_kwargs[k] = v
-
- else:
- new_kwargs[k] = v
-
- return new_args, new_kwargs
-
- async def unload_return_value(x: Any | BaseRemoteModel) -> Any:
- if isinstance(x, BaseRemoteModel):
- await x.unload_all()
-
- return await store_in_blob_store_if_large(x)
-
- @wraps(f)
- async def async_wrapper(*args, **kwargs) -> Any:
- new_args, new_kwargs = await load_args(args, kwargs)
- output = await f(*new_args, **new_kwargs)
-
- return await unload_return_value(output)
-
- return async_wrapper if use_blob_store_for_temporal else f
-
- return auto_blob_store_decorator(f) if f else auto_blob_store_decorator
-
-
-def auto_blob_store_workflow(f: Callable) -> Callable:
- @wraps(f)
- async def wrapper(*args, **kwargs) -> Any:
- keys = kwargs.keys()
- values = [kwargs[k] for k in keys]
-
- loaded = await workflow.execute_activity(
- load_inputs_remote,
- args=[[*args, *values]],
- schedule_to_close_timeout=timedelta(
- seconds=60 if debug or testing else temporal_schedule_to_close_timeout
- ),
- retry_policy=DEFAULT_RETRY_POLICY,
- heartbeat_timeout=timedelta(seconds=temporal_heartbeat_timeout),
- )
-
- loaded_args = loaded[: len(args)]
- loaded_kwargs = dict(zip(keys, loaded[len(args) :]))
-
- result = await f(*loaded_args, **loaded_kwargs)
-
- return result
-
- return wrapper if use_blob_store_for_temporal else f
diff --git a/agents-api/agents_api/env.py b/agents-api/agents_api/env.py
index 8b9fd4dae..7baa24653 100644
--- a/agents-api/agents_api/env.py
+++ b/agents-api/agents_api/env.py
@@ -36,8 +36,8 @@
# Blob Store
# ----------
-use_blob_store_for_temporal: bool = (
- env.bool("USE_BLOB_STORE_FOR_TEMPORAL", default=False) if not testing else False
+use_blob_store_for_temporal: bool = testing or env.bool(
+ "USE_BLOB_STORE_FOR_TEMPORAL", default=False
)
blob_store_bucket: str = env.str("BLOB_STORE_BUCKET", default="agents-api")
diff --git a/agents-api/agents_api/routers/healthz/check_health.py b/agents-api/agents_api/routers/healthz/check_health.py
new file mode 100644
index 000000000..5a466ba39
--- /dev/null
+++ b/agents-api/agents_api/routers/healthz/check_health.py
@@ -0,0 +1,19 @@
+import logging
+from uuid import UUID
+
+from ...models.agent.list_agents import list_agents as list_agents_query
+from .router import router
+
+
+@router.get("/healthz", tags=["healthz"])
+async def check_health() -> dict:
+ try:
+ # Check if the database is reachable
+ list_agents_query(
+ developer_id=UUID("00000000-0000-0000-0000-000000000000"),
+ )
+ except Exception as e:
+ logging.error("An error occurred while checking health: %s", str(e))
+ return {"status": "error", "message": "An internal error has occurred."}
+
+ return {"status": "ok"}
diff --git a/agents-api/agents_api/workflows/task_execution/__init__.py b/agents-api/agents_api/workflows/task_execution/__init__.py
index 6ea9239df..a76c13975 100644
--- a/agents-api/agents_api/workflows/task_execution/__init__.py
+++ b/agents-api/agents_api/workflows/task_execution/__init__.py
@@ -15,7 +15,7 @@
from ...activities.excecute_api_call import execute_api_call
from ...activities.execute_integration import execute_integration
from ...activities.execute_system import execute_system
- from ...activities.sync_items_remote import load_inputs_remote, save_inputs_remote
+ from ...activities.sync_items_remote import save_inputs_remote
from ...autogen.openapi_model import (
ApiCallDef,
BaseIntegrationDef,
@@ -214,16 +214,6 @@ async def run(
# 3. Then, based on the outcome and step type, decide what to do next
workflow.logger.info(f"Processing outcome for step {context.cursor.step}")
- [outcome] = await workflow.execute_activity(
- load_inputs_remote,
- args=[[outcome]],
- schedule_to_close_timeout=timedelta(
- seconds=60 if debug or testing else temporal_schedule_to_close_timeout
- ),
- retry_policy=DEFAULT_RETRY_POLICY,
- heartbeat_timeout=timedelta(seconds=temporal_heartbeat_timeout),
- )
-
# Init state
state = None
diff --git a/agents-api/agents_api/workflows/task_execution/helpers.py b/agents-api/agents_api/workflows/task_execution/helpers.py
index 1d68322f5..b2df640a7 100644
--- a/agents-api/agents_api/workflows/task_execution/helpers.py
+++ b/agents-api/agents_api/workflows/task_execution/helpers.py
@@ -19,11 +19,9 @@
ExecutionInput,
StepContext,
)
- from ...common.storage_handler import auto_blob_store_workflow
from ...env import task_max_parallelism, temporal_heartbeat_timeout
-@auto_blob_store_workflow
async def continue_as_child(
execution_input: ExecutionInput,
start: TransitionTarget,
@@ -50,7 +48,6 @@ async def continue_as_child(
)
-@auto_blob_store_workflow
async def execute_switch_branch(
*,
context: StepContext,
@@ -84,7 +81,6 @@ async def execute_switch_branch(
)
-@auto_blob_store_workflow
async def execute_if_else_branch(
*,
context: StepContext,
@@ -123,7 +119,6 @@ async def execute_if_else_branch(
)
-@auto_blob_store_workflow
async def execute_foreach_step(
*,
context: StepContext,
@@ -161,7 +156,6 @@ async def execute_foreach_step(
return results
-@auto_blob_store_workflow
async def execute_map_reduce_step(
*,
context: StepContext,
@@ -209,7 +203,6 @@ async def execute_map_reduce_step(
return result
-@auto_blob_store_workflow
async def execute_map_reduce_step_parallel(
*,
context: StepContext,
From ca5f4e24a2cedcab3d3bad10b70996b3edd54a27 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Thu, 19 Dec 2024 19:50:21 +0530
Subject: [PATCH 084/274] fix(agents-api): Minor fixes
Signed-off-by: Diwank Singh Tomer
---
agents-api/agents_api/activities/utils.py | 1 +
agents-api/agents_api/queries/sessions/create_session.py | 2 --
agents-api/tests/fixtures.py | 1 -
agents-api/tests/test_session_queries.py | 1 -
4 files changed, 1 insertion(+), 4 deletions(-)
diff --git a/agents-api/agents_api/activities/utils.py b/agents-api/agents_api/activities/utils.py
index cedc01695..d9ad1840c 100644
--- a/agents-api/agents_api/activities/utils.py
+++ b/agents-api/agents_api/activities/utils.py
@@ -304,6 +304,7 @@ def get_handler(system: SystemDef) -> Callable:
from ..models.docs.delete_doc import delete_doc as delete_doc_query
from ..models.docs.list_docs import list_docs as list_docs_query
from ..models.session.create_session import create_session as create_session_query
+ from ..models.session.delete_session import delete_session as delete_session_query
from ..models.session.get_session import get_session as get_session_query
from ..models.session.list_sessions import list_sessions as list_sessions_query
from ..models.session.update_session import update_session as update_session_query
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 63fbdc940..058462cf8 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -8,10 +8,8 @@
from ...autogen.openapi_model import (
CreateSessionRequest,
- ResourceCreatedResponse,
Session,
)
-from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 49c2e7094..e1d286c9c 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,6 +1,5 @@
import random
import string
-import time
from uuid import UUID
from fastapi.testclient import TestClient
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 5f2190e2b..7926a391f 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -10,7 +10,6 @@
CreateOrUpdateSessionRequest,
CreateSessionRequest,
PatchSessionRequest,
- ResourceCreatedResponse,
ResourceDeletedResponse,
ResourceUpdatedResponse,
Session,
From e5394fcf4ca5415778a69b99cec9d2de760b17b7 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Thu, 19 Dec 2024 17:28:51 +0300
Subject: [PATCH 085/274] feat(agents-api): add entries tests
---
.../queries/entries/create_entries.py | 107 ++++----
.../queries/entries/delete_entries.py | 90 +++---
.../agents_api/queries/entries/get_history.py | 103 ++++---
.../queries/entries/list_entries.py | 55 ++--
agents-api/agents_api/queries/utils.py | 6 +-
agents-api/tests/test_entry_queries.py | 257 +++++++++---------
memory-store/migrations/000015_entries.up.sql | 11 +-
7 files changed, 350 insertions(+), 279 deletions(-)
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 33dcda984..8d3bdb1eb 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -2,13 +2,17 @@
from uuid import UUID
from beartype import beartype
+from fastapi import HTTPException
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
-from ..utils import pg_query, wrap_in_class
+from ..utils import partialclass, pg_query, wrap_in_class, rewrap_exceptions
+import asyncpg
+from litellm.utils import _select_tokenizer as select_tokenizer
+
# Query for checking if the session exists
session_exists_query = """
@@ -22,7 +26,7 @@
entry_query = """
INSERT INTO entries (
session_id,
- entry_id,
+ entry_id,
source,
role,
event_type,
@@ -32,9 +36,10 @@
tool_calls,
model,
token_count,
+ tokenizer,
created_at,
timestamp
-) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
+) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
RETURNING *;
"""
@@ -50,34 +55,34 @@
"""
-# @rewrap_exceptions(
-# {
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="Entry already exists",
-# ),
-# asyncpg.NotNullViolationError: partialclass(
-# HTTPException,
-# status_code=400,
-# detail="Not null violation",
-# ),
-# asyncpg.NoDataFoundError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Entry already exists",
+ ),
+ asyncpg.NotNullViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Not null violation",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
@wrap_in_class(
Entry,
transform=lambda d: {
- "id": UUID(d.pop("entry_id")),
+ "id": d.pop("entry_id"),
**d,
},
)
@@ -89,7 +94,7 @@ async def create_entries(
developer_id: UUID,
session_id: UUID,
data: list[CreateEntryRequest],
-) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
# Convert the data to a list of dictionaries
data_dicts = [item.model_dump(mode="json") for item in data]
@@ -100,7 +105,7 @@ async def create_entries(
params.append(
[
session_id, # $1
- item.pop("id", None) or str(uuid7()), # $2
+ item.pop("id", None) or uuid7(), # $2
item.get("source"), # $3
item.get("role"), # $4
item.get("event_type") or "message.create", # $5
@@ -110,8 +115,9 @@ async def create_entries(
content_to_json(item.get("tool_calls") or {}), # $9
item.get("model"), # $10
item.get("token_count"), # $11
- item.get("created_at") or utcnow(), # $12
- utcnow(), # $13
+ select_tokenizer(item.get("model"))["type"], # $12
+ item.get("created_at") or utcnow(), # $13
+ utcnow().timestamp(), # $14
]
)
@@ -119,7 +125,7 @@ async def create_entries(
(
session_exists_query,
[session_id, developer_id],
- "fetch",
+ "fetchrow",
),
(
entry_query,
@@ -129,20 +135,25 @@ async def create_entries(
]
-# @rewrap_exceptions(
-# {
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="Entry already exists",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Entry already exists",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
@wrap_in_class(Relation)
@increase_counter("add_entry_relations")
@pg_query
@@ -152,7 +163,7 @@ async def add_entry_relations(
developer_id: UUID,
session_id: UUID,
data: list[Relation],
-) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
# Convert the data to a list of dictionaries
data_dicts = [item.model_dump(mode="json") for item in data]
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index 628ef9011..be08eae42 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -1,13 +1,15 @@
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
-from ..utils import pg_query, wrap_in_class
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for deleting entries with a developer check
delete_entry_query = parse_one("""
@@ -55,20 +57,25 @@
"""
-# @rewrap_exceptions(
-# {
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified session or developer does not exist.",
-# ),
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="The specified session has already been deleted.",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified session or developer does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="The specified session has already been deleted.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
@wrap_in_class(
ResourceDeletedResponse,
one=True,
@@ -85,29 +92,34 @@ async def delete_entries_for_session(
*,
developer_id: UUID,
session_id: UUID,
-) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
"""Delete all entries for a given session."""
return [
- (session_exists_query, [session_id, developer_id], "fetch"),
+ (session_exists_query, [session_id, developer_id], "fetchrow"),
(delete_entry_relations_query, [session_id], "fetchmany"),
(delete_entry_query, [session_id, developer_id], "fetchmany"),
]
-# @rewrap_exceptions(
-# {
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified entries, session, or developer does not exist.",
-# ),
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="One or more specified entries have already been deleted.",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified entries, session, or developer does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="One or more specified entries have already been deleted.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
@wrap_in_class(
ResourceDeletedResponse,
transform=lambda d: {
@@ -121,10 +133,18 @@ async def delete_entries_for_session(
@beartype
async def delete_entries(
*, developer_id: UUID, session_id: UUID, entry_ids: list[UUID]
-) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
"""Delete specific entries by their IDs."""
return [
- (session_exists_query, [session_id, developer_id], "fetch"),
- (delete_entry_relations_by_ids_query, [session_id, entry_ids], "fetchmany"),
- (delete_entry_by_ids_query, [entry_ids, developer_id, session_id], "fetchmany"),
+ (
+ session_exists_query,
+ [session_id, developer_id],
+ "fetchrow",
+ ),
+ (delete_entry_relations_by_ids_query, [session_id, entry_ids], "fetch"),
+ (
+ delete_entry_by_ids_query,
+ [entry_ids, developer_id, session_id],
+ "fetch",
+ ),
]
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index b0b767c08..afa940cce 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -1,61 +1,92 @@
from uuid import UUID
+import json
+from typing import Tuple, List, Any
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import History
-from ..utils import pg_query, wrap_in_class
+from ..utils import (
+ partialclass,
+ pg_query,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+
+from ...common.utils.datetime import utcnow
-# Define the raw SQL query for getting history with a developer check
+# Define the raw SQL query for getting history with a developer check and relations
history_query = parse_one("""
+WITH entries AS (
+ SELECT
+ e.entry_id AS id,
+ e.session_id,
+ e.role,
+ e.name,
+ e.content,
+ e.source,
+ e.token_count,
+ e.created_at,
+ e.timestamp,
+ e.tool_calls,
+ e.tool_call_id,
+ e.tokenizer
+ FROM entries e
+ JOIN developers d ON d.developer_id = $3
+ WHERE e.session_id = $1
+ AND e.source = ANY($2)
+),
+relations AS (
+ SELECT
+ er.head,
+ er.relation,
+ er.tail
+ FROM entry_relations er
+ WHERE er.session_id = $1
+)
SELECT
- e.entry_id as id, -- entry_id
- e.session_id, -- session_id
- e.role, -- role
- e.name, -- name
- e.content, -- content
- e.source, -- source
- e.token_count, -- token_count
- e.created_at, -- created_at
- e.timestamp, -- timestamp
- e.tool_calls, -- tool_calls
- e.tool_call_id -- tool_call_id
-FROM entries e
-JOIN developers d ON d.developer_id = $3
-WHERE e.session_id = $1
-AND e.source = ANY($2)
-ORDER BY e.created_at;
+ (SELECT json_agg(e) FROM entries e) AS entries,
+ (SELECT json_agg(r) FROM relations r) AS relations,
+ $1::uuid AS session_id,
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Entry already exists",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
@wrap_in_class(
History,
one=True,
transform=lambda d: {
- **d,
+ "entries": json.loads(d.get("entries") or "[]"),
"relations": [
{
"head": r["head"],
"relation": r["relation"],
"tail": r["tail"],
}
- for r in d.pop("relations")
+ for r in (d.get("relations") or [])
],
- "entries": d.pop("entries"),
+ "session_id": d.get("session_id"),
+ "created_at": utcnow(),
},
)
@pg_query
@@ -65,7 +96,7 @@ async def get_history(
developer_id: UUID,
session_id: UUID,
allowed_sources: list[str] = ["api_request", "api_response"],
-) -> tuple[str, list]:
+) -> tuple[str, list] | tuple[str, list, str]:
return (
history_query,
[session_id, allowed_sources, developer_id],
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index a6c355f53..89f432734 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -1,12 +1,13 @@
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from ...autogen.openapi_model import Entry
from ...metrics.counters import increase_counter
-from ..utils import pg_query, wrap_in_class
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Query for checking if the session exists
session_exists_query = """
@@ -34,7 +35,8 @@
e.event_type,
e.tool_call_id,
e.tool_calls,
- e.model
+ e.model,
+ e.tokenizer
FROM entries e
JOIN developers d ON d.developer_id = $5
LEFT JOIN entry_relations er ON er.head = e.entry_id AND er.session_id = e.session_id
@@ -47,30 +49,30 @@
"""
-# @rewrap_exceptions(
-# {
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="Entry already exists",
-# ),
-# asyncpg.NotNullViolationError: partialclass(
-# HTTPException,
-# status_code=400,
-# detail="Entry is required",
-# ),
-# asyncpg.NoDataFoundError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Session not found",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Entry already exists",
+ ),
+ asyncpg.NotNullViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Entry is required",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Session not found",
+ ),
+ }
+)
@wrap_in_class(Entry)
@increase_counter("list_entries")
@pg_query
@@ -114,5 +116,6 @@ async def list_entries(
(
query,
entry_params,
+ "fetch",
),
]
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 0c20ca59e..bb1451678 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -175,9 +175,9 @@ async def wrapper(
all_results.append(results)
if method_name == "fetchrow" and (
- len(results) == 0 or results.get("bool") is None
+ len(results) == 0 or results.get("bool", True) is None
):
- raise asyncpg.NoDataFoundError
+ raise asyncpg.NoDataFoundError("No data found")
end = timeit and time.perf_counter()
@@ -231,7 +231,7 @@ def _return_data(rec: list[Record]):
nonlocal transform
transform = transform or (lambda x: x)
-
+
if one:
assert len(data) == 1, "Expected one result, got none"
obj: ModelT = cls(**transform(data[0]))
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index f5b9d8d56..703aa484f 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,14 +3,19 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
+from uuid import UUID
from fastapi import HTTPException
from uuid_extensions import uuid7
from ward import raises, test
-from agents_api.autogen.openapi_model import CreateEntryRequest
+from agents_api.autogen.openapi_model import (
+ CreateEntryRequest,
+ Entry,
+ History,
+)
from agents_api.clients.pg import create_db_pool
-from agents_api.queries.entries import create_entries, list_entries
-from tests.fixtures import pg_dsn, test_developer # , test_session
+from agents_api.queries.entries import create_entries, list_entries, get_history, delete_entries
+from tests.fixtures import pg_dsn, test_developer, test_developer_id, test_session
MODEL = "gpt-4o-mini"
@@ -52,126 +57,126 @@ async def _(dsn=pg_dsn, developer=test_developer):
assert exc_info.raised.status_code == 404
-# @test("query: get entries")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
-# """Test the retrieval of entries from the database."""
-
-# pool = await create_db_pool(dsn=dsn)
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="api_request",
-# content="test entry content",
-# )
-
-# internal_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# content="test entry content",
-# source="internal",
-# )
-
-# await create_entries(
-# developer_id=TEST_DEVELOPER_ID,
-# session_id=SESSION_ID,
-# data=[test_entry, internal_entry],
-# connection_pool=pool,
-# )
-
-# result = await list_entries(
-# developer_id=TEST_DEVELOPER_ID,
-# session_id=SESSION_ID,
-# connection_pool=pool,
-# )
-
-
-# # Assert that only one entry is retrieved, matching the session_id.
-# assert len(result) == 1
-# assert isinstance(result[0], Entry)
-# assert result is not None
-
-
-# @test("query: get history")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
-# """Test the retrieval of entry history from the database."""
-
-# pool = await create_db_pool(dsn=dsn)
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="api_request",
-# content="test entry content",
-# )
-
-# internal_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# content="test entry content",
-# source="internal",
-# )
-
-# await create_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# data=[test_entry, internal_entry],
-# connection_pool=pool,
-# )
-
-# result = await get_history(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# connection_pool=pool,
-# )
-
-# # Assert that entries are retrieved and have valid IDs.
-# assert result is not None
-# assert isinstance(result, History)
-# assert len(result.entries) > 0
-# assert result.entries[0].id
-
-
-# @test("query: delete entries")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id): # , session=test_session
-# """Test the deletion of entries from the database."""
-
-# pool = await create_db_pool(dsn=dsn)
-# test_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# source="api_request",
-# content="test entry content",
-# )
-
-# internal_entry = CreateEntryRequest.from_model_input(
-# model=MODEL,
-# role="user",
-# content="internal entry content",
-# source="internal",
-# )
-
-# created_entries = await create_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# data=[test_entry, internal_entry],
-# connection_pool=pool,
-# )
-
-# entry_ids = [entry.id for entry in created_entries]
-
-# await delete_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# entry_ids=[UUID("123e4567-e89b-12d3-a456-426614174002")],
-# connection_pool=pool,
-# )
-
-# result = await list_entries(
-# developer_id=developer_id,
-# session_id=SESSION_ID,
-# connection_pool=pool,
-# )
-
-# Assert that no entries are retrieved after deletion.
-# assert all(id not in [entry.id for entry in result] for id in entry_ids)
-# assert len(result) == 0
-# assert result is not None
+@test("query: get entries")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test the retrieval of entries from the database."""
+
+ pool = await create_db_pool(dsn=dsn)
+ test_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ source="api_request",
+ content="test entry content",
+ )
+
+ internal_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ content="test entry content",
+ source="internal",
+ )
+
+ await create_entries(
+ developer_id=developer_id,
+ session_id=session.id,
+ data=[test_entry, internal_entry],
+ connection_pool=pool,
+ )
+
+ result = await list_entries(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+
+
+ # Assert that only one entry is retrieved, matching the session_id.
+ assert len(result) == 1
+ assert isinstance(result[0], Entry)
+ assert result is not None
+
+
+@test("query: get history")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test the retrieval of entry history from the database."""
+
+ pool = await create_db_pool(dsn=dsn)
+ test_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ source="api_request",
+ content="test entry content",
+ )
+
+ internal_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ content="test entry content",
+ source="internal",
+ )
+
+ await create_entries(
+ developer_id=developer_id,
+ session_id=session.id,
+ data=[test_entry, internal_entry],
+ connection_pool=pool,
+ )
+
+ result = await get_history(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+
+ # Assert that entries are retrieved and have valid IDs.
+ assert result is not None
+ assert isinstance(result, History)
+ assert len(result.entries) > 0
+ assert result.entries[0].id
+
+
+@test("query: delete entries")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test the deletion of entries from the database."""
+
+ pool = await create_db_pool(dsn=dsn)
+ test_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ source="api_request",
+ content="test entry content",
+ )
+
+ internal_entry = CreateEntryRequest.from_model_input(
+ model=MODEL,
+ role="user",
+ content="internal entry content",
+ source="internal",
+ )
+
+ created_entries = await create_entries(
+ developer_id=developer_id,
+ session_id=session.id,
+ data=[test_entry, internal_entry],
+ connection_pool=pool,
+ )
+
+ entry_ids = [entry.id for entry in created_entries]
+
+ await delete_entries(
+ developer_id=developer_id,
+ session_id=session.id,
+ entry_ids=entry_ids,
+ connection_pool=pool,
+ )
+
+ result = await list_entries(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
+
+ # Assert that no entries are retrieved after deletion.
+ assert all(id not in [entry.id for entry in result] for id in entry_ids)
+ assert len(result) == 0
+ assert result is not None
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index c104091a2..73723a8bc 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -16,8 +16,9 @@ CREATE TABLE IF NOT EXISTS entries (
tool_calls JSONB[] NOT NULL DEFAULT '{}',
model TEXT NOT NULL,
token_count INTEGER DEFAULT NULL,
+ tokenizer TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ timestamp DOUBLE PRECISION NOT NULL,
CONSTRAINT pk_entries PRIMARY KEY (session_id, entry_id, created_at)
);
@@ -58,10 +59,10 @@ END $$;
CREATE
OR REPLACE FUNCTION optimized_update_token_count_after () RETURNS TRIGGER AS $$
DECLARE
- token_count INTEGER;
+ calc_token_count INTEGER;
BEGIN
-- Compute token_count outside the UPDATE statement for clarity and potential optimization
- token_count := cardinality(
+ calc_token_count := cardinality(
ai.openai_tokenize(
'gpt-4o', -- FIXME: Use `NEW.model`
array_to_string(NEW.content::TEXT[], ' ')
@@ -69,9 +70,9 @@ BEGIN
);
-- Perform the update only if token_count differs
- IF token_count <> NEW.token_count THEN
+ IF calc_token_count <> NEW.token_count THEN
UPDATE entries
- SET token_count = token_count
+ SET token_count = calc_token_count
WHERE entry_id = NEW.entry_id;
END IF;
From 619f973290bf055a0fe0920645e24712231ecbc6 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Thu, 19 Dec 2024 14:30:16 +0000
Subject: [PATCH 086/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/entries/create_entries.py | 7 +++----
agents-api/agents_api/queries/entries/delete_entries.py | 2 +-
agents-api/agents_api/queries/entries/get_history.py | 7 +++----
agents-api/agents_api/queries/utils.py | 2 +-
agents-api/tests/test_entry_queries.py | 9 +++++++--
5 files changed, 15 insertions(+), 12 deletions(-)
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 8d3bdb1eb..95973ad0b 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -1,18 +1,17 @@
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
+from litellm.utils import _select_tokenizer as select_tokenizer
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, wrap_in_class, rewrap_exceptions
-import asyncpg
-from litellm.utils import _select_tokenizer as select_tokenizer
-
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Query for checking if the session exists
session_exists_query = """
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index be08eae42..47b7379a4 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -144,7 +144,7 @@ async def delete_entries(
(delete_entry_relations_by_ids_query, [session_id, entry_ids], "fetch"),
(
delete_entry_by_ids_query,
- [entry_ids, developer_id, session_id],
+ [entry_ids, developer_id, session_id],
"fetch",
),
]
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index afa940cce..e6967a6cc 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -1,6 +1,6 @@
-from uuid import UUID
import json
-from typing import Tuple, List, Any
+from typing import Any, List, Tuple
+from uuid import UUID
import asyncpg
from beartype import beartype
@@ -8,6 +8,7 @@
from sqlglot import parse_one
from ...autogen.openapi_model import History
+from ...common.utils.datetime import utcnow
from ..utils import (
partialclass,
pg_query,
@@ -15,8 +16,6 @@
wrap_in_class,
)
-from ...common.utils.datetime import utcnow
-
# Define the raw SQL query for getting history with a developer check and relations
history_query = parse_one("""
WITH entries AS (
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index bb1451678..0d139cb91 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -231,7 +231,7 @@ def _return_data(rec: list[Record]):
nonlocal transform
transform = transform or (lambda x: x)
-
+
if one:
assert len(data) == 1, "Expected one result, got none"
obj: ModelT = cls(**transform(data[0]))
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 703aa484f..706185c7b 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -4,6 +4,7 @@
"""
from uuid import UUID
+
from fastapi import HTTPException
from uuid_extensions import uuid7
from ward import raises, test
@@ -14,7 +15,12 @@
History,
)
from agents_api.clients.pg import create_db_pool
-from agents_api.queries.entries import create_entries, list_entries, get_history, delete_entries
+from agents_api.queries.entries import (
+ create_entries,
+ delete_entries,
+ get_history,
+ list_entries,
+)
from tests.fixtures import pg_dsn, test_developer, test_developer_id, test_session
MODEL = "gpt-4o-mini"
@@ -89,7 +95,6 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
connection_pool=pool,
)
-
# Assert that only one entry is retrieved, matching the session_id.
assert len(result) == 1
assert isinstance(result[0], Entry)
From d3b222e4ccf46fc2d9bba79aacbe7d2a037e2abf Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Fri, 20 Dec 2024 04:02:43 +0530
Subject: [PATCH 087/274] wip(agents-api,memory-store): Tasks queries
Signed-off-by: Diwank Singh Tomer
---
.../agents_api/queries/tasks/__init__.py | 29 +++
.../queries/tasks/create_or_update_task.py | 169 ++++++++++++++++++
.../agents_api/queries/tasks/create_task.py | 151 ++++++++++++++++
.../migrations/000002_developers.up.sql | 19 +-
memory-store/migrations/000004_agents.up.sql | 15 +-
memory-store/migrations/000005_files.up.sql | 4 +-
memory-store/migrations/000006_docs.up.sql | 7 +-
memory-store/migrations/000008_tools.up.sql | 21 ++-
.../migrations/000009_sessions.up.sql | 14 +-
memory-store/migrations/000010_tasks.up.sql | 31 ++--
.../migrations/000011_executions.up.sql | 3 +-
.../migrations/000012_transitions.up.sql | 7 +-
.../migrations/000014_temporal_lookup.up.sql | 2 +-
.../migrations/000015_entries.down.sql | 5 +-
memory-store/migrations/000015_entries.up.sql | 47 +++--
.../migrations/000016_entry_relations.up.sql | 2 +-
16 files changed, 461 insertions(+), 65 deletions(-)
create mode 100644 agents-api/agents_api/queries/tasks/__init__.py
create mode 100644 agents-api/agents_api/queries/tasks/create_or_update_task.py
create mode 100644 agents-api/agents_api/queries/tasks/create_task.py
diff --git a/agents-api/agents_api/queries/tasks/__init__.py b/agents-api/agents_api/queries/tasks/__init__.py
new file mode 100644
index 000000000..d2f8b3c35
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/__init__.py
@@ -0,0 +1,29 @@
+"""
+The `task` module within the `queries` package provides SQL query functions for managing tasks
+in the TimescaleDB database. This includes operations for:
+
+- Creating new tasks
+- Updating existing tasks
+- Retrieving task details
+- Listing tasks with filtering and pagination
+- Deleting tasks
+"""
+
+from .create_or_update_task import create_or_update_task
+from .create_task import create_task
+
+# from .delete_task import delete_task
+# from .get_task import get_task
+# from .list_tasks import list_tasks
+# from .patch_task import patch_task
+# from .update_task import update_task
+
+__all__ = [
+ "create_or_update_task",
+ "create_task",
+ # "delete_task",
+ # "get_task",
+ # "list_tasks",
+ # "patch_task",
+ # "update_task",
+]
diff --git a/agents-api/agents_api/queries/tasks/create_or_update_task.py b/agents-api/agents_api/queries/tasks/create_or_update_task.py
new file mode 100644
index 000000000..a302a38e1
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/create_or_update_task.py
@@ -0,0 +1,169 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import CreateOrUpdateTaskRequest, ResourceUpdatedResponse
+from ...common.protocol.tasks import task_to_spec
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for creating or updating a task
+tools_query = parse_one("""
+WITH current_version AS (
+ SELECT COALESCE(MAX("version"), 0) + 1 as next_version
+ FROM tasks
+ WHERE developer_id = $1
+ AND task_id = $3
+)
+INSERT INTO tools (
+ task_version,
+ developer_id,
+ agent_id,
+ task_id,
+ tool_id,
+ type,
+ name,
+ description,
+ spec
+)
+SELECT
+ next_version, -- task_version
+ $1, -- developer_id
+ $2, -- agent_id
+ $3, -- task_id
+ $4, -- tool_id
+ $5, -- type
+ $6, -- name
+ $7, -- description
+ $8 -- spec
+FROM current_version
+""").sql(pretty=True)
+
+task_query = parse_one("""
+WITH current_version AS (
+ SELECT COALESCE(MAX("version"), 0) + 1 as next_version
+ FROM tasks
+ WHERE developer_id = $1
+ AND task_id = $4
+)
+INSERT INTO tasks (
+ "version",
+ developer_id,
+ canonical_name,
+ agent_id,
+ task_id,
+ name,
+ description,
+ input_schema,
+ spec,
+ metadata
+)
+SELECT
+ next_version, -- version
+ $1, -- developer_id
+ $2, -- canonical_name
+ $3, -- agent_id
+ $4, -- task_id
+ $5, -- name
+ $6, -- description
+ $7::jsonb, -- input_schema
+ $8::jsonb, -- spec
+ $9::jsonb -- metadata
+FROM current_version
+RETURNING *, (SELECT next_version FROM current_version) as next_version
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["task_id"],
+ "jobs": [],
+ "updated_at": d["updated_at"].timestamp(),
+ **d,
+ },
+)
+@increase_counter("create_or_update_task")
+@pg_query
+@beartype
+async def create_or_update_task(
+ *,
+ developer_id: UUID,
+ agent_id: UUID,
+ task_id: UUID,
+ data: CreateOrUpdateTaskRequest,
+) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+ """
+ Constructs an SQL query to create or update a task.
+
+ Args:
+ developer_id (UUID): The UUID of the developer.
+ agent_id (UUID): The UUID of the agent.
+ task_id (UUID): The UUID of the task.
+ data (CreateOrUpdateTaskRequest): The task data to insert or update.
+
+ Returns:
+ list[tuple[str, list, Literal["fetch", "fetchmany"]]]: List of SQL queries and parameters.
+
+ Raises:
+ HTTPException: If developer/agent doesn't exist (404) or on unique constraint violation (409)
+ """
+ task_data = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+
+ # Generate canonical name from task name if not provided
+ canonical_name = data.canonical_name or task_data["name"].lower().replace(" ", "_")
+
+ # Version will be determined by the CTE
+ task_params = [
+ developer_id, # $1
+ canonical_name, # $2
+ agent_id, # $3
+ task_id, # $4
+ task_data["name"], # $5
+ task_data.get("description"), # $6
+ data.input_schema or {}, # $7
+ task_data["spec"], # $8
+ data.metadata or {}, # $9
+ ]
+
+ queries = [(task_query, task_params, "fetch")]
+
+ tool_params = [
+ [
+ developer_id,
+ agent_id,
+ task_id,
+ uuid7(), # tool_id
+ tool.type,
+ tool.name,
+ tool.description,
+ getattr(tool, tool.type), # spec
+ ]
+ for tool in data.tools or []
+ ]
+
+ # Add tools query if there are tools
+ if tool_params:
+ queries.append((tools_query, tool_params, "fetchmany"))
+
+ return queries
diff --git a/agents-api/agents_api/queries/tasks/create_task.py b/agents-api/agents_api/queries/tasks/create_task.py
new file mode 100644
index 000000000..2587e63ff
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/create_task.py
@@ -0,0 +1,151 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import CreateTaskRequest, ResourceUpdatedResponse
+from ...common.protocol.tasks import task_to_spec
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for creating or updating a task
+tools_query = parse_one("""
+INSERT INTO tools (
+ task_version,
+ developer_id,
+ agent_id,
+ task_id,
+ tool_id,
+ type,
+ name,
+ description,
+ spec
+)
+VALUES (
+ 1, -- task_version
+ $1, -- developer_id
+ $2, -- agent_id
+ $3, -- task_id
+ $4, -- tool_id
+ $5, -- type
+ $6, -- name
+ $7, -- description
+ $8 -- spec
+)
+""").sql(pretty=True)
+
+task_query = parse_one("""
+INSERT INTO tasks (
+ "version",
+ developer_id,
+ agent_id,
+ task_id,
+ name,
+ description,
+ input_schema,
+ spec,
+ metadata
+)
+VALUES (
+ 1, -- version
+ $1, -- developer_id
+ $2, -- agent_id
+ $3, -- task_id
+ $4, -- name
+ $5, -- description
+ $6::jsonb, -- input_schema
+ $7::jsonb, -- spec
+ $8::jsonb -- metadata
+)
+RETURNING *
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["task_id"],
+ "jobs": [],
+ # "updated_at": d["updated_at"].timestamp(),
+ **d,
+ },
+)
+@increase_counter("create_task")
+@pg_query
+@beartype
+async def create_task(
+ *, developer_id: UUID, agent_id: UUID, task_id: UUID, data: CreateTaskRequest
+) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
+ """
+ Constructs an SQL query to create or update a task.
+
+ Args:
+ developer_id (UUID): The UUID of the developer.
+ agent_id (UUID): The UUID of the agent.
+ task_id (UUID): The UUID of the task.
+ data (CreateTaskRequest): The task data to insert or update.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters.
+
+ Raises:
+ HTTPException: If developer/agent doesn't exist (404) or on unique constraint violation (409)
+ """
+ task_data = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+
+ params = [
+ developer_id, # $1
+ agent_id, # $2
+ task_id, # $3
+ data.name, # $4
+ data.description, # $5
+ data.input_schema or {}, # $6
+ task_data["spec"], # $7
+ data.metadata or {}, # $8
+ ]
+
+ tool_params = [
+ [
+ developer_id,
+ agent_id,
+ task_id,
+ uuid7(), # tool_id
+ tool.type,
+ tool.name,
+ tool.description,
+ getattr(tool, tool.type), # spec
+ ]
+ for tool in data.tools or []
+ ]
+
+ return [
+ (
+ task_query,
+ params,
+ "fetch",
+ ),
+ (
+ tools_query,
+ tool_params,
+ "fetchmany",
+ ),
+ ]
diff --git a/memory-store/migrations/000002_developers.up.sql b/memory-store/migrations/000002_developers.up.sql
index 9ca9dca69..e18e42248 100644
--- a/memory-store/migrations/000002_developers.up.sql
+++ b/memory-store/migrations/000002_developers.up.sql
@@ -12,11 +12,21 @@ CREATE TABLE IF NOT EXISTS developers (
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_developers PRIMARY KEY (developer_id),
- CONSTRAINT uq_developers_email UNIQUE (email)
+ CONSTRAINT uq_developers_email UNIQUE (email),
+ CONSTRAINT ct_settings_is_object CHECK (jsonb_typeof(settings) = 'object')
);
-- Create sorted index on developer_id (optimized for UUID v7)
-CREATE INDEX IF NOT EXISTS idx_developers_id_sorted ON developers (developer_id DESC);
+CREATE INDEX IF NOT EXISTS idx_developers_id_sorted ON developers (developer_id DESC) INCLUDE (
+ email,
+ active,
+ tags,
+ settings,
+ created_at,
+ updated_at
+)
+WHERE
+ active = TRUE;
-- Create index on email
CREATE INDEX IF NOT EXISTS idx_developers_email ON developers (email);
@@ -24,11 +34,6 @@ CREATE INDEX IF NOT EXISTS idx_developers_email ON developers (email);
-- Create GIN index for tags array
CREATE INDEX IF NOT EXISTS idx_developers_tags ON developers USING GIN (tags);
--- Create partial index for active developers
-CREATE INDEX IF NOT EXISTS idx_developers_active ON developers (developer_id)
-WHERE
- active = TRUE;
-
-- Create trigger to automatically update updated_at
DO $$
BEGIN
diff --git a/memory-store/migrations/000004_agents.up.sql b/memory-store/migrations/000004_agents.up.sql
index 32e066f71..1254cba5f 100644
--- a/memory-store/migrations/000004_agents.up.sql
+++ b/memory-store/migrations/000004_agents.up.sql
@@ -1,16 +1,5 @@
BEGIN;
--- Drop existing objects if they exist
-DROP TRIGGER IF EXISTS trg_agents_updated_at ON agents;
-
-DROP INDEX IF EXISTS idx_agents_metadata;
-
-DROP INDEX IF EXISTS idx_agents_developer;
-
-DROP INDEX IF EXISTS idx_agents_id_sorted;
-
-DROP TABLE IF EXISTS agents;
-
-- Create agents table
CREATE TABLE IF NOT EXISTS agents (
developer_id UUID NOT NULL,
@@ -35,7 +24,9 @@ CREATE TABLE IF NOT EXISTS agents (
default_settings JSONB NOT NULL DEFAULT '{}'::JSONB,
CONSTRAINT pk_agents PRIMARY KEY (developer_id, agent_id),
CONSTRAINT uq_agents_canonical_name_unique UNIQUE (developer_id, canonical_name), -- per developer
- CONSTRAINT ct_agents_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
+ CONSTRAINT ct_agents_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$'),
+ CONSTRAINT ct_agents_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object'),
+ CONSTRAINT ct_agents_default_settings_is_object CHECK (jsonb_typeof(default_settings) = 'object')
);
-- Create sorted index on agent_id (optimized for UUID v7)
diff --git a/memory-store/migrations/000005_files.up.sql b/memory-store/migrations/000005_files.up.sql
index ef4c22b3d..28c2500b5 100644
--- a/memory-store/migrations/000005_files.up.sql
+++ b/memory-store/migrations/000005_files.up.sql
@@ -63,7 +63,7 @@ CREATE TABLE IF NOT EXISTS user_files (
file_id UUID NOT NULL,
CONSTRAINT pk_user_files PRIMARY KEY (developer_id, user_id, file_id),
CONSTRAINT fk_user_files_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
- CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id)
+ CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id) ON DELETE CASCADE
);
-- Create index if it doesn't exist
@@ -76,7 +76,7 @@ CREATE TABLE IF NOT EXISTS agent_files (
file_id UUID NOT NULL,
CONSTRAINT pk_agent_files PRIMARY KEY (developer_id, agent_id, file_id),
CONSTRAINT fk_agent_files_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
- CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id)
+ CONSTRAINT fk_agent_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id) ON DELETE CASCADE
);
-- Create index if it doesn't exist
diff --git a/memory-store/migrations/000006_docs.up.sql b/memory-store/migrations/000006_docs.up.sql
index 5b532bbef..ce440b32d 100644
--- a/memory-store/migrations/000006_docs.up.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -29,7 +29,8 @@ CREATE TABLE IF NOT EXISTS docs (
CONSTRAINT ct_docs_embedding_dimensions_positive CHECK (embedding_dimensions > 0),
CONSTRAINT ct_docs_valid_modality CHECK (modality IN ('text', 'image', 'mixed')),
CONSTRAINT ct_docs_index_positive CHECK (index >= 0),
- CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language))
+ CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language)),
+ CONSTRAINT ct_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object')
);
-- Create sorted index on doc_id if not exists
@@ -70,7 +71,7 @@ CREATE TABLE IF NOT EXISTS user_docs (
doc_id UUID NOT NULL,
CONSTRAINT pk_user_docs PRIMARY KEY (developer_id, user_id, doc_id),
CONSTRAINT fk_user_docs_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
- CONSTRAINT fk_user_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id)
+ CONSTRAINT fk_user_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id) ON DELETE CASCADE
);
-- Create the agent_docs table
@@ -80,7 +81,7 @@ CREATE TABLE IF NOT EXISTS agent_docs (
doc_id UUID NOT NULL,
CONSTRAINT pk_agent_docs PRIMARY KEY (developer_id, agent_id, doc_id),
CONSTRAINT fk_agent_docs_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
- CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id)
+ CONSTRAINT fk_agent_docs_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id) ON DELETE CASCADE
);
-- Create indexes if not exists
diff --git a/memory-store/migrations/000008_tools.up.sql b/memory-store/migrations/000008_tools.up.sql
index 159ef3688..93e852de2 100644
--- a/memory-store/migrations/000008_tools.up.sql
+++ b/memory-store/migrations/000008_tools.up.sql
@@ -22,7 +22,8 @@ CREATE TABLE IF NOT EXISTS tools (
spec JSONB NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id, type, name)
+ CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id, type, name),
+ CONSTRAINT ct_spec_is_object CHECK (jsonb_typeof(spec) = 'object')
);
-- Create sorted index on tool_id if it doesn't exist
@@ -41,12 +42,28 @@ DO $$ BEGIN
ALTER TABLE tools
ADD CONSTRAINT fk_tools_agent
FOREIGN KEY (developer_id, agent_id)
- REFERENCES agents(developer_id, agent_id);
+ REFERENCES agents(developer_id, agent_id) ON DELETE CASCADE;
END IF;
END $$;
CREATE INDEX IF NOT EXISTS idx_tools_developer_agent ON tools (developer_id, agent_id);
+-- Add foreign key constraint referencing tasks(task_id)
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_constraint
+ WHERE conname = 'fk_tools_task'
+ ) THEN
+ ALTER TABLE tools
+ ADD CONSTRAINT fk_tools_task
+ FOREIGN KEY (developer_id, task_id)
+ REFERENCES tasks(developer_id, task_id) ON DELETE CASCADE;
+ END IF;
+END
+$$;
+
-- Drop trigger if exists and recreate
DROP TRIGGER IF EXISTS trg_tools_updated_at ON tools;
diff --git a/memory-store/migrations/000009_sessions.up.sql b/memory-store/migrations/000009_sessions.up.sql
index 75b5fde9a..b014017e0 100644
--- a/memory-store/migrations/000009_sessions.up.sql
+++ b/memory-store/migrations/000009_sessions.up.sql
@@ -16,21 +16,21 @@ CREATE TABLE IF NOT EXISTS sessions (
recall_options JSONB NOT NULL DEFAULT '{}'::JSONB,
CONSTRAINT pk_sessions PRIMARY KEY (developer_id, session_id),
CONSTRAINT uq_sessions_session_id UNIQUE (session_id),
- CONSTRAINT chk_sessions_token_budget_positive CHECK (
+ CONSTRAINT ct_sessions_token_budget_positive CHECK (
token_budget IS NULL
OR token_budget > 0
),
- CONSTRAINT chk_sessions_context_overflow_valid CHECK (
+ CONSTRAINT ct_sessions_context_overflow_valid CHECK (
context_overflow IS NULL
OR context_overflow IN ('truncate', 'adaptive')
),
- CONSTRAINT chk_sessions_system_template_not_empty CHECK (length(trim(system_template)) > 0),
- CONSTRAINT chk_sessions_situation_not_empty CHECK (
+ CONSTRAINT ct_sessions_system_template_not_empty CHECK (length(trim(system_template)) > 0),
+ CONSTRAINT ct_sessions_situation_not_empty CHECK (
situation IS NULL
OR length(trim(situation)) > 0
),
- CONSTRAINT chk_sessions_metadata_valid CHECK (jsonb_typeof(metadata) = 'object'),
- CONSTRAINT chk_sessions_recall_options_valid CHECK (jsonb_typeof(recall_options) = 'object')
+ CONSTRAINT ct_sessions_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object'),
+ CONSTRAINT ct_sessions_recall_options_is_object CHECK (jsonb_typeof(recall_options) = 'object')
);
-- Create indexes if they don't exist
@@ -84,7 +84,7 @@ CREATE TABLE IF NOT EXISTS session_lookup (
participant_type,
participant_id
),
- FOREIGN KEY (developer_id, session_id) REFERENCES sessions (developer_id, session_id)
+ FOREIGN KEY (developer_id, session_id) REFERENCES sessions (developer_id, session_id) ON DELETE CASCADE
);
-- Create indexes if they don't exist
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index ad27d5bdc..d5a0119d8 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -31,11 +31,11 @@ CREATE TABLE IF NOT EXISTS tasks (
metadata JSONB DEFAULT '{}'::JSONB,
CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id, "version"),
CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
- CONSTRAINT fk_tasks_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id),
+ CONSTRAINT fk_tasks_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id) ON DELETE CASCADE,
CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$'),
- CONSTRAINT chk_tasks_metadata_valid CHECK (jsonb_typeof(metadata) = 'object'),
- CONSTRAINT chk_tasks_input_schema_valid CHECK (jsonb_typeof(input_schema) = 'object'),
- CONSTRAINT chk_tasks_version_positive CHECK ("version" > 0)
+ CONSTRAINT ct_tasks_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object'),
+ CONSTRAINT ct_tasks_input_schema_is_object CHECK (jsonb_typeof(input_schema) = 'object'),
+ CONSTRAINT ct_tasks_version_positive CHECK ("version" > 0)
);
-- Create sorted index on task_id if it doesn't exist
@@ -98,20 +98,19 @@ COMMENT ON TABLE tasks IS 'Stores tasks associated with AI agents for developers
CREATE TABLE IF NOT EXISTS workflows (
developer_id UUID NOT NULL,
task_id UUID NOT NULL,
- version INTEGER NOT NULL,
- name TEXT NOT NULL CONSTRAINT chk_workflows_name_length CHECK (
- length(name) >= 1 AND length(name) <= 255
- ),
- step_idx INTEGER NOT NULL CONSTRAINT chk_workflows_step_idx_positive CHECK (step_idx >= 0),
- step_type TEXT NOT NULL CONSTRAINT chk_workflows_step_type_length CHECK (
- length(step_type) >= 1 AND length(step_type) <= 255
+ "version" INTEGER NOT NULL,
+ name TEXT NOT NULL CONSTRAINT ct_workflows_name_length CHECK (
+ length(name) >= 1
+ AND length(name) <= 255
),
- step_definition JSONB NOT NULL CONSTRAINT chk_workflows_step_definition_valid CHECK (
- jsonb_typeof(step_definition) = 'object'
+ step_idx INTEGER NOT NULL CONSTRAINT ct_workflows_step_idx_positive CHECK (step_idx >= 0),
+ step_type TEXT NOT NULL CONSTRAINT ct_workflows_step_type_length CHECK (
+ length(step_type) >= 1
+ AND length(step_type) <= 255
),
- CONSTRAINT pk_workflows PRIMARY KEY (developer_id, task_id, version, step_idx),
- CONSTRAINT fk_workflows_tasks FOREIGN KEY (developer_id, task_id, version)
- REFERENCES tasks (developer_id, task_id, version) ON DELETE CASCADE
+ step_definition JSONB NOT NULL CONSTRAINT ct_workflows_step_definition_valid CHECK (jsonb_typeof(step_definition) = 'object'),
+ CONSTRAINT pk_workflows PRIMARY KEY (developer_id, task_id, "version", name, step_idx),
+ CONSTRAINT fk_workflows_tasks FOREIGN KEY (developer_id, task_id, "version") REFERENCES tasks (developer_id, task_id, "version") ON DELETE CASCADE
);
-- Create index for 'workflows' table if it doesn't exist
diff --git a/memory-store/migrations/000011_executions.up.sql b/memory-store/migrations/000011_executions.up.sql
index 976ead369..5184601b2 100644
--- a/memory-store/migrations/000011_executions.up.sql
+++ b/memory-store/migrations/000011_executions.up.sql
@@ -16,7 +16,8 @@ CREATE TABLE IF NOT EXISTS executions (
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_executions PRIMARY KEY (execution_id),
CONSTRAINT fk_executions_developer FOREIGN KEY (developer_id) REFERENCES developers (developer_id),
- CONSTRAINT fk_executions_task FOREIGN KEY (developer_id, task_id, task_version) REFERENCES tasks (developer_id, task_id, "version")
+ CONSTRAINT fk_executions_task FOREIGN KEY (developer_id, task_id, task_version) REFERENCES tasks (developer_id, task_id, "version"),
+ CONSTRAINT ct_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object')
);
-- Create sorted index on execution_id (optimized for UUID v7)
diff --git a/memory-store/migrations/000012_transitions.up.sql b/memory-store/migrations/000012_transitions.up.sql
index 7bbcf2ad5..5c07172f9 100644
--- a/memory-store/migrations/000012_transitions.up.sql
+++ b/memory-store/migrations/000012_transitions.up.sql
@@ -49,7 +49,9 @@ CREATE TABLE IF NOT EXISTS transitions (
output JSONB,
task_token TEXT DEFAULT NULL,
metadata JSONB DEFAULT '{}'::JSONB,
- CONSTRAINT pk_transitions PRIMARY KEY (created_at, execution_id, transition_id)
+ CONSTRAINT pk_transitions PRIMARY KEY (created_at, execution_id, transition_id),
+ CONSTRAINT ct_step_definition_is_object CHECK (jsonb_typeof(step_definition) = 'object'),
+ CONSTRAINT ct_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object')
);
-- Convert to hypertable if not already
@@ -104,7 +106,8 @@ BEGIN
ALTER TABLE transitions
ADD CONSTRAINT fk_transitions_execution
FOREIGN KEY (execution_id)
- REFERENCES executions(execution_id);
+ REFERENCES executions(execution_id)
+ ON DELETE CASCADE;
END IF;
END $$;
diff --git a/memory-store/migrations/000014_temporal_lookup.up.sql b/memory-store/migrations/000014_temporal_lookup.up.sql
index 724ee1340..59c19a781 100644
--- a/memory-store/migrations/000014_temporal_lookup.up.sql
+++ b/memory-store/migrations/000014_temporal_lookup.up.sql
@@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS temporal_executions_lookup (
result_run_id TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_temporal_executions_lookup PRIMARY KEY (execution_id, id),
- CONSTRAINT fk_temporal_executions_lookup_execution FOREIGN KEY (execution_id) REFERENCES executions (execution_id)
+ CONSTRAINT fk_temporal_executions_lookup_execution FOREIGN KEY (execution_id) REFERENCES executions (execution_id) ON DELETE CASCADE
);
-- Create sorted index on execution_id (optimized for UUID v7)
diff --git a/memory-store/migrations/000015_entries.down.sql b/memory-store/migrations/000015_entries.down.sql
index d8afbb826..fdfd6c8dd 100644
--- a/memory-store/migrations/000015_entries.down.sql
+++ b/memory-store/migrations/000015_entries.down.sql
@@ -14,7 +14,10 @@ DROP INDEX IF EXISTS idx_entries_by_session;
-- Drop the hypertable (this will also drop the table)
DROP TABLE IF EXISTS entries;
+-- Drop the function
+DROP FUNCTION IF EXISTS all_jsonb_elements_are_objects;
+
-- Drop the enum type
DROP TYPE IF EXISTS chat_role;
-COMMIT;
+COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index c104091a2..0f0518939 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -1,9 +1,33 @@
BEGIN;
-- Create chat_role enum
-CREATE TYPE chat_role AS ENUM('user', 'assistant', 'tool', 'system', 'developer');
+CREATE TYPE chat_role AS ENUM(
+ 'user',
+ 'assistant',
+ 'tool',
+ 'system',
+ 'developer'
+);
+
+-- Create a custom function that checks if `content` is non-empty
+-- and that every JSONB element in the array is an 'object'.
+CREATE
+OR REPLACE FUNCTION all_jsonb_elements_are_objects (content jsonb[]) RETURNS boolean AS $$
+DECLARE
+ elem jsonb;
+BEGIN
+ -- Check each element in the `content` array
+ FOREACH elem IN ARRAY content
+ LOOP
+ IF jsonb_typeof(elem) <> 'object' THEN
+ RETURN false;
+ END IF;
+ END LOOP;
+
+ RETURN true;
+END;
+$$ LANGUAGE plpgsql IMMUTABLE;
--- Create entries table
CREATE TABLE IF NOT EXISTS entries (
session_id UUID NOT NULL,
entry_id UUID NOT NULL,
@@ -13,12 +37,14 @@ CREATE TABLE IF NOT EXISTS entries (
name TEXT,
content JSONB[] NOT NULL,
tool_call_id TEXT DEFAULT NULL,
- tool_calls JSONB[] NOT NULL DEFAULT '{}',
+ tool_calls JSONB[] NOT NULL DEFAULT '{}'::JSONB[],
model TEXT NOT NULL,
token_count INTEGER DEFAULT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- CONSTRAINT pk_entries PRIMARY KEY (session_id, entry_id, created_at)
+ CONSTRAINT pk_entries PRIMARY KEY (session_id, entry_id, created_at),
+ CONSTRAINT ct_content_is_array_of_objects CHECK (all_jsonb_elements_are_objects (content)),
+ CONSTRAINT ct_tool_calls_is_array_of_objects CHECK (all_jsonb_elements_are_objects (tool_calls))
);
-- Convert to hypertable if not already
@@ -48,7 +74,7 @@ BEGIN
ALTER TABLE entries
ADD CONSTRAINT fk_entries_session
FOREIGN KEY (session_id)
- REFERENCES sessions(session_id);
+ REFERENCES sessions(session_id) ON DELETE CASCADE;
END IF;
END $$;
@@ -86,8 +112,8 @@ UPDATE ON entries FOR EACH ROW
EXECUTE FUNCTION optimized_update_token_count_after ();
-- Add trigger to update parent session's updated_at
-CREATE OR REPLACE FUNCTION update_session_updated_at()
-RETURNS TRIGGER AS $$
+CREATE
+OR REPLACE FUNCTION update_session_updated_at () RETURNS TRIGGER AS $$
BEGIN
UPDATE sessions
SET updated_at = CURRENT_TIMESTAMP
@@ -97,8 +123,9 @@ END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trg_update_session_updated_at
-AFTER INSERT OR UPDATE ON entries
-FOR EACH ROW
-EXECUTE FUNCTION update_session_updated_at();
+AFTER INSERT
+OR
+UPDATE ON entries FOR EACH ROW
+EXECUTE FUNCTION update_session_updated_at ();
COMMIT;
diff --git a/memory-store/migrations/000016_entry_relations.up.sql b/memory-store/migrations/000016_entry_relations.up.sql
index bcdb7fb72..6e9af3f2a 100644
--- a/memory-store/migrations/000016_entry_relations.up.sql
+++ b/memory-store/migrations/000016_entry_relations.up.sql
@@ -22,7 +22,7 @@ BEGIN
ALTER TABLE entry_relations
ADD CONSTRAINT fk_entry_relations_session
FOREIGN KEY (session_id)
- REFERENCES sessions(session_id);
+ REFERENCES sessions(session_id) ON DELETE CASCADE;
END IF;
END $$;
From c88e8d76fe558189194afdb1c1c7fecc592f22af Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Thu, 19 Dec 2024 19:10:22 -0500
Subject: [PATCH 088/274] chore: fix conflicts
---
agents-api/tests/test_entry_queries.py | 9 +-
agents-api/tests/test_session_queries.py | 154 +++++++++++------------
2 files changed, 81 insertions(+), 82 deletions(-)
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 03972cdee..e8286e8bc 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -5,9 +5,9 @@
from uuid import UUID
-# from fastapi import HTTPException
-# from uuid_extensions import uuid7
-# from ward import raises, test
+from fastapi import HTTPException
+from uuid_extensions import uuid7
+from ward import raises, test
from agents_api.autogen.openapi_model import (
CreateEntryRequest,
@@ -23,8 +23,7 @@
)
from tests.fixtures import pg_dsn, test_developer, test_developer_id, test_session
-# MODEL = "gpt-4o-mini"
-
+MODEL = "gpt-4o-mini"
@test("query: create entry no session")
async def _(dsn=pg_dsn, developer=test_developer):
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 73b232f1f..171e56aa8 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -3,8 +3,8 @@
# Tests verify the SQL queries without actually executing them against a database.
# """
-# from uuid_extensions import uuid7
-# from ward import raises, test
+from uuid_extensions import uuid7
+from ward import raises, test
from agents_api.autogen.openapi_model import (
CreateOrUpdateSessionRequest,
@@ -36,11 +36,11 @@
)
-# @test("query: create session sql")
-# async def _(
-# dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
-# ):
-# """Test that a session can be successfully created."""
+@test("query: create session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
+):
+ """Test that a session can be successfully created."""
pool = await create_db_pool(dsn=dsn)
session_id = uuid7()
@@ -61,11 +61,11 @@
assert result.id == session_id
-# @test("query: create or update session sql")
-# async def _(
-# dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
-# ):
-# """Test that a session can be successfully created or updated."""
+@test("query: create or update session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, user=test_user
+):
+ """Test that a session can be successfully created or updated."""
pool = await create_db_pool(dsn=dsn)
session_id = uuid7()
@@ -87,39 +87,39 @@
assert result.updated_at is not None
-# @test("query: get session exists")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test retrieving an existing session."""
+@test("query: get session exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test retrieving an existing session."""
-# pool = await create_db_pool(dsn=dsn)
-# result = await get_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# connection_pool=pool,
-# )
+ pool = await create_db_pool(dsn=dsn)
+ result = await get_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
assert result is not None
assert isinstance(result, Session)
assert result.id == session.id
-# @test("query: get session does not exist")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id):
-# """Test retrieving a non-existent session."""
+@test("query: get session does not exist")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test retrieving a non-existent session."""
-# session_id = uuid7()
-# pool = await create_db_pool(dsn=dsn)
-# with raises(Exception):
-# await get_session(
-# session_id=session_id,
-# developer_id=developer_id,
-# connection_pool=pool,
-# )
+ session_id = uuid7()
+ pool = await create_db_pool(dsn=dsn)
+ with raises(Exception):
+ await get_session(
+ session_id=session_id,
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
-# @test("query: list sessions")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test listing sessions with default pagination."""
+@test("query: list sessions")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test listing sessions with default pagination."""
pool = await create_db_pool(dsn=dsn)
result = await list_sessions(
@@ -129,14 +129,14 @@
connection_pool=pool,
)
-# assert isinstance(result, list)
-# assert len(result) >= 1
-# assert any(s.id == session.id for s in result)
+ assert isinstance(result, list)
+ assert len(result) >= 1
+ assert any(s.id == session.id for s in result)
-# @test("query: list sessions with filters")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test listing sessions with specific filters."""
+@test("query: list sessions with filters")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test listing sessions with specific filters."""
pool = await create_db_pool(dsn=dsn)
result = await list_sessions(
@@ -153,15 +153,15 @@
), f"Result is not a list of sessions, {result}, {session.situation}"
-# @test("query: count sessions")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test counting the number of sessions for a developer."""
+@test("query: count sessions")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test counting the number of sessions for a developer."""
-# pool = await create_db_pool(dsn=dsn)
-# count = await count_sessions(
-# developer_id=developer_id,
-# connection_pool=pool,
-# )
+ pool = await create_db_pool(dsn=dsn)
+ count = await count_sessions(
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
assert isinstance(count, dict)
assert count["count"] >= 1
@@ -190,9 +190,9 @@ async def _(
connection_pool=pool,
)
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-# assert result.updated_at > session.created_at
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+ assert result.updated_at > session.created_at
updated_session = await get_session(
developer_id=developer_id,
@@ -202,11 +202,11 @@ async def _(
assert updated_session.forward_tool_calls is True
-# @test("query: patch session sql")
-# async def _(
-# dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
-# ):
-# """Test that a session can be successfully patched."""
+@test("query: patch session sql")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, session=test_session, agent=test_agent
+):
+ """Test that a session can be successfully patched."""
pool = await create_db_pool(dsn=dsn)
data = PatchSessionRequest(
@@ -219,9 +219,9 @@ async def _(
connection_pool=pool,
)
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-# assert result.updated_at > session.created_at
+ assert result is not None
+ assert isinstance(result, ResourceUpdatedResponse)
+ assert result.updated_at > session.created_at
patched_session = await get_session(
developer_id=developer_id,
@@ -232,23 +232,23 @@ async def _(
assert patched_session.metadata == {"test": "metadata"}
-# @test("query: delete session sql")
-# async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
-# """Test that a session can be successfully deleted."""
+@test("query: delete session sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
+ """Test that a session can be successfully deleted."""
-# pool = await create_db_pool(dsn=dsn)
-# delete_result = await delete_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# connection_pool=pool,
-# )
+ pool = await create_db_pool(dsn=dsn)
+ delete_result = await delete_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
-# assert delete_result is not None
-# assert isinstance(delete_result, ResourceDeletedResponse)
+ assert delete_result is not None
+ assert isinstance(delete_result, ResourceDeletedResponse)
-# with raises(Exception):
-# await get_session(
-# developer_id=developer_id,
-# session_id=session.id,
-# connection_pool=pool,
-# )
+ with raises(Exception):
+ await get_session(
+ developer_id=developer_id,
+ session_id=session.id,
+ connection_pool=pool,
+ )
From 41739ee94dbcfed66dd873db50d628a4810f6a25 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Fri, 20 Dec 2024 00:11:16 +0000
Subject: [PATCH 089/274] refactor: Lint agents-api (CI)
---
agents-api/tests/test_entry_queries.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index e8286e8bc..706185c7b 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -25,6 +25,7 @@
MODEL = "gpt-4o-mini"
+
@test("query: create entry no session")
async def _(dsn=pg_dsn, developer=test_developer):
"""Test the addition of a new entry to the database."""
From 6c77490b60286343809faa91be80339bee6b6fc1 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Thu, 19 Dec 2024 20:24:28 -0500
Subject: [PATCH 090/274] wip(agents-api): Doc queries
---
.../agents_api/queries/docs/__init__.py | 25 +++
.../agents_api/queries/docs/create_doc.py | 135 +++++++++++++++
.../agents_api/queries/docs/delete_doc.py | 77 +++++++++
.../agents_api/queries/docs/embed_snippets.py | 0
agents-api/agents_api/queries/docs/get_doc.py | 52 ++++++
.../agents_api/queries/docs/list_docs.py | 91 ++++++++++
agents-api/agents_api/queries/docs/mmr.py | 109 ++++++++++++
.../queries/docs/search_docs_by_embedding.py | 70 ++++++++
.../queries/docs/search_docs_by_text.py | 65 +++++++
.../queries/docs/search_docs_hybrid.py | 159 ++++++++++++++++++
10 files changed, 783 insertions(+)
create mode 100644 agents-api/agents_api/queries/docs/__init__.py
create mode 100644 agents-api/agents_api/queries/docs/create_doc.py
create mode 100644 agents-api/agents_api/queries/docs/delete_doc.py
create mode 100644 agents-api/agents_api/queries/docs/embed_snippets.py
create mode 100644 agents-api/agents_api/queries/docs/get_doc.py
create mode 100644 agents-api/agents_api/queries/docs/list_docs.py
create mode 100644 agents-api/agents_api/queries/docs/mmr.py
create mode 100644 agents-api/agents_api/queries/docs/search_docs_by_embedding.py
create mode 100644 agents-api/agents_api/queries/docs/search_docs_by_text.py
create mode 100644 agents-api/agents_api/queries/docs/search_docs_hybrid.py
diff --git a/agents-api/agents_api/queries/docs/__init__.py b/agents-api/agents_api/queries/docs/__init__.py
new file mode 100644
index 000000000..0ba3db0d4
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/__init__.py
@@ -0,0 +1,25 @@
+"""
+Module: agents_api/models/docs
+
+This module is responsible for managing document-related operations within the application, particularly for agents and possibly other entities. It serves as a core component of the document management system, enabling features such as document creation, listing, deletion, and embedding of snippets for enhanced search and retrieval capabilities.
+
+Main functionalities include:
+- Creating new documents and associating them with agents or users.
+- Listing documents based on various criteria, including ownership and metadata filters.
+- Deleting documents by their unique identifiers.
+- Embedding document snippets for retrieval purposes.
+
+The module interacts with other parts of the application, such as the agents and users modules, to provide a comprehensive document management system. Its role is crucial in enabling document search, retrieval, and management features within the context of agents and users.
+
+This documentation aims to provide clear, concise, and sufficient context for new developers or contributors to understand the module's role without needing to dive deep into the code immediately.
+"""
+
+# ruff: noqa: F401, F403, F405
+
+from .create_doc import create_doc
+from .delete_doc import delete_doc
+from .embed_snippets import embed_snippets
+from .get_doc import get_doc
+from .list_docs import list_docs
+from .search_docs_by_embedding import search_docs_by_embedding
+from .search_docs_by_text import search_docs_by_text
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
new file mode 100644
index 000000000..57be43bdf
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -0,0 +1,135 @@
+"""
+Timescale-based creation of docs.
+
+Mirrors the structure of create_file.py, but uses the docs/doc_owners tables.
+"""
+
+import base64
+import hashlib
+from typing import Any, Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+from uuid_extensions import uuid7
+
+from ...autogen.openapi_model import CreateDocRequest, Doc
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Base INSERT for docs
+doc_query = parse_one("""
+INSERT INTO docs (
+ developer_id,
+ doc_id,
+ title,
+ content,
+ index,
+ modality,
+ embedding_model,
+ embedding_dimensions,
+ language,
+ metadata
+)
+VALUES (
+ $1, -- developer_id
+ $2, -- doc_id
+ $3, -- title
+ $4, -- content
+ $5, -- index
+ $6, -- modality
+ $7, -- embedding_model
+ $8, -- embedding_dimensions
+ $9, -- language
+ $10 -- metadata (JSONB)
+)
+RETURNING *;
+""").sql(pretty=True)
+
+# Owner association query for doc_owners
+doc_owner_query = parse_one("""
+WITH inserted_owner AS (
+ INSERT INTO doc_owners (
+ developer_id,
+ doc_id,
+ owner_type,
+ owner_id
+ )
+ VALUES ($1, $2, $3, $4)
+ RETURNING doc_id
+)
+SELECT d.*
+FROM inserted_owner io
+JOIN docs d ON d.doc_id = io.doc_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A document with this ID already exists for this developer",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified owner does not exist",
+ ),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Developer or doc owner not found",
+ ),
+ }
+)
+@wrap_in_class(
+ Doc,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "id": d["doc_id"],
+ # You could optionally return a computed hash or partial content if desired
+ },
+)
+@increase_counter("create_doc")
+@pg_query
+@beartype
+async def create_doc(
+ *,
+ developer_id: UUID,
+ doc_id: UUID | None = None,
+ data: CreateDocRequest,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_id: UUID | None = None,
+) -> list[tuple[str, list]]:
+ """
+ Insert a new doc record into Timescale and optionally associate it with an owner.
+ """
+ # Generate a UUID if not provided
+ doc_id = doc_id or uuid7()
+
+ # Create the doc record
+ doc_params = [
+ developer_id,
+ doc_id,
+ data.title,
+ data.content,
+ data.index or 0, # fallback if no snippet index
+ data.modality or "text",
+ data.embedding_model or "none",
+ data.embedding_dimensions or 0,
+ data.language or "english",
+ data.metadata or {},
+ ]
+
+ queries = [(doc_query, doc_params)]
+
+ # If an owner is specified, associate it:
+ if owner_type and owner_id:
+ owner_params = [developer_id, doc_id, owner_type, owner_id]
+ queries.append((doc_owner_query, owner_params))
+
+ return queries
diff --git a/agents-api/agents_api/queries/docs/delete_doc.py b/agents-api/agents_api/queries/docs/delete_doc.py
new file mode 100644
index 000000000..d1e02faf1
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/delete_doc.py
@@ -0,0 +1,77 @@
+"""
+Timescale-based deletion of a doc record.
+"""
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Delete doc query + ownership check
+delete_doc_query = parse_one("""
+WITH deleted_owners AS (
+ DELETE FROM doc_owners
+ WHERE developer_id = $1
+ AND doc_id = $2
+ AND (
+ ($3::text IS NULL AND $4::uuid IS NULL)
+ OR (owner_type = $3 AND owner_id = $4)
+ )
+)
+DELETE FROM docs
+WHERE developer_id = $1
+ AND doc_id = $2
+ AND (
+ $3::text IS NULL OR EXISTS (
+ SELECT 1 FROM doc_owners
+ WHERE developer_id = $1
+ AND doc_id = $2
+ AND owner_type = $3
+ AND owner_id = $4
+ )
+ )
+RETURNING doc_id;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Doc not found",
+ )
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["doc_id"],
+ "deleted_at": utcnow(),
+ "jobs": [],
+ },
+)
+@pg_query
+@beartype
+async def delete_doc(
+ *,
+ developer_id: UUID,
+ doc_id: UUID,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_id: UUID | None = None,
+) -> tuple[str, list]:
+ """
+ Deletes a doc (and associated doc_owners) for the given developer and doc_id.
+ If owner_type/owner_id is specified, only remove doc if that matches.
+ """
+ return (
+ delete_doc_query,
+ [developer_id, doc_id, owner_type, owner_id],
+ )
diff --git a/agents-api/agents_api/queries/docs/embed_snippets.py b/agents-api/agents_api/queries/docs/embed_snippets.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
new file mode 100644
index 000000000..a0345f5e3
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -0,0 +1,52 @@
+"""
+Timescale-based retrieval of a single doc record.
+"""
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import Doc
+from ..utils import pg_query, wrap_in_class
+
+doc_query = parse_one("""
+SELECT d.*
+FROM docs d
+LEFT JOIN doc_owners do ON d.developer_id = do.developer_id AND d.doc_id = do.doc_id
+WHERE d.developer_id = $1
+ AND d.doc_id = $2
+ AND (
+ ($3::text IS NULL AND $4::uuid IS NULL)
+ OR (do.owner_type = $3 AND do.owner_id = $4)
+ )
+LIMIT 1;
+""").sql(pretty=True)
+
+
+@wrap_in_class(
+ Doc,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "id": d["doc_id"],
+ },
+)
+@pg_query
+@beartype
+async def get_doc(
+ *,
+ developer_id: UUID,
+ doc_id: UUID,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_id: UUID | None = None
+) -> tuple[str, list]:
+ """
+ Fetch a single doc, optionally constrained to a given owner.
+ """
+ return (
+ doc_query,
+ [developer_id, doc_id, owner_type, owner_id],
+ )
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
new file mode 100644
index 000000000..b145a1cbc
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -0,0 +1,91 @@
+"""
+Timescale-based listing of docs with optional owner filter and pagination.
+"""
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import Doc
+from ..utils import pg_query, wrap_in_class
+
+# Basic listing for all docs by developer
+developer_docs_query = parse_one("""
+SELECT d.*
+FROM docs d
+LEFT JOIN doc_owners do ON d.developer_id = do.developer_id AND d.doc_id = do.doc_id
+WHERE d.developer_id = $1
+ORDER BY
+CASE
+ WHEN $4 = 'created_at' AND $5 = 'asc' THEN d.created_at
+ WHEN $4 = 'created_at' AND $5 = 'desc' THEN d.created_at
+ WHEN $4 = 'updated_at' AND $5 = 'asc' THEN d.updated_at
+ WHEN $4 = 'updated_at' AND $5 = 'desc' THEN d.updated_at
+END DESC NULLS LAST
+LIMIT $2
+OFFSET $3;
+""").sql(pretty=True)
+
+# Listing for docs associated with a specific owner
+owner_docs_query = parse_one("""
+SELECT d.*
+FROM docs d
+JOIN doc_owners do ON d.developer_id = do.developer_id AND d.doc_id = do.doc_id
+WHERE do.developer_id = $1
+ AND do.owner_id = $6
+ AND do.owner_type = $7
+ORDER BY
+CASE
+ WHEN $4 = 'created_at' AND $5 = 'asc' THEN d.created_at
+ WHEN $4 = 'created_at' AND $5 = 'desc' THEN d.created_at
+ WHEN $4 = 'updated_at' AND $5 = 'asc' THEN d.updated_at
+ WHEN $4 = 'updated_at' AND $5 = 'desc' THEN d.updated_at
+END DESC NULLS LAST
+LIMIT $2
+OFFSET $3;
+""").sql(pretty=True)
+
+
+@wrap_in_class(
+ Doc,
+ one=False,
+ transform=lambda d: {
+ **d,
+ "id": d["doc_id"],
+ },
+)
+@pg_query
+@beartype
+async def list_docs(
+ *,
+ developer_id: UUID,
+ owner_id: UUID | None = None,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "updated_at"] = "created_at",
+ direction: Literal["asc", "desc"] = "desc",
+) -> tuple[str, list]:
+ """
+ Lists docs with optional owner filtering, pagination, and sorting.
+ """
+ if direction.lower() not in ["asc", "desc"]:
+ raise HTTPException(status_code=400, detail="Invalid sort direction")
+
+ if limit > 100 or limit < 1:
+ raise HTTPException(status_code=400, detail="Limit must be between 1 and 100")
+
+ if offset < 0:
+ raise HTTPException(status_code=400, detail="Offset must be >= 0")
+
+ params = [developer_id, limit, offset, sort_by, direction]
+ if owner_id and owner_type:
+ params.extend([owner_id, owner_type])
+ query = owner_docs_query
+ else:
+ query = developer_docs_query
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/docs/mmr.py b/agents-api/agents_api/queries/docs/mmr.py
new file mode 100644
index 000000000..d214e8c04
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/mmr.py
@@ -0,0 +1,109 @@
+from __future__ import annotations
+
+import logging
+from typing import Union
+
+import numpy as np
+
+Matrix = Union[list[list[float]], list[np.ndarray], np.ndarray]
+
+logger = logging.getLogger(__name__)
+
+
+def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
+ """Row-wise cosine similarity between two equal-width matrices.
+
+ Args:
+ x: A matrix of shape (n, m).
+ y: A matrix of shape (k, m).
+
+ Returns:
+ A matrix of shape (n, k) where each element (i, j) is the cosine similarity
+ between the ith row of X and the jth row of Y.
+
+ Raises:
+ ValueError: If the number of columns in X and Y are not the same.
+ ImportError: If numpy is not installed.
+ """
+
+ if len(x) == 0 or len(y) == 0:
+ return np.array([])
+
+ x = [xx for xx in x if xx is not None]
+ y = [yy for yy in y if yy is not None]
+
+ x = np.array(x)
+ y = np.array(y)
+ if x.shape[1] != y.shape[1]:
+ msg = (
+ f"Number of columns in X and Y must be the same. X has shape {x.shape} "
+ f"and Y has shape {y.shape}."
+ )
+ raise ValueError(msg)
+ try:
+ import simsimd as simd # type: ignore
+
+ x = np.array(x, dtype=np.float32)
+ y = np.array(y, dtype=np.float32)
+ z = 1 - np.array(simd.cdist(x, y, metric="cosine"))
+ return z
+ except ImportError:
+ logger.debug(
+ "Unable to import simsimd, defaulting to NumPy implementation. If you want "
+ "to use simsimd please install with `pip install simsimd`."
+ )
+ x_norm = np.linalg.norm(x, axis=1)
+ y_norm = np.linalg.norm(y, axis=1)
+ # Ignore divide by zero errors run time warnings as those are handled below.
+ with np.errstate(divide="ignore", invalid="ignore"):
+ similarity = np.dot(x, y.T) / np.outer(x_norm, y_norm)
+ similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0
+ return similarity
+
+
+def maximal_marginal_relevance(
+ query_embedding: np.ndarray,
+ embedding_list: list,
+ lambda_mult: float = 0.5,
+ k: int = 4,
+) -> list[int]:
+ """Calculate maximal marginal relevance.
+
+ Args:
+ query_embedding: The query embedding.
+ embedding_list: A list of embeddings.
+ lambda_mult: The lambda parameter for MMR. Default is 0.5.
+ k: The number of embeddings to return. Default is 4.
+
+ Returns:
+ A list of indices of the embeddings to return.
+
+ Raises:
+ ImportError: If numpy is not installed.
+ """
+
+ if min(k, len(embedding_list)) <= 0:
+ return []
+ if query_embedding.ndim == 1:
+ query_embedding = np.expand_dims(query_embedding, axis=0)
+ similarity_to_query = _cosine_similarity(query_embedding, embedding_list)[0]
+ most_similar = int(np.argmax(similarity_to_query))
+ idxs = [most_similar]
+ selected = np.array([embedding_list[most_similar]])
+ while len(idxs) < min(k, len(embedding_list)):
+ best_score = -np.inf
+ idx_to_add = -1
+ similarity_to_selected = _cosine_similarity(embedding_list, selected)
+ for i, query_score in enumerate(similarity_to_query):
+ if i in idxs:
+ continue
+ redundant_score = max(similarity_to_selected[i])
+ equation_score = (
+ lambda_mult * query_score - (1 - lambda_mult) * redundant_score
+ )
+ if equation_score > best_score:
+ best_score = equation_score
+ idx_to_add = i
+ idxs.append(idx_to_add)
+ selected = np.append(selected, [embedding_list[idx_to_add]], axis=0)
+ return idxs
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
new file mode 100644
index 000000000..c62188b61
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -0,0 +1,70 @@
+"""
+Timescale-based doc embedding search using the `embedding` column.
+"""
+
+import asyncpg
+from typing import Literal, List
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import Doc
+from ..utils import pg_query, wrap_in_class
+
+# If you're doing approximate ANN (DiskANN) or IVF, you might use a special function or hint.
+# For a basic vector distance search, you can do something like:
+search_docs_by_embedding_query = parse_one("""
+SELECT d.*,
+ (d.embedding <-> $3) AS distance
+FROM docs d
+LEFT JOIN doc_owners do
+ ON d.developer_id = do.developer_id
+ AND d.doc_id = do.doc_id
+WHERE d.developer_id = $1
+ AND (
+ ($4::text IS NULL AND $5::uuid IS NULL)
+ OR (do.owner_type = $4 AND do.owner_id = $5)
+ )
+ AND d.embedding IS NOT NULL
+ORDER BY d.embedding <-> $3
+LIMIT $2;
+""").sql(pretty=True)
+
+@wrap_in_class(
+ Doc,
+ one=False,
+ transform=lambda rec: {
+ **rec,
+ "id": rec["doc_id"],
+ },
+)
+@pg_query
+@beartype
+async def search_docs_by_embedding(
+ *,
+ developer_id: UUID,
+ query_embedding: List[float],
+ k: int = 10,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_id: UUID | None = None,
+) -> tuple[str, list]:
+ """
+ Vector-based doc search:
+ - developer_id is required
+ - query_embedding: the vector to query
+ - k: number of results to return
+ - owner_type/owner_id: optional doc ownership filter
+ """
+ if k < 1:
+ raise HTTPException(status_code=400, detail="k must be >= 1")
+
+ # Validate embedding length if needed; e.g. 1024 floats
+ if not query_embedding:
+ raise HTTPException(status_code=400, detail="Empty embedding provided")
+
+ return (
+ search_docs_by_embedding_query,
+ [developer_id, k, query_embedding, owner_type, owner_id],
+ )
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
new file mode 100644
index 000000000..c9a5a93e2
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -0,0 +1,65 @@
+"""
+Timescale-based doc text search using the `search_tsv` column.
+"""
+
+import asyncpg
+from typing import Literal
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import Doc
+from ..utils import pg_query, wrap_in_class
+
+search_docs_text_query = parse_one("""
+SELECT d.*,
+ ts_rank_cd(d.search_tsv, websearch_to_tsquery($3)) AS rank
+FROM docs d
+LEFT JOIN doc_owners do
+ ON d.developer_id = do.developer_id
+ AND d.doc_id = do.doc_id
+WHERE d.developer_id = $1
+ AND (
+ ($4::text IS NULL AND $5::uuid IS NULL)
+ OR (do.owner_type = $4 AND do.owner_id = $5)
+ )
+ AND d.search_tsv @@ websearch_to_tsquery($3)
+ORDER BY rank DESC
+LIMIT $2;
+""").sql(pretty=True)
+
+
+@wrap_in_class(
+ Doc,
+ one=False,
+ transform=lambda rec: {
+ **rec,
+ "id": rec["doc_id"],
+ },
+)
+@pg_query
+@beartype
+async def search_docs_by_text(
+ *,
+ developer_id: UUID,
+ query: str,
+ k: int = 10,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_id: UUID | None = None,
+) -> tuple[str, list]:
+ """
+ Full-text search on docs using the search_tsv column.
+ - developer_id: required
+ - query: the text to look for
+ - k: max results
+ - owner_type / owner_id: optional doc ownership filter
+ """
+ if k < 1:
+ raise HTTPException(status_code=400, detail="k must be >= 1")
+
+ return (
+ search_docs_text_query,
+ [developer_id, k, query, owner_type, owner_id],
+ )
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
new file mode 100644
index 000000000..9e8d84dc7
--- /dev/null
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -0,0 +1,159 @@
+"""
+Hybrid doc search that merges text search and embedding search results
+via a simple distribution-based score fusion or direct weighting in Python.
+"""
+
+from typing import Literal, List
+from uuid import UUID
+
+from beartype import beartype
+from fastapi import HTTPException
+
+from ...autogen.openapi_model import Doc
+from ..utils import run_concurrently
+from .search_docs_by_text import search_docs_by_text
+from .search_docs_by_embedding import search_docs_by_embedding
+
+def dbsf_normalize(scores: List[float]) -> List[float]:
+ """
+ Example distribution-based normalization: clamp each score
+ from (mean - 3*stddev) to (mean + 3*stddev) and scale to 0..1
+ """
+ import statistics
+ if len(scores) < 2:
+ return scores
+ m = statistics.mean(scores)
+ sd = statistics.pstdev(scores) # population std
+ if sd == 0:
+ return scores
+ upper = m + 3*sd
+ lower = m - 3*sd
+ def clamp_scale(v):
+ c = min(upper, max(lower, v))
+ return (c - lower) / (upper - lower)
+ return [clamp_scale(s) for s in scores]
+
+@beartype
+def fuse_results(
+ text_docs: List[Doc], embedding_docs: List[Doc], alpha: float
+) -> List[Doc]:
+ """
+ Merges text search results (descending by text rank) with
+ embedding results (descending by closeness or inverse distance).
+ alpha ~ how much to weigh the embedding score
+ """
+ # Suppose we stored each doc's "distance" from the embedding query, and
+ # for text search we store a rank or negative distance. We'll unify them:
+ # Make up a dictionary of doc_id -> text_score, doc_id -> embed_score
+ # For example, text_score = -distance if you want bigger = better
+ text_scores = {}
+ embed_scores = {}
+ for doc in text_docs:
+ # If you had "rank", you might store doc.distance = rank
+ # For demo, let's assume doc.distance is negative... up to you
+ text_scores[doc.id] = float(-doc.distance if doc.distance else 0)
+
+ for doc in embedding_docs:
+ # Lower distance => better, so we do embed_score = -distance
+ embed_scores[doc.id] = float(-doc.distance if doc.distance else 0)
+
+ # Normalize them
+ text_vals = list(text_scores.values())
+ embed_vals = list(embed_scores.values())
+ text_vals_norm = dbsf_normalize(text_vals)
+ embed_vals_norm = dbsf_normalize(embed_vals)
+
+ # Map them back
+ t_keys = list(text_scores.keys())
+ for i, key in enumerate(t_keys):
+ text_scores[key] = text_vals_norm[i]
+ e_keys = list(embed_scores.keys())
+ for i, key in enumerate(e_keys):
+ embed_scores[key] = embed_vals_norm[i]
+
+ # Gather all doc IDs
+ all_ids = set(text_scores.keys()) | set(embed_scores.keys())
+
+ # Weighted sum => combined
+ out = []
+ for doc_id in all_ids:
+ # text and embed might be missing doc_id => 0
+ t_score = text_scores.get(doc_id, 0)
+ e_score = embed_scores.get(doc_id, 0)
+ combined = alpha * e_score + (1 - alpha) * t_score
+ # We'll store final "distance" as -(combined) so bigger combined => smaller distance
+ out.append((doc_id, combined))
+
+ # Sort descending by combined
+ out.sort(key=lambda x: x[1], reverse=True)
+
+ # Convert to doc objects. We can pick from text_docs or embedding_docs or whichever is found.
+ # If present in both, we can merge fields. For simplicity, just pick from text_docs then fallback embedding_docs.
+
+ # Create a quick ID->doc map
+ text_map = {d.id: d for d in text_docs}
+ embed_map = {d.id: d for d in embedding_docs}
+
+ final_docs = []
+ for doc_id, score in out:
+ doc = text_map.get(doc_id) or embed_map.get(doc_id)
+ doc = doc.model_copy() # or copy if you are using Pydantic
+ doc.distance = float(-score) # so a higher combined => smaller distance
+ final_docs.append(doc)
+ return final_docs
+
+
+@beartype
+async def search_docs_hybrid(
+ developer_id: UUID,
+ text_query: str = "",
+ embedding: List[float] = None,
+ k: int = 10,
+ alpha: float = 0.5,
+ owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_id: UUID | None = None,
+) -> List[Doc]:
+ """
+ Hybrid text-and-embedding doc search. We get top-K from each approach,
+ then fuse them client-side. Adjust concurrency or approach as you like.
+ """
+ # We'll dispatch two queries in parallel
+ # (One full-text, one embedding-based) each limited to K
+ tasks = []
+ if text_query.strip():
+ tasks.append(
+ search_docs_by_text(
+ developer_id=developer_id,
+ query=text_query,
+ k=k,
+ owner_type=owner_type,
+ owner_id=owner_id,
+ )
+ )
+ else:
+ tasks.append([]) # no text results if query is empty
+
+ if embedding and any(embedding):
+ tasks.append(
+ search_docs_by_embedding(
+ developer_id=developer_id,
+ query_embedding=embedding,
+ k=k,
+ owner_type=owner_type,
+ owner_id=owner_id,
+ )
+ )
+ else:
+ tasks.append([])
+
+ # Run concurrently (or sequentially, if you prefer)
+ # If you have a 'run_concurrently' from your old code, you can do:
+ # text_results, embed_results = await run_concurrently([task1, task2])
+ # Otherwise just do them in parallel with e.g. asyncio.gather:
+ from asyncio import gather
+ text_results, embed_results = await gather(*tasks)
+
+ # fuse them
+ fused = fuse_results(text_results, embed_results, alpha)
+ # Then pick top K overall
+ return fused[:k]
From b427e38576eacd709e536cf24d0f65c0ba1a56f0 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Fri, 20 Dec 2024 01:26:00 +0000
Subject: [PATCH 091/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/docs/delete_doc.py | 1 +
agents-api/agents_api/queries/docs/get_doc.py | 3 ++-
agents-api/agents_api/queries/docs/list_docs.py | 1 +
.../queries/docs/search_docs_by_embedding.py | 5 +++--
.../agents_api/queries/docs/search_docs_by_text.py | 2 +-
.../agents_api/queries/docs/search_docs_hybrid.py | 14 ++++++++++----
6 files changed, 18 insertions(+), 8 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/delete_doc.py b/agents-api/agents_api/queries/docs/delete_doc.py
index d1e02faf1..9d2075600 100644
--- a/agents-api/agents_api/queries/docs/delete_doc.py
+++ b/agents-api/agents_api/queries/docs/delete_doc.py
@@ -1,6 +1,7 @@
"""
Timescale-based deletion of a doc record.
"""
+
from typing import Literal
from uuid import UUID
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index a0345f5e3..35d692c84 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -1,6 +1,7 @@
"""
Timescale-based retrieval of a single doc record.
"""
+
from typing import Literal
from uuid import UUID
@@ -41,7 +42,7 @@ async def get_doc(
developer_id: UUID,
doc_id: UUID,
owner_type: Literal["user", "agent", "org"] | None = None,
- owner_id: UUID | None = None
+ owner_id: UUID | None = None,
) -> tuple[str, list]:
"""
Fetch a single doc, optionally constrained to a given owner.
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index b145a1cbc..678c1a5e6 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -1,6 +1,7 @@
"""
Timescale-based listing of docs with optional owner filter and pagination.
"""
+
from typing import Literal
from uuid import UUID
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index c62188b61..af89cc1b8 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -2,10 +2,10 @@
Timescale-based doc embedding search using the `embedding` column.
"""
-import asyncpg
-from typing import Literal, List
+from typing import List, Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
@@ -32,6 +32,7 @@
LIMIT $2;
""").sql(pretty=True)
+
@wrap_in_class(
Doc,
one=False,
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index c9a5a93e2..eed74e54b 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -2,10 +2,10 @@
Timescale-based doc text search using the `search_tsv` column.
"""
-import asyncpg
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index 9e8d84dc7..ae107419d 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -3,7 +3,7 @@
via a simple distribution-based score fusion or direct weighting in Python.
"""
-from typing import Literal, List
+from typing import List, Literal
from uuid import UUID
from beartype import beartype
@@ -11,8 +11,9 @@
from ...autogen.openapi_model import Doc
from ..utils import run_concurrently
-from .search_docs_by_text import search_docs_by_text
from .search_docs_by_embedding import search_docs_by_embedding
+from .search_docs_by_text import search_docs_by_text
+
def dbsf_normalize(scores: List[float]) -> List[float]:
"""
@@ -20,19 +21,23 @@ def dbsf_normalize(scores: List[float]) -> List[float]:
from (mean - 3*stddev) to (mean + 3*stddev) and scale to 0..1
"""
import statistics
+
if len(scores) < 2:
return scores
m = statistics.mean(scores)
sd = statistics.pstdev(scores) # population std
if sd == 0:
return scores
- upper = m + 3*sd
- lower = m - 3*sd
+ upper = m + 3 * sd
+ lower = m - 3 * sd
+
def clamp_scale(v):
c = min(upper, max(lower, v))
return (c - lower) / (upper - lower)
+
return [clamp_scale(s) for s in scores]
+
@beartype
def fuse_results(
text_docs: List[Doc], embedding_docs: List[Doc], alpha: float
@@ -151,6 +156,7 @@ async def search_docs_hybrid(
# text_results, embed_results = await run_concurrently([task1, task2])
# Otherwise just do them in parallel with e.g. asyncio.gather:
from asyncio import gather
+
text_results, embed_results = await gather(*tasks)
# fuse them
From 48439d459af9ede3817b59515dee432de99d5f3f Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Wed, 18 Dec 2024 15:39:35 +0300
Subject: [PATCH 092/274] chore: Move ti queries directory
---
.../models/chat/get_cached_response.py | 15 ---------------
.../models/chat/set_cached_response.py | 19 -------------------
.../{models => queries}/chat/__init__.py | 2 --
.../chat/gather_messages.py | 0
.../chat/prepare_chat_context.py | 15 +++++++--------
5 files changed, 7 insertions(+), 44 deletions(-)
delete mode 100644 agents-api/agents_api/models/chat/get_cached_response.py
delete mode 100644 agents-api/agents_api/models/chat/set_cached_response.py
rename agents-api/agents_api/{models => queries}/chat/__init__.py (92%)
rename agents-api/agents_api/{models => queries}/chat/gather_messages.py (100%)
rename agents-api/agents_api/{models => queries}/chat/prepare_chat_context.py (92%)
diff --git a/agents-api/agents_api/models/chat/get_cached_response.py b/agents-api/agents_api/models/chat/get_cached_response.py
deleted file mode 100644
index 368c88567..000000000
--- a/agents-api/agents_api/models/chat/get_cached_response.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from beartype import beartype
-
-from ..utils import cozo_query
-
-
-@cozo_query
-@beartype
-def get_cached_response(key: str) -> tuple[str, dict]:
- query = """
- input[key] <- [[$key]]
- ?[key, value] := input[key], *session_cache{key, value}
- :limit 1
- """
-
- return (query, {"key": key})
diff --git a/agents-api/agents_api/models/chat/set_cached_response.py b/agents-api/agents_api/models/chat/set_cached_response.py
deleted file mode 100644
index 8625f3f1b..000000000
--- a/agents-api/agents_api/models/chat/set_cached_response.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from beartype import beartype
-
-from ..utils import cozo_query
-
-
-@cozo_query
-@beartype
-def set_cached_response(key: str, value: dict) -> tuple[str, dict]:
- query = """
- ?[key, value] <- [[$key, $value]]
-
- :insert session_cache {
- key => value
- }
-
- :returning
- """
-
- return (query, {"key": key, "value": value})
diff --git a/agents-api/agents_api/models/chat/__init__.py b/agents-api/agents_api/queries/chat/__init__.py
similarity index 92%
rename from agents-api/agents_api/models/chat/__init__.py
rename to agents-api/agents_api/queries/chat/__init__.py
index 428b72572..2c05b4f8b 100644
--- a/agents-api/agents_api/models/chat/__init__.py
+++ b/agents-api/agents_api/queries/chat/__init__.py
@@ -17,6 +17,4 @@
# ruff: noqa: F401, F403, F405
from .gather_messages import gather_messages
-from .get_cached_response import get_cached_response
from .prepare_chat_context import prepare_chat_context
-from .set_cached_response import set_cached_response
diff --git a/agents-api/agents_api/models/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
similarity index 100%
rename from agents-api/agents_api/models/chat/gather_messages.py
rename to agents-api/agents_api/queries/chat/gather_messages.py
diff --git a/agents-api/agents_api/models/chat/prepare_chat_context.py b/agents-api/agents_api/queries/chat/prepare_chat_context.py
similarity index 92%
rename from agents-api/agents_api/models/chat/prepare_chat_context.py
rename to agents-api/agents_api/queries/chat/prepare_chat_context.py
index f77686d7a..4731618f8 100644
--- a/agents-api/agents_api/models/chat/prepare_chat_context.py
+++ b/agents-api/agents_api/queries/chat/prepare_chat_context.py
@@ -3,7 +3,6 @@
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from ...common.protocol.sessions import ChatContext, make_session
@@ -22,13 +21,13 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+# TODO: implement this part
+# @rewrap_exceptions(
+# {
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ChatContext,
one=True,
From 7b6c502eb1de5580c93dd9e38f59ab3bf5512878 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Thu, 19 Dec 2024 15:34:37 +0300
Subject: [PATCH 093/274] feat: Add prepare chat context query
---
.../queries/chat/gather_messages.py | 12 +-
.../queries/chat/prepare_chat_context.py | 225 ++++++++++--------
2 files changed, 129 insertions(+), 108 deletions(-)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 28dc6607f..34a7c564f 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -3,18 +3,17 @@
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from ...autogen.openapi_model import ChatInput, DocReference, History
from ...clients import litellm
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
-from ..docs.search_docs_by_embedding import search_docs_by_embedding
-from ..docs.search_docs_by_text import search_docs_by_text
-from ..docs.search_docs_hybrid import search_docs_hybrid
-from ..entry.get_history import get_history
-from ..session.get_session import get_session
+# from ..docs.search_docs_by_embedding import search_docs_by_embedding
+# from ..docs.search_docs_by_text import search_docs_by_text
+# from ..docs.search_docs_hybrid import search_docs_hybrid
+# from ..entry.get_history import get_history
+from ..sessions.get_session import get_session
from ..utils import (
partialclass,
rewrap_exceptions,
@@ -25,7 +24,6 @@
@rewrap_exceptions(
{
- QueryException: partialclass(HTTPException, status_code=400),
ValidationError: partialclass(HTTPException, status_code=400),
TypeError: partialclass(HTTPException, status_code=400),
}
diff --git a/agents-api/agents_api/queries/chat/prepare_chat_context.py b/agents-api/agents_api/queries/chat/prepare_chat_context.py
index 4731618f8..23926ea4c 100644
--- a/agents-api/agents_api/queries/chat/prepare_chat_context.py
+++ b/agents-api/agents_api/queries/chat/prepare_chat_context.py
@@ -2,18 +2,10 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
-from pydantic import ValidationError
from ...common.protocol.sessions import ChatContext, make_session
-from ..session.prepare_session_data import prepare_session_data
from ..utils import (
- cozo_query,
- fix_uuid_if_present,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -21,17 +13,107 @@
T = TypeVar("T")
-# TODO: implement this part
-# @rewrap_exceptions(
-# {
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
-@wrap_in_class(
- ChatContext,
- one=True,
- transform=lambda d: {
+query = """
+SELECT * FROM
+(
+ SELECT jsonb_agg(u) AS users FROM (
+ SELECT
+ session_lookup.participant_id,
+ users.user_id AS id,
+ users.developer_id,
+ users.name,
+ users.about,
+ users.created_at,
+ users.updated_at,
+ users.metadata
+ FROM session_lookup
+ INNER JOIN users ON session_lookup.participant_id = users.user_id
+ WHERE
+ session_lookup.developer_id = $1 AND
+ session_id = $2 AND
+ session_lookup.participant_type = 'user'
+ ) u
+) AS users,
+(
+ SELECT jsonb_agg(a) AS agents FROM (
+ SELECT
+ session_lookup.participant_id,
+ agents.agent_id AS id,
+ agents.developer_id,
+ agents.canonical_name,
+ agents.name,
+ agents.about,
+ agents.instructions,
+ agents.model,
+ agents.created_at,
+ agents.updated_at,
+ agents.metadata,
+ agents.default_settings
+ FROM session_lookup
+ INNER JOIN agents ON session_lookup.participant_id = agents.agent_id
+ WHERE
+ session_lookup.developer_id = $1 AND
+ session_id = $2 AND
+ session_lookup.participant_type = 'agent'
+ ) a
+) AS agents,
+(
+ SELECT to_jsonb(s) AS session FROM (
+ SELECT
+ sessions.session_id AS id,
+ sessions.developer_id,
+ sessions.situation,
+ sessions.system_template,
+ sessions.created_at,
+ sessions.metadata,
+ sessions.render_templates,
+ sessions.token_budget,
+ sessions.context_overflow,
+ sessions.forward_tool_calls,
+ sessions.recall_options
+ FROM sessions
+ WHERE
+ developer_id = $1 AND
+ session_id = $2
+ LIMIT 1
+ ) s
+) AS session,
+(
+ SELECT jsonb_agg(r) AS toolsets FROM (
+ SELECT
+ session_lookup.participant_id,
+ tools.tool_id as id,
+ tools.developer_id,
+ tools.agent_id,
+ tools.task_id,
+ tools.task_version,
+ tools.type,
+ tools.name,
+ tools.description,
+ tools.spec,
+ tools.updated_at,
+ tools.created_at
+ FROM session_lookup
+ INNER JOIN tools ON session_lookup.participant_id = tools.agent_id
+ WHERE
+ session_lookup.developer_id = $1 AND
+ session_id = $2 AND
+ session_lookup.participant_type = 'agent'
+ ) r
+) AS toolsets
+"""
+
+
+def _transform(d):
+ toolsets = {}
+ for tool in d["toolsets"]:
+ agent_id = tool["agent_id"]
+ if agent_id in toolsets:
+ toolsets[agent_id].append(tool)
+ else:
+ toolsets[agent_id] = [tool]
+
+ return {
**d,
"session": make_session(
agents=[a["id"] for a in d["agents"]],
@@ -40,103 +122,44 @@
),
"toolsets": [
{
- **ts,
+ "agent_id": agent_id,
"tools": [
{
tool["type"]: tool.pop("spec"),
**tool,
}
- for tool in map(fix_uuid_if_present, ts["tools"])
+ for tool in tools
],
}
- for ts in d["toolsets"]
+ for agent_id, tools in toolsets.items()
],
- },
+ }
+
+
+# TODO: implement this part
+# @rewrap_exceptions(
+# {
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
+@wrap_in_class(
+ ChatContext,
+ one=True,
+ transform=_transform,
)
-@cozo_query
+@pg_query
@beartype
-def prepare_chat_context(
+async def prepare_chat_context(
*,
developer_id: UUID,
session_id: UUID,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
"""
Executes a complex query to retrieve memory context based on session ID.
"""
- [*_, session_data_query], sd_vars = prepare_session_data.__wrapped__(
- developer_id=developer_id, session_id=session_id
- )
-
- session_data_fields = ("session", "agents", "users")
-
- session_data_query += """
- :create _session_data_json {
- agents: [Json],
- users: [Json],
- session: Json,
- }
- """
-
- toolsets_query = """
- input[session_id] <- [[to_uuid($session_id)]]
-
- tools_by_agent[agent_id, collect(tool)] :=
- input[session_id],
- *session_lookup{
- session_id,
- participant_id: agent_id,
- participant_type: "agent",
- },
-
- *tools { agent_id, tool_id, name, type, spec, description, updated_at, created_at },
- tool = {
- "id": tool_id,
- "name": name,
- "type": type,
- "spec": spec,
- "description": description,
- "updated_at": updated_at,
- "created_at": created_at,
- }
-
- agent_toolsets[collect(toolset)] :=
- tools_by_agent[agent_id, tools],
- toolset = {
- "agent_id": agent_id,
- "tools": tools,
- }
-
- ?[toolsets] :=
- agent_toolsets[toolsets]
-
- :create _toolsets_json {
- toolsets: [Json],
- }
- """
-
- combine_query = f"""
- ?[{', '.join(session_data_fields)}, toolsets] :=
- *_session_data_json {{ {', '.join(session_data_fields)} }},
- *_toolsets_json {{ toolsets }}
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- session_data_query,
- toolsets_query,
- combine_query,
- ]
-
return (
- queries,
- {
- "session_id": str(session_id),
- **sd_vars,
- },
+ [query],
+ [developer_id, session_id],
)
From ca12d656e2487ce107b5db10fab8427e6ac9ec3f Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Thu, 19 Dec 2024 12:38:09 +0000
Subject: [PATCH 094/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/chat/gather_messages.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 34a7c564f..4fd574368 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -9,6 +9,7 @@
from ...clients import litellm
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
+
# from ..docs.search_docs_by_embedding import search_docs_by_embedding
# from ..docs.search_docs_by_text import search_docs_by_text
# from ..docs.search_docs_hybrid import search_docs_hybrid
From 0aecd613642c3344520a102610f0bc1ddd3371f8 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Thu, 19 Dec 2024 15:49:10 +0300
Subject: [PATCH 095/274] feat: Add SQL validation
---
agents-api/agents_api/exceptions.py | 9 ++
.../queries/chat/prepare_chat_context.py | 90 ++++++++++---------
2 files changed, 56 insertions(+), 43 deletions(-)
diff --git a/agents-api/agents_api/exceptions.py b/agents-api/agents_api/exceptions.py
index 615958a87..f6fcc4741 100644
--- a/agents-api/agents_api/exceptions.py
+++ b/agents-api/agents_api/exceptions.py
@@ -49,3 +49,12 @@ class FailedEncodingSentinel:
"""Sentinel object returned when failed to encode payload."""
payload_data: bytes
+
+
+class QueriesBaseException(AgentsBaseException):
+ pass
+
+
+class InvalidSQLQuery(QueriesBaseException):
+ def __init__(self, query_name: str):
+ super().__init__(f"invalid query: {query_name}")
diff --git a/agents-api/agents_api/queries/chat/prepare_chat_context.py b/agents-api/agents_api/queries/chat/prepare_chat_context.py
index 23926ea4c..1d9bd52fb 100644
--- a/agents-api/agents_api/queries/chat/prepare_chat_context.py
+++ b/agents-api/agents_api/queries/chat/prepare_chat_context.py
@@ -1,9 +1,11 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
from ...common.protocol.sessions import ChatContext, make_session
+from ...exceptions import InvalidSQLQuery
from ..utils import (
pg_query,
wrap_in_class,
@@ -13,19 +15,19 @@
T = TypeVar("T")
-query = """
-SELECT * FROM
+sql_query = sqlvalidator.parse(
+ """SELECT * FROM
(
SELECT jsonb_agg(u) AS users FROM (
SELECT
session_lookup.participant_id,
users.user_id AS id,
- users.developer_id,
- users.name,
- users.about,
- users.created_at,
- users.updated_at,
- users.metadata
+ users.developer_id,
+ users.name,
+ users.about,
+ users.created_at,
+ users.updated_at,
+ users.metadata
FROM session_lookup
INNER JOIN users ON session_lookup.participant_id = users.user_id
WHERE
@@ -39,16 +41,16 @@
SELECT
session_lookup.participant_id,
agents.agent_id AS id,
- agents.developer_id,
- agents.canonical_name,
- agents.name,
- agents.about,
- agents.instructions,
- agents.model,
- agents.created_at,
- agents.updated_at,
- agents.metadata,
- agents.default_settings
+ agents.developer_id,
+ agents.canonical_name,
+ agents.name,
+ agents.about,
+ agents.instructions,
+ agents.model,
+ agents.created_at,
+ agents.updated_at,
+ agents.metadata,
+ agents.default_settings
FROM session_lookup
INNER JOIN agents ON session_lookup.participant_id = agents.agent_id
WHERE
@@ -58,24 +60,24 @@
) a
) AS agents,
(
- SELECT to_jsonb(s) AS session FROM (
+ SELECT to_jsonb(s) AS session FROM (
SELECT
sessions.session_id AS id,
- sessions.developer_id,
- sessions.situation,
- sessions.system_template,
- sessions.created_at,
- sessions.metadata,
- sessions.render_templates,
- sessions.token_budget,
- sessions.context_overflow,
- sessions.forward_tool_calls,
- sessions.recall_options
+ sessions.developer_id,
+ sessions.situation,
+ sessions.system_template,
+ sessions.created_at,
+ sessions.metadata,
+ sessions.render_templates,
+ sessions.token_budget,
+ sessions.context_overflow,
+ sessions.forward_tool_calls,
+ sessions.recall_options
FROM sessions
WHERE
developer_id = $1 AND
session_id = $2
- LIMIT 1
+ LIMIT 1
) s
) AS session,
(
@@ -83,16 +85,16 @@
SELECT
session_lookup.participant_id,
tools.tool_id as id,
- tools.developer_id,
- tools.agent_id,
- tools.task_id,
- tools.task_version,
- tools.type,
- tools.name,
- tools.description,
- tools.spec,
- tools.updated_at,
- tools.created_at
+ tools.developer_id,
+ tools.agent_id,
+ tools.task_id,
+ tools.task_version,
+ tools.type,
+ tools.name,
+ tools.description,
+ tools.spec,
+ tools.updated_at,
+ tools.created_at
FROM session_lookup
INNER JOIN tools ON session_lookup.participant_id = tools.agent_id
WHERE
@@ -100,8 +102,10 @@
session_id = $2 AND
session_lookup.participant_type = 'agent'
) r
-) AS toolsets
-"""
+) AS toolsets"""
+)
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("prepare_chat_context")
def _transform(d):
@@ -160,6 +164,6 @@ async def prepare_chat_context(
"""
return (
- [query],
+ [sql_query.format()],
[developer_id, session_id],
)
From 0d288c43ab50c5a855680ef02d41d1147853a310 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 11:49:21 +0300
Subject: [PATCH 096/274] chore: Import other required queries
---
agents-api/agents_api/queries/chat/gather_messages.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 4fd574368..94d5fe71a 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -10,10 +10,10 @@
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
-# from ..docs.search_docs_by_embedding import search_docs_by_embedding
-# from ..docs.search_docs_by_text import search_docs_by_text
-# from ..docs.search_docs_hybrid import search_docs_hybrid
-# from ..entry.get_history import get_history
+from ..docs.search_docs_by_embedding import search_docs_by_embedding
+from ..docs.search_docs_by_text import search_docs_by_text
+from ..docs.search_docs_hybrid import search_docs_hybrid
+from ..entries.get_history import get_history
from ..sessions.get_session import get_session
from ..utils import (
partialclass,
From 473387bb2325cc6b0f0af96069732c3a2b46db7a Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Fri, 20 Dec 2024 08:50:13 +0000
Subject: [PATCH 097/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/chat/gather_messages.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 94d5fe71a..cbf3bf209 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -9,7 +9,6 @@
from ...clients import litellm
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
-
from ..docs.search_docs_by_embedding import search_docs_by_embedding
from ..docs.search_docs_by_text import search_docs_by_text
from ..docs.search_docs_hybrid import search_docs_hybrid
From 15659c57a0336ea9dff974b69f831ec5dddb5efc Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 12:37:19 +0300
Subject: [PATCH 098/274] chore: Move queries to another folder
---
.../{models => queries}/tools/__init__.py | 0
.../{models => queries}/tools/create_tools.py | 21 +++++++++----------
.../{models => queries}/tools/delete_tool.py | 0
.../{models => queries}/tools/get_tool.py | 0
.../tools/get_tool_args_from_metadata.py | 0
.../{models => queries}/tools/list_tools.py | 0
.../{models => queries}/tools/patch_tool.py | 0
.../{models => queries}/tools/update_tool.py | 0
8 files changed, 10 insertions(+), 11 deletions(-)
rename agents-api/agents_api/{models => queries}/tools/__init__.py (100%)
rename agents-api/agents_api/{models => queries}/tools/create_tools.py (89%)
rename agents-api/agents_api/{models => queries}/tools/delete_tool.py (100%)
rename agents-api/agents_api/{models => queries}/tools/get_tool.py (100%)
rename agents-api/agents_api/{models => queries}/tools/get_tool_args_from_metadata.py (100%)
rename agents-api/agents_api/{models => queries}/tools/list_tools.py (100%)
rename agents-api/agents_api/{models => queries}/tools/patch_tool.py (100%)
rename agents-api/agents_api/{models => queries}/tools/update_tool.py (100%)
diff --git a/agents-api/agents_api/models/tools/__init__.py b/agents-api/agents_api/queries/tools/__init__.py
similarity index 100%
rename from agents-api/agents_api/models/tools/__init__.py
rename to agents-api/agents_api/queries/tools/__init__.py
diff --git a/agents-api/agents_api/models/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
similarity index 89%
rename from agents-api/agents_api/models/tools/create_tools.py
rename to agents-api/agents_api/queries/tools/create_tools.py
index 578a1268d..0d2e0984c 100644
--- a/agents-api/agents_api/models/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,18 +1,18 @@
"""This module contains functions for creating tools in the CozoDB database."""
+import sqlvalidator
from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateToolRequest, Tool
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
+ pg_query,
partialclass,
rewrap_exceptions,
verify_developer_id_query,
@@ -24,14 +24,13 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- AssertionError: partialclass(HTTPException, status_code=400),
- }
-)
+# @rewrap_exceptions(
+# {
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# AssertionError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -41,7 +40,7 @@
},
_kind="inserted",
)
-@cozo_query
+@pg_query
@increase_counter("create_tools")
@beartype
def create_tools(
diff --git a/agents-api/agents_api/models/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/delete_tool.py
rename to agents-api/agents_api/queries/tools/delete_tool.py
diff --git a/agents-api/agents_api/models/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/get_tool.py
rename to agents-api/agents_api/queries/tools/get_tool.py
diff --git a/agents-api/agents_api/models/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
similarity index 100%
rename from agents-api/agents_api/models/tools/get_tool_args_from_metadata.py
rename to agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
diff --git a/agents-api/agents_api/models/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
similarity index 100%
rename from agents-api/agents_api/models/tools/list_tools.py
rename to agents-api/agents_api/queries/tools/list_tools.py
diff --git a/agents-api/agents_api/models/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/patch_tool.py
rename to agents-api/agents_api/queries/tools/patch_tool.py
diff --git a/agents-api/agents_api/models/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/update_tool.py
rename to agents-api/agents_api/queries/tools/update_tool.py
From 8a44cdee8ad093f4fcde41445781c4e585a49893 Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Fri, 20 Dec 2024 09:38:56 +0000
Subject: [PATCH 099/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/tools/create_tools.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index 0d2e0984c..a54fa6973 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,9 +1,9 @@
"""This module contains functions for creating tools in the CozoDB database."""
-import sqlvalidator
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
from fastapi import HTTPException
from pydantic import ValidationError
@@ -12,8 +12,8 @@
from ...autogen.openapi_model import CreateToolRequest, Tool
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
verify_developer_id_query,
verify_developer_owns_resource_query,
From 44122cad522f4fcbe00bc17d271ba9acfc373270 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:10:53 +0300
Subject: [PATCH 100/274] feat: Add create tools query
---
.../agents_api/queries/tools/create_tools.py | 103 +++++++-----------
1 file changed, 41 insertions(+), 62 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index a54fa6973..d50e98e80 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -5,18 +5,14 @@
import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pydantic import ValidationError
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateToolRequest, Tool
+from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ # rewrap_exceptions,
wrap_in_class,
)
@@ -24,6 +20,37 @@
T = TypeVar("T")
+sql_query = sqlvalidator.parse(
+ """INSERT INTO tools
+(
+ developer_id,
+ agent_id,
+ tool_id,
+ type,
+ name,
+ spec,
+ description
+)
+SELECT
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7
+WHERE NOT EXISTS (
+ SELECT null FROM tools
+ WHERE (agent_id, name) = ($2, $5)
+)
+RETURNING *
+"""
+)
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("create_tools")
+
+
# @rewrap_exceptions(
# {
# ValidationError: partialclass(HTTPException, status_code=400),
@@ -48,8 +75,8 @@ def create_tools(
developer_id: UUID,
agent_id: UUID,
data: list[CreateToolRequest],
- ignore_existing: bool = False,
-) -> tuple[list[str], dict]:
+ ignore_existing: bool = False, # TODO: what to do with this flag?
+) -> tuple[list[str], list]:
"""
Constructs a datalog query for inserting tool records into the 'agent_functions' relation in the CozoDB.
@@ -69,6 +96,7 @@ def create_tools(
tools_data = [
[
+ developer_id,
str(agent_id),
str(uuid7()),
tool.type,
@@ -79,57 +107,8 @@ def create_tools(
for tool in data
]
- ensure_tool_name_unique_query = """
- input[agent_id, tool_id, type, name, spec, description] <- $records
- ?[tool_id] :=
- input[agent_id, _, type, name, _, _],
- *tools{
- agent_id: to_uuid(agent_id),
- tool_id,
- type,
- name,
- spec,
- description,
- }
-
- :limit 1
- :assert none
- """
-
- # Datalog query for inserting new tool records into the 'tools' relation
- create_query = """
- input[agent_id, tool_id, type, name, spec, description] <- $records
-
- # Do not add duplicate
- ?[agent_id, tool_id, type, name, spec, description] :=
- input[agent_id, tool_id, type, name, spec, description],
- not *tools{
- agent_id: to_uuid(agent_id),
- type,
- name,
- }
-
- :insert tools {
- agent_id,
- tool_id,
- type,
- name,
- spec,
- description,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- create_query,
- ]
-
- if not ignore_existing:
- queries.insert(
- -1,
- ensure_tool_name_unique_query,
- )
-
- return (queries, {"records": tools_data})
+ return (
+ sql_query.format(),
+ tools_data,
+ "fetchmany",
+ )
From b19a0010dd3276589ce829048700151cdbe402b4 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:27:45 +0300
Subject: [PATCH 101/274] feat: Add delete tool query
---
.../agents_api/queries/tools/delete_tool.py | 69 +++++++++----------
1 file changed, 33 insertions(+), 36 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index c79cdfd29..59f561cf1 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -1,19 +1,14 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
+from ...exceptions import InvalidSQLQuery
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -21,20 +16,34 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+sql_query = sqlvalidator.parse("""
+DELETE FROM
+ tools
+WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+RETURNING *
+""")
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("delete_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ResourceDeletedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "deleted_at": utcnow(), "jobs": [], **d},
_kind="deleted",
)
-@cozo_query
+@pg_query
@beartype
def delete_tool(
*,
@@ -42,27 +51,15 @@ def delete_tool(
agent_id: UUID,
tool_id: UUID,
) -> tuple[list[str], dict]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
- delete_query = """
- # Delete function
- ?[tool_id, agent_id] <- [[
- to_uuid($tool_id),
- to_uuid($agent_id),
- ]]
-
- :delete tools {
- tool_id,
+ return (
+ sql_query.format(),
+ [
+ developer_id,
agent_id,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- delete_query,
- ]
-
- return (queries, {"tool_id": tool_id, "agent_id": agent_id})
+ tool_id,
+ ],
+ )
From e7d3079f380fa954c3e18c866bc120c8b16a9a50 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:32:39 +0300
Subject: [PATCH 102/274] feat: Add get tool query
---
.../agents_api/queries/tools/get_tool.py | 76 ++++++++-----------
1 file changed, 30 insertions(+), 46 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 465fd2efe..3662725b8 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -1,32 +1,39 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import Tool
+from ...exceptions import InvalidSQLQuery
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+sql_query = sqlvalidator.parse("""
+SELECT * FROM tools
+WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+LIMIT 1
+""")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("get_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -36,7 +43,7 @@
},
one=True,
)
-@cozo_query
+@pg_query
@beartype
def get_tool(
*,
@@ -44,38 +51,15 @@ def get_tool(
agent_id: UUID,
tool_id: UUID,
) -> tuple[list[str], dict]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
- get_query = """
- input[agent_id, tool_id] <- [[to_uuid($agent_id), to_uuid($tool_id)]]
-
- ?[
+ return (
+ sql_query.format(),
+ [
+ developer_id,
agent_id,
tool_id,
- type,
- name,
- spec,
- updated_at,
- created_at,
- ] := input[agent_id, tool_id],
- *tools {
- agent_id,
- tool_id,
- name,
- type,
- spec,
- updated_at,
- created_at,
- }
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- get_query,
- ]
-
- return (queries, {"agent_id": agent_id, "tool_id": tool_id})
+ ],
+ )
From 83f58aca92fc715cfbafc5f9f2f19f95cbf2da1e Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:45:28 +0300
Subject: [PATCH 103/274] feat: Add list tools query
---
.../agents_api/queries/tools/list_tools.py | 92 ++++++++-----------
1 file changed, 37 insertions(+), 55 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 727bf8028..59fb1eff5 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -1,32 +1,43 @@
from typing import Any, Literal, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import Tool
+from ...exceptions import InvalidSQLQuery
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+sql_query = sqlvalidator.parse("""
+SELECT * FROM tools
+WHERE
+ developer_id = $1 AND
+ agent_id = $2
+ORDER BY
+ CASE WHEN $5 = 'created_at' AND $6 = 'desc' THEN s.created_at END DESC,
+ CASE WHEN $5 = 'created_at' AND $6 = 'asc' THEN s.created_at END ASC,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN s.updated_at END DESC,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN s.updated_at END ASC
+LIMIT $3 OFFSET $4;
+""")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("get_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -38,7 +49,7 @@
**d,
},
)
-@cozo_query
+@pg_query
@beartype
def list_tools(
*,
@@ -49,46 +60,17 @@ def list_tools(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> tuple[list[str], dict]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- list_query = f"""
- input[agent_id] <- [[to_uuid($agent_id)]]
-
- ?[
- agent_id,
- id,
- name,
- type,
- spec,
- description,
- updated_at,
- created_at,
- ] := input[agent_id],
- *tools {{
- agent_id,
- tool_id: id,
- name,
- type,
- spec,
- description,
- updated_at,
- created_at,
- }}
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- list_query,
- ]
-
return (
- queries,
- {"agent_id": agent_id, "limit": limit, "offset": offset},
+ sql_query.format(),
+ [
+ developer_id,
+ agent_id,
+ limit,
+ offset,
+ sort_by,
+ direction,
+ ],
)
From 59b24ac9bf2031daff49c42ecce5e03c880b1ee9 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 15:21:09 +0300
Subject: [PATCH 104/274] feat: Add patch tool query
---
.../agents_api/queries/tools/patch_tool.py | 94 +++++++++----------
1 file changed, 43 insertions(+), 51 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index bc49b8121..aa663dec0 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -1,20 +1,14 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import PatchToolRequest, ResourceUpdatedResponse
-from ...common.utils.cozo import cozo_process_mutate_data
+from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -22,25 +16,46 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
+sql_query = sqlvalidator.parse("""
+WITH updated_tools AS (
+ UPDATE tools
+ SET
+ type = COALESCE($4, type),
+ name = COALESCE($5, name),
+ description = COALESCE($6, description),
+ spec = COALESCE($7, spec)
+ WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+ RETURNING *
)
+SELECT * FROM updated_tools;
+""")
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("patch_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
_kind="inserted",
)
-@cozo_query
+@pg_query
@increase_counter("patch_tool")
@beartype
def patch_tool(
*, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
"""
Execute the datalog query and return the results as a DataFrame
Updates the tool information for a given agent and tool ID in the 'cozodb' database.
@@ -54,6 +69,7 @@ def patch_tool(
ResourceUpdatedResponse: The updated tool data.
"""
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
@@ -78,39 +94,15 @@ def patch_tool(
if tool_spec:
del patch_data[tool_type]
- tool_cols, tool_vals = cozo_process_mutate_data(
- {
- **patch_data,
- "agent_id": agent_id,
- "tool_id": tool_id,
- }
- )
-
- # Construct the datalog query for updating the tool information
- patch_query = f"""
- input[{tool_cols}] <- $input
-
- ?[{tool_cols}, spec, updated_at] :=
- *tools {{
- agent_id: to_uuid($agent_id),
- tool_id: to_uuid($tool_id),
- spec: old_spec,
- }},
- input[{tool_cols}],
- spec = concat(old_spec, $spec),
- updated_at = now()
-
- :update tools {{ {tool_cols}, spec, updated_at }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- patch_query,
- ]
-
return (
- queries,
- dict(input=tool_vals, spec=tool_spec, agent_id=agent_id, tool_id=tool_id),
+ sql_query.format(),
+ [
+ developer_id,
+ agent_id,
+ tool_id,
+ tool_type,
+ data.name,
+ data.description,
+ tool_spec,
+ ],
)
From 32dbbbaac376757ddc535d40eef64d3d64259c3f Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 15:21:21 +0300
Subject: [PATCH 105/274] fix: Fix return types
---
agents-api/agents_api/queries/tools/delete_tool.py | 2 +-
agents-api/agents_api/queries/tools/get_tool.py | 2 +-
agents-api/agents_api/queries/tools/list_tools.py | 4 ++--
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 59f561cf1..17535e1e4 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -50,7 +50,7 @@ def delete_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 3662725b8..af63be0c9 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -50,7 +50,7 @@ def get_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 59fb1eff5..3dac84875 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -28,7 +28,7 @@
""")
if not sql_query.is_valid():
- raise InvalidSQLQuery("get_tool")
+ raise InvalidSQLQuery("list_tools")
# @rewrap_exceptions(
@@ -59,7 +59,7 @@ def list_tools(
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
From 281e1a8f44c79cfd7081108a213b0a580446db26 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 15:31:48 +0300
Subject: [PATCH 106/274] feat: Add update tool query
---
.../agents_api/queries/tools/update_tool.py | 93 ++++++++-----------
1 file changed, 41 insertions(+), 52 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index ef700a5f6..356e28bbf 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -1,44 +1,55 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import (
ResourceUpdatedResponse,
UpdateToolRequest,
)
-from ...common.utils.cozo import cozo_process_mutate_data
+from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+sql_query = sqlvalidator.parse("""
+UPDATE tools
+SET
+ type = $4,
+ name = $5,
+ description = $6,
+ spec = $7
+WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+RETURNING *;
+""")
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("update_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
_kind="inserted",
)
-@cozo_query
+@pg_query
@increase_counter("update_tool")
@beartype
def update_tool(
@@ -48,7 +59,8 @@ def update_tool(
tool_id: UUID,
data: UpdateToolRequest,
**kwargs,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
@@ -72,38 +84,15 @@ def update_tool(
update_data["spec"] = tool_spec
del update_data[tool_type]
- tool_cols, tool_vals = cozo_process_mutate_data(
- {
- **update_data,
- "agent_id": agent_id,
- "tool_id": tool_id,
- }
- )
-
- # Construct the datalog query for updating the tool information
- patch_query = f"""
- input[{tool_cols}] <- $input
-
- ?[{tool_cols}, created_at, updated_at] :=
- *tools {{
- agent_id: to_uuid($agent_id),
- tool_id: to_uuid($tool_id),
- created_at
- }},
- input[{tool_cols}],
- updated_at = now()
-
- :put tools {{ {tool_cols}, created_at, updated_at }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- patch_query,
- ]
-
return (
- queries,
- dict(input=tool_vals, spec=tool_spec, agent_id=agent_id, tool_id=tool_id),
+ sql_query.format(),
+ [
+ developer_id,
+ agent_id,
+ tool_id,
+ tool_type,
+ data.name,
+ data.description,
+ tool_spec,
+ ],
)
From 93673b732512199a77df585c6568a42f657c65f4 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Fri, 20 Dec 2024 14:43:12 -0500
Subject: [PATCH 107/274] fix: fixed the CRD doc queries + added tests
---
agents-api/agents_api/autogen/Docs.py | 24 ++
.../agents_api/queries/docs/__init__.py | 13 +-
.../agents_api/queries/docs/create_doc.py | 40 +-
.../agents_api/queries/docs/delete_doc.py | 6 +-
agents-api/agents_api/queries/docs/get_doc.py | 15 +-
.../agents_api/queries/docs/list_docs.py | 81 ++--
.../queries/docs/search_docs_by_embedding.py | 1 -
.../queries/docs/search_docs_by_text.py | 3 +-
.../queries/docs/search_docs_hybrid.py | 2 -
.../agents_api/queries/entries/get_history.py | 1 -
.../agents_api/queries/files/get_file.py | 6 +-
.../agents_api/queries/files/list_files.py | 87 +---
.../queries/sessions/create_session.py | 2 -
agents-api/tests/fixtures.py | 21 +-
agents-api/tests/test_docs_queries.py | 406 +++++++++++-------
agents-api/tests/test_entry_queries.py | 1 -
agents-api/tests/test_files_queries.py | 4 +-
agents-api/tests/test_session_queries.py | 1 -
.../integrations/autogen/Docs.py | 24 ++
typespec/docs/models.tsp | 20 +
.../@typespec/openapi3/openapi-1.0.0.yaml | 22 +
21 files changed, 454 insertions(+), 326 deletions(-)
diff --git a/agents-api/agents_api/autogen/Docs.py b/agents-api/agents_api/autogen/Docs.py
index ffed27c1d..af5f60d6a 100644
--- a/agents-api/agents_api/autogen/Docs.py
+++ b/agents-api/agents_api/autogen/Docs.py
@@ -73,6 +73,30 @@ class Doc(BaseModel):
"""
Embeddings for the document
"""
+ modality: Annotated[str | None, Field(json_schema_extra={"readOnly": True})] = None
+ """
+ Modality of the document
+ """
+ language: Annotated[str | None, Field(json_schema_extra={"readOnly": True})] = None
+ """
+ Language of the document
+ """
+ index: Annotated[int | None, Field(json_schema_extra={"readOnly": True})] = None
+ """
+ Index of the document
+ """
+ embedding_model: Annotated[
+ str | None, Field(json_schema_extra={"readOnly": True})
+ ] = None
+ """
+ Embedding model to use for the document
+ """
+ embedding_dimensions: Annotated[
+ int | None, Field(json_schema_extra={"readOnly": True})
+ ] = None
+ """
+ Dimensions of the embedding model
+ """
class DocOwner(BaseModel):
diff --git a/agents-api/agents_api/queries/docs/__init__.py b/agents-api/agents_api/queries/docs/__init__.py
index 0ba3db0d4..f7c207bf2 100644
--- a/agents-api/agents_api/queries/docs/__init__.py
+++ b/agents-api/agents_api/queries/docs/__init__.py
@@ -18,8 +18,15 @@
from .create_doc import create_doc
from .delete_doc import delete_doc
-from .embed_snippets import embed_snippets
from .get_doc import get_doc
from .list_docs import list_docs
-from .search_docs_by_embedding import search_docs_by_embedding
-from .search_docs_by_text import search_docs_by_text
+# from .search_docs_by_embedding import search_docs_by_embedding
+# from .search_docs_by_text import search_docs_by_text
+
+__all__ = [
+ "create_doc",
+ "delete_doc",
+ "get_doc",
+ "list_docs",
+ # "search_docs_by_embct",
+]
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
index 57be43bdf..4528e9fc5 100644
--- a/agents-api/agents_api/queries/docs/create_doc.py
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -1,12 +1,4 @@
-"""
-Timescale-based creation of docs.
-
-Mirrors the structure of create_file.py, but uses the docs/doc_owners tables.
-"""
-
-import base64
-import hashlib
-from typing import Any, Literal
+from typing import Literal
from uuid import UUID
import asyncpg
@@ -15,6 +7,9 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
+import ast
+
+
from ...autogen.openapi_model import CreateDocRequest, Doc
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
@@ -91,7 +86,7 @@
transform=lambda d: {
**d,
"id": d["doc_id"],
- # You could optionally return a computed hash or partial content if desired
+ "content": ast.literal_eval(d["content"])[0] if len(ast.literal_eval(d["content"])) == 1 else ast.literal_eval(d["content"]),
},
)
@increase_counter("create_doc")
@@ -102,26 +97,35 @@ async def create_doc(
developer_id: UUID,
doc_id: UUID | None = None,
data: CreateDocRequest,
- owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_type: Literal["user", "agent"] | None = None,
owner_id: UUID | None = None,
-) -> list[tuple[str, list]]:
+ modality: Literal["text", "image", "mixed"] | None = "text",
+ embedding_model: str | None = "voyage-3",
+ embedding_dimensions: int | None = 1024,
+ language: str | None = "english",
+ index: int | None = 0,
+) -> list[tuple[str, list] | tuple[str, list, str]]:
"""
Insert a new doc record into Timescale and optionally associate it with an owner.
"""
# Generate a UUID if not provided
doc_id = doc_id or uuid7()
+ # check if content is a string
+ if isinstance(data.content, str):
+ data.content = [data.content]
+
# Create the doc record
doc_params = [
developer_id,
doc_id,
data.title,
- data.content,
- data.index or 0, # fallback if no snippet index
- data.modality or "text",
- data.embedding_model or "none",
- data.embedding_dimensions or 0,
- data.language or "english",
+ str(data.content),
+ index,
+ modality,
+ embedding_model,
+ embedding_dimensions,
+ language,
data.metadata or {},
]
diff --git a/agents-api/agents_api/queries/docs/delete_doc.py b/agents-api/agents_api/queries/docs/delete_doc.py
index 9d2075600..adeb09bd8 100644
--- a/agents-api/agents_api/queries/docs/delete_doc.py
+++ b/agents-api/agents_api/queries/docs/delete_doc.py
@@ -1,7 +1,3 @@
-"""
-Timescale-based deletion of a doc record.
-"""
-
from typing import Literal
from uuid import UUID
@@ -65,7 +61,7 @@ async def delete_doc(
*,
developer_id: UUID,
doc_id: UUID,
- owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_type: Literal["user", "agent"] | None = None,
owner_id: UUID | None = None,
) -> tuple[str, list]:
"""
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index 35d692c84..9155f500a 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -1,14 +1,9 @@
-"""
-Timescale-based retrieval of a single doc record.
-"""
-
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
+import ast
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
@@ -16,12 +11,12 @@
doc_query = parse_one("""
SELECT d.*
FROM docs d
-LEFT JOIN doc_owners do ON d.developer_id = do.developer_id AND d.doc_id = do.doc_id
+LEFT JOIN doc_owners doc_own ON d.developer_id = doc_own.developer_id AND d.doc_id = doc_own.doc_id
WHERE d.developer_id = $1
AND d.doc_id = $2
AND (
($3::text IS NULL AND $4::uuid IS NULL)
- OR (do.owner_type = $3 AND do.owner_id = $4)
+ OR (doc_own.owner_type = $3 AND doc_own.owner_id = $4)
)
LIMIT 1;
""").sql(pretty=True)
@@ -33,6 +28,8 @@
transform=lambda d: {
**d,
"id": d["doc_id"],
+ "content": ast.literal_eval(d["content"])[0] if len(ast.literal_eval(d["content"])) == 1 else ast.literal_eval(d["content"]),
+ # "embeddings": d["embeddings"],
},
)
@pg_query
@@ -41,7 +38,7 @@ async def get_doc(
*,
developer_id: UUID,
doc_id: UUID,
- owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_type: Literal["user", "agent"] | None = None,
owner_id: UUID | None = None,
) -> tuple[str, list]:
"""
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index 678c1a5e6..a4df08e73 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -1,52 +1,20 @@
-"""
-Timescale-based listing of docs with optional owner filter and pagination.
-"""
-
-from typing import Literal
+from typing import Any, Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
+import ast
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
-# Basic listing for all docs by developer
-developer_docs_query = parse_one("""
+# Base query for listing docs
+base_docs_query = parse_one("""
SELECT d.*
FROM docs d
-LEFT JOIN doc_owners do ON d.developer_id = do.developer_id AND d.doc_id = do.doc_id
+LEFT JOIN doc_owners doc_own ON d.developer_id = doc_own.developer_id AND d.doc_id = doc_own.doc_id
WHERE d.developer_id = $1
-ORDER BY
-CASE
- WHEN $4 = 'created_at' AND $5 = 'asc' THEN d.created_at
- WHEN $4 = 'created_at' AND $5 = 'desc' THEN d.created_at
- WHEN $4 = 'updated_at' AND $5 = 'asc' THEN d.updated_at
- WHEN $4 = 'updated_at' AND $5 = 'desc' THEN d.updated_at
-END DESC NULLS LAST
-LIMIT $2
-OFFSET $3;
-""").sql(pretty=True)
-
-# Listing for docs associated with a specific owner
-owner_docs_query = parse_one("""
-SELECT d.*
-FROM docs d
-JOIN doc_owners do ON d.developer_id = do.developer_id AND d.doc_id = do.doc_id
-WHERE do.developer_id = $1
- AND do.owner_id = $6
- AND do.owner_type = $7
-ORDER BY
-CASE
- WHEN $4 = 'created_at' AND $5 = 'asc' THEN d.created_at
- WHEN $4 = 'created_at' AND $5 = 'desc' THEN d.created_at
- WHEN $4 = 'updated_at' AND $5 = 'asc' THEN d.updated_at
- WHEN $4 = 'updated_at' AND $5 = 'desc' THEN d.updated_at
-END DESC NULLS LAST
-LIMIT $2
-OFFSET $3;
""").sql(pretty=True)
@@ -56,6 +24,8 @@
transform=lambda d: {
**d,
"id": d["doc_id"],
+ "content": ast.literal_eval(d["content"])[0] if len(ast.literal_eval(d["content"])) == 1 else ast.literal_eval(d["content"]),
+ # "embeddings": d["embeddings"],
},
)
@pg_query
@@ -64,11 +34,13 @@ async def list_docs(
*,
developer_id: UUID,
owner_id: UUID | None = None,
- owner_type: Literal["user", "agent", "org"] | None = None,
+ owner_type: Literal["user", "agent"] | None = None,
limit: int = 100,
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
+ metadata_filter: dict[str, Any] = {},
+ include_without_embeddings: bool = False,
) -> tuple[str, list]:
"""
Lists docs with optional owner filtering, pagination, and sorting.
@@ -76,17 +48,36 @@ async def list_docs(
if direction.lower() not in ["asc", "desc"]:
raise HTTPException(status_code=400, detail="Invalid sort direction")
+ if sort_by not in ["created_at", "updated_at"]:
+ raise HTTPException(status_code=400, detail="Invalid sort field")
+
if limit > 100 or limit < 1:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 100")
if offset < 0:
raise HTTPException(status_code=400, detail="Offset must be >= 0")
- params = [developer_id, limit, offset, sort_by, direction]
- if owner_id and owner_type:
- params.extend([owner_id, owner_type])
- query = owner_docs_query
- else:
- query = developer_docs_query
+ # Start with the base query
+ query = base_docs_query
+ params = [developer_id]
+
+ # Add owner filtering
+ if owner_type and owner_id:
+ query += " AND doc_own.owner_type = $2 AND doc_own.owner_id = $3"
+ params.extend([owner_type, owner_id])
+
+ # Add metadata filtering
+ if metadata_filter:
+ for key, value in metadata_filter.items():
+ query += f" AND d.metadata->>'{key}' = ${len(params) + 1}"
+ params.append(value)
+
+ # Include or exclude documents without embeddings
+ # if not include_without_embeddings:
+ # query += " AND d.embeddings IS NOT NULL"
+
+ # Add sorting and pagination
+ query += f" ORDER BY {sort_by} {direction} LIMIT ${len(params) + 1} OFFSET ${len(params) + 2}"
+ params.extend([limit, offset])
- return (query, params)
+ return query, params
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index af89cc1b8..e3120bd36 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -5,7 +5,6 @@
from typing import List, Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index eed74e54b..9f434d438 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -5,7 +5,6 @@
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
@@ -22,7 +21,7 @@
AND d.doc_id = do.doc_id
WHERE d.developer_id = $1
AND (
- ($4::text IS NULL AND $5::uuid IS NULL)
+ ($4 IS NULL AND $5 IS NULL)
OR (do.owner_type = $4 AND do.owner_id = $5)
)
AND d.search_tsv @@ websearch_to_tsquery($3)
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index ae107419d..a879e3b6b 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -7,10 +7,8 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
from ...autogen.openapi_model import Doc
-from ..utils import run_concurrently
from .search_docs_by_embedding import search_docs_by_embedding
from .search_docs_by_text import search_docs_by_text
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index e6967a6cc..ffa0746c0 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -1,5 +1,4 @@
import json
-from typing import Any, List, Tuple
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 4d5dca4c0..5ccb08d86 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -6,13 +6,11 @@
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import File
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Define the raw SQL query
file_query = parse_one("""
@@ -47,8 +45,8 @@
File,
one=True,
transform=lambda d: {
- "id": d["file_id"],
**d,
+ "id": d["file_id"],
"hash": d["hash"].hex(),
"content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
},
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index 2bc42f842..7c8b67887 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -3,51 +3,21 @@
It constructs and executes SQL queries to fetch a list of files based on developer ID with pagination.
"""
-from typing import Any, Literal
+from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-
from ...autogen.openapi_model import File
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
-# Query to list all files for a developer (uses developer_id index)
-developer_files_query = parse_one("""
+# Base query for listing files
+base_files_query = parse_one("""
SELECT f.*
FROM files f
LEFT JOIN file_owners fo ON f.developer_id = fo.developer_id AND f.file_id = fo.file_id
WHERE f.developer_id = $1
-ORDER BY
- CASE
- WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at
- WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at
- WHEN $4 = 'updated_at' AND $5 = 'asc' THEN updated_at
- WHEN $4 = 'updated_at' AND $5 = 'desc' THEN updated_at
- END DESC NULLS LAST
-LIMIT $2
-OFFSET $3;
-""").sql(pretty=True)
-
-# Query to list files for a specific owner (uses composite indexes)
-owner_files_query = parse_one("""
-SELECT f.*
-FROM files f
-JOIN file_owners fo ON f.developer_id = fo.developer_id AND f.file_id = fo.file_id
-WHERE fo.developer_id = $1
-AND fo.owner_id = $6
-AND fo.owner_type = $7
-ORDER BY
- CASE
- WHEN $4 = 'created_at' AND $5 = 'asc' THEN created_at
- WHEN $4 = 'created_at' AND $5 = 'desc' THEN created_at
- WHEN $4 = 'updated_at' AND $5 = 'asc' THEN updated_at
- WHEN $4 = 'updated_at' AND $5 = 'desc' THEN updated_at
- END DESC NULLS LAST
-LIMIT $2
-OFFSET $3;
""").sql(pretty=True)
@@ -74,49 +44,32 @@ async def list_files(
direction: Literal["asc", "desc"] = "desc",
) -> tuple[str, list]:
"""
- Lists files with optimized queries for two cases:
- 1. Owner specified: Returns files associated with that owner
- 2. No owner: Returns all files for the developer
-
- Args:
- developer_id: UUID of the developer
- owner_id: Optional UUID of the owner (user or agent)
- owner_type: Optional type of owner ("user" or "agent")
- limit: Maximum number of records to return (1-100)
- offset: Number of records to skip
- sort_by: Field to sort by
- direction: Sort direction ('asc' or 'desc')
-
- Returns:
- Tuple of (query, params)
-
- Raises:
- HTTPException: If parameters are invalid
+ Lists files with optional owner filtering, pagination, and sorting.
"""
# Validate parameters
if direction.lower() not in ["asc", "desc"]:
raise HTTPException(status_code=400, detail="Invalid sort direction")
+ if sort_by not in ["created_at", "updated_at"]:
+ raise HTTPException(status_code=400, detail="Invalid sort field")
+
if limit > 100 or limit < 1:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 100")
if offset < 0:
raise HTTPException(status_code=400, detail="Offset must be non-negative")
- # Base parameters used in all queries
- params = [
- developer_id,
- limit,
- offset,
- sort_by,
- direction,
- ]
+ # Start with the base query
+ query = base_files_query
+ params = [developer_id]
+
+ # Add owner filtering
+ if owner_type and owner_id:
+ query += " AND fo.owner_type = $2 AND fo.owner_id = $3"
+ params.extend([owner_type, owner_id])
- # Choose appropriate query based on owner details
- if owner_id and owner_type:
- params.extend([owner_id, owner_type]) # Add owner_id as $6 and owner_type as $7
- query = owner_files_query # Use single query with owner_type parameter
- else:
- query = developer_files_query
+ # Add sorting and pagination
+ query += f" ORDER BY {sort_by} {direction} LIMIT ${len(params) + 1} OFFSET ${len(params) + 2}"
+ params.extend([limit, offset])
- return (query, params)
+ return query, params
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 63fbdc940..058462cf8 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -8,10 +8,8 @@
from ...autogen.openapi_model import (
CreateSessionRequest,
- ResourceCreatedResponse,
Session,
)
-from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 286fd10fb..6689137d7 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,6 +1,5 @@
import random
import string
-import time
from uuid import UUID
from fastapi.testclient import TestClient
@@ -12,6 +11,7 @@
CreateFileRequest,
CreateSessionRequest,
CreateUserRequest,
+ CreateDocRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
@@ -21,7 +21,8 @@
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
-# from agents_api.queries.docs.create_doc import create_doc
+from agents_api.queries.docs.create_doc import create_doc
+
# from agents_api.queries.docs.delete_doc import delete_doc
# from agents_api.queries.execution.create_execution import create_execution
# from agents_api.queries.execution.create_execution_transition import create_execution_transition
@@ -149,6 +150,22 @@ async def test_file(dsn=pg_dsn, developer=test_developer, user=test_user):
return file
+@fixture(scope="test")
+async def test_doc(dsn=pg_dsn, developer=test_developer):
+ pool = await create_db_pool(dsn=dsn)
+ doc = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="Hello",
+ content=["World"],
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ connection_pool=pool,
+ )
+ return doc
+
+
@fixture(scope="test")
async def random_email():
return f"{"".join([random.choice(string.ascii_lowercase) for _ in range(10)])}@mail.com"
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index f2ff2c786..d6af42e57 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -1,163 +1,249 @@
-# # Tests for entry queries
+from ward import test
-# import asyncio
+from agents_api.autogen.openapi_model import CreateDocRequest
+from agents_api.clients.pg import create_db_pool
+from agents_api.queries.docs.create_doc import create_doc
+from agents_api.queries.docs.delete_doc import delete_doc
+from agents_api.queries.docs.get_doc import get_doc
+from agents_api.queries.docs.list_docs import list_docs
-# from ward import test
-
-# from agents_api.autogen.openapi_model import CreateDocRequest
-# from agents_api.queries.docs.create_doc import create_doc
-# from agents_api.queries.docs.delete_doc import delete_doc
-# from agents_api.queries.docs.embed_snippets import embed_snippets
-# from agents_api.queries.docs.get_doc import get_doc
-# from agents_api.queries.docs.list_docs import list_docs
-# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
+# If you wish to test text/embedding/hybrid search, import them:
# from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
-# from tests.fixtures import (
-# EMBEDDING_SIZE,
-# cozo_client,
-# test_agent,
-# test_developer_id,
-# test_doc,
-# test_user,
-# )
-
-
-# @test("query: create docs")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user
-# ):
-# create_doc(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-
-# create_doc(
-# developer_id=developer_id,
-# owner_type="user",
-# owner_id=user.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-
-
-# @test("query: get docs")
-# def _(client=cozo_client, doc=test_doc, developer_id=test_developer_id):
-# get_doc(
-# developer_id=developer_id,
-# doc_id=doc.id,
-# client=client,
-# )
-
-
-# @test("query: delete doc")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# doc = create_doc(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-
-# delete_doc(
-# developer_id=developer_id,
-# doc_id=doc.id,
-# owner_type="agent",
-# owner_id=agent.id,
-# client=client,
-# )
-
-
-# @test("query: list docs")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, doc=test_doc, agent=test_agent
-# ):
-# result = list_docs(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# client=client,
-# include_without_embeddings=True,
-# )
-
-# assert len(result) >= 1
-
-
-# @test("query: search docs by text")
-# async def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id):
-# create_doc(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(
-# title="Hello", content=["The world is a funny little thing"]
-# ),
-# client=client,
-# )
-
-# await asyncio.sleep(1)
-
-# result = search_docs_by_text(
-# developer_id=developer_id,
-# owners=[("agent", agent.id)],
-# query="funny",
-# client=client,
-# )
-
-# assert len(result) >= 1
-# assert result[0].metadata is not None
-
-
-# @test("query: search docs by embedding")
-# async def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id):
-# doc = create_doc(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-
-# ### Add embedding to the snippet
-# embed_snippets(
-# developer_id=developer_id,
-# doc_id=doc.id,
-# snippet_indices=[0],
-# embeddings=[[1.0] * EMBEDDING_SIZE],
-# client=client,
-# )
-
-# await asyncio.sleep(1)
-
-# ### Search
-# query_embedding = [0.99] * EMBEDDING_SIZE
-
-# result = search_docs_by_embedding(
-# developer_id=developer_id,
-# owners=[("agent", agent.id)],
-# query_embedding=query_embedding,
-# client=client,
-# )
-
-# assert len(result) >= 1
-# assert result[0].metadata is not None
-
-
-# @test("query: embed snippets")
-# def _(client=cozo_client, developer_id=test_developer_id, doc=test_doc):
-# snippet_indices = [0]
-# embeddings = [[1.0] * EMBEDDING_SIZE]
-
-# result = embed_snippets(
-# developer_id=developer_id,
-# doc_id=doc.id,
-# snippet_indices=snippet_indices,
-# embeddings=embeddings,
-# client=client,
-# )
-
-# assert result is not None
-# assert result.id == doc.id
+# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
+# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
+
+# You can rename or remove these imports to match your actual fixtures
+from tests.fixtures import pg_dsn, test_agent, test_developer, test_user, test_doc
+
+
+@test("query: create doc")
+async def _(dsn=pg_dsn, developer=test_developer):
+ pool = await create_db_pool(dsn=dsn)
+ doc = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="Hello Doc",
+ content="This is sample doc content",
+ embed_instruction="Embed the document",
+ metadata={"test": "test"},
+ ),
+ connection_pool=pool,
+ )
+
+ assert doc.title == "Hello Doc"
+ assert doc.content == "This is sample doc content"
+ assert doc.modality == "text"
+ assert doc.embedding_model == "voyage-3"
+ assert doc.embedding_dimensions == 1024
+ assert doc.language == "english"
+ assert doc.index == 0
+
+@test("query: create user doc")
+async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+ doc = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="User Doc",
+ content="Docs for user testing",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert doc.title == "User Doc"
+
+ # Verify doc appears in user's docs
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert any(d.id == doc.id for d in docs_list)
+
+@test("query: create agent doc")
+async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+ doc = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="Agent Doc",
+ content="Docs for agent testing",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert doc.title == "Agent Doc"
+
+ # Verify doc appears in agent's docs
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert any(d.id == doc.id for d in docs_list)
+
+@test("model: get doc")
+async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
+ pool = await create_db_pool(dsn=dsn)
+ doc_test = await get_doc(
+ developer_id=developer.id,
+ doc_id=doc.id,
+ connection_pool=pool,
+ )
+ assert doc_test.id == doc.id
+ assert doc_test.title == doc.title
+
+@test("query: list docs")
+async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
+ pool = await create_db_pool(dsn=dsn)
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ connection_pool=pool,
+ )
+ assert len(docs_list) >= 1
+ assert any(d.id == doc.id for d in docs_list)
+
+@test("query: list user docs")
+async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a doc owned by the user
+ doc_user = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="User List Test",
+ content="Some user doc content",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+
+ # List user's docs
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert len(docs_list) >= 1
+ assert any(d.id == doc_user.id for d in docs_list)
+
+@test("query: list agent docs")
+async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a doc owned by the agent
+ doc_agent = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="Agent List Test",
+ content="Some agent doc content",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+
+ # List agent's docs
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert len(docs_list) >= 1
+ assert any(d.id == doc_agent.id for d in docs_list)
+
+@test("query: delete user doc")
+async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a doc owned by the user
+ doc_user = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="User Delete Test",
+ content="Doc for user deletion test",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+
+ # Delete the doc
+ await delete_doc(
+ developer_id=developer.id,
+ doc_id=doc_user.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+
+ # Verify doc is no longer in user's docs
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ owner_type="user",
+ owner_id=user.id,
+ connection_pool=pool,
+ )
+ assert not any(d.id == doc_user.id for d in docs_list)
+
+@test("query: delete agent doc")
+async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a doc owned by the agent
+ doc_agent = await create_doc(
+ developer_id=developer.id,
+ data=CreateDocRequest(
+ title="Agent Delete Test",
+ content="Doc for agent deletion test",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+
+ # Delete the doc
+ await delete_doc(
+ developer_id=developer.id,
+ doc_id=doc_agent.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+
+ # Verify doc is no longer in agent's docs
+ docs_list = await list_docs(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ connection_pool=pool,
+ )
+ assert not any(d.id == doc_agent.id for d in docs_list)
+
+@test("query: delete doc")
+async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
+ pool = await create_db_pool(dsn=dsn)
+ await delete_doc(
+ developer_id=developer.id,
+ doc_id=doc.id,
+ connection_pool=pool,
+ )
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 706185c7b..2a9746ef1 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,7 +3,6 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-from uuid import UUID
from fastapi import HTTPException
from uuid_extensions import uuid7
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index 92b52d733..c83c7a6f6 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -1,9 +1,7 @@
# # Tests for entry queries
-from fastapi import HTTPException
-from uuid_extensions import uuid7
-from ward import raises, test
+from ward import test
from agents_api.autogen.openapi_model import CreateFileRequest
from agents_api.clients.pg import create_db_pool
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 171e56aa8..4673d6fc5 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -10,7 +10,6 @@
CreateOrUpdateSessionRequest,
CreateSessionRequest,
PatchSessionRequest,
- ResourceCreatedResponse,
ResourceDeletedResponse,
ResourceUpdatedResponse,
Session,
diff --git a/integrations-service/integrations/autogen/Docs.py b/integrations-service/integrations/autogen/Docs.py
index ffed27c1d..af5f60d6a 100644
--- a/integrations-service/integrations/autogen/Docs.py
+++ b/integrations-service/integrations/autogen/Docs.py
@@ -73,6 +73,30 @@ class Doc(BaseModel):
"""
Embeddings for the document
"""
+ modality: Annotated[str | None, Field(json_schema_extra={"readOnly": True})] = None
+ """
+ Modality of the document
+ """
+ language: Annotated[str | None, Field(json_schema_extra={"readOnly": True})] = None
+ """
+ Language of the document
+ """
+ index: Annotated[int | None, Field(json_schema_extra={"readOnly": True})] = None
+ """
+ Index of the document
+ """
+ embedding_model: Annotated[
+ str | None, Field(json_schema_extra={"readOnly": True})
+ ] = None
+ """
+ Embedding model to use for the document
+ """
+ embedding_dimensions: Annotated[
+ int | None, Field(json_schema_extra={"readOnly": True})
+ ] = None
+ """
+ Dimensions of the embedding model
+ """
class DocOwner(BaseModel):
diff --git a/typespec/docs/models.tsp b/typespec/docs/models.tsp
index 055fc2003..f4d16cbd5 100644
--- a/typespec/docs/models.tsp
+++ b/typespec/docs/models.tsp
@@ -27,6 +27,26 @@ model Doc {
/** Embeddings for the document */
@visibility("read")
embeddings?: float32[] | float32[][];
+
+ @visibility("read")
+ /** Modality of the document */
+ modality?: string;
+
+ @visibility("read")
+ /** Language of the document */
+ language?: string;
+
+ @visibility("read")
+ /** Index of the document */
+ index?: uint16;
+
+ @visibility("read")
+ /** Embedding model to use for the document */
+ embedding_model?: string;
+
+ @visibility("read")
+ /** Dimensions of the embedding model */
+ embedding_dimensions?: uint16;
}
/** Payload for creating a doc */
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index d4835a695..c19bc4ed2 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -2876,6 +2876,28 @@ components:
format: float
description: Embeddings for the document
readOnly: true
+ modality:
+ type: string
+ description: Modality of the document
+ readOnly: true
+ language:
+ type: string
+ description: Language of the document
+ readOnly: true
+ index:
+ type: integer
+ format: uint16
+ description: Index of the document
+ readOnly: true
+ embedding_model:
+ type: string
+ description: Embedding model to use for the document
+ readOnly: true
+ embedding_dimensions:
+ type: integer
+ format: uint16
+ description: Dimensions of the embedding model
+ readOnly: true
Docs.DocOwner:
type: object
required:
From 7b0be5c5ae15d7c8b2b6d34689b746278c79fdb4 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Fri, 20 Dec 2024 19:44:02 +0000
Subject: [PATCH 108/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/docs/__init__.py | 1 +
agents-api/agents_api/queries/docs/create_doc.py | 8 ++++----
agents-api/agents_api/queries/docs/get_doc.py | 6 ++++--
agents-api/agents_api/queries/docs/list_docs.py | 6 ++++--
agents-api/agents_api/queries/files/list_files.py | 1 +
agents-api/tests/fixtures.py | 3 +--
agents-api/tests/test_docs_queries.py | 14 +++++++++++---
agents-api/tests/test_entry_queries.py | 1 -
8 files changed, 26 insertions(+), 14 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/__init__.py b/agents-api/agents_api/queries/docs/__init__.py
index f7c207bf2..75f9516a6 100644
--- a/agents-api/agents_api/queries/docs/__init__.py
+++ b/agents-api/agents_api/queries/docs/__init__.py
@@ -20,6 +20,7 @@
from .delete_doc import delete_doc
from .get_doc import get_doc
from .list_docs import list_docs
+
# from .search_docs_by_embedding import search_docs_by_embedding
# from .search_docs_by_text import search_docs_by_text
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
index 4528e9fc5..bf789fad2 100644
--- a/agents-api/agents_api/queries/docs/create_doc.py
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -1,3 +1,4 @@
+import ast
from typing import Literal
from uuid import UUID
@@ -7,9 +8,6 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
-import ast
-
-
from ...autogen.openapi_model import CreateDocRequest, Doc
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
@@ -86,7 +84,9 @@
transform=lambda d: {
**d,
"id": d["doc_id"],
- "content": ast.literal_eval(d["content"])[0] if len(ast.literal_eval(d["content"])) == 1 else ast.literal_eval(d["content"]),
+ "content": ast.literal_eval(d["content"])[0]
+ if len(ast.literal_eval(d["content"])) == 1
+ else ast.literal_eval(d["content"]),
},
)
@increase_counter("create_doc")
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index 9155f500a..b46563dbb 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -1,9 +1,9 @@
+import ast
from typing import Literal
from uuid import UUID
from beartype import beartype
from sqlglot import parse_one
-import ast
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
@@ -28,7 +28,9 @@
transform=lambda d: {
**d,
"id": d["doc_id"],
- "content": ast.literal_eval(d["content"])[0] if len(ast.literal_eval(d["content"])) == 1 else ast.literal_eval(d["content"]),
+ "content": ast.literal_eval(d["content"])[0]
+ if len(ast.literal_eval(d["content"])) == 1
+ else ast.literal_eval(d["content"]),
# "embeddings": d["embeddings"],
},
)
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index a4df08e73..92cbacf7f 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -1,10 +1,10 @@
+import ast
from typing import Any, Literal
from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-import ast
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
@@ -24,7 +24,9 @@
transform=lambda d: {
**d,
"id": d["doc_id"],
- "content": ast.literal_eval(d["content"])[0] if len(ast.literal_eval(d["content"])) == 1 else ast.literal_eval(d["content"]),
+ "content": ast.literal_eval(d["content"])[0]
+ if len(ast.literal_eval(d["content"])) == 1
+ else ast.literal_eval(d["content"]),
# "embeddings": d["embeddings"],
},
)
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index 7c8b67887..2f36def4f 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -9,6 +9,7 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
+
from ...autogen.openapi_model import File
from ..utils import pg_query, wrap_in_class
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 6689137d7..2f7de580e 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -8,10 +8,10 @@
from agents_api.autogen.openapi_model import (
CreateAgentRequest,
+ CreateDocRequest,
CreateFileRequest,
CreateSessionRequest,
CreateUserRequest,
- CreateDocRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
@@ -20,7 +20,6 @@
# from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
-
from agents_api.queries.docs.create_doc import create_doc
# from agents_api.queries.docs.delete_doc import delete_doc
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index d6af42e57..1410c88c9 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -11,9 +11,8 @@
# from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
-
# You can rename or remove these imports to match your actual fixtures
-from tests.fixtures import pg_dsn, test_agent, test_developer, test_user, test_doc
+from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
@test("query: create doc")
@@ -29,7 +28,7 @@ async def _(dsn=pg_dsn, developer=test_developer):
),
connection_pool=pool,
)
-
+
assert doc.title == "Hello Doc"
assert doc.content == "This is sample doc content"
assert doc.modality == "text"
@@ -38,6 +37,7 @@ async def _(dsn=pg_dsn, developer=test_developer):
assert doc.language == "english"
assert doc.index == 0
+
@test("query: create user doc")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
@@ -64,6 +64,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
)
assert any(d.id == doc.id for d in docs_list)
+
@test("query: create agent doc")
async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
@@ -90,6 +91,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
)
assert any(d.id == doc.id for d in docs_list)
+
@test("model: get doc")
async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
pool = await create_db_pool(dsn=dsn)
@@ -101,6 +103,7 @@ async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
assert doc_test.id == doc.id
assert doc_test.title == doc.title
+
@test("query: list docs")
async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
pool = await create_db_pool(dsn=dsn)
@@ -111,6 +114,7 @@ async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
assert len(docs_list) >= 1
assert any(d.id == doc.id for d in docs_list)
+
@test("query: list user docs")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
@@ -139,6 +143,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
assert len(docs_list) >= 1
assert any(d.id == doc_user.id for d in docs_list)
+
@test("query: list agent docs")
async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
@@ -167,6 +172,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
assert len(docs_list) >= 1
assert any(d.id == doc_agent.id for d in docs_list)
+
@test("query: delete user doc")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
@@ -203,6 +209,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
)
assert not any(d.id == doc_user.id for d in docs_list)
+
@test("query: delete agent doc")
async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
@@ -239,6 +246,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
)
assert not any(d.id == doc_agent.id for d in docs_list)
+
@test("query: delete doc")
async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
pool = await create_db_pool(dsn=dsn)
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 2a9746ef1..ae825ed92 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,7 +3,6 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-
from fastapi import HTTPException
from uuid_extensions import uuid7
from ward import raises, test
From dc0ec364e7a250db8811108953338ffcdc0baf1e Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Fri, 20 Dec 2024 15:25:52 -0500
Subject: [PATCH 109/274] wip: initial set of exceptions added
---
.../agents_api/queries/agents/create_agent.py | 58 +++++++++----------
.../queries/agents/create_or_update_agent.py | 37 +++++++++---
.../agents_api/queries/agents/delete_agent.py | 39 +++++++++----
.../agents_api/queries/agents/get_agent.py | 28 +++++----
.../agents_api/queries/agents/list_agents.py | 29 ++++++----
.../agents_api/queries/agents/patch_agent.py | 38 ++++++++----
.../agents_api/queries/agents/update_agent.py | 39 +++++++++----
.../queries/developers/create_developer.py | 4 +-
.../queries/developers/patch_developer.py | 4 +-
.../queries/developers/update_developer.py | 5 ++
.../agents_api/queries/files/create_file.py | 38 ++++++------
.../agents_api/queries/files/delete_file.py | 5 ++
.../agents_api/queries/files/get_file.py | 33 ++++++-----
.../agents_api/queries/files/list_files.py | 13 ++++-
.../sessions/create_or_update_session.py | 7 ++-
.../queries/sessions/create_session.py | 7 ++-
.../queries/sessions/delete_session.py | 2 +-
.../queries/sessions/get_session.py | 2 +-
.../queries/sessions/list_sessions.py | 18 +++---
.../queries/sessions/patch_session.py | 7 ++-
.../queries/sessions/update_session.py | 7 ++-
.../queries/users/create_or_update_user.py | 4 +-
.../agents_api/queries/users/create_user.py | 6 +-
.../agents_api/queries/users/delete_user.py | 2 +-
.../agents_api/queries/users/get_user.py | 5 --
.../agents_api/queries/users/list_users.py | 5 --
.../agents_api/queries/users/patch_user.py | 4 +-
.../agents_api/queries/users/update_user.py | 4 +-
28 files changed, 283 insertions(+), 167 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 76c96f46b..0b7a7d208 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -8,13 +8,16 @@
from beartype import beartype
from sqlglot import parse_one
from uuid_extensions import uuid7
-
+import asyncpg
+from fastapi import HTTPException
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -45,35 +48,30 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# ),
-# psycopg_errors.UniqueViolation: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="An agent with this canonical name already exists for this developer.",
-# ),
-# psycopg_errors.CheckViolation: partialclass(
-# HTTPException,
-# status_code=400,
-# detail="The provided data violates one or more constraints. Please check the input values.",
-# ),
-# ValidationError: partialclass(
-# HTTPException,
-# status_code=400,
-# detail="Input validation failed. Please check the provided data.",
-# ),
-# TypeError: partialclass(
-# HTTPException,
-# status_code=400,
-# detail="A type mismatch occurred. Please review the input.",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="An agent with this canonical name already exists for this developer.",
+ ),
+ asyncpg.exceptions.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The provided data violates one or more constraints. Please check the input values.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(
Agent,
one=True,
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index ef3a0abe5..fd70e5f8b 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -7,6 +7,8 @@
from beartype import beartype
from sqlglot import parse_one
+from fastapi import HTTPException
+import asyncpg
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
@@ -14,6 +16,8 @@
generate_canonical_name,
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -44,15 +48,30 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# )
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="An agent with this canonical name already exists for this developer.",
+ ),
+ asyncpg.exceptions.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The provided data violates one or more constraints. Please check the input values.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(
Agent,
one=True,
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index c0ca3919f..64b3e392e 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -7,12 +7,16 @@
from beartype import beartype
from sqlglot import parse_one
+from fastapi import HTTPException
+import asyncpg
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -59,17 +63,30 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# )
-# }
-# # TODO: Add more exceptions
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="An agent with this canonical name already exists for this developer.",
+ ),
+ asyncpg.exceptions.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The provided data violates one or more constraints. Please check the input values.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(
ResourceDeletedResponse,
one=True,
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index a731300fa..985937b0d 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -7,11 +7,15 @@
from beartype import beartype
from sqlglot import parse_one
+from fastapi import HTTPException
+import asyncpg
from ...autogen.openapi_model import Agent
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -35,16 +39,20 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# )
-# }
-# # TODO: Add more exceptions
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
@pg_query
@beartype
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 87a0c942d..68ee3c73a 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -8,11 +8,13 @@
from beartype import beartype
from fastapi import HTTPException
-
+import asyncpg
from ...autogen.openapi_model import Agent
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -39,17 +41,20 @@
LIMIT $2 OFFSET $3;
"""
-
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# )
-# }
-# # TODO: Add more exceptions
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(Agent, transform=lambda d: {"id": d["agent_id"], **d})
@pg_query
@beartype
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index 69a5a6ca5..fef682858 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -7,12 +7,16 @@
from beartype import beartype
from sqlglot import parse_one
+from fastapi import HTTPException
+import asyncpg
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -44,16 +48,30 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# )
-# }
-# # TODO: Add more exceptions
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="An agent with this canonical name already exists for this developer.",
+ ),
+ asyncpg.exceptions.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The provided data violates one or more constraints. Please check the input values.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index f28e28264..5e33fdddd 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -7,12 +7,15 @@
from beartype import beartype
from sqlglot import parse_one
-
+from fastapi import HTTPException
+import asyncpg
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
# Define the raw SQL query
@@ -29,16 +32,30 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# psycopg_errors.ForeignKeyViolation: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist.",
-# )
-# }
-# # TODO: Add more exceptions
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.exceptions.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ ),
+ asyncpg.exceptions.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="An agent with this canonical name already exists for this developer.",
+ ),
+ asyncpg.exceptions.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="The provided data violates one or more constraints. Please check the input values.",
+ ),
+ asyncpg.exceptions.DataError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid data provided. Please check the input values.",
+ ),
+ }
+)
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
diff --git a/agents-api/agents_api/queries/developers/create_developer.py b/agents-api/agents_api/queries/developers/create_developer.py
index bed6371c4..51011a63b 100644
--- a/agents-api/agents_api/queries/developers/create_developer.py
+++ b/agents-api/agents_api/queries/developers/create_developer.py
@@ -38,8 +38,8 @@
{
asyncpg.UniqueViolationError: partialclass(
HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
+ status_code=409,
+ detail="A developer with this email already exists.",
)
}
)
diff --git a/agents-api/agents_api/queries/developers/patch_developer.py b/agents-api/agents_api/queries/developers/patch_developer.py
index af2ddb1f8..e14c8bbd0 100644
--- a/agents-api/agents_api/queries/developers/patch_developer.py
+++ b/agents-api/agents_api/queries/developers/patch_developer.py
@@ -26,8 +26,8 @@
{
asyncpg.UniqueViolationError: partialclass(
HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
+ status_code=409,
+ detail="A developer with this email already exists.",
)
}
)
diff --git a/agents-api/agents_api/queries/developers/update_developer.py b/agents-api/agents_api/queries/developers/update_developer.py
index d41b333d5..659dcb111 100644
--- a/agents-api/agents_api/queries/developers/update_developer.py
+++ b/agents-api/agents_api/queries/developers/update_developer.py
@@ -28,6 +28,11 @@
HTTPException,
status_code=404,
detail="The specified developer does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A developer with this email already exists.",
)
}
)
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index 48251fa5e..f2e35a6f4 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -60,25 +60,25 @@
# Add error handling decorator
-# @rewrap_exceptions(
-# {
-# asyncpg.UniqueViolationError: partialclass(
-# HTTPException,
-# status_code=409,
-# detail="A file with this name already exists for this developer",
-# ),
-# asyncpg.NoDataFoundError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified owner does not exist",
-# ),
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="The specified developer does not exist",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A file with this name already exists for this developer",
+ ),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or owner does not exist",
+ ),
+ asyncpg.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="File size must be positive and name must be between 1 and 255 characters",
+ ),
+ }
+)
@wrap_in_class(
File,
one=True,
diff --git a/agents-api/agents_api/queries/files/delete_file.py b/agents-api/agents_api/queries/files/delete_file.py
index 31cb43404..4cf0142ae 100644
--- a/agents-api/agents_api/queries/files/delete_file.py
+++ b/agents-api/agents_api/queries/files/delete_file.py
@@ -48,6 +48,11 @@
status_code=404,
detail="File not found",
),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or owner does not exist",
+ ),
}
)
@wrap_in_class(
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 5ccb08d86..882a93ab7 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -8,9 +8,12 @@
from beartype import beartype
from sqlglot import parse_one
+import asyncpg
+from fastapi import HTTPException
from ...autogen.openapi_model import File
-from ..utils import pg_query, wrap_in_class
+from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass, partialclass
+
# Define the raw SQL query
file_query = parse_one("""
@@ -27,20 +30,20 @@
""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# asyncpg.NoDataFoundError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="File not found",
-# ),
-# asyncpg.ForeignKeyViolationError: partialclass(
-# HTTPException,
-# status_code=404,
-# detail="Developer not found",
-# ),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="File not found",
+ ),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or owner does not exist",
+ ),
+ }
+)
@wrap_in_class(
File,
one=True,
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index 2f36def4f..7908bf37d 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -9,9 +9,10 @@
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
+import asyncpg
from ...autogen.openapi_model import File
-from ..utils import pg_query, wrap_in_class
+from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass
# Base query for listing files
base_files_query = parse_one("""
@@ -21,7 +22,15 @@
WHERE f.developer_id = $1
""").sql(pretty=True)
-
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or owner does not exist",
+ ),
+ }
+)
@wrap_in_class(
File,
one=False,
diff --git a/agents-api/agents_api/queries/sessions/create_or_update_session.py b/agents-api/agents_api/queries/sessions/create_or_update_session.py
index 3c4dbf66e..b6c280b01 100644
--- a/agents-api/agents_api/queries/sessions/create_or_update_session.py
+++ b/agents-api/agents_api/queries/sessions/create_or_update_session.py
@@ -70,13 +70,18 @@
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
- detail="The specified developer or participant does not exist.",
+ detail="The specified developer or session does not exist.",
),
asyncpg.UniqueViolationError: partialclass(
HTTPException,
status_code=409,
detail="A session with this ID already exists.",
),
+ asyncpg.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid session data provided.",
+ ),
}
)
@wrap_in_class(
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 058462cf8..0bb967ce5 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -58,13 +58,18 @@
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
- detail="The specified developer or participant does not exist.",
+ detail="The specified developer or session does not exist.",
),
asyncpg.UniqueViolationError: partialclass(
HTTPException,
status_code=409,
detail="A session with this ID already exists.",
),
+ asyncpg.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid session data provided.",
+ ),
}
)
@wrap_in_class(
diff --git a/agents-api/agents_api/queries/sessions/delete_session.py b/agents-api/agents_api/queries/sessions/delete_session.py
index 2e3234fe2..ff5317f58 100644
--- a/agents-api/agents_api/queries/sessions/delete_session.py
+++ b/agents-api/agents_api/queries/sessions/delete_session.py
@@ -30,7 +30,7 @@
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
- detail="The specified developer does not exist.",
+ detail="The specified developer or session does not exist.",
),
}
)
diff --git a/agents-api/agents_api/queries/sessions/get_session.py b/agents-api/agents_api/queries/sessions/get_session.py
index 1f704539e..cc12d0f88 100644
--- a/agents-api/agents_api/queries/sessions/get_session.py
+++ b/agents-api/agents_api/queries/sessions/get_session.py
@@ -51,7 +51,7 @@
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
- detail="The specified developer does not exist.",
+ detail="The specified developer or session does not exist.",
),
asyncpg.NoDataFoundError: partialclass(
HTTPException, status_code=404, detail="Session not found"
diff --git a/agents-api/agents_api/queries/sessions/list_sessions.py b/agents-api/agents_api/queries/sessions/list_sessions.py
index 3aabaf32d..c113c0192 100644
--- a/agents-api/agents_api/queries/sessions/list_sessions.py
+++ b/agents-api/agents_api/queries/sessions/list_sessions.py
@@ -12,7 +12,7 @@
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query
-raw_query = """
+session_query = """
WITH session_participants AS (
SELECT
sl.session_id,
@@ -49,11 +49,6 @@
LIMIT $2 OFFSET $6;
"""
-# Parse and optimize the query
-# query = parse_one(raw_query).sql(pretty=True)
-query = raw_query
-
-
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
@@ -62,7 +57,14 @@
detail="The specified developer does not exist.",
),
asyncpg.NoDataFoundError: partialclass(
- HTTPException, status_code=404, detail="No sessions found"
+ HTTPException,
+ status_code=404,
+ detail="No sessions found",
+ ),
+ asyncpg.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid session data provided.",
),
}
)
@@ -94,7 +96,7 @@ async def list_sessions(
tuple[str, list]: SQL query and parameters
"""
return (
- query,
+ session_query,
[
developer_id, # $1
limit, # $2
diff --git a/agents-api/agents_api/queries/sessions/patch_session.py b/agents-api/agents_api/queries/sessions/patch_session.py
index 7d526ae1a..d7533e124 100644
--- a/agents-api/agents_api/queries/sessions/patch_session.py
+++ b/agents-api/agents_api/queries/sessions/patch_session.py
@@ -37,13 +37,18 @@
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
- detail="The specified developer or participant does not exist.",
+ detail="The specified developer or session does not exist.",
),
asyncpg.NoDataFoundError: partialclass(
HTTPException,
status_code=404,
detail="Session not found",
),
+ asyncpg.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid session data provided.",
+ ),
}
)
@wrap_in_class(
diff --git a/agents-api/agents_api/queries/sessions/update_session.py b/agents-api/agents_api/queries/sessions/update_session.py
index 7c58d10e6..e3f46c0af 100644
--- a/agents-api/agents_api/queries/sessions/update_session.py
+++ b/agents-api/agents_api/queries/sessions/update_session.py
@@ -33,13 +33,18 @@
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
- detail="The specified developer or participant does not exist.",
+ detail="The specified developer or session does not exist.",
),
asyncpg.NoDataFoundError: partialclass(
HTTPException,
status_code=404,
detail="Session not found",
),
+ asyncpg.CheckViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid session data provided.",
+ ),
}
)
@wrap_in_class(
diff --git a/agents-api/agents_api/queries/users/create_or_update_user.py b/agents-api/agents_api/queries/users/create_or_update_user.py
index 965ae4ce4..0a2936a9b 100644
--- a/agents-api/agents_api/queries/users/create_or_update_user.py
+++ b/agents-api/agents_api/queries/users/create_or_update_user.py
@@ -40,10 +40,10 @@
status_code=404,
detail="The specified developer does not exist.",
),
- asyncpg.UniqueViolationError: partialclass( # Add handling for potential race conditions
+ asyncpg.UniqueViolationError: partialclass(
HTTPException,
status_code=409,
- detail="A user with this ID already exists.",
+ detail="A user with this ID already exists for the specified developer.",
),
}
)
diff --git a/agents-api/agents_api/queries/users/create_user.py b/agents-api/agents_api/queries/users/create_user.py
index 8f35a646c..e246c7255 100644
--- a/agents-api/agents_api/queries/users/create_user.py
+++ b/agents-api/agents_api/queries/users/create_user.py
@@ -37,10 +37,10 @@
status_code=404,
detail="The specified developer does not exist.",
),
- asyncpg.NullValueNoIndicatorParameterError: partialclass(
+ asyncpg.UniqueViolationError: partialclass(
HTTPException,
- status_code=404,
- detail="The specified developer does not exist.",
+ status_code=409,
+ detail="A user with this ID already exists for the specified developer.",
),
}
)
diff --git a/agents-api/agents_api/queries/users/delete_user.py b/agents-api/agents_api/queries/users/delete_user.py
index ad5befd73..6b8497980 100644
--- a/agents-api/agents_api/queries/users/delete_user.py
+++ b/agents-api/agents_api/queries/users/delete_user.py
@@ -56,7 +56,7 @@
status_code=404,
detail="The specified developer does not exist.",
),
- asyncpg.UniqueViolationError: partialclass(
+ asyncpg.DataError: partialclass(
HTTPException,
status_code=404,
detail="The specified user does not exist.",
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 2b71f9192..07a840621 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -31,11 +31,6 @@
status_code=404,
detail="The specified developer does not exist.",
),
- asyncpg.UniqueViolationError: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified user does not exist.",
- ),
}
)
@wrap_in_class(User, one=True)
diff --git a/agents-api/agents_api/queries/users/list_users.py b/agents-api/agents_api/queries/users/list_users.py
index 0f0818135..75fd62b4b 100644
--- a/agents-api/agents_api/queries/users/list_users.py
+++ b/agents-api/agents_api/queries/users/list_users.py
@@ -42,11 +42,6 @@
status_code=404,
detail="The specified developer does not exist.",
),
- asyncpg.UniqueViolationError: partialclass(
- HTTPException,
- status_code=404,
- detail="The specified user does not exist.",
- ),
}
)
@wrap_in_class(User)
diff --git a/agents-api/agents_api/queries/users/patch_user.py b/agents-api/agents_api/queries/users/patch_user.py
index c55ee31b7..fb2d8bfad 100644
--- a/agents-api/agents_api/queries/users/patch_user.py
+++ b/agents-api/agents_api/queries/users/patch_user.py
@@ -47,8 +47,8 @@
),
asyncpg.UniqueViolationError: partialclass(
HTTPException,
- status_code=404,
- detail="The specified user does not exist.",
+ status_code=409,
+ detail="A user with this ID already exists for the specified developer.",
),
}
)
diff --git a/agents-api/agents_api/queries/users/update_user.py b/agents-api/agents_api/queries/users/update_user.py
index 91572e15d..975dc57c7 100644
--- a/agents-api/agents_api/queries/users/update_user.py
+++ b/agents-api/agents_api/queries/users/update_user.py
@@ -31,8 +31,8 @@
),
asyncpg.UniqueViolationError: partialclass(
HTTPException,
- status_code=404,
- detail="The specified user does not exist.",
+ status_code=409,
+ detail="A user with this ID already exists for the specified developer.",
),
}
)
From 32d67bc9a5e7f286fb9008a104329e61858aa002 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Fri, 20 Dec 2024 20:26:41 +0000
Subject: [PATCH 110/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/agents/create_agent.py | 9 +++++----
.../queries/agents/create_or_update_agent.py | 8 ++++----
agents-api/agents_api/queries/agents/delete_agent.py | 8 ++++----
agents-api/agents_api/queries/agents/get_agent.py | 8 ++++----
agents-api/agents_api/queries/agents/list_agents.py | 8 +++++---
agents-api/agents_api/queries/agents/patch_agent.py | 8 ++++----
agents-api/agents_api/queries/agents/update_agent.py | 9 +++++----
.../queries/developers/update_developer.py | 2 +-
agents-api/agents_api/queries/files/get_file.py | 12 ++++++++----
agents-api/agents_api/queries/files/list_files.py | 5 +++--
.../agents_api/queries/sessions/list_sessions.py | 1 +
11 files changed, 44 insertions(+), 34 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 0b7a7d208..5294cfa6d 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -5,19 +5,20 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
from sqlglot import parse_one
from uuid_extensions import uuid7
-import asyncpg
-from fastapi import HTTPException
+
from ...autogen.openapi_model import Agent, CreateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index fd70e5f8b..fcef53fd6 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -5,19 +5,19 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
+from sqlglot import parse_one
from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/agents/delete_agent.py b/agents-api/agents_api/queries/agents/delete_agent.py
index 64b3e392e..2fd1f1406 100644
--- a/agents-api/agents_api/queries/agents/delete_agent.py
+++ b/agents-api/agents_api/queries/agents/delete_agent.py
@@ -5,18 +5,18 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
+from sqlglot import parse_one
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 985937b0d..79fa1c4fc 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -5,17 +5,17 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
+from sqlglot import parse_one
from ...autogen.openapi_model import Agent
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/agents/list_agents.py b/agents-api/agents_api/queries/agents/list_agents.py
index 68ee3c73a..11b9dc283 100644
--- a/agents-api/agents_api/queries/agents/list_agents.py
+++ b/agents-api/agents_api/queries/agents/list_agents.py
@@ -6,15 +6,16 @@
from typing import Any, Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-import asyncpg
+
from ...autogen.openapi_model import Agent
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
@@ -41,6 +42,7 @@
LIMIT $2 OFFSET $3;
"""
+
@rewrap_exceptions(
{
asyncpg.exceptions.ForeignKeyViolationError: partialclass(
diff --git a/agents-api/agents_api/queries/agents/patch_agent.py b/agents-api/agents_api/queries/agents/patch_agent.py
index fef682858..06f0b9253 100644
--- a/agents-api/agents_api/queries/agents/patch_agent.py
+++ b/agents-api/agents_api/queries/agents/patch_agent.py
@@ -5,18 +5,18 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
+from sqlglot import parse_one
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/agents/update_agent.py b/agents-api/agents_api/queries/agents/update_agent.py
index 5e33fdddd..4d19229d8 100644
--- a/agents-api/agents_api/queries/agents/update_agent.py
+++ b/agents-api/agents_api/queries/agents/update_agent.py
@@ -5,17 +5,18 @@
from uuid import UUID
+import asyncpg
from beartype import beartype
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
+from sqlglot import parse_one
+
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...metrics.counters import increase_counter
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query
diff --git a/agents-api/agents_api/queries/developers/update_developer.py b/agents-api/agents_api/queries/developers/update_developer.py
index 659dcb111..8f3e7cd87 100644
--- a/agents-api/agents_api/queries/developers/update_developer.py
+++ b/agents-api/agents_api/queries/developers/update_developer.py
@@ -33,7 +33,7 @@
HTTPException,
status_code=409,
detail="A developer with this email already exists.",
- )
+ ),
}
)
@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 882a93ab7..04ba8ea71 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -6,14 +6,18 @@
from typing import Literal
from uuid import UUID
-from beartype import beartype
-from sqlglot import parse_one
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import File
-from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass, partialclass
-
+from ..utils import (
+ partialclass,
+ pg_query,
+ rewrap_exceptions,
+ wrap_in_class,
+)
# Define the raw SQL query
file_query = parse_one("""
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index 7908bf37d..d3866dacc 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -6,13 +6,13 @@
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-import asyncpg
from ...autogen.openapi_model import File
-from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Base query for listing files
base_files_query = parse_one("""
@@ -22,6 +22,7 @@
WHERE f.developer_id = $1
""").sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
diff --git a/agents-api/agents_api/queries/sessions/list_sessions.py b/agents-api/agents_api/queries/sessions/list_sessions.py
index c113c0192..ac3573e61 100644
--- a/agents-api/agents_api/queries/sessions/list_sessions.py
+++ b/agents-api/agents_api/queries/sessions/list_sessions.py
@@ -49,6 +49,7 @@
LIMIT $2 OFFSET $6;
"""
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
From 831e950ead49c33eaed6972ff47f29067f8dac81 Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Fri, 20 Dec 2024 16:40:38 -0500
Subject: [PATCH 111/274] chore: added embedding reading + doctrings updates
---
.../agents_api/queries/docs/create_doc.py | 13 +++++++
.../agents_api/queries/docs/delete_doc.py | 9 +++++
.../agents_api/queries/docs/embed_snippets.py | 37 +++++++++++++++++++
agents-api/agents_api/queries/docs/get_doc.py | 26 ++++++++++---
.../agents_api/queries/docs/list_docs.py | 29 ++++++++++-----
.../queries/docs/search_docs_by_embedding.py | 29 ++++++++++-----
.../queries/docs/search_docs_by_text.py | 29 ++++++++++-----
.../queries/entries/create_entries.py | 22 +++++++++++
.../queries/entries/delete_entries.py | 11 +++++-
.../agents_api/queries/entries/get_history.py | 10 +++++
.../queries/entries/list_entries.py | 15 ++++++++
11 files changed, 194 insertions(+), 36 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
index bf789fad2..59fd40004 100644
--- a/agents-api/agents_api/queries/docs/create_doc.py
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -107,6 +107,19 @@ async def create_doc(
) -> list[tuple[str, list] | tuple[str, list, str]]:
"""
Insert a new doc record into Timescale and optionally associate it with an owner.
+
+ Parameters:
+ owner_type (Literal["user", "agent"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents.
+ modality (Literal["text", "image", "mixed"]): The modality of the documents.
+ embedding_model (str): The model used for embedding.
+ embedding_dimensions (int): The dimensions of the embedding.
+ language (str): The language of the documents.
+ index (int): The index of the documents.
+ data (CreateDocRequest): The data for the document.
+
+ Returns:
+ list[tuple[str, list] | tuple[str, list, str]]: SQL query and parameters for creating the document.
"""
# Generate a UUID if not provided
doc_id = doc_id or uuid7()
diff --git a/agents-api/agents_api/queries/docs/delete_doc.py b/agents-api/agents_api/queries/docs/delete_doc.py
index adeb09bd8..5697ca8d6 100644
--- a/agents-api/agents_api/queries/docs/delete_doc.py
+++ b/agents-api/agents_api/queries/docs/delete_doc.py
@@ -67,6 +67,15 @@ async def delete_doc(
"""
Deletes a doc (and associated doc_owners) for the given developer and doc_id.
If owner_type/owner_id is specified, only remove doc if that matches.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ doc_id (UUID): The ID of the document.
+ owner_type (Literal["user", "agent"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters for deleting the document.
"""
return (
delete_doc_query,
diff --git a/agents-api/agents_api/queries/docs/embed_snippets.py b/agents-api/agents_api/queries/docs/embed_snippets.py
index e69de29bb..1a20d6a34 100644
--- a/agents-api/agents_api/queries/docs/embed_snippets.py
+++ b/agents-api/agents_api/queries/docs/embed_snippets.py
@@ -0,0 +1,37 @@
+from typing import Literal
+from uuid import UUID
+
+from beartype import beartype
+from sqlglot import parse_one
+
+from ..utils import pg_query
+
+# TODO: This is a placeholder for the actual query
+vectorizer_query = None
+
+
+@pg_query
+@beartype
+async def embed_snippets(
+ *,
+ developer_id: UUID,
+ doc_id: UUID,
+ owner_type: Literal["user", "agent"] | None = None,
+ owner_id: UUID | None = None,
+) -> tuple[str, list]:
+ """
+ Trigger the vectorizer to generate embeddings for documents.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ doc_id (UUID): The ID of the document.
+ owner_type (Literal["user", "agent"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters for embedding the snippets.
+ """
+ return (
+ vectorizer_query,
+ [developer_id, doc_id, owner_type, owner_id],
+ )
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index b46563dbb..8575f77b0 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -8,10 +8,15 @@
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
-doc_query = parse_one("""
-SELECT d.*
+# Combined query to fetch document details and embedding
+doc_with_embedding_query = parse_one("""
+SELECT d.*, e.embedding
FROM docs d
-LEFT JOIN doc_owners doc_own ON d.developer_id = doc_own.developer_id AND d.doc_id = doc_own.doc_id
+LEFT JOIN doc_owners doc_own
+ ON d.developer_id = doc_own.developer_id
+ AND d.doc_id = doc_own.doc_id
+LEFT JOIN docs_embeddings e
+ ON d.doc_id = e.doc_id
WHERE d.developer_id = $1
AND d.doc_id = $2
AND (
@@ -31,7 +36,7 @@
"content": ast.literal_eval(d["content"])[0]
if len(ast.literal_eval(d["content"])) == 1
else ast.literal_eval(d["content"]),
- # "embeddings": d["embeddings"],
+ "embedding": d["embedding"], # Add embedding to the transformation
},
)
@pg_query
@@ -44,9 +49,18 @@ async def get_doc(
owner_id: UUID | None = None,
) -> tuple[str, list]:
"""
- Fetch a single doc, optionally constrained to a given owner.
+ Fetch a single doc with its embedding, optionally constrained to a given owner.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ doc_id (UUID): The ID of the document.
+ owner_type (Literal["user", "agent"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters for fetching the document.
"""
return (
- doc_query,
+ doc_with_embedding_query,
[developer_id, doc_id, owner_type, owner_id],
)
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index 92cbacf7f..8ea196958 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -9,11 +9,12 @@
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
-# Base query for listing docs
+# Base query for listing docs with optional embeddings
base_docs_query = parse_one("""
-SELECT d.*
+SELECT d.*, CASE WHEN $2 THEN NULL ELSE e.embedding END AS embedding
FROM docs d
LEFT JOIN doc_owners doc_own ON d.developer_id = doc_own.developer_id AND d.doc_id = doc_own.doc_id
+LEFT JOIN docs_embeddings e ON d.doc_id = e.doc_id
WHERE d.developer_id = $1
""").sql(pretty=True)
@@ -27,7 +28,7 @@
"content": ast.literal_eval(d["content"])[0]
if len(ast.literal_eval(d["content"])) == 1
else ast.literal_eval(d["content"]),
- # "embeddings": d["embeddings"],
+ "embedding": d.get("embedding"), # Add embedding to the transformation
},
)
@pg_query
@@ -46,6 +47,20 @@ async def list_docs(
) -> tuple[str, list]:
"""
Lists docs with optional owner filtering, pagination, and sorting.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ owner_id (UUID): The ID of the owner of the documents.
+ owner_type (Literal["user", "agent"]): The type of the owner of the documents.
+ limit (int): The number of documents to return.
+ offset (int): The number of documents to skip.
+ sort_by (Literal["created_at", "updated_at"]): The field to sort by.
+ direction (Literal["asc", "desc"]): The direction to sort by.
+ metadata_filter (dict[str, Any]): The metadata filter to apply.
+ include_without_embeddings (bool): Whether to include documents without embeddings.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters for listing the documents.
"""
if direction.lower() not in ["asc", "desc"]:
raise HTTPException(status_code=400, detail="Invalid sort direction")
@@ -61,11 +76,11 @@ async def list_docs(
# Start with the base query
query = base_docs_query
- params = [developer_id]
+ params = [developer_id, include_without_embeddings]
# Add owner filtering
if owner_type and owner_id:
- query += " AND doc_own.owner_type = $2 AND doc_own.owner_id = $3"
+ query += " AND doc_own.owner_type = $3 AND doc_own.owner_id = $4"
params.extend([owner_type, owner_id])
# Add metadata filtering
@@ -74,10 +89,6 @@ async def list_docs(
query += f" AND d.metadata->>'{key}' = ${len(params) + 1}"
params.append(value)
- # Include or exclude documents without embeddings
- # if not include_without_embeddings:
- # query += " AND d.embeddings IS NOT NULL"
-
# Add sorting and pagination
query += f" ORDER BY {sort_by} {direction} LIMIT ${len(params) + 1} OFFSET ${len(params) + 2}"
params.extend([limit, offset])
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index e3120bd36..c7b15ee64 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -9,7 +9,7 @@
from fastapi import HTTPException
from sqlglot import parse_one
-from ...autogen.openapi_model import Doc
+from ...autogen.openapi_model import DocReference
from ..utils import pg_query, wrap_in_class
# If you're doing approximate ANN (DiskANN) or IVF, you might use a special function or hint.
@@ -33,11 +33,14 @@
@wrap_in_class(
- Doc,
- one=False,
- transform=lambda rec: {
- **rec,
- "id": rec["doc_id"],
+ DocReference,
+ transform=lambda d: {
+ "owner": {
+ "id": d["owner_id"],
+ "role": d["owner_type"],
+ },
+ "metadata": d.get("metadata", {}),
+ **d,
},
)
@pg_query
@@ -52,10 +55,16 @@ async def search_docs_by_embedding(
) -> tuple[str, list]:
"""
Vector-based doc search:
- - developer_id is required
- - query_embedding: the vector to query
- - k: number of results to return
- - owner_type/owner_id: optional doc ownership filter
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ query_embedding (List[float]): The vector to query.
+ k (int): The number of results to return.
+ owner_type (Literal["user", "agent", "org"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters for searching the documents.
"""
if k < 1:
raise HTTPException(status_code=400, detail="k must be >= 1")
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index 9f434d438..0ab309ee8 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -9,7 +9,7 @@
from fastapi import HTTPException
from sqlglot import parse_one
-from ...autogen.openapi_model import Doc
+from ...autogen.openapi_model import DocReference
from ..utils import pg_query, wrap_in_class
search_docs_text_query = parse_one("""
@@ -31,11 +31,14 @@
@wrap_in_class(
- Doc,
- one=False,
- transform=lambda rec: {
- **rec,
- "id": rec["doc_id"],
+ DocReference,
+ transform=lambda d: {
+ "owner": {
+ "id": d["owner_id"],
+ "role": d["owner_type"],
+ },
+ "metadata": d.get("metadata", {}),
+ **d,
},
)
@pg_query
@@ -50,10 +53,16 @@ async def search_docs_by_text(
) -> tuple[str, list]:
"""
Full-text search on docs using the search_tsv column.
- - developer_id: required
- - query: the text to look for
- - k: max results
- - owner_type / owner_id: optional doc ownership filter
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ query (str): The text to search for.
+ k (int): The number of results to return.
+ owner_type (Literal["user", "agent", "org"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents.
+
+ Returns:
+ tuple[str, list]: SQL query and parameters for searching the documents.
"""
if k < 1:
raise HTTPException(status_code=400, detail="k must be >= 1")
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 95973ad0b..d8439fa21 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -94,6 +94,17 @@ async def create_entries(
session_id: UUID,
data: list[CreateEntryRequest],
) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
+ """
+ Create entries in a session.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ session_id (UUID): The ID of the session.
+ data (list[CreateEntryRequest]): The list of entries to create.
+
+ Returns:
+ list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]: SQL query and parameters for creating the entries.
+ """
# Convert the data to a list of dictionaries
data_dicts = [item.model_dump(mode="json") for item in data]
@@ -163,6 +174,17 @@ async def add_entry_relations(
session_id: UUID,
data: list[Relation],
) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
+ """
+ Add relations between entries in a session.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ session_id (UUID): The ID of the session.
+ data (list[Relation]): The list of relations to add.
+
+ Returns:
+ list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]: SQL query and parameters for adding the relations.
+ """
# Convert the data to a list of dictionaries
data_dicts = [item.model_dump(mode="json") for item in data]
diff --git a/agents-api/agents_api/queries/entries/delete_entries.py b/agents-api/agents_api/queries/entries/delete_entries.py
index 47b7379a4..14a9648e5 100644
--- a/agents-api/agents_api/queries/entries/delete_entries.py
+++ b/agents-api/agents_api/queries/entries/delete_entries.py
@@ -134,7 +134,16 @@ async def delete_entries_for_session(
async def delete_entries(
*, developer_id: UUID, session_id: UUID, entry_ids: list[UUID]
) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
- """Delete specific entries by their IDs."""
+ """Delete specific entries by their IDs.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ session_id (UUID): The ID of the session.
+ entry_ids (list[UUID]): The IDs of the entries to delete.
+
+ Returns:
+ list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]: SQL query and parameters for deleting the entries.
+ """
return [
(
session_exists_query,
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index ffa0746c0..6a734d4c5 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -95,6 +95,16 @@ async def get_history(
session_id: UUID,
allowed_sources: list[str] = ["api_request", "api_response"],
) -> tuple[str, list] | tuple[str, list, str]:
+ """Get the history of a session.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ session_id (UUID): The ID of the session.
+ allowed_sources (list[str]): The sources to include in the history.
+
+ Returns:
+ tuple[str, list] | tuple[str, list, str]: SQL query and parameters for getting the history.
+ """
return (
history_query,
[session_id, allowed_sources, developer_id],
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 89f432734..0153fe778 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -88,6 +88,21 @@ async def list_entries(
direction: Literal["asc", "desc"] = "asc",
exclude_relations: list[str] = [],
) -> list[tuple[str, list] | tuple[str, list, str]]:
+ """List entries in a session.
+
+ Parameters:
+ developer_id (UUID): The ID of the developer.
+ session_id (UUID): The ID of the session.
+ allowed_sources (list[str]): The sources to include in the history.
+ limit (int): The number of entries to return.
+ offset (int): The number of entries to skip.
+ sort_by (Literal["created_at", "timestamp"]): The field to sort by.
+ direction (Literal["asc", "desc"]): The direction to sort by.
+ exclude_relations (list[str]): The relations to exclude.
+
+ Returns:
+ tuple[str, list] | tuple[str, list, str]: SQL query and parameters for listing the entries.
+ """
if limit < 1 or limit > 1000:
raise HTTPException(status_code=400, detail="Limit must be between 1 and 1000")
if offset < 0:
From 74add36fd068a2c16942feb74c91d0cf3541489f Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Fri, 20 Dec 2024 21:41:35 +0000
Subject: [PATCH 112/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/docs/list_docs.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index 8ea196958..bfbc2971e 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -48,7 +48,7 @@ async def list_docs(
"""
Lists docs with optional owner filtering, pagination, and sorting.
- Parameters:
+ Parameters:
developer_id (UUID): The ID of the developer.
owner_id (UUID): The ID of the owner of the documents.
owner_type (Literal["user", "agent"]): The type of the owner of the documents.
From 249513d6c944f77ff579cb4cd7e51b362483178f Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Sat, 21 Dec 2024 03:12:06 -0500
Subject: [PATCH 113/274] chore: updated migrations + added indices support
---
.../queries/developers/get_developer.py | 9 +-
.../agents_api/queries/docs/__init__.py | 6 +-
.../agents_api/queries/docs/create_doc.py | 141 +++++++++++++-----
.../agents_api/queries/docs/delete_doc.py | 24 ++-
.../agents_api/queries/docs/embed_snippets.py | 37 -----
agents-api/agents_api/queries/docs/get_doc.py | 68 +++++----
.../agents_api/queries/docs/list_docs.py | 96 ++++++++----
.../queries/docs/search_docs_by_embedding.py | 4 -
.../queries/docs/search_docs_by_text.py | 76 ++++++----
.../queries/docs/search_docs_hybrid.py | 5 -
agents-api/tests/fixtures.py | 23 +--
agents-api/tests/test_docs_queries.py | 72 ++++-----
agents-api/tests/test_files_queries.py | 2 +-
memory-store/migrations/000006_docs.up.sql | 9 +-
.../migrations/000018_doc_search.up.sql | 57 +++----
15 files changed, 349 insertions(+), 280 deletions(-)
delete mode 100644 agents-api/agents_api/queries/docs/embed_snippets.py
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index 373a2fb36..79b6e6067 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -24,9 +24,6 @@
SELECT * FROM developers WHERE developer_id = $1 -- developer_id
""").sql(pretty=True)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
@rewrap_exceptions(
{
@@ -37,7 +34,11 @@
)
}
)
-@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
+@wrap_in_class(
+ Developer,
+ one=True,
+ transform=lambda d: {**d, "id": d["developer_id"]},
+)
@pg_query
@beartype
async def get_developer(
diff --git a/agents-api/agents_api/queries/docs/__init__.py b/agents-api/agents_api/queries/docs/__init__.py
index 75f9516a6..51bab2555 100644
--- a/agents-api/agents_api/queries/docs/__init__.py
+++ b/agents-api/agents_api/queries/docs/__init__.py
@@ -8,6 +8,7 @@
- Listing documents based on various criteria, including ownership and metadata filters.
- Deleting documents by their unique identifiers.
- Embedding document snippets for retrieval purposes.
+- Searching documents by text.
The module interacts with other parts of the application, such as the agents and users modules, to provide a comprehensive document management system. Its role is crucial in enabling document search, retrieval, and management features within the context of agents and users.
@@ -22,12 +23,13 @@
from .list_docs import list_docs
# from .search_docs_by_embedding import search_docs_by_embedding
-# from .search_docs_by_text import search_docs_by_text
+from .search_docs_by_text import search_docs_by_text
__all__ = [
"create_doc",
"delete_doc",
"get_doc",
"list_docs",
- # "search_docs_by_embct",
+ # "search_docs_by_embedding",
+ "search_docs_by_text",
]
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
index 59fd40004..d8bcce7d3 100644
--- a/agents-api/agents_api/queries/docs/create_doc.py
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -47,15 +47,38 @@
INSERT INTO doc_owners (
developer_id,
doc_id,
+ index,
owner_type,
owner_id
)
- VALUES ($1, $2, $3, $4)
+ VALUES ($1, $2, $3, $4, $5)
RETURNING doc_id
)
-SELECT d.*
+SELECT DISTINCT ON (docs.doc_id)
+ docs.doc_id,
+ docs.developer_id,
+ docs.title,
+ array_agg(docs.content ORDER BY docs.index) as content,
+ array_agg(docs.index ORDER BY docs.index) as indices,
+ docs.modality,
+ docs.embedding_model,
+ docs.embedding_dimensions,
+ docs.language,
+ docs.metadata,
+ docs.created_at
+
FROM inserted_owner io
-JOIN docs d ON d.doc_id = io.doc_id;
+JOIN docs ON docs.doc_id = io.doc_id
+GROUP BY
+ docs.doc_id,
+ docs.developer_id,
+ docs.title,
+ docs.modality,
+ docs.embedding_model,
+ docs.embedding_dimensions,
+ docs.language,
+ docs.metadata,
+ docs.created_at;
""").sql(pretty=True)
@@ -82,11 +105,10 @@
Doc,
one=True,
transform=lambda d: {
- **d,
"id": d["doc_id"],
- "content": ast.literal_eval(d["content"])[0]
- if len(ast.literal_eval(d["content"])) == 1
- else ast.literal_eval(d["content"]),
+ "index": d["indices"][0],
+ "content": d["content"][0] if len(d["content"]) == 1 else d["content"],
+ **d,
},
)
@increase_counter("create_doc")
@@ -97,56 +119,99 @@ async def create_doc(
developer_id: UUID,
doc_id: UUID | None = None,
data: CreateDocRequest,
- owner_type: Literal["user", "agent"] | None = None,
- owner_id: UUID | None = None,
+ owner_type: Literal["user", "agent"],
+ owner_id: UUID,
modality: Literal["text", "image", "mixed"] | None = "text",
embedding_model: str | None = "voyage-3",
embedding_dimensions: int | None = 1024,
language: str | None = "english",
index: int | None = 0,
-) -> list[tuple[str, list] | tuple[str, list, str]]:
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
"""
- Insert a new doc record into Timescale and optionally associate it with an owner.
+ Insert a new doc record into Timescale and associate it with an owner.
Parameters:
- owner_type (Literal["user", "agent"]): The type of the owner of the documents.
- owner_id (UUID): The ID of the owner of the documents.
+ developer_id (UUID): The ID of the developer.
+ doc_id (UUID | None): Optional custom UUID for the document. If not provided, one will be generated.
+ data (CreateDocRequest): The data for the document.
+ owner_type (Literal["user", "agent"]): The type of the owner (required).
+ owner_id (UUID): The ID of the owner (required).
modality (Literal["text", "image", "mixed"]): The modality of the documents.
embedding_model (str): The model used for embedding.
embedding_dimensions (int): The dimensions of the embedding.
language (str): The language of the documents.
index (int): The index of the documents.
- data (CreateDocRequest): The data for the document.
Returns:
list[tuple[str, list] | tuple[str, list, str]]: SQL query and parameters for creating the document.
"""
+ queries = []
# Generate a UUID if not provided
- doc_id = doc_id or uuid7()
+ current_doc_id = uuid7() if doc_id is None else doc_id
- # check if content is a string
- if isinstance(data.content, str):
- data.content = [data.content]
+ # Check if content is a list
+ if isinstance(data.content, list):
+ final_params_doc = []
+ final_params_owner = []
+
+ for idx, content in enumerate(data.content):
+ doc_params = [
+ developer_id,
+ current_doc_id,
+ data.title,
+ content,
+ idx,
+ modality,
+ embedding_model,
+ embedding_dimensions,
+ language,
+ data.metadata or {},
+ ]
+ final_params_doc.append(doc_params)
- # Create the doc record
- doc_params = [
- developer_id,
- doc_id,
- data.title,
- str(data.content),
- index,
- modality,
- embedding_model,
- embedding_dimensions,
- language,
- data.metadata or {},
- ]
-
- queries = [(doc_query, doc_params)]
-
- # If an owner is specified, associate it:
- if owner_type and owner_id:
- owner_params = [developer_id, doc_id, owner_type, owner_id]
- queries.append((doc_owner_query, owner_params))
+ owner_params = [
+ developer_id,
+ current_doc_id,
+ idx,
+ owner_type,
+ owner_id,
+ ]
+ final_params_owner.append(owner_params)
+
+ # Add the doc query for each content
+ queries.append((doc_query, final_params_doc, "fetchmany"))
+
+ # Add the owner query
+ queries.append((doc_owner_query, final_params_owner, "fetchmany"))
+
+ else:
+
+ # Create the doc record
+ doc_params = [
+ developer_id,
+ current_doc_id,
+ data.title,
+ data.content,
+ index,
+ modality,
+ embedding_model,
+ embedding_dimensions,
+ language,
+ data.metadata or {},
+ ]
+
+ owner_params = [
+ developer_id,
+ current_doc_id,
+ index,
+ owner_type,
+ owner_id,
+ ]
+
+ # Add the doc query for single content
+ queries.append((doc_query, doc_params, "fetch"))
+
+ # Add the owner query
+ queries.append((doc_owner_query, owner_params, "fetch"))
return queries
diff --git a/agents-api/agents_api/queries/docs/delete_doc.py b/agents-api/agents_api/queries/docs/delete_doc.py
index 5697ca8d6..b0a9ea1a1 100644
--- a/agents-api/agents_api/queries/docs/delete_doc.py
+++ b/agents-api/agents_api/queries/docs/delete_doc.py
@@ -16,22 +16,18 @@
DELETE FROM doc_owners
WHERE developer_id = $1
AND doc_id = $2
- AND (
- ($3::text IS NULL AND $4::uuid IS NULL)
- OR (owner_type = $3 AND owner_id = $4)
- )
+ AND owner_type = $3
+ AND owner_id = $4
)
DELETE FROM docs
WHERE developer_id = $1
AND doc_id = $2
- AND (
- $3::text IS NULL OR EXISTS (
- SELECT 1 FROM doc_owners
- WHERE developer_id = $1
- AND doc_id = $2
- AND owner_type = $3
- AND owner_id = $4
- )
+ AND EXISTS (
+ SELECT 1 FROM doc_owners
+ WHERE developer_id = $1
+ AND doc_id = $2
+ AND owner_type = $3
+ AND owner_id = $4
)
RETURNING doc_id;
""").sql(pretty=True)
@@ -61,8 +57,8 @@ async def delete_doc(
*,
developer_id: UUID,
doc_id: UUID,
- owner_type: Literal["user", "agent"] | None = None,
- owner_id: UUID | None = None,
+ owner_type: Literal["user", "agent"],
+ owner_id: UUID,
) -> tuple[str, list]:
"""
Deletes a doc (and associated doc_owners) for the given developer and doc_id.
diff --git a/agents-api/agents_api/queries/docs/embed_snippets.py b/agents-api/agents_api/queries/docs/embed_snippets.py
deleted file mode 100644
index 1a20d6a34..000000000
--- a/agents-api/agents_api/queries/docs/embed_snippets.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from typing import Literal
-from uuid import UUID
-
-from beartype import beartype
-from sqlglot import parse_one
-
-from ..utils import pg_query
-
-# TODO: This is a placeholder for the actual query
-vectorizer_query = None
-
-
-@pg_query
-@beartype
-async def embed_snippets(
- *,
- developer_id: UUID,
- doc_id: UUID,
- owner_type: Literal["user", "agent"] | None = None,
- owner_id: UUID | None = None,
-) -> tuple[str, list]:
- """
- Trigger the vectorizer to generate embeddings for documents.
-
- Parameters:
- developer_id (UUID): The ID of the developer.
- doc_id (UUID): The ID of the document.
- owner_type (Literal["user", "agent"]): The type of the owner of the documents.
- owner_id (UUID): The ID of the owner of the documents.
-
- Returns:
- tuple[str, list]: SQL query and parameters for embedding the snippets.
- """
- return (
- vectorizer_query,
- [developer_id, doc_id, owner_type, owner_id],
- )
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index 8575f77b0..3f071cf87 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -8,35 +8,51 @@
from ...autogen.openapi_model import Doc
from ..utils import pg_query, wrap_in_class
-# Combined query to fetch document details and embedding
+# Update the query to use DISTINCT ON to prevent duplicates
doc_with_embedding_query = parse_one("""
-SELECT d.*, e.embedding
-FROM docs d
-LEFT JOIN doc_owners doc_own
- ON d.developer_id = doc_own.developer_id
- AND d.doc_id = doc_own.doc_id
-LEFT JOIN docs_embeddings e
- ON d.doc_id = e.doc_id
-WHERE d.developer_id = $1
- AND d.doc_id = $2
- AND (
- ($3::text IS NULL AND $4::uuid IS NULL)
- OR (doc_own.owner_type = $3 AND doc_own.owner_id = $4)
- )
-LIMIT 1;
+WITH doc_data AS (
+ SELECT DISTINCT ON (d.doc_id)
+ d.doc_id,
+ d.developer_id,
+ d.title,
+ array_agg(d.content ORDER BY d.index) as content,
+ array_agg(d.index ORDER BY d.index) as indices,
+ array_agg(e.embedding ORDER BY d.index) as embeddings,
+ d.modality,
+ d.embedding_model,
+ d.embedding_dimensions,
+ d.language,
+ d.metadata,
+ d.created_at
+ FROM docs d
+ LEFT JOIN docs_embeddings e
+ ON d.doc_id = e.doc_id
+ WHERE d.developer_id = $1
+ AND d.doc_id = $2
+ GROUP BY
+ d.doc_id,
+ d.developer_id,
+ d.title,
+ d.modality,
+ d.embedding_model,
+ d.embedding_dimensions,
+ d.language,
+ d.metadata,
+ d.created_at
+)
+SELECT * FROM doc_data;
""").sql(pretty=True)
@wrap_in_class(
Doc,
- one=True,
+ one=True, # Changed to True since we're now returning one grouped record
transform=lambda d: {
- **d,
"id": d["doc_id"],
- "content": ast.literal_eval(d["content"])[0]
- if len(ast.literal_eval(d["content"])) == 1
- else ast.literal_eval(d["content"]),
- "embedding": d["embedding"], # Add embedding to the transformation
+ "index": d["indices"][0],
+ "content": d["content"][0] if len(d["content"]) == 1 else d["content"],
+ "embeddings": d["embeddings"][0] if len(d["embeddings"]) == 1 else d["embeddings"],
+ **d,
},
)
@pg_query
@@ -45,22 +61,18 @@ async def get_doc(
*,
developer_id: UUID,
doc_id: UUID,
- owner_type: Literal["user", "agent"] | None = None,
- owner_id: UUID | None = None,
) -> tuple[str, list]:
"""
- Fetch a single doc with its embedding, optionally constrained to a given owner.
-
+ Fetch a single doc with its embedding, grouping all content chunks and embeddings.
+
Parameters:
developer_id (UUID): The ID of the developer.
doc_id (UUID): The ID of the document.
- owner_type (Literal["user", "agent"]): The type of the owner of the documents.
- owner_id (UUID): The ID of the owner of the documents.
Returns:
tuple[str, list]: SQL query and parameters for fetching the document.
"""
return (
doc_with_embedding_query,
- [developer_id, doc_id, owner_type, owner_id],
+ [developer_id, doc_id],
)
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index bfbc2971e..2b31df250 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -1,34 +1,82 @@
-import ast
+"""
+This module contains the functionality for listing documents from the PostgreSQL database.
+It constructs and executes SQL queries to fetch document details based on various filters.
+"""
+
from typing import Any, Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import Doc
-from ..utils import pg_query, wrap_in_class
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-# Base query for listing docs with optional embeddings
+# Base query for listing docs with aggregated content and embeddings
base_docs_query = parse_one("""
-SELECT d.*, CASE WHEN $2 THEN NULL ELSE e.embedding END AS embedding
-FROM docs d
-LEFT JOIN doc_owners doc_own ON d.developer_id = doc_own.developer_id AND d.doc_id = doc_own.doc_id
-LEFT JOIN docs_embeddings e ON d.doc_id = e.doc_id
-WHERE d.developer_id = $1
+WITH doc_data AS (
+ SELECT DISTINCT ON (d.doc_id)
+ d.doc_id,
+ d.developer_id,
+ d.title,
+ array_agg(d.content ORDER BY d.index) as content,
+ array_agg(d.index ORDER BY d.index) as indices,
+ array_agg(CASE WHEN $2 THEN NULL ELSE e.embedding END ORDER BY d.index) as embeddings,
+ d.modality,
+ d.embedding_model,
+ d.embedding_dimensions,
+ d.language,
+ d.metadata,
+ d.created_at
+ FROM docs d
+ JOIN doc_owners doc_own
+ ON d.developer_id = doc_own.developer_id
+ AND d.doc_id = doc_own.doc_id
+ LEFT JOIN docs_embeddings e
+ ON d.doc_id = e.doc_id
+ WHERE d.developer_id = $1
+ AND doc_own.owner_type = $3
+ AND doc_own.owner_id = $4
+ GROUP BY
+ d.doc_id,
+ d.developer_id,
+ d.title,
+ d.modality,
+ d.embedding_model,
+ d.embedding_dimensions,
+ d.language,
+ d.metadata,
+ d.created_at
+)
+SELECT * FROM doc_data
""").sql(pretty=True)
+@rewrap_exceptions(
+ {
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="No documents found",
+ ),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or owner does not exist",
+ ),
+ }
+)
@wrap_in_class(
Doc,
one=False,
transform=lambda d: {
- **d,
"id": d["doc_id"],
- "content": ast.literal_eval(d["content"])[0]
- if len(ast.literal_eval(d["content"])) == 1
- else ast.literal_eval(d["content"]),
- "embedding": d.get("embedding"), # Add embedding to the transformation
+ "index": d["indices"][0],
+ "content": d["content"][0] if len(d["content"]) == 1 else d["content"],
+ "embedding": d["embeddings"][0] if d.get("embeddings") and len(d["embeddings"]) == 1 else d.get("embeddings"),
+ **d,
},
)
@pg_query
@@ -36,8 +84,8 @@
async def list_docs(
*,
developer_id: UUID,
- owner_id: UUID | None = None,
- owner_type: Literal["user", "agent"] | None = None,
+ owner_id: UUID,
+ owner_type: Literal["user", "agent"],
limit: int = 100,
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
@@ -46,12 +94,12 @@ async def list_docs(
include_without_embeddings: bool = False,
) -> tuple[str, list]:
"""
- Lists docs with optional owner filtering, pagination, and sorting.
+ Lists docs with pagination and sorting, aggregating content chunks and embeddings.
Parameters:
developer_id (UUID): The ID of the developer.
- owner_id (UUID): The ID of the owner of the documents.
- owner_type (Literal["user", "agent"]): The type of the owner of the documents.
+ owner_id (UUID): The ID of the owner of the documents (required).
+ owner_type (Literal["user", "agent"]): The type of the owner of the documents (required).
limit (int): The number of documents to return.
offset (int): The number of documents to skip.
sort_by (Literal["created_at", "updated_at"]): The field to sort by.
@@ -61,6 +109,9 @@ async def list_docs(
Returns:
tuple[str, list]: SQL query and parameters for listing the documents.
+
+ Raises:
+ HTTPException: If invalid parameters are provided.
"""
if direction.lower() not in ["asc", "desc"]:
raise HTTPException(status_code=400, detail="Invalid sort direction")
@@ -76,17 +127,12 @@ async def list_docs(
# Start with the base query
query = base_docs_query
- params = [developer_id, include_without_embeddings]
-
- # Add owner filtering
- if owner_type and owner_id:
- query += " AND doc_own.owner_type = $3 AND doc_own.owner_id = $4"
- params.extend([owner_type, owner_id])
+ params = [developer_id, include_without_embeddings, owner_type, owner_id]
# Add metadata filtering
if metadata_filter:
for key, value in metadata_filter.items():
- query += f" AND d.metadata->>'{key}' = ${len(params) + 1}"
+ query += f" AND metadata->>'{key}' = ${len(params) + 1}"
params.append(value)
# Add sorting and pagination
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index c7b15ee64..5a89803ee 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -1,7 +1,3 @@
-"""
-Timescale-based doc embedding search using the `embedding` column.
-"""
-
from typing import List, Literal
from uuid import UUID
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index 0ab309ee8..79f9ac305 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -1,35 +1,36 @@
-"""
-Timescale-based doc text search using the `search_tsv` column.
-"""
-
-from typing import Literal
+from typing import Any, Literal, List
from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
+import asyncpg
+import json
from ...autogen.openapi_model import DocReference
-from ..utils import pg_query, wrap_in_class
+from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass
-search_docs_text_query = parse_one("""
-SELECT d.*,
- ts_rank_cd(d.search_tsv, websearch_to_tsquery($3)) AS rank
-FROM docs d
-LEFT JOIN doc_owners do
- ON d.developer_id = do.developer_id
- AND d.doc_id = do.doc_id
-WHERE d.developer_id = $1
- AND (
- ($4 IS NULL AND $5 IS NULL)
- OR (do.owner_type = $4 AND do.owner_id = $5)
- )
- AND d.search_tsv @@ websearch_to_tsquery($3)
-ORDER BY rank DESC
-LIMIT $2;
-""").sql(pretty=True)
+search_docs_text_query = (
+ """
+ SELECT * FROM search_by_text(
+ $1, -- developer_id
+ $2, -- query
+ $3, -- owner_types
+ ( SELECT array_agg(*)::UUID[] FROM jsonb_array_elements($4) )
+ )
+ """
+)
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(
DocReference,
transform=lambda d: {
@@ -41,15 +42,16 @@
**d,
},
)
-@pg_query
+@pg_query(debug=True)
@beartype
async def search_docs_by_text(
*,
developer_id: UUID,
+ owners: list[tuple[Literal["user", "agent"], UUID]],
query: str,
- k: int = 10,
- owner_type: Literal["user", "agent", "org"] | None = None,
- owner_id: UUID | None = None,
+ k: int = 3,
+ metadata_filter: dict[str, Any] = {},
+ search_language: str | None = "english",
) -> tuple[str, list]:
"""
Full-text search on docs using the search_tsv column.
@@ -57,9 +59,11 @@ async def search_docs_by_text(
Parameters:
developer_id (UUID): The ID of the developer.
query (str): The text to search for.
- k (int): The number of results to return.
- owner_type (Literal["user", "agent", "org"]): The type of the owner of the documents.
- owner_id (UUID): The ID of the owner of the documents.
+ owners (list[tuple[Literal["user", "agent"], UUID]]): List of (owner_type, owner_id) tuples.
+ k (int): Maximum number of results to return.
+ search_language (str): Language for text search (default: "english").
+ metadata_filter (dict): Metadata filter criteria.
+ connection_pool (asyncpg.Pool): Database connection pool.
Returns:
tuple[str, list]: SQL query and parameters for searching the documents.
@@ -67,7 +71,19 @@ async def search_docs_by_text(
if k < 1:
raise HTTPException(status_code=400, detail="k must be >= 1")
+ # Extract owner types and IDs
+ owner_types = [owner[0] for owner in owners]
+ owner_ids = [owner[1] for owner in owners]
+
return (
search_docs_text_query,
- [developer_id, k, query, owner_type, owner_id],
+ [
+ developer_id,
+ query,
+ owner_types,
+ owner_ids,
+ search_language,
+ k,
+ metadata_filter,
+ ],
)
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index a879e3b6b..184ba7e8e 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -1,8 +1,3 @@
-"""
-Hybrid doc search that merges text search and embedding search results
-via a simple distribution-based score fusion or direct weighting in Python.
-"""
-
from typing import List, Literal
from uuid import UUID
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 2f7de580e..a34c7e2aa 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -63,23 +63,6 @@ def test_developer_id():
developer_id = uuid7()
return developer_id
-
-# @fixture(scope="global")
-# async def test_file(dsn=pg_dsn, developer_id=test_developer_id):
-# async with get_pg_client(dsn=dsn) as client:
-# file = await create_file(
-# developer_id=developer_id,
-# data=CreateFileRequest(
-# name="Hello",
-# description="World",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# ),
-# client=client,
-# )
-# yield file
-
-
@fixture(scope="global")
async def test_developer(dsn=pg_dsn, developer_id=test_developer_id):
pool = await create_db_pool(dsn=dsn)
@@ -150,16 +133,18 @@ async def test_file(dsn=pg_dsn, developer=test_developer, user=test_user):
@fixture(scope="test")
-async def test_doc(dsn=pg_dsn, developer=test_developer):
+async def test_doc(dsn=pg_dsn, developer=test_developer, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
doc = await create_doc(
developer_id=developer.id,
data=CreateDocRequest(
title="Hello",
- content=["World"],
+ content=["World", "World2", "World3"],
metadata={"test": "test"},
embed_instruction="Embed the document",
),
+ owner_type="agent",
+ owner_id=agent.id,
connection_pool=pool,
)
return doc
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 1410c88c9..71553ee83 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -8,36 +8,13 @@
from agents_api.queries.docs.list_docs import list_docs
# If you wish to test text/embedding/hybrid search, import them:
-# from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
+from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
# You can rename or remove these imports to match your actual fixtures
from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
-@test("query: create doc")
-async def _(dsn=pg_dsn, developer=test_developer):
- pool = await create_db_pool(dsn=dsn)
- doc = await create_doc(
- developer_id=developer.id,
- data=CreateDocRequest(
- title="Hello Doc",
- content="This is sample doc content",
- embed_instruction="Embed the document",
- metadata={"test": "test"},
- ),
- connection_pool=pool,
- )
-
- assert doc.title == "Hello Doc"
- assert doc.content == "This is sample doc content"
- assert doc.modality == "text"
- assert doc.embedding_model == "voyage-3"
- assert doc.embedding_dimensions == 1024
- assert doc.language == "english"
- assert doc.index == 0
-
-
@test("query: create user doc")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
@@ -92,7 +69,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
assert any(d.id == doc.id for d in docs_list)
-@test("model: get doc")
+@test("query: get doc")
async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
pool = await create_db_pool(dsn=dsn)
doc_test = await get_doc(
@@ -102,18 +79,7 @@ async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
)
assert doc_test.id == doc.id
assert doc_test.title == doc.title
-
-
-@test("query: list docs")
-async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
- pool = await create_db_pool(dsn=dsn)
- docs_list = await list_docs(
- developer_id=developer.id,
- connection_pool=pool,
- )
- assert len(docs_list) >= 1
- assert any(d.id == doc.id for d in docs_list)
-
+ assert doc_test.content == doc.content
@test("query: list user docs")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
@@ -246,12 +212,34 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
)
assert not any(d.id == doc_agent.id for d in docs_list)
-
-@test("query: delete doc")
-async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
+@test("query: search docs by text")
+async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
- await delete_doc(
+
+ # Create a test document
+ await create_doc(
developer_id=developer.id,
- doc_id=doc.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ data=CreateDocRequest(
+ title="Hello",
+ content="The world is a funny little thing",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
connection_pool=pool,
)
+
+ # Search using the correct parameter types
+ result = await search_docs_by_text(
+ developer_id=developer.id,
+ owners=[("agent", agent.id)],
+ query="funny",
+ k=3, # Add k parameter
+ search_language="english", # Add language parameter
+ metadata_filter={}, # Add metadata filter
+ connection_pool=pool,
+ )
+
+ assert len(result) >= 1
+ assert result[0].metadata is not None
\ No newline at end of file
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index c83c7a6f6..68409ef5c 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -82,7 +82,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
assert any(f.id == file.id for f in files)
-@test("model: get file")
+@test("query: get file")
async def _(dsn=pg_dsn, file=test_file, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
file_test = await get_file(
diff --git a/memory-store/migrations/000006_docs.up.sql b/memory-store/migrations/000006_docs.up.sql
index 193fae122..97bdad43c 100644
--- a/memory-store/migrations/000006_docs.up.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -24,8 +24,7 @@ CREATE TABLE IF NOT EXISTS docs (
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
- CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id),
- CONSTRAINT uq_docs_doc_id_index UNIQUE (doc_id, index),
+ CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id, index),
CONSTRAINT ct_docs_embedding_dimensions_positive CHECK (embedding_dimensions > 0),
CONSTRAINT ct_docs_valid_modality CHECK (modality IN ('text', 'image', 'mixed')),
CONSTRAINT ct_docs_index_positive CHECK (index >= 0),
@@ -67,10 +66,12 @@ END $$;
CREATE TABLE IF NOT EXISTS doc_owners (
developer_id UUID NOT NULL,
doc_id UUID NOT NULL,
+ index INTEGER NOT NULL,
owner_type TEXT NOT NULL, -- 'user' or 'agent'
owner_id UUID NOT NULL,
- CONSTRAINT pk_doc_owners PRIMARY KEY (developer_id, doc_id),
- CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
+ CONSTRAINT pk_doc_owners PRIMARY KEY (developer_id, doc_id, index),
+ -- TODO: Add foreign key constraint
+ -- CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
CONSTRAINT ct_doc_owners_owner_type CHECK (owner_type IN ('user', 'agent'))
);
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index 5293cc81a..2f5b2baf1 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -101,6 +101,7 @@ END $$;
-- Create the search function
CREATE
OR REPLACE FUNCTION search_by_vector (
+ developer_id UUID,
query_embedding vector (1024),
owner_types TEXT[],
owner_ids UUID [],
@@ -134,9 +135,7 @@ BEGIN
IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
owner_filter_sql := '
AND (
- (ud.user_id = ANY($5) AND ''user'' = ANY($4))
- OR
- (ad.agent_id = ANY($5) AND ''agent'' = ANY($4))
+ doc_owners.owner_id = ANY($5::uuid[]) AND doc_owners.owner_type = ANY($4::text[])
)';
ELSE
owner_filter_sql := '';
@@ -153,6 +152,7 @@ BEGIN
RETURN QUERY EXECUTE format(
'WITH ranked_docs AS (
SELECT
+ d.developer_id,
d.doc_id,
d.index,
d.title,
@@ -160,15 +160,12 @@ BEGIN
(1 - (d.embedding <=> $1)) as distance,
d.embedding,
d.metadata,
- CASE
- WHEN ud.user_id IS NOT NULL THEN ''user''
- WHEN ad.agent_id IS NOT NULL THEN ''agent''
- END as owner_type,
- COALESCE(ud.user_id, ad.agent_id) as owner_id
+ doc_owners.owner_type,
+ doc_owners.owner_id
FROM docs_embeddings d
- LEFT JOIN user_docs ud ON d.doc_id = ud.doc_id
- LEFT JOIN agent_docs ad ON d.doc_id = ad.doc_id
- WHERE 1 - (d.embedding <=> $1) >= $2
+ LEFT JOIN doc_owners ON d.doc_id = doc_owners.doc_id
+ WHERE d.developer_id = $7
+ AND 1 - (d.embedding <=> $1) >= $2
%s
%s
)
@@ -185,7 +182,9 @@ BEGIN
k,
owner_types,
owner_ids,
- metadata_filter;
+ metadata_filter,
+ developer_id;
+
END;
$$;
@@ -238,6 +237,7 @@ COMMENT ON FUNCTION embed_and_search_by_vector IS 'Convenience function that com
-- Create the text search function
CREATE
OR REPLACE FUNCTION search_by_text (
+ developer_id UUID,
query_text text,
owner_types TEXT[],
owner_ids UUID [],
@@ -267,9 +267,7 @@ BEGIN
IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
owner_filter_sql := '
AND (
- (ud.user_id = ANY($5) AND ''user'' = ANY($4))
- OR
- (ad.agent_id = ANY($5) AND ''agent'' = ANY($4))
+ doc_owners.owner_id = ANY($5::uuid[]) AND doc_owners.owner_type = ANY($4::text[])
)';
ELSE
owner_filter_sql := '';
@@ -286,6 +284,7 @@ BEGIN
RETURN QUERY EXECUTE format(
'WITH ranked_docs AS (
SELECT
+ d.developer_id,
d.doc_id,
d.index,
d.title,
@@ -293,15 +292,12 @@ BEGIN
ts_rank_cd(d.search_tsv, $1, 32)::double precision as distance,
d.embedding,
d.metadata,
- CASE
- WHEN ud.user_id IS NOT NULL THEN ''user''
- WHEN ad.agent_id IS NOT NULL THEN ''agent''
- END as owner_type,
- COALESCE(ud.user_id, ad.agent_id) as owner_id
+ doc_owners.owner_type,
+ doc_owners.owner_id
FROM docs_embeddings d
- LEFT JOIN user_docs ud ON d.doc_id = ud.doc_id
- LEFT JOIN agent_docs ad ON d.doc_id = ad.doc_id
- WHERE d.search_tsv @@ $1
+ LEFT JOIN doc_owners ON d.doc_id = doc_owners.doc_id
+ WHERE d.developer_id = $6
+ AND d.search_tsv @@ $1
%s
%s
)
@@ -314,11 +310,11 @@ BEGIN
)
USING
ts_query,
- search_language,
k,
owner_types,
owner_ids,
- metadata_filter;
+ metadata_filter,
+ developer_id;
END;
$$;
@@ -372,6 +368,7 @@ $$ LANGUAGE plpgsql;
-- Hybrid search function combining text and vector search
CREATE
OR REPLACE FUNCTION search_hybrid (
+ developer_id UUID,
query_text text,
query_embedding vector (1024),
owner_types TEXT[],
@@ -397,6 +394,7 @@ BEGIN
RETURN QUERY
WITH text_results AS (
SELECT * FROM search_by_text(
+ developer_id,
query_text,
owner_types,
owner_ids,
@@ -407,6 +405,7 @@ BEGIN
),
embedding_results AS (
SELECT * FROM search_by_vector(
+ developer_id,
query_embedding,
owner_types,
owner_ids,
@@ -426,6 +425,7 @@ BEGIN
),
scores AS (
SELECT
+ r.developer_id,
r.doc_id,
r.title,
r.content,
@@ -437,8 +437,8 @@ BEGIN
COALESCE(t.distance, 0.0) as text_score,
COALESCE(e.distance, 0.0) as embedding_score
FROM all_results r
- LEFT JOIN text_results t ON r.doc_id = t.doc_id
- LEFT JOIN embedding_results e ON r.doc_id = e.doc_id
+ LEFT JOIN text_results t ON r.doc_id = t.doc_id AND r.developer_id = t.developer_id
+ LEFT JOIN embedding_results e ON r.doc_id = e.doc_id AND r.developer_id = e.developer_id
),
normalized_scores AS (
SELECT
@@ -448,6 +448,7 @@ BEGIN
FROM scores
)
SELECT
+ developer_id,
doc_id,
index,
title,
@@ -468,6 +469,7 @@ COMMENT ON FUNCTION search_hybrid IS 'Hybrid search combining text and vector se
-- Convenience function that handles embedding generation
CREATE
OR REPLACE FUNCTION embed_and_search_hybrid (
+ developer_id UUID,
query_text text,
owner_types TEXT[],
owner_ids UUID [],
@@ -497,6 +499,7 @@ BEGIN
-- Perform hybrid search
RETURN QUERY SELECT * FROM search_hybrid(
+ developer_id,
query_text,
query_embedding,
owner_types,
From d7d9cd49f83b6606c0c6bd2aa68cd1c044eae5cb Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Sat, 21 Dec 2024 08:13:04 +0000
Subject: [PATCH 114/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/docs/create_doc.py | 3 +--
agents-api/agents_api/queries/docs/get_doc.py | 6 ++++--
agents-api/agents_api/queries/docs/list_docs.py | 4 +++-
.../queries/docs/search_docs_by_text.py | 16 +++++++---------
agents-api/tests/fixtures.py | 1 +
agents-api/tests/test_docs_queries.py | 9 ++++++---
6 files changed, 22 insertions(+), 17 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
index d8bcce7d3..d3c2fe3c1 100644
--- a/agents-api/agents_api/queries/docs/create_doc.py
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -153,7 +153,7 @@ async def create_doc(
if isinstance(data.content, list):
final_params_doc = []
final_params_owner = []
-
+
for idx, content in enumerate(data.content):
doc_params = [
developer_id,
@@ -185,7 +185,6 @@ async def create_doc(
queries.append((doc_owner_query, final_params_owner, "fetchmany"))
else:
-
# Create the doc record
doc_params = [
developer_id,
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index 3f071cf87..1cee8f354 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -51,7 +51,9 @@
"id": d["doc_id"],
"index": d["indices"][0],
"content": d["content"][0] if len(d["content"]) == 1 else d["content"],
- "embeddings": d["embeddings"][0] if len(d["embeddings"]) == 1 else d["embeddings"],
+ "embeddings": d["embeddings"][0]
+ if len(d["embeddings"]) == 1
+ else d["embeddings"],
**d,
},
)
@@ -64,7 +66,7 @@ async def get_doc(
) -> tuple[str, list]:
"""
Fetch a single doc with its embedding, grouping all content chunks and embeddings.
-
+
Parameters:
developer_id (UUID): The ID of the developer.
doc_id (UUID): The ID of the document.
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index 2b31df250..9788b0daa 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -75,7 +75,9 @@
"id": d["doc_id"],
"index": d["indices"][0],
"content": d["content"][0] if len(d["content"]) == 1 else d["content"],
- "embedding": d["embeddings"][0] if d.get("embeddings") and len(d["embeddings"]) == 1 else d.get("embeddings"),
+ "embedding": d["embeddings"][0]
+ if d.get("embeddings") and len(d["embeddings"]) == 1
+ else d.get("embeddings"),
**d,
},
)
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index 79f9ac305..9c22a60ce 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -1,17 +1,16 @@
-from typing import Any, Literal, List
+import json
+from typing import Any, List, Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-import asyncpg
-import json
from ...autogen.openapi_model import DocReference
-from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-search_docs_text_query = (
- """
+search_docs_text_query = """
SELECT * FROM search_by_text(
$1, -- developer_id
$2, -- query
@@ -19,7 +18,6 @@
( SELECT array_agg(*)::UUID[] FROM jsonb_array_elements($4) )
)
"""
-)
@rewrap_exceptions(
@@ -74,10 +72,10 @@ async def search_docs_by_text(
# Extract owner types and IDs
owner_types = [owner[0] for owner in owners]
owner_ids = [owner[1] for owner in owners]
-
+
return (
search_docs_text_query,
- [
+ [
developer_id,
query,
owner_types,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index a34c7e2aa..2ad6bfeeb 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -63,6 +63,7 @@ def test_developer_id():
developer_id = uuid7()
return developer_id
+
@fixture(scope="global")
async def test_developer(dsn=pg_dsn, developer_id=test_developer_id):
pool = await create_db_pool(dsn=dsn)
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 71553ee83..82490cb77 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -9,6 +9,7 @@
# If you wish to test text/embedding/hybrid search, import them:
from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
+
# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
# You can rename or remove these imports to match your actual fixtures
@@ -81,6 +82,7 @@ async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
assert doc_test.title == doc.title
assert doc_test.content == doc.content
+
@test("query: list user docs")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
@@ -212,17 +214,18 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
)
assert not any(d.id == doc_agent.id for d in docs_list)
+
@test("query: search docs by text")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
-
+
# Create a test document
await create_doc(
developer_id=developer.id,
owner_type="agent",
owner_id=agent.id,
data=CreateDocRequest(
- title="Hello",
+ title="Hello",
content="The world is a funny little thing",
metadata={"test": "test"},
embed_instruction="Embed the document",
@@ -242,4 +245,4 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
)
assert len(result) >= 1
- assert result[0].metadata is not None
\ No newline at end of file
+ assert result[0].metadata is not None
From 2900786f11dbf5af8d647b105e8aa15195b3db56 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 21 Dec 2024 16:37:45 +0530
Subject: [PATCH 115/274] fix(memory-store): Remove redundant indices
Signed-off-by: Diwank Singh Tomer
---
.../migrations/000002_developers.up.sql | 7 +--
memory-store/migrations/000003_users.down.sql | 6 +--
memory-store/migrations/000003_users.up.sql | 13 ++---
.../migrations/000004_agents.down.sql | 6 +--
memory-store/migrations/000004_agents.up.sql | 5 --
memory-store/migrations/000005_files.up.sql | 27 +++-------
memory-store/migrations/000006_docs.down.sql | 7 ++-
memory-store/migrations/000006_docs.up.sql | 54 +++++++++----------
memory-store/migrations/000008_tools.up.sql | 14 ++---
.../migrations/000009_sessions.up.sql | 14 ++---
memory-store/migrations/000010_tasks.up.sql | 14 ++---
.../migrations/000011_executions.up.sql | 7 +--
.../migrations/000012_transitions.down.sql | 4 --
.../migrations/000012_transitions.up.sql | 24 +++------
.../000014_temporal_lookup.down.sql | 2 +-
.../migrations/000014_temporal_lookup.up.sql | 3 --
memory-store/migrations/000015_entries.up.sql | 21 +++++---
.../migrations/000016_entry_relations.up.sql | 4 +-
.../migrations/000018_doc_search.down.sql | 3 --
.../migrations/000018_doc_search.up.sql | 37 ++++---------
20 files changed, 91 insertions(+), 181 deletions(-)
diff --git a/memory-store/migrations/000002_developers.up.sql b/memory-store/migrations/000002_developers.up.sql
index 9ca9dca69..57e5bd2d5 100644
--- a/memory-store/migrations/000002_developers.up.sql
+++ b/memory-store/migrations/000002_developers.up.sql
@@ -15,9 +15,6 @@ CREATE TABLE IF NOT EXISTS developers (
CONSTRAINT uq_developers_email UNIQUE (email)
);
--- Create sorted index on developer_id (optimized for UUID v7)
-CREATE INDEX IF NOT EXISTS idx_developers_id_sorted ON developers (developer_id DESC);
-
-- Create index on email
CREATE INDEX IF NOT EXISTS idx_developers_email ON developers (email);
@@ -30,7 +27,7 @@ WHERE
active = TRUE;
-- Create trigger to automatically update updated_at
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'trg_developers_updated_at') THEN
CREATE TRIGGER trg_developers_updated_at
@@ -44,4 +41,4 @@ $$;
-- Add comment to table
COMMENT ON TABLE developers IS 'Stores developer information including their settings and tags';
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000003_users.down.sql b/memory-store/migrations/000003_users.down.sql
index 41a27bfc4..6bae2529e 100644
--- a/memory-store/migrations/000003_users.down.sql
+++ b/memory-store/migrations/000003_users.down.sql
@@ -6,10 +6,6 @@ DROP TRIGGER IF EXISTS update_users_updated_at ON users;
-- Drop indexes
DROP INDEX IF EXISTS users_metadata_gin_idx;
-DROP INDEX IF EXISTS users_developer_id_idx;
-
-DROP INDEX IF EXISTS users_id_sorted_idx;
-
-- Drop foreign key constraint
ALTER TABLE IF EXISTS users
DROP CONSTRAINT IF EXISTS users_developer_id_fkey;
@@ -17,4 +13,4 @@ DROP CONSTRAINT IF EXISTS users_developer_id_fkey;
-- Finally drop the table
DROP TABLE IF EXISTS users;
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000003_users.up.sql b/memory-store/migrations/000003_users.up.sql
index 028e40ef5..480d39b6c 100644
--- a/memory-store/migrations/000003_users.up.sql
+++ b/memory-store/migrations/000003_users.up.sql
@@ -12,23 +12,18 @@ CREATE TABLE IF NOT EXISTS users (
CONSTRAINT pk_users PRIMARY KEY (developer_id, user_id)
);
--- Create sorted index on user_id if it doesn't exist
-CREATE INDEX IF NOT EXISTS users_id_sorted_idx ON users (user_id DESC);
-
-- Create foreign key constraint and index if they don't exist
DO $$ BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'users_developer_id_fkey'
) THEN
- ALTER TABLE users
- ADD CONSTRAINT users_developer_id_fkey
- FOREIGN KEY (developer_id)
+ ALTER TABLE users
+ ADD CONSTRAINT users_developer_id_fkey
+ FOREIGN KEY (developer_id)
REFERENCES developers(developer_id);
END IF;
END $$;
-CREATE INDEX IF NOT EXISTS users_developer_id_idx ON users (developer_id);
-
-- Create a GIN index on the entire metadata column if it doesn't exist
CREATE INDEX IF NOT EXISTS users_metadata_gin_idx ON users USING GIN (metadata);
@@ -47,4 +42,4 @@ END $$;
-- Add comment to table (comments are idempotent by default)
COMMENT ON TABLE users IS 'Stores user information linked to developers';
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000004_agents.down.sql b/memory-store/migrations/000004_agents.down.sql
index be81aaa30..98d75058d 100644
--- a/memory-store/migrations/000004_agents.down.sql
+++ b/memory-store/migrations/000004_agents.down.sql
@@ -6,11 +6,7 @@ DROP TRIGGER IF EXISTS trg_agents_updated_at ON agents;
-- Drop indexes
DROP INDEX IF EXISTS idx_agents_metadata;
-DROP INDEX IF EXISTS idx_agents_developer;
-
-DROP INDEX IF EXISTS idx_agents_id_sorted;
-
-- Drop table (this will automatically drop associated constraints)
-DROP TABLE IF EXISTS agents;
+DROP TABLE IF EXISTS agents CASCADE;
COMMIT;
diff --git a/memory-store/migrations/000004_agents.up.sql b/memory-store/migrations/000004_agents.up.sql
index 32e066f71..1f3715793 100644
--- a/memory-store/migrations/000004_agents.up.sql
+++ b/memory-store/migrations/000004_agents.up.sql
@@ -38,16 +38,11 @@ CREATE TABLE IF NOT EXISTS agents (
CONSTRAINT ct_agents_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$')
);
--- Create sorted index on agent_id (optimized for UUID v7)
-CREATE INDEX IF NOT EXISTS idx_agents_id_sorted ON agents (agent_id DESC);
-
-- Create foreign key constraint and index on developer_id
ALTER TABLE agents
DROP CONSTRAINT IF EXISTS fk_agents_developer,
ADD CONSTRAINT fk_agents_developer FOREIGN KEY (developer_id) REFERENCES developers (developer_id);
-CREATE INDEX IF NOT EXISTS idx_agents_developer ON agents (developer_id);
-
-- Create a GIN index on the entire metadata column
CREATE INDEX IF NOT EXISTS idx_agents_metadata ON agents USING GIN (metadata);
diff --git a/memory-store/migrations/000005_files.up.sql b/memory-store/migrations/000005_files.up.sql
index 40a2cbccf..d51bb0826 100644
--- a/memory-store/migrations/000005_files.up.sql
+++ b/memory-store/migrations/000005_files.up.sql
@@ -23,26 +23,13 @@ CREATE TABLE IF NOT EXISTS files (
CONSTRAINT pk_files PRIMARY KEY (developer_id, file_id)
);
--- Create sorted index on file_id if it doesn't exist
-CREATE INDEX IF NOT EXISTS idx_files_id_sorted ON files (file_id DESC);
-
-- Create foreign key constraint and index if they don't exist
DO $$ BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_files_developer') THEN
- ALTER TABLE files
- ADD CONSTRAINT fk_files_developer
- FOREIGN KEY (developer_id)
- REFERENCES developers(developer_id);
- END IF;
-END $$;
-
-CREATE INDEX IF NOT EXISTS idx_files_developer ON files (developer_id);
-
--- Add unique constraint if it doesn't exist
-DO $$ BEGIN
- IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_files_developer_id_file_id') THEN
ALTER TABLE files
- ADD CONSTRAINT uq_files_developer_id_file_id UNIQUE (developer_id, file_id);
+ ADD CONSTRAINT fk_files_developer
+ FOREIGN KEY (developer_id)
+ REFERENCES developers(developer_id);
END IF;
END $$;
@@ -68,7 +55,7 @@ CREATE TABLE IF NOT EXISTS file_owners (
);
-- Create indexes
-CREATE INDEX IF NOT EXISTS idx_file_owners_owner
+CREATE INDEX IF NOT EXISTS idx_file_owners_owner
ON file_owners (developer_id, owner_type, owner_id);
-- Create function to validate owner reference
@@ -77,14 +64,14 @@ RETURNS TRIGGER AS $$
BEGIN
IF NEW.owner_type = 'user' THEN
IF NOT EXISTS (
- SELECT 1 FROM users
+ SELECT 1 FROM users
WHERE developer_id = NEW.developer_id AND user_id = NEW.owner_id
) THEN
RAISE EXCEPTION 'Invalid user reference';
END IF;
ELSIF NEW.owner_type = 'agent' THEN
IF NOT EXISTS (
- SELECT 1 FROM agents
+ SELECT 1 FROM agents
WHERE developer_id = NEW.developer_id AND agent_id = NEW.owner_id
) THEN
RAISE EXCEPTION 'Invalid agent reference';
@@ -100,4 +87,4 @@ BEFORE INSERT OR UPDATE ON file_owners
FOR EACH ROW
EXECUTE FUNCTION validate_file_owner();
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000006_docs.down.sql b/memory-store/migrations/000006_docs.down.sql
index ea67b0005..f0df5a8e4 100644
--- a/memory-store/migrations/000006_docs.down.sql
+++ b/memory-store/migrations/000006_docs.down.sql
@@ -3,7 +3,8 @@ BEGIN;
-- Drop doc_owners table and its dependencies
DROP TRIGGER IF EXISTS trg_validate_doc_owner ON doc_owners;
DROP FUNCTION IF EXISTS validate_doc_owner();
-DROP TABLE IF EXISTS doc_owners;
+DROP INDEX IF EXISTS idx_doc_owners_owner;
+DROP TABLE IF EXISTS doc_owners CASCADE;
-- Drop docs table and its dependencies
DROP TRIGGER IF EXISTS trg_docs_search_tsv ON docs;
@@ -15,11 +16,9 @@ DROP INDEX IF EXISTS idx_docs_content_trgm;
DROP INDEX IF EXISTS idx_docs_title_trgm;
DROP INDEX IF EXISTS idx_docs_search_tsv;
DROP INDEX IF EXISTS idx_docs_metadata;
-DROP INDEX IF EXISTS idx_docs_developer;
-DROP INDEX IF EXISTS idx_docs_id_sorted;
-- Drop docs table
-DROP TABLE IF EXISTS docs;
+DROP TABLE IF EXISTS docs CASCADE;
-- Drop language validation function
DROP FUNCTION IF EXISTS is_valid_language(text);
diff --git a/memory-store/migrations/000006_docs.up.sql b/memory-store/migrations/000006_docs.up.sql
index 97bdad43c..37d17a590 100644
--- a/memory-store/migrations/000006_docs.up.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -24,34 +24,30 @@ CREATE TABLE IF NOT EXISTS docs (
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
- CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id, index),
+ CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id),
CONSTRAINT ct_docs_embedding_dimensions_positive CHECK (embedding_dimensions > 0),
CONSTRAINT ct_docs_valid_modality CHECK (modality IN ('text', 'image', 'mixed')),
CONSTRAINT ct_docs_index_positive CHECK (index >= 0),
- CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language))
+ CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language)),
+ UNIQUE (developer_id, doc_id, index)
);
--- Create sorted index on doc_id if not exists
-CREATE INDEX IF NOT EXISTS idx_docs_id_sorted ON docs (doc_id DESC);
-
-- Create foreign key constraint if not exists (using DO block for safety)
-DO $$
-BEGIN
+DO $$
+BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'fk_docs_developer'
) THEN
- ALTER TABLE docs
- ADD CONSTRAINT fk_docs_developer
- FOREIGN KEY (developer_id)
+ ALTER TABLE docs
+ ADD CONSTRAINT fk_docs_developer
+ FOREIGN KEY (developer_id)
REFERENCES developers(developer_id);
END IF;
END $$;
-CREATE INDEX IF NOT EXISTS idx_docs_developer ON docs (developer_id);
-
-- Create trigger if not exists
-DO $$
-BEGIN
+DO $$
+BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_trigger WHERE tgname = 'trg_docs_updated_at'
) THEN
@@ -66,12 +62,10 @@ END $$;
CREATE TABLE IF NOT EXISTS doc_owners (
developer_id UUID NOT NULL,
doc_id UUID NOT NULL,
- index INTEGER NOT NULL,
owner_type TEXT NOT NULL, -- 'user' or 'agent'
owner_id UUID NOT NULL,
- CONSTRAINT pk_doc_owners PRIMARY KEY (developer_id, doc_id, index),
- -- TODO: Add foreign key constraint
- -- CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
+ CONSTRAINT pk_doc_owners PRIMARY KEY (developer_id, doc_id),
+ CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
CONSTRAINT ct_doc_owners_owner_type CHECK (owner_type IN ('user', 'agent'))
);
@@ -85,14 +79,14 @@ RETURNS TRIGGER AS $$
BEGIN
IF NEW.owner_type = 'user' THEN
IF NOT EXISTS (
- SELECT 1 FROM users
+ SELECT 1 FROM users
WHERE developer_id = NEW.developer_id AND user_id = NEW.owner_id
) THEN
RAISE EXCEPTION 'Invalid user reference';
END IF;
ELSIF NEW.owner_type = 'agent' THEN
IF NOT EXISTS (
- SELECT 1 FROM agents
+ SELECT 1 FROM agents
WHERE developer_id = NEW.developer_id AND agent_id = NEW.owner_id
) THEN
RAISE EXCEPTION 'Invalid agent reference';
@@ -128,29 +122,29 @@ DECLARE
lang text;
BEGIN
FOR lang IN (SELECT cfgname FROM pg_ts_config WHERE cfgname IN (
- 'arabic', 'danish', 'dutch', 'english', 'finnish', 'french',
+ 'arabic', 'danish', 'dutch', 'english', 'finnish', 'french',
'german', 'greek', 'hungarian', 'indonesian', 'irish', 'italian',
'lithuanian', 'nepali', 'norwegian', 'portuguese', 'romanian',
'russian', 'spanish', 'swedish', 'tamil', 'turkish'
))
LOOP
-- Configure integer dictionary
- EXECUTE format('ALTER TEXT SEARCH CONFIGURATION %I
+ EXECUTE format('ALTER TEXT SEARCH CONFIGURATION %I
ALTER MAPPING FOR int, uint WITH intdict', lang);
-
+
-- Configure synonym and stemming
EXECUTE format('ALTER TEXT SEARCH CONFIGURATION %I
- ALTER MAPPING FOR asciihword, hword_asciipart, hword, hword_part, word, asciiword
+ ALTER MAPPING FOR asciihword, hword_asciipart, hword, hword_part, word, asciiword
WITH xsyn, %I_stem', lang, lang);
END LOOP;
END
$$;
-- Add the search_tsv column if it doesn't exist
-DO $$
-BEGIN
+DO $$
+BEGIN
IF NOT EXISTS (
- SELECT 1 FROM information_schema.columns
+ SELECT 1 FROM information_schema.columns
WHERE table_name = 'docs' AND column_name = 'search_tsv'
) THEN
ALTER TABLE docs ADD COLUMN search_tsv tsvector;
@@ -169,8 +163,8 @@ END;
$$ LANGUAGE plpgsql;
-- Create trigger if not exists
-DO $$
-BEGIN
+DO $$
+BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_trigger WHERE tgname = 'trg_docs_search_tsv'
) THEN
@@ -208,4 +202,4 @@ SET
WHERE
search_tsv IS NULL;
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000008_tools.up.sql b/memory-store/migrations/000008_tools.up.sql
index 159ef3688..993c1b64a 100644
--- a/memory-store/migrations/000008_tools.up.sql
+++ b/memory-store/migrations/000008_tools.up.sql
@@ -22,12 +22,10 @@ CREATE TABLE IF NOT EXISTS tools (
spec JSONB NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id, type, name)
+ CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id),
+ UNIQUE (developer_id, agent_id, task_id, task_version, name)
);
--- Create sorted index on tool_id if it doesn't exist
-CREATE INDEX IF NOT EXISTS idx_tools_id_sorted ON tools (tool_id DESC);
-
-- Create sorted index on task_id if it doesn't exist
CREATE INDEX IF NOT EXISTS idx_tools_task_id_sorted ON tools (task_id DESC)
WHERE
@@ -38,15 +36,13 @@ DO $$ BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'fk_tools_agent'
) THEN
- ALTER TABLE tools
+ ALTER TABLE tools
ADD CONSTRAINT fk_tools_agent
- FOREIGN KEY (developer_id, agent_id)
+ FOREIGN KEY (developer_id, agent_id)
REFERENCES agents(developer_id, agent_id);
END IF;
END $$;
-CREATE INDEX IF NOT EXISTS idx_tools_developer_agent ON tools (developer_id, agent_id);
-
-- Drop trigger if exists and recreate
DROP TRIGGER IF EXISTS trg_tools_updated_at ON tools;
@@ -57,4 +53,4 @@ EXECUTE FUNCTION update_updated_at_column ();
-- Add comment to table
COMMENT ON TABLE tools IS 'Stores tool configurations and specifications for AI agents';
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000009_sessions.up.sql b/memory-store/migrations/000009_sessions.up.sql
index 75b5fde9a..d8bd0b2b3 100644
--- a/memory-store/migrations/000009_sessions.up.sql
+++ b/memory-store/migrations/000009_sessions.up.sql
@@ -33,9 +33,6 @@ CREATE TABLE IF NOT EXISTS sessions (
CONSTRAINT chk_sessions_recall_options_valid CHECK (jsonb_typeof(recall_options) = 'object')
);
--- Create indexes if they don't exist
-CREATE INDEX IF NOT EXISTS idx_sessions_id_sorted ON sessions (session_id DESC);
-
CREATE INDEX IF NOT EXISTS idx_sessions_metadata ON sessions USING GIN (metadata);
-- Create foreign key if it doesn't exist
@@ -44,9 +41,9 @@ BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'fk_sessions_developer'
) THEN
- ALTER TABLE sessions
+ ALTER TABLE sessions
ADD CONSTRAINT fk_sessions_developer
- FOREIGN KEY (developer_id)
+ FOREIGN KEY (developer_id)
REFERENCES developers(developer_id);
END IF;
END $$;
@@ -87,10 +84,7 @@ CREATE TABLE IF NOT EXISTS session_lookup (
FOREIGN KEY (developer_id, session_id) REFERENCES sessions (developer_id, session_id)
);
--- Create indexes if they don't exist
-CREATE INDEX IF NOT EXISTS idx_session_lookup_by_session ON session_lookup (developer_id, session_id);
-
-CREATE INDEX IF NOT EXISTS idx_session_lookup_by_participant ON session_lookup (developer_id, participant_id);
+CREATE INDEX IF NOT EXISTS idx_session_lookup_by_participant ON session_lookup (developer_id, participant_type, participant_id);
-- Create or replace the validation function
CREATE
@@ -134,4 +128,4 @@ BEGIN
END IF;
END $$;
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index ad27d5bdc..090b2dfd7 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -46,11 +46,11 @@ BEGIN
END IF;
END $$;
--- Create index on developer_id if it doesn't exist
+-- Create index on canonical_name if it doesn't exist
DO $$
BEGIN
- IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_developer') THEN
- CREATE INDEX idx_tasks_developer ON tasks (developer_id);
+ IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_canonical_name') THEN
+ CREATE INDEX idx_tasks_canonical_name ON tasks (developer_id DESC, canonical_name);
END IF;
END $$;
@@ -114,14 +114,6 @@ CREATE TABLE IF NOT EXISTS workflows (
REFERENCES tasks (developer_id, task_id, version) ON DELETE CASCADE
);
--- Create index for 'workflows' table if it doesn't exist
-DO $$
-BEGIN
- IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_workflows_developer') THEN
- CREATE INDEX idx_workflows_developer ON workflows (developer_id, task_id, version);
- END IF;
-END $$;
-
-- Add comment to 'workflows' table
COMMENT ON TABLE workflows IS 'Stores normalized workflows for tasks';
diff --git a/memory-store/migrations/000011_executions.up.sql b/memory-store/migrations/000011_executions.up.sql
index 976ead369..b57313769 100644
--- a/memory-store/migrations/000011_executions.up.sql
+++ b/memory-store/migrations/000011_executions.up.sql
@@ -19,14 +19,11 @@ CREATE TABLE IF NOT EXISTS executions (
CONSTRAINT fk_executions_task FOREIGN KEY (developer_id, task_id, task_version) REFERENCES tasks (developer_id, task_id, "version")
);
--- Create sorted index on execution_id (optimized for UUID v7)
-CREATE INDEX IF NOT EXISTS idx_executions_execution_id_sorted ON executions (execution_id DESC);
-
-- Create index on developer_id
CREATE INDEX IF NOT EXISTS idx_executions_developer_id ON executions (developer_id);
-- Create index on task_id
-CREATE INDEX IF NOT EXISTS idx_executions_task_id ON executions (task_id);
+CREATE INDEX IF NOT EXISTS idx_executions_task_id ON executions (task_id, task_version);
-- Create a GIN index on the metadata column
CREATE INDEX IF NOT EXISTS idx_executions_metadata ON executions USING GIN (metadata);
@@ -34,4 +31,4 @@ CREATE INDEX IF NOT EXISTS idx_executions_metadata ON executions USING GIN (meta
-- Add comment to table (comments are idempotent by default)
COMMENT ON TABLE executions IS 'Stores executions associated with AI agents for developers';
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000012_transitions.down.sql b/memory-store/migrations/000012_transitions.down.sql
index faac2e308..e6171b495 100644
--- a/memory-store/migrations/000012_transitions.down.sql
+++ b/memory-store/migrations/000012_transitions.down.sql
@@ -7,10 +7,6 @@ DROP CONSTRAINT IF EXISTS fk_transitions_execution;
-- Drop indexes if they exist
DROP INDEX IF EXISTS idx_transitions_metadata;
-DROP INDEX IF EXISTS idx_transitions_execution_id_sorted;
-
-DROP INDEX IF EXISTS idx_transitions_transition_id_sorted;
-
DROP INDEX IF EXISTS idx_transitions_label;
DROP INDEX IF EXISTS idx_transitions_next;
diff --git a/memory-store/migrations/000012_transitions.up.sql b/memory-store/migrations/000012_transitions.up.sql
index 7bbcf2ad5..0edf4d636 100644
--- a/memory-store/migrations/000012_transitions.up.sql
+++ b/memory-store/migrations/000012_transitions.up.sql
@@ -8,7 +8,7 @@ BEGIN;
*/
-- Create transition type enum if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'transition_type') THEN
CREATE TYPE transition_type AS ENUM (
@@ -26,7 +26,7 @@ BEGIN
END $$;
-- Create transition cursor type if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'transition_cursor') THEN
CREATE TYPE transition_cursor AS (
@@ -68,40 +68,32 @@ SELECT
);
-- Create indexes if they don't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_current') THEN
CREATE UNIQUE INDEX idx_transitions_current ON transitions (execution_id, current_step, created_at DESC);
END IF;
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_next') THEN
- CREATE UNIQUE INDEX idx_transitions_next ON transitions (execution_id, next_step, created_at DESC)
+ CREATE UNIQUE INDEX idx_transitions_next ON transitions (execution_id, next_step, created_at DESC)
WHERE next_step IS NOT NULL;
END IF;
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_label') THEN
- CREATE UNIQUE INDEX idx_transitions_label ON transitions (execution_id, step_label, created_at DESC)
+ CREATE UNIQUE INDEX idx_transitions_label ON transitions (execution_id, step_label, created_at DESC)
WHERE step_label IS NOT NULL;
END IF;
- IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_transition_id_sorted') THEN
- CREATE INDEX idx_transitions_transition_id_sorted ON transitions (transition_id DESC, created_at DESC);
- END IF;
-
- IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_execution_id_sorted') THEN
- CREATE INDEX idx_transitions_execution_id_sorted ON transitions (execution_id DESC, created_at DESC);
- END IF;
-
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_transitions_metadata') THEN
CREATE INDEX idx_transitions_metadata ON transitions USING GIN (metadata);
END IF;
END $$;
-- Add foreign key constraint if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_transitions_execution') THEN
- ALTER TABLE transitions
+ ALTER TABLE transitions
ADD CONSTRAINT fk_transitions_execution
FOREIGN KEY (execution_id)
REFERENCES executions(execution_id);
@@ -168,4 +160,4 @@ $$ LANGUAGE plpgsql;
CREATE TRIGGER validate_transition BEFORE INSERT ON transitions FOR EACH ROW
EXECUTE FUNCTION check_valid_transition ();
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000014_temporal_lookup.down.sql b/memory-store/migrations/000014_temporal_lookup.down.sql
index 4c836f911..ff501819b 100644
--- a/memory-store/migrations/000014_temporal_lookup.down.sql
+++ b/memory-store/migrations/000014_temporal_lookup.down.sql
@@ -1,5 +1,5 @@
BEGIN;
-DROP TABLE IF EXISTS temporal_executions_lookup;
+DROP TABLE IF EXISTS temporal_executions_lookup CASCADE;
COMMIT;
\ No newline at end of file
diff --git a/memory-store/migrations/000014_temporal_lookup.up.sql b/memory-store/migrations/000014_temporal_lookup.up.sql
index 724ee1340..40b2e6755 100644
--- a/memory-store/migrations/000014_temporal_lookup.up.sql
+++ b/memory-store/migrations/000014_temporal_lookup.up.sql
@@ -12,9 +12,6 @@ CREATE TABLE IF NOT EXISTS temporal_executions_lookup (
CONSTRAINT fk_temporal_executions_lookup_execution FOREIGN KEY (execution_id) REFERENCES executions (execution_id)
);
--- Create sorted index on execution_id (optimized for UUID v7)
-CREATE INDEX IF NOT EXISTS idx_temporal_executions_lookup_execution_id_sorted ON temporal_executions_lookup (execution_id DESC);
-
-- Add comment to table
COMMENT ON TABLE temporal_executions_lookup IS 'Stores temporal workflow execution lookup data for AI agent executions';
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index 73723a8bc..f8080b485 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -1,7 +1,13 @@
BEGIN;
-- Create chat_role enum
-CREATE TYPE chat_role AS ENUM('user', 'assistant', 'tool', 'system', 'developer');
+CREATE TYPE chat_role AS ENUM(
+ 'user',
+ 'assistant',
+ 'tool',
+ 'system',
+ 'developer'
+);
-- Create entries table
CREATE TABLE IF NOT EXISTS entries (
@@ -38,7 +44,7 @@ SELECT
);
-- Create indexes for efficient querying
-CREATE INDEX IF NOT EXISTS idx_entries_by_session ON entries (session_id DESC, entry_id DESC);
+CREATE INDEX IF NOT EXISTS idx_entries_by_session ON entries (session_id DESC);
-- Add foreign key constraint to sessions table
DO $$
@@ -87,8 +93,8 @@ UPDATE ON entries FOR EACH ROW
EXECUTE FUNCTION optimized_update_token_count_after ();
-- Add trigger to update parent session's updated_at
-CREATE OR REPLACE FUNCTION update_session_updated_at()
-RETURNS TRIGGER AS $$
+CREATE
+OR REPLACE FUNCTION update_session_updated_at () RETURNS TRIGGER AS $$
BEGIN
UPDATE sessions
SET updated_at = CURRENT_TIMESTAMP
@@ -98,8 +104,9 @@ END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trg_update_session_updated_at
-AFTER INSERT OR UPDATE ON entries
-FOR EACH ROW
-EXECUTE FUNCTION update_session_updated_at();
+AFTER INSERT
+OR
+UPDATE ON entries FOR EACH ROW
+EXECUTE FUNCTION update_session_updated_at ();
COMMIT;
diff --git a/memory-store/migrations/000016_entry_relations.up.sql b/memory-store/migrations/000016_entry_relations.up.sql
index bcdb7fb72..4a70d02c8 100644
--- a/memory-store/migrations/000016_entry_relations.up.sql
+++ b/memory-store/migrations/000016_entry_relations.up.sql
@@ -27,9 +27,7 @@ BEGIN
END $$;
-- Create indexes for efficient querying
-CREATE INDEX idx_entry_relations_components ON entry_relations (session_id, head, relation, tail);
-
-CREATE INDEX idx_entry_relations_leaf ON entry_relations (session_id, relation, is_leaf);
+CREATE INDEX idx_entry_relations_leaf ON entry_relations (session_id, is_leaf);
CREATE OR REPLACE FUNCTION auto_update_leaf_status() RETURNS TRIGGER AS $$
BEGIN
diff --git a/memory-store/migrations/000018_doc_search.down.sql b/memory-store/migrations/000018_doc_search.down.sql
index d32c51a0a..1ccbc5af8 100644
--- a/memory-store/migrations/000018_doc_search.down.sql
+++ b/memory-store/migrations/000018_doc_search.down.sql
@@ -21,9 +21,6 @@ DROP TYPE IF EXISTS doc_search_result;
-- Drop the embed_with_cache function
DROP FUNCTION IF EXISTS embed_with_cache;
--- Drop the index on embeddings_cache
-DROP INDEX IF EXISTS idx_embeddings_cache_provider_model_input_text;
-
-- Drop the embeddings cache table
DROP TABLE IF EXISTS embeddings_cache CASCADE;
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index 2f5b2baf1..593d00a7f 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -2,19 +2,11 @@ BEGIN;
-- Create unlogged table for caching embeddings
CREATE UNLOGGED TABLE IF NOT EXISTS embeddings_cache (
- provider TEXT NOT NULL,
- model TEXT NOT NULL,
- input_text TEXT NOT NULL,
- input_type TEXT DEFAULT NULL,
- api_key TEXT DEFAULT NULL,
- api_key_name TEXT DEFAULT NULL,
+ model_input_md5 TEXT NOT NULL,
embedding vector (1024) NOT NULL,
- CONSTRAINT pk_embeddings_cache PRIMARY KEY (provider, model, input_text)
+ CONSTRAINT pk_embeddings_cache PRIMARY KEY (model_input_md5)
);
--- Add index on provider, model, input_text for faster lookups
-CREATE INDEX IF NOT EXISTS idx_embeddings_cache_provider_model_input_text ON embeddings_cache (provider, model, input_text ASC);
-
-- Add comment explaining table purpose
COMMENT ON TABLE embeddings_cache IS 'Unlogged table that caches embedding requests to avoid duplicate API calls';
@@ -31,16 +23,17 @@ OR REPLACE function embed_with_cache (
-- Try to get cached embedding first
declare
cached_embedding vector(1024);
+ model_input_md5 text;
begin
if _provider != 'voyageai' then
raise exception 'Only voyageai provider is supported';
end if;
+ model_input_md5 := md5(_provider || '++' || _model || '++' || _input_text || '++' || _input_type);
+
select embedding into cached_embedding
from embeddings_cache c
- where c.provider = _provider
- and c.model = _model
- and c.input_text = _input_text;
+ where c.model_input_md5 = model_input_md5;
if found then
return cached_embedding;
@@ -57,22 +50,12 @@ begin
-- Cache the result
insert into embeddings_cache (
- provider,
- model,
- input_text,
- input_type,
- api_key,
- api_key_name,
+ model_input_md5,
embedding
) values (
- _provider,
- _model,
- _input_text,
- _input_type,
- _api_key,
- _api_key_name,
+ model_input_md5,
cached_embedding
- ) on conflict (provider, model, input_text) do update set embedding = cached_embedding;
+ ) on conflict (model_input_md5) do update set embedding = cached_embedding;
return cached_embedding;
end;
@@ -195,6 +178,7 @@ COMMENT ON FUNCTION search_by_vector IS 'Search documents by vector similarity w
-- Create the combined embed and search function
CREATE
OR REPLACE FUNCTION embed_and_search_by_vector (
+ developer_id UUID,
query_text text,
owner_types TEXT[],
owner_ids UUID [],
@@ -222,6 +206,7 @@ BEGIN
-- Then perform the search using the generated embedding
RETURN QUERY SELECT * FROM search_by_vector(
+ developer_id,
query_embedding,
owner_types,
owner_ids,
From 1a0fe16f42b2c552e0d3ddc2e6ea67100ec51745 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Sat, 21 Dec 2024 17:39:05 +0300
Subject: [PATCH 116/274] feat(agents-api, memory-store): Add tasks queries and
tests, and other misc fixes
---
agents-api/agents_api/autogen/Tasks.py | 58 ++-
.../agents_api/queries/agents/create_agent.py | 6 +-
.../queries/agents/create_or_update_agent.py | 26 +-
.../queries/developers/create_developer.py | 4 +-
.../queries/entries/create_entries.py | 5 +-
.../agents_api/queries/entries/get_history.py | 1 -
.../queries/entries/list_entries.py | 12 +-
.../agents_api/queries/files/create_file.py | 6 +-
.../agents_api/queries/files/get_file.py | 4 +-
.../agents_api/queries/files/list_files.py | 5 +-
.../queries/sessions/create_session.py | 5 +-
.../agents_api/queries/tasks/__init__.py | 21 +-
.../queries/tasks/create_or_update_task.py | 143 +++--
.../agents_api/queries/tasks/create_task.py | 99 +++-
.../agents_api/queries/tasks/delete_task.py | 77 +++
.../agents_api/queries/tasks/get_task.py | 93 ++++
.../agents_api/queries/tasks/list_tasks.py | 124 +++++
.../agents_api/queries/tasks/patch_task.py | 217 ++++++++
.../agents_api/queries/tasks/update_task.py | 187 +++++++
agents-api/agents_api/queries/utils.py | 21 +-
agents-api/tests/fixtures.py | 29 +-
agents-api/tests/test_developer_queries.py | 13 +-
agents-api/tests/test_entry_queries.py | 9 +-
agents-api/tests/test_files_queries.py | 4 +-
agents-api/tests/test_session_queries.py | 8 +-
agents-api/tests/test_task_queries.py | 493 ++++++++++++------
.../integrations/autogen/Tasks.py | 58 ++-
memory-store/migrations/000005_files.up.sql | 4 -
memory-store/migrations/000008_tools.up.sql | 16 -
memory-store/migrations/000010_tasks.up.sql | 2 +-
typespec/tasks/models.tsp | 9 +-
.../@typespec/openapi3/openapi-1.0.0.yaml | 37 +-
32 files changed, 1478 insertions(+), 318 deletions(-)
create mode 100644 agents-api/agents_api/queries/tasks/delete_task.py
create mode 100644 agents-api/agents_api/queries/tasks/get_task.py
create mode 100644 agents-api/agents_api/queries/tasks/list_tasks.py
create mode 100644 agents-api/agents_api/queries/tasks/patch_task.py
create mode 100644 agents-api/agents_api/queries/tasks/update_task.py
diff --git a/agents-api/agents_api/autogen/Tasks.py b/agents-api/agents_api/autogen/Tasks.py
index b9212d8cb..f6bf58ddf 100644
--- a/agents-api/agents_api/autogen/Tasks.py
+++ b/agents-api/agents_api/autogen/Tasks.py
@@ -161,8 +161,21 @@ class CreateTaskRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
- name: str
+ name: Annotated[str, Field(max_length=255, min_length=1)]
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
@@ -650,7 +663,21 @@ class PatchTaskRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
+ name: Annotated[str | None, Field(max_length=255, min_length=1)] = None
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
@@ -966,8 +993,21 @@ class Task(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
- name: str
+ name: Annotated[str, Field(max_length=255, min_length=1)]
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
@@ -1124,7 +1164,21 @@ class UpdateTaskRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
+ name: Annotated[str, Field(max_length=255, min_length=1)]
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 76c96f46b..b5a4af75a 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -9,7 +9,7 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
-from ...autogen.openapi_model import Agent, CreateAgentRequest
+from ...autogen.openapi_model import CreateAgentRequest, ResourceCreatedResponse
from ...metrics.counters import increase_counter
from ..utils import (
generate_canonical_name,
@@ -75,9 +75,9 @@
# }
# )
@wrap_in_class(
- Agent,
+ ResourceCreatedResponse,
one=True,
- transform=lambda d: {"id": d["agent_id"], **d},
+ transform=lambda d: {"id": d["agent_id"], "created_at": d["created_at"]},
)
@increase_counter("create_agent")
@pg_query
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index ef3a0abe5..258badc93 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -18,6 +18,11 @@
# Define the raw SQL query
agent_query = parse_one("""
+WITH existing_agent AS (
+ SELECT canonical_name
+ FROM agents
+ WHERE developer_id = $1 AND agent_id = $2
+)
INSERT INTO agents (
developer_id,
agent_id,
@@ -30,15 +35,18 @@
default_settings
)
VALUES (
- $1,
- $2,
- $3,
- $4,
- $5,
- $6,
- $7,
- $8,
- $9
+ $1, -- developer_id
+ $2, -- agent_id
+ COALESCE( -- canonical_name
+ (SELECT canonical_name FROM existing_agent),
+ $3
+ ),
+ $4, -- name
+ $5, -- about
+ $6, -- instructions
+ $7, -- model
+ $8, -- metadata
+ $9 -- default_settings
)
RETURNING *;
""").sql(pretty=True)
diff --git a/agents-api/agents_api/queries/developers/create_developer.py b/agents-api/agents_api/queries/developers/create_developer.py
index bed6371c4..4cb505a14 100644
--- a/agents-api/agents_api/queries/developers/create_developer.py
+++ b/agents-api/agents_api/queries/developers/create_developer.py
@@ -6,7 +6,7 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
-from ...common.protocol.developers import Developer
+from ...autogen.openapi_model import ResourceCreatedResponse
from ..utils import (
partialclass,
pg_query,
@@ -43,7 +43,7 @@
)
}
)
-@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]})
+@wrap_in_class(ResourceCreatedResponse, one=True, transform=lambda d: {**d, "id": d["developer_id"], "created_at": d["created_at"]})
@pg_query
@beartype
async def create_developer(
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index 95973ad0b..c11986d3c 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -7,7 +7,7 @@
from litellm.utils import _select_tokenizer as select_tokenizer
from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
+from ...autogen.openapi_model import CreateEntryRequest, Relation, ResourceCreatedResponse
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
@@ -79,9 +79,10 @@
}
)
@wrap_in_class(
- Entry,
+ ResourceCreatedResponse,
transform=lambda d: {
"id": d.pop("entry_id"),
+ "created_at": d.pop("created_at"),
**d,
},
)
diff --git a/agents-api/agents_api/queries/entries/get_history.py b/agents-api/agents_api/queries/entries/get_history.py
index e6967a6cc..ffa0746c0 100644
--- a/agents-api/agents_api/queries/entries/get_history.py
+++ b/agents-api/agents_api/queries/entries/get_history.py
@@ -1,5 +1,4 @@
import json
-from typing import Any, List, Tuple
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/entries/list_entries.py b/agents-api/agents_api/queries/entries/list_entries.py
index 89f432734..55384b633 100644
--- a/agents-api/agents_api/queries/entries/list_entries.py
+++ b/agents-api/agents_api/queries/entries/list_entries.py
@@ -11,14 +11,10 @@
# Query for checking if the session exists
session_exists_query = """
-SELECT CASE
- WHEN EXISTS (
- SELECT 1 FROM sessions
- WHERE session_id = $1 AND developer_id = $2
- )
- THEN TRUE
- ELSE (SELECT NULL::boolean WHERE FALSE) -- This raises a NO_DATA_FOUND error
-END;
+SELECT EXISTS (
+ SELECT 1 FROM sessions
+ WHERE session_id = $1 AND developer_id = $2
+) AS exists;
"""
list_entries_query = """
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index 48251fa5e..00d07bce7 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -5,18 +5,16 @@
import base64
import hashlib
-from typing import Any, Literal
+from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateFileRequest, File
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Create file
file_query = parse_one("""
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 4d5dca4c0..36bfc42c6 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -6,13 +6,11 @@
from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
-from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import File
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Define the raw SQL query
file_query = parse_one("""
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index 2bc42f842..ee4f70d95 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -3,16 +3,15 @@
It constructs and executes SQL queries to fetch a list of files based on developer ID with pagination.
"""
-from typing import Any, Literal
+from typing import Literal
from uuid import UUID
-import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
from ...autogen.openapi_model import File
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import pg_query, wrap_in_class
# Query to list all files for a developer (uses developer_id index)
developer_files_query = parse_one("""
diff --git a/agents-api/agents_api/queries/sessions/create_session.py b/agents-api/agents_api/queries/sessions/create_session.py
index 058462cf8..fb1168b0f 100644
--- a/agents-api/agents_api/queries/sessions/create_session.py
+++ b/agents-api/agents_api/queries/sessions/create_session.py
@@ -8,7 +8,7 @@
from ...autogen.openapi_model import (
CreateSessionRequest,
- Session,
+ ResourceCreatedResponse,
)
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
@@ -68,11 +68,12 @@
}
)
@wrap_in_class(
- Session,
+ ResourceCreatedResponse,
one=True,
transform=lambda d: {
**d,
"id": d["session_id"],
+ "created_at": d["created_at"],
},
)
@increase_counter("create_session")
diff --git a/agents-api/agents_api/queries/tasks/__init__.py b/agents-api/agents_api/queries/tasks/__init__.py
index d2f8b3c35..63b4bed22 100644
--- a/agents-api/agents_api/queries/tasks/__init__.py
+++ b/agents-api/agents_api/queries/tasks/__init__.py
@@ -11,19 +11,18 @@
from .create_or_update_task import create_or_update_task
from .create_task import create_task
-
-# from .delete_task import delete_task
-# from .get_task import get_task
-# from .list_tasks import list_tasks
-# from .patch_task import patch_task
-# from .update_task import update_task
+from .delete_task import delete_task
+from .get_task import get_task
+from .list_tasks import list_tasks
+from .patch_task import patch_task
+from .update_task import update_task
__all__ = [
"create_or_update_task",
"create_task",
- # "delete_task",
- # "get_task",
- # "list_tasks",
- # "patch_task",
- # "update_task",
+ "delete_task",
+ "get_task",
+ "list_tasks",
+ "patch_task",
+ "update_task",
]
diff --git a/agents-api/agents_api/queries/tasks/create_or_update_task.py b/agents-api/agents_api/queries/tasks/create_or_update_task.py
index a302a38e1..1f259ac16 100644
--- a/agents-api/agents_api/queries/tasks/create_or_update_task.py
+++ b/agents-api/agents_api/queries/tasks/create_or_update_task.py
@@ -10,12 +10,18 @@
from ...autogen.openapi_model import CreateOrUpdateTaskRequest, ResourceUpdatedResponse
from ...common.protocol.tasks import task_to_spec
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import (
+ generate_canonical_name,
+ partialclass,
+ pg_query,
+ rewrap_exceptions,
+ wrap_in_class,
+)
# Define the raw SQL query for creating or updating a task
tools_query = parse_one("""
-WITH current_version AS (
- SELECT COALESCE(MAX("version"), 0) + 1 as next_version
+WITH version AS (
+ SELECT COALESCE(MAX("version"), 0) as current_version
FROM tasks
WHERE developer_id = $1
AND task_id = $3
@@ -32,7 +38,7 @@
spec
)
SELECT
- next_version, -- task_version
+ current_version, -- task_version
$1, -- developer_id
$2, -- agent_id
$3, -- task_id
@@ -41,15 +47,27 @@
$6, -- name
$7, -- description
$8 -- spec
-FROM current_version
+FROM version
""").sql(pretty=True)
task_query = parse_one("""
WITH current_version AS (
- SELECT COALESCE(MAX("version"), 0) + 1 as next_version
- FROM tasks
- WHERE developer_id = $1
- AND task_id = $4
+ SELECT COALESCE(
+ (SELECT MAX("version")
+ FROM tasks
+ WHERE developer_id = $1
+ AND task_id = $4),
+ 0
+ ) + 1 as next_version,
+ COALESCE(
+ (SELECT canonical_name
+ FROM tasks
+ WHERE developer_id = $1 AND task_id = $4
+ ORDER BY version DESC
+ LIMIT 1),
+ $2
+ ) as effective_canonical_name
+ FROM (SELECT 1) as dummy
)
INSERT INTO tasks (
"version",
@@ -59,23 +77,51 @@
task_id,
name,
description,
+ inherit_tools,
input_schema,
- spec,
metadata
)
SELECT
- next_version, -- version
- $1, -- developer_id
- $2, -- canonical_name
- $3, -- agent_id
- $4, -- task_id
- $5, -- name
- $6, -- description
- $7::jsonb, -- input_schema
- $8::jsonb, -- spec
- $9::jsonb -- metadata
+ next_version, -- version
+ $1, -- developer_id
+ effective_canonical_name, -- canonical_name
+ $3, -- agent_id
+ $4, -- task_id
+ $5, -- name
+ $6, -- description
+ $7, -- inherit_tools
+ $8::jsonb, -- input_schema
+ $9::jsonb -- metadata
FROM current_version
-RETURNING *, (SELECT next_version FROM current_version) as next_version
+RETURNING *, (SELECT next_version FROM current_version) as next_version;
+""").sql(pretty=True)
+
+# Define the raw SQL query for inserting workflows
+workflows_query = parse_one("""
+WITH version AS (
+ SELECT COALESCE(MAX("version"), 0) as current_version
+ FROM tasks
+ WHERE developer_id = $1
+ AND task_id = $2
+)
+INSERT INTO workflows (
+ developer_id,
+ task_id,
+ "version",
+ name,
+ step_idx,
+ step_type,
+ step_definition
+)
+SELECT
+ $1, -- developer_id
+ $2, -- task_id
+ current_version, -- version
+ $3, -- name
+ $4, -- step_idx
+ $5, -- step_type
+ $6 -- step_definition
+FROM version
""").sql(pretty=True)
@@ -98,13 +144,12 @@
one=True,
transform=lambda d: {
"id": d["task_id"],
- "jobs": [],
"updated_at": d["updated_at"].timestamp(),
**d,
},
)
@increase_counter("create_or_update_task")
-@pg_query
+@pg_query(return_index=0)
@beartype
async def create_or_update_task(
*,
@@ -128,10 +173,9 @@ async def create_or_update_task(
Raises:
HTTPException: If developer/agent doesn't exist (404) or on unique constraint violation (409)
"""
- task_data = task_to_spec(data).model_dump(exclude_none=True, mode="json")
# Generate canonical name from task name if not provided
- canonical_name = data.canonical_name or task_data["name"].lower().replace(" ", "_")
+ canonical_name = data.canonical_name or generate_canonical_name(data.name)
# Version will be determined by the CTE
task_params = [
@@ -139,15 +183,14 @@ async def create_or_update_task(
canonical_name, # $2
agent_id, # $3
task_id, # $4
- task_data["name"], # $5
- task_data.get("description"), # $6
- data.input_schema or {}, # $7
- task_data["spec"], # $8
+ data.name, # $5
+ data.description, # $6
+ data.inherit_tools, # $7
+ data.input_schema or {}, # $8
data.metadata or {}, # $9
]
- queries = [(task_query, task_params, "fetch")]
-
+ # Prepare tool parameters for the tools table
tool_params = [
[
developer_id,
@@ -162,8 +205,38 @@ async def create_or_update_task(
for tool in data.tools or []
]
- # Add tools query if there are tools
- if tool_params:
- queries.append((tools_query, tool_params, "fetchmany"))
+ # Generate workflows from task data using task_to_spec
+ workflows_spec = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+ workflow_params = []
+ for workflow in workflows_spec.get("workflows", []):
+ workflow_name = workflow.get("name")
+ steps = workflow.get("steps", [])
+ for step_idx, step in enumerate(steps):
+ workflow_params.append(
+ [
+ developer_id, # $1
+ task_id, # $2
+ workflow_name, # $3
+ step_idx, # $4
+ step["kind_"], # $5
+ step[step["kind_"]], # $6
+ ]
+ )
- return queries
+ return [
+ (
+ task_query,
+ task_params,
+ "fetch",
+ ),
+ (
+ tools_query,
+ tool_params,
+ "fetchmany",
+ ),
+ (
+ workflows_query,
+ workflow_params,
+ "fetchmany",
+ ),
+ ]
diff --git a/agents-api/agents_api/queries/tasks/create_task.py b/agents-api/agents_api/queries/tasks/create_task.py
index 2587e63ff..58287fbbc 100644
--- a/agents-api/agents_api/queries/tasks/create_task.py
+++ b/agents-api/agents_api/queries/tasks/create_task.py
@@ -7,10 +7,16 @@
from sqlglot import parse_one
from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateTaskRequest, ResourceUpdatedResponse
+from ...autogen.openapi_model import CreateTaskRequest, ResourceCreatedResponse
from ...common.protocol.tasks import task_to_spec
from ...metrics.counters import increase_counter
-from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ..utils import (
+ generate_canonical_name,
+ partialclass,
+ pg_query,
+ rewrap_exceptions,
+ wrap_in_class,
+)
# Define the raw SQL query for creating or updating a task
tools_query = parse_one("""
@@ -45,9 +51,10 @@
agent_id,
task_id,
name,
+ canonical_name,
description,
+ inherit_tools,
input_schema,
- spec,
metadata
)
VALUES (
@@ -56,14 +63,37 @@
$2, -- agent_id
$3, -- task_id
$4, -- name
- $5, -- description
- $6::jsonb, -- input_schema
- $7::jsonb, -- spec
- $8::jsonb -- metadata
+ $5, -- canonical_name
+ $6, -- description
+ $7, -- inherit_tools
+ $8::jsonb, -- input_schema
+ $9::jsonb -- metadata
)
RETURNING *
""").sql(pretty=True)
+# Define the raw SQL query for inserting workflows
+workflows_query = parse_one("""
+INSERT INTO workflows (
+ developer_id,
+ task_id,
+ "version",
+ name,
+ step_idx,
+ step_type,
+ step_definition
+)
+VALUES (
+ $1, -- developer_id
+ $2, -- task_id
+ $3, -- version
+ $4, -- name
+ $5, -- step_idx
+ $6, -- step_type
+ $7 -- step_definition
+)
+""").sql(pretty=True)
+
@rewrap_exceptions(
{
@@ -80,7 +110,7 @@
}
)
@wrap_in_class(
- ResourceUpdatedResponse,
+ ResourceCreatedResponse,
one=True,
transform=lambda d: {
"id": d["task_id"],
@@ -90,18 +120,22 @@
},
)
@increase_counter("create_task")
-@pg_query
+@pg_query(return_index=0)
@beartype
async def create_task(
- *, developer_id: UUID, agent_id: UUID, task_id: UUID, data: CreateTaskRequest
+ *,
+ developer_id: UUID,
+ agent_id: UUID,
+ task_id: UUID | None = None,
+ data: CreateTaskRequest,
) -> list[tuple[str, list, Literal["fetch", "fetchmany"]]]:
"""
- Constructs an SQL query to create or update a task.
+ Constructs SQL queries to create or update a task along with its associated tools and workflows.
Args:
developer_id (UUID): The UUID of the developer.
agent_id (UUID): The UUID of the agent.
- task_id (UUID): The UUID of the task.
+ task_id (UUID, optional): The UUID of the task. If not provided, a new UUID is generated.
data (CreateTaskRequest): The task data to insert or update.
Returns:
@@ -110,19 +144,22 @@ async def create_task(
Raises:
HTTPException: If developer/agent doesn't exist (404) or on unique constraint violation (409)
"""
- task_data = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+ task_id = task_id or uuid7()
- params = [
+ # Insert parameters for the tasks table
+ task_params = [
developer_id, # $1
agent_id, # $2
task_id, # $3
data.name, # $4
- data.description, # $5
- data.input_schema or {}, # $6
- task_data["spec"], # $7
- data.metadata or {}, # $8
+ data.canonical_name or generate_canonical_name(data.name), # $5
+ data.description, # $6
+ data.inherit_tools, # $7
+ data.input_schema or {}, # $8
+ data.metadata or {}, # $9
]
+ # Prepare tool parameters for the tools table
tool_params = [
[
developer_id,
@@ -137,10 +174,29 @@ async def create_task(
for tool in data.tools or []
]
+ # Generate workflows from task data using task_to_spec
+ workflows_spec = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+ workflow_params = []
+ for workflow in workflows_spec.get("workflows", []):
+ workflow_name = workflow.get("name")
+ steps = workflow.get("steps", [])
+ for step_idx, step in enumerate(steps):
+ workflow_params.append(
+ [
+ developer_id, # $1
+ task_id, # $2
+ 1, # $3 (version)
+ workflow_name, # $4
+ step_idx, # $5
+ step["kind_"], # $6
+ step[step["kind_"]], # $7
+ ]
+ )
+
return [
(
task_query,
- params,
+ task_params,
"fetch",
),
(
@@ -148,4 +204,9 @@ async def create_task(
tool_params,
"fetchmany",
),
+ (
+ workflows_query,
+ workflow_params,
+ "fetchmany",
+ ),
]
diff --git a/agents-api/agents_api/queries/tasks/delete_task.py b/agents-api/agents_api/queries/tasks/delete_task.py
new file mode 100644
index 000000000..8a058591e
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/delete_task.py
@@ -0,0 +1,77 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+
+from ...common.utils.datetime import utcnow
+from ...autogen.openapi_model import ResourceDeletedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+workflow_query = """
+DELETE FROM workflows
+WHERE developer_id = $1 AND task_id = $2;
+"""
+
+task_query = """
+DELETE FROM tasks
+WHERE developer_id = $1 AND task_id = $2
+RETURNING task_id;
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Task not found",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceDeletedResponse,
+ one=True,
+ transform=lambda d: {
+ "id": d["task_id"],
+ "deleted_at": utcnow(),
+ },
+)
+@increase_counter("delete_task")
+@pg_query
+@beartype
+async def delete_task(
+ *,
+ developer_id: UUID,
+ task_id: UUID,
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
+ """
+ Deletes a task by its unique identifier along with its associated workflows.
+
+ Parameters:
+ developer_id (UUID): The unique identifier of the developer associated with the task.
+ task_id (UUID): The unique identifier of the task to delete.
+
+ Returns:
+ tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]: SQL query, parameters, and fetch method.
+
+ Raises:
+ HTTPException: If developer/agent doesn't exist (404) or on unique constraint violation (409)
+ """
+
+ return [
+ (workflow_query, [developer_id, task_id], "fetch"),
+ (task_query, [developer_id, task_id], "fetchrow"),
+ ]
diff --git a/agents-api/agents_api/queries/tasks/get_task.py b/agents-api/agents_api/queries/tasks/get_task.py
new file mode 100644
index 000000000..292eabd35
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/get_task.py
@@ -0,0 +1,93 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+
+
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ...common.protocol.tasks import spec_to_task
+
+get_task_query = """
+SELECT
+ t.*,
+ COALESCE(
+ jsonb_agg(
+ CASE WHEN w.name IS NOT NULL THEN
+ jsonb_build_object(
+ 'name', w.name,
+ 'steps', jsonb_build_array(
+ jsonb_build_object(
+ w.step_type, w.step_definition,
+ 'step_idx', w.step_idx -- Not sure if this is needed
+ )
+ )
+ )
+ END
+ ) FILTER (WHERE w.name IS NOT NULL),
+ '[]'::jsonb
+ ) as workflows
+FROM
+ tasks t
+LEFT JOIN
+ workflows w ON t.developer_id = w.developer_id AND t.task_id = w.task_id AND t.version = w.version
+WHERE
+ t.developer_id = $1 AND t.task_id = $2
+ AND t.version = (
+ SELECT MAX(version)
+ FROM tasks
+ WHERE developer_id = $1 AND task_id = $2
+ )
+GROUP BY t.developer_id, t.task_id, t.canonical_name, t.agent_id, t.version;
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Task not found",
+ ),
+ }
+)
+@wrap_in_class(spec_to_task, one=True)
+@increase_counter("get_task")
+@pg_query
+@beartype
+async def get_task(
+ *,
+ developer_id: UUID,
+ task_id: UUID,
+) -> tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]:
+ """
+ Retrieves a task by its unique identifier along with its associated workflows.
+
+ Parameters:
+ developer_id (UUID): The unique identifier of the developer associated with the task.
+ task_id (UUID): The unique identifier of the task to retrieve.
+
+ Returns:
+ tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]: SQL query, parameters, and fetch method.
+
+ Raises:
+ HTTPException: If developer/agent doesn't exist (404) or on unique constraint violation (409)
+ """
+
+ return (
+ get_task_query,
+ [developer_id, task_id],
+ "fetchrow",
+ )
diff --git a/agents-api/agents_api/queries/tasks/list_tasks.py b/agents-api/agents_api/queries/tasks/list_tasks.py
new file mode 100644
index 000000000..8cd0980a5
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/list_tasks.py
@@ -0,0 +1,124 @@
+from typing import Any, Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+
+
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ...common.protocol.tasks import spec_to_task
+
+list_tasks_query = """
+SELECT
+ t.*,
+ COALESCE(
+ jsonb_agg(
+ CASE WHEN w.name IS NOT NULL THEN
+ jsonb_build_object(
+ 'name', w.name,
+ 'steps', jsonb_build_array(
+ jsonb_build_object(
+ w.step_type, w.step_definition,
+ 'step_idx', w.step_idx -- Not sure if this is needed
+ )
+ )
+ )
+ END
+ ) FILTER (WHERE w.name IS NOT NULL),
+ '[]'::jsonb
+ ) as workflows
+FROM
+ tasks t
+LEFT JOIN
+ workflows w ON t.developer_id = w.developer_id AND t.task_id = w.task_id AND t.version = w.version
+WHERE
+ t.developer_id = $1
+ {metadata_filter_query}
+GROUP BY t.developer_id, t.task_id, t.canonical_name, t.agent_id, t.version
+ORDER BY
+ CASE WHEN $4 = 'created_at' AND $5 = 'asc' THEN t.created_at END ASC NULLS LAST,
+ CASE WHEN $4 = 'created_at' AND $5 = 'desc' THEN t.created_at END DESC NULLS LAST,
+ CASE WHEN $4 = 'updated_at' AND $5 = 'asc' THEN t.updated_at END ASC NULLS LAST,
+ CASE WHEN $4 = 'updated_at' AND $5 = 'desc' THEN t.updated_at END DESC NULLS LAST
+LIMIT $2 OFFSET $3;
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Task not found",
+ ),
+ }
+)
+@wrap_in_class(spec_to_task)
+@increase_counter("list_tasks")
+@pg_query
+@beartype
+async def list_tasks(
+ *,
+ developer_id: UUID,
+ limit: int = 100,
+ offset: int = 0,
+ sort_by: Literal["created_at", "updated_at"] = "created_at",
+ direction: Literal["asc", "desc"] = "desc",
+ metadata_filter: dict[str, Any] = {},
+) -> tuple[str, list]:
+ """
+ Retrieves all tasks for a given developer with pagination and sorting.
+
+ Parameters:
+ developer_id (UUID): The unique identifier of the developer.
+ limit (int): Maximum number of records to return (default: 100)
+ offset (int): Number of records to skip (default: 0)
+ sort_by (str): Field to sort by ("created_at" or "updated_at")
+ direction (str): Sort direction ("asc" or "desc")
+ metadata_filter (dict): Optional metadata filters
+
+ Returns:
+ tuple[str, list]: SQL query and parameters.
+
+ Raises:
+ HTTPException: If parameters are invalid or developer/agent doesn't exist
+ """
+ if direction.lower() not in ["asc", "desc"]:
+ raise HTTPException(status_code=400, detail="Invalid sort direction")
+
+ if limit > 100 or limit < 1:
+ raise HTTPException(status_code=400, detail="Limit must be between 1 and 100")
+
+ if offset < 0:
+ raise HTTPException(status_code=400, detail="Offset must be non-negative")
+
+ # Format query with metadata filter if needed
+ query = list_tasks_query.format(
+ metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
+ )
+
+ # Build parameters list
+ params = [
+ developer_id,
+ limit,
+ offset,
+ sort_by,
+ direction,
+ ]
+
+ if metadata_filter:
+ params.append(metadata_filter)
+
+ return (query, params)
diff --git a/agents-api/agents_api/queries/tasks/patch_task.py b/agents-api/agents_api/queries/tasks/patch_task.py
new file mode 100644
index 000000000..0d82f9c91
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/patch_task.py
@@ -0,0 +1,217 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceUpdatedResponse, PatchTaskRequest
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ...common.utils.datetime import utcnow
+from ...common.protocol.tasks import task_to_spec
+
+# # Update task query using UPDATE
+# update_task_query = parse_one("""
+# UPDATE tasks
+# SET
+# version = version + 1,
+# canonical_name = $2,
+# agent_id = $4,
+# metadata = $5,
+# name = $6,
+# description = $7,
+# inherit_tools = $8,
+# input_schema = $9::jsonb,
+# updated_at = NOW()
+# WHERE
+# developer_id = $1
+# AND task_id = $3
+# RETURNING *;
+# """).sql(pretty=True)
+
+# Update task query using INSERT with version increment
+patch_task_query = parse_one("""
+WITH current_version AS (
+ SELECT MAX("version") as current_version,
+ canonical_name as existing_canonical_name,
+ metadata as existing_metadata,
+ name as existing_name,
+ description as existing_description,
+ inherit_tools as existing_inherit_tools,
+ input_schema as existing_input_schema
+ FROM tasks
+ WHERE developer_id = $1
+ AND task_id = $3
+ GROUP BY canonical_name, metadata, name, description, inherit_tools, input_schema
+ HAVING MAX("version") IS NOT NULL -- This ensures we only proceed if a version exists
+)
+INSERT INTO tasks (
+ "version",
+ developer_id, -- $1
+ canonical_name, -- $2
+ task_id, -- $3
+ agent_id, -- $4
+ metadata, -- $5
+ name, -- $6
+ description, -- $7
+ inherit_tools, -- $8
+ input_schema -- $9
+)
+SELECT
+ current_version + 1, -- version
+ $1, -- developer_id
+ COALESCE($2, existing_canonical_name), -- canonical_name
+ $3, -- task_id
+ $4, -- agent_id
+ COALESCE($5::jsonb, existing_metadata), -- metadata
+ COALESCE($6, existing_name), -- name
+ COALESCE($7, existing_description), -- description
+ COALESCE($8, existing_inherit_tools), -- inherit_tools
+ COALESCE($9::jsonb, existing_input_schema) -- input_schema
+FROM current_version
+RETURNING *;
+""").sql(pretty=True)
+
+# When main is None - just copy existing workflows with new version
+copy_workflows_query = parse_one("""
+WITH current_version AS (
+ SELECT MAX(version) - 1 as current_version
+ FROM tasks
+ WHERE developer_id = $1 AND task_id = $2
+)
+INSERT INTO workflows (
+ developer_id,
+ task_id,
+ version,
+ name,
+ step_idx,
+ step_type,
+ step_definition
+)
+SELECT
+ developer_id,
+ task_id,
+ (SELECT current_version + 1 FROM current_version), -- new version
+ name,
+ step_idx,
+ step_type,
+ step_definition
+FROM workflows
+WHERE developer_id = $1
+AND task_id = $2
+AND version = (SELECT current_version FROM current_version)
+""").sql(pretty=True)
+
+# When main is provided - create new workflows (existing query)
+new_workflows_query = parse_one("""
+WITH current_version AS (
+ SELECT COALESCE(MAX(version), 0) - 1 as next_version
+ FROM tasks
+ WHERE developer_id = $1 AND task_id = $2
+)
+INSERT INTO workflows (
+ developer_id,
+ task_id,
+ version,
+ name,
+ step_idx,
+ step_type,
+ step_definition
+)
+SELECT
+ $1, -- developer_id
+ $2, -- task_id
+ next_version + 1, -- version
+ $3, -- name
+ $4, -- step_idx
+ $5, -- step_type
+ $6 -- step_definition
+FROM current_version
+""").sql(pretty=True)
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Task not found",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {"id": d["task_id"], "updated_at": utcnow()},
+)
+@increase_counter("patch_task")
+@pg_query(return_index=0)
+@beartype
+async def patch_task(
+ *,
+ developer_id: UUID,
+ task_id: UUID,
+ agent_id: UUID,
+ data: PatchTaskRequest,
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
+ """
+ Updates a task and its associated workflows with version control.
+ Only updates the fields that are provided in the request.
+
+ Parameters:
+ developer_id (UUID): The unique identifier of the developer.
+ task_id (UUID): The unique identifier of the task to update.
+ data (PatchTaskRequest): The partial update data.
+ agent_id (UUID): The unique identifier of the agent.
+ Returns:
+ list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]: List of queries to execute.
+ """
+ # Parameters for patching the task
+
+ patch_task_params = [
+ developer_id, # $1
+ data.canonical_name, # $2
+ task_id, # $3
+ agent_id, # $4
+ data.metadata or None, # $5
+ data.name or None, # $6
+ data.description or None, # $7
+ data.inherit_tools, # $8
+ data.input_schema, # $9
+ ]
+
+ if data.main is None:
+ workflow_query = copy_workflows_query
+ workflow_params = [[developer_id, task_id]] # Only need these params
+ else:
+ workflow_query = new_workflows_query
+ workflow_params = []
+ workflows_spec = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+ for workflow in workflows_spec.get("workflows", []):
+ workflow_name = workflow.get("name")
+ steps = workflow.get("steps", [])
+ for step_idx, step in enumerate(steps):
+ workflow_params.append([
+ developer_id, # $1
+ task_id, # $2
+ workflow_name, # $3
+ step_idx, # $4
+ step["kind_"], # $5
+ step[step["kind_"]], # $6
+ ])
+
+ return [
+ (patch_task_query, patch_task_params, "fetchrow"),
+ (workflow_query, workflow_params, "fetchmany"),
+ ]
diff --git a/agents-api/agents_api/queries/tasks/update_task.py b/agents-api/agents_api/queries/tasks/update_task.py
new file mode 100644
index 000000000..d14f915ac
--- /dev/null
+++ b/agents-api/agents_api/queries/tasks/update_task.py
@@ -0,0 +1,187 @@
+from typing import Literal
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateTaskRequest
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+from ...common.utils.datetime import utcnow
+from ...common.protocol.tasks import task_to_spec
+
+# # Update task query using UPDATE
+# update_task_query = parse_one("""
+# UPDATE tasks
+# SET
+# version = version + 1,
+# canonical_name = $2,
+# agent_id = $4,
+# metadata = $5,
+# name = $6,
+# description = $7,
+# inherit_tools = $8,
+# input_schema = $9::jsonb,
+# updated_at = NOW()
+# WHERE
+# developer_id = $1
+# AND task_id = $3
+# RETURNING *;
+# """).sql(pretty=True)
+
+# Update task query using INSERT with version increment
+update_task_query = parse_one("""
+WITH current_version AS (
+ SELECT MAX("version") as current_version,
+ canonical_name as existing_canonical_name
+ FROM tasks
+ WHERE developer_id = $1
+ AND task_id = $3
+ GROUP BY task_id, canonical_name
+ HAVING MAX("version") IS NOT NULL -- This ensures we only proceed if a version exists
+)
+INSERT INTO tasks (
+ "version",
+ developer_id, -- $1
+ canonical_name, -- $2
+ task_id, -- $3
+ agent_id, -- $4
+ metadata, -- $5
+ name, -- $6
+ description, -- $7
+ inherit_tools, -- $8
+ input_schema, -- $9
+)
+SELECT
+ current_version + 1, -- version
+ $1, -- developer_id
+ COALESCE($2, existing_canonical_name), -- canonical_name
+ $3, -- task_id
+ $4, -- agent_id
+ $5::jsonb, -- metadata
+ $6, -- name
+ $7, -- description
+ $8, -- inherit_tools
+ $9::jsonb -- input_schema
+FROM current_version
+RETURNING *;
+""").sql(pretty=True)
+
+# Update workflows query to use UPDATE instead of INSERT
+workflows_query = parse_one("""
+WITH version AS (
+ SELECT COALESCE(MAX(version), 0) as current_version
+ FROM tasks
+ WHERE developer_id = $1 AND task_id = $2
+)
+INSERT INTO workflows (
+ developer_id,
+ task_id,
+ version,
+ name,
+ step_idx,
+ step_type,
+ step_definition
+)
+SELECT
+ $1, -- developer_id
+ $2, -- task_id
+ current_version, -- version (from CTE)
+ $3, -- name
+ $4, -- step_idx
+ $5, -- step_type
+ $6 -- step_definition
+FROM version
+""").sql(pretty=True)
+
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer or agent does not exist.",
+ ),
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A task with this ID already exists for this agent.",
+ ),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Task not found",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {"id": d["task_id"], "updated_at": utcnow()},
+)
+@increase_counter("update_task")
+@pg_query(return_index=0)
+@beartype
+async def update_task(
+ *,
+ developer_id: UUID,
+ task_id: UUID,
+ agent_id: UUID,
+ data: UpdateTaskRequest,
+) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
+ """
+ Updates a task and its associated workflows with version control.
+
+ Parameters:
+ developer_id (UUID): The unique identifier of the developer.
+ task_id (UUID): The unique identifier of the task to update.
+ data (UpdateTaskRequest): The update data.
+ agent_id (UUID): The unique identifier of the agent.
+ Returns:
+ list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]: List of queries to execute.
+ """
+ print("UPDATING TIIIIIME")
+ # Parameters for updating the task
+ update_task_params = [
+ developer_id, # $1
+ data.canonical_name, # $2
+ task_id, # $3
+ agent_id, # $4
+ data.metadata or {}, # $5
+ data.name, # $6
+ data.description, # $7
+ data.inherit_tools, # $8
+ data.input_schema or {}, # $9
+ ]
+
+ # Generate workflows from task data
+ workflows_spec = task_to_spec(data).model_dump(exclude_none=True, mode="json")
+ workflow_params = []
+ for workflow in workflows_spec.get("workflows", []):
+ workflow_name = workflow.get("name")
+ steps = workflow.get("steps", [])
+ for step_idx, step in enumerate(steps):
+ workflow_params.append(
+ [
+ developer_id, # $1
+ task_id, # $2
+ workflow_name, # $3
+ step_idx, # $4
+ step["kind_"], # $5
+ step[step["kind_"]], # $6
+ ]
+ )
+
+ return [
+ (
+ update_task_query,
+ update_task_params,
+ "fetchrow",
+ ),
+ (
+ workflows_query,
+ workflow_params,
+ "fetchmany",
+ ),
+ ]
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 0d139cb91..d736a30c1 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -172,13 +172,20 @@ async def wrapper(
results: list[Record] = await method(
query, *args, timeout=timeout
)
- all_results.append(results)
-
- if method_name == "fetchrow" and (
- len(results) == 0 or results.get("bool", True) is None
- ):
+ if method_name == "fetchrow":
+ results = (
+ [results]
+ if results is not None
+ and results.get("bool", False) is not None
+ and results.get("exists", True) is not False
+ else []
+ )
+
+ if method_name == "fetchrow" and len(results) == 0:
raise asyncpg.NoDataFoundError("No data found")
+ all_results.append(results)
+
end = timeit and time.perf_counter()
timeit and print(
@@ -238,6 +245,10 @@ def _return_data(rec: list[Record]):
return obj
objs: list[ModelT] = [cls(**item) for item in map(transform, data)]
+ print("data", data)
+ print("-" * 10)
+ print("objs", objs)
+ print("-" * 100)
return objs
def decorator(
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 430a2e3c5..0e0224aff 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -10,6 +10,7 @@
CreateAgentRequest,
CreateFileRequest,
CreateSessionRequest,
+ CreateTaskRequest,
CreateUserRequest,
)
from agents_api.clients.pg import create_db_pool
@@ -30,7 +31,8 @@
# from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
-# from agents_api.queries.task.create_task import create_task
+from agents_api.queries.tasks.create_task import create_task
+
# from agents_api.queries.task.delete_task import delete_task
# from agents_api.queries.tools.create_tools import create_tools
# from agents_api.queries.tools.delete_tool import delete_tool
@@ -148,6 +150,24 @@ async def test_file(dsn=pg_dsn, developer=test_developer, user=test_user):
return file
+@fixture(scope="test")
+async def test_task(dsn=pg_dsn, developer=test_developer, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+ task = await create_task(
+ developer_id=developer.id,
+ agent_id=agent.id,
+ task_id=uuid7(),
+ data=CreateTaskRequest(
+ name="test task",
+ description="test task about",
+ input_schema={"type": "object", "additionalProperties": True},
+ main=[{"evaluate": {"hi": "_"}}],
+ ),
+ connection_pool=pool,
+ )
+ return task
+
+
@fixture(scope="test")
async def random_email():
return f"{"".join([random.choice(string.ascii_lowercase) for _ in range(10)])}@mail.com"
@@ -157,7 +177,7 @@ async def random_email():
async def test_new_developer(dsn=pg_dsn, email=random_email):
pool = await create_db_pool(dsn=dsn)
dev_id = uuid7()
- developer = await create_developer(
+ await create_developer(
email=email,
active=True,
tags=["tag1"],
@@ -166,6 +186,11 @@ async def test_new_developer(dsn=pg_dsn, email=random_email):
connection_pool=pool,
)
+ developer = await get_developer(
+ developer_id=dev_id,
+ connection_pool=pool,
+ )
+
return developer
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index eedc07dd2..3325b4a69 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -3,6 +3,8 @@
from uuid_extensions import uuid7
from ward import raises, test
+from agents_api.autogen.openapi_model import ResourceCreatedResponse
+from agents_api.common.protocol.developers import Developer
from agents_api.clients.pg import create_db_pool
from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import (
@@ -32,6 +34,7 @@ async def _(dsn=pg_dsn, dev=test_new_developer):
connection_pool=pool,
)
+ assert type(developer) == Developer
assert developer.id == dev.id
assert developer.email == dev.email
assert developer.active
@@ -52,11 +55,9 @@ async def _(dsn=pg_dsn):
connection_pool=pool,
)
+ assert type(developer) == ResourceCreatedResponse
assert developer.id == dev_id
- assert developer.email == "m@mail.com"
- assert developer.active
- assert developer.tags == ["tag1"]
- assert developer.settings == {"key1": "val1"}
+ assert developer.created_at is not None
@test("query: update developer")
@@ -71,10 +72,6 @@ async def _(dsn=pg_dsn, dev=test_new_developer, email=random_email):
)
assert developer.id == dev.id
- assert developer.email == email
- assert developer.active
- assert developer.tags == ["tag2"]
- assert developer.settings == {"key2": "val2"}
@test("query: patch developer")
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 706185c7b..463627d74 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,7 +3,6 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-from uuid import UUID
from fastapi import HTTPException
from uuid_extensions import uuid7
@@ -48,7 +47,7 @@ async def _(dsn=pg_dsn, developer=test_developer):
assert exc_info.raised.status_code == 404
-@test("query: list entries no session")
+@test("query: list entries sql - no session")
async def _(dsn=pg_dsn, developer=test_developer):
"""Test the retrieval of entries from the database."""
@@ -63,7 +62,7 @@ async def _(dsn=pg_dsn, developer=test_developer):
assert exc_info.raised.status_code == 404
-@test("query: get entries")
+@test("query: list entries sql - session exists")
async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
"""Test the retrieval of entries from the database."""
@@ -101,7 +100,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
assert result is not None
-@test("query: get history")
+@test("query: get history sql - session exists")
async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
"""Test the retrieval of entry history from the database."""
@@ -140,7 +139,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
assert result.entries[0].id
-@test("query: delete entries")
+@test("query: delete entries sql - session exists")
async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
"""Test the deletion of entries from the database."""
diff --git a/agents-api/tests/test_files_queries.py b/agents-api/tests/test_files_queries.py
index 92b52d733..c83c7a6f6 100644
--- a/agents-api/tests/test_files_queries.py
+++ b/agents-api/tests/test_files_queries.py
@@ -1,9 +1,7 @@
# # Tests for entry queries
-from fastapi import HTTPException
-from uuid_extensions import uuid7
-from ward import raises, test
+from ward import test
from agents_api.autogen.openapi_model import CreateFileRequest
from agents_api.clients.pg import create_db_pool
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 4673d6fc5..1d7341b08 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -10,6 +10,7 @@
CreateOrUpdateSessionRequest,
CreateSessionRequest,
PatchSessionRequest,
+ ResourceCreatedResponse,
ResourceDeletedResponse,
ResourceUpdatedResponse,
Session,
@@ -56,7 +57,7 @@ async def _(
)
assert result is not None
- assert isinstance(result, Session), f"Result is not a Session, {result}"
+ assert isinstance(result, ResourceCreatedResponse), f"Result is not a Session, {result}"
assert result.id == session_id
@@ -148,8 +149,8 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, session=test_session):
assert isinstance(result, list)
assert len(result) >= 1
assert all(
- s.situation == session.situation for s in result
- ), f"Result is not a list of sessions, {result}, {session.situation}"
+ isinstance(s, Session) for s in result
+ ), f"Result is not a list of sessions, {result}"
@test("query: count sessions")
@@ -227,7 +228,6 @@ async def _(
session_id=session.id,
connection_pool=pool,
)
- assert patched_session.situation == session.situation
assert patched_session.metadata == {"test": "metadata"}
diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py
index 1a9fcd544..5e42681d6 100644
--- a/agents-api/tests/test_task_queries.py
+++ b/agents-api/tests/test_task_queries.py
@@ -1,160 +1,333 @@
-# # Tests for task queries
-
-# from uuid_extensions import uuid7
-# from ward import test
-
-# from agents_api.autogen.openapi_model import (
-# CreateTaskRequest,
-# ResourceUpdatedResponse,
-# Task,
-# UpdateTaskRequest,
-# )
-# from agents_api.queries.task.create_or_update_task import create_or_update_task
-# from agents_api.queries.task.create_task import create_task
-# from agents_api.queries.task.delete_task import delete_task
-# from agents_api.queries.task.get_task import get_task
-# from agents_api.queries.task.list_tasks import list_tasks
-# from agents_api.queries.task.update_task import update_task
-# from tests.fixtures import cozo_client, test_agent, test_developer_id, test_task
-
-
-# @test("query: create task")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# task_id = uuid7()
-
-# create_task(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# task_id=task_id,
-# data=CreateTaskRequest(
-# **{
-# "name": "test task",
-# "description": "test task about",
-# "input_schema": {"type": "object", "additionalProperties": True},
-# "main": [{"evaluate": {"hi": "_"}}],
-# }
-# ),
-# client=client,
-# )
-
-
-# @test("query: create or update task")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# task_id = uuid7()
-
-# create_or_update_task(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# task_id=task_id,
-# data=CreateTaskRequest(
-# **{
-# "name": "test task",
-# "description": "test task about",
-# "input_schema": {"type": "object", "additionalProperties": True},
-# "main": [{"evaluate": {"hi": "_"}}],
-# }
-# ),
-# client=client,
-# )
-
-
-# @test("query: get task not exists")
-# def _(client=cozo_client, developer_id=test_developer_id):
-# task_id = uuid7()
-
-# try:
-# get_task(
-# developer_id=developer_id,
-# task_id=task_id,
-# client=client,
-# )
-# except Exception:
-# pass
-# else:
-# assert False, "Task should not exist"
-
-
-# @test("query: get task exists")
-# def _(client=cozo_client, developer_id=test_developer_id, task=test_task):
-# result = get_task(
-# developer_id=developer_id,
-# task_id=task.id,
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, Task)
-
-
-# @test("query: delete task")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# task = create_task(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# data=CreateTaskRequest(
-# **{
-# "name": "test task",
-# "description": "test task about",
-# "input_schema": {"type": "object", "additionalProperties": True},
-# "main": [{"evaluate": {"hi": "_"}}],
-# }
-# ),
-# client=client,
-# )
-
-# delete_task(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# task_id=task.id,
-# client=client,
-# )
-
-# try:
-# get_task(
-# developer_id=developer_id,
-# task_id=task.id,
-# client=client,
-# )
-# except Exception:
-# pass
-
-# else:
-# assert False, "Task should not exist"
-
-
-# @test("query: update task")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, task=test_task
-# ):
-# result = update_task(
-# developer_id=developer_id,
-# task_id=task.id,
-# agent_id=agent.id,
-# data=UpdateTaskRequest(
-# **{
-# "name": "updated task",
-# "description": "updated task about",
-# "input_schema": {"type": "object", "additionalProperties": True},
-# "main": [{"evaluate": {"hi": "_"}}],
-# }
-# ),
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result, ResourceUpdatedResponse)
-
-
-# @test("query: list tasks")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, task=test_task, agent=test_agent
-# ):
-# result = list_tasks(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# client=client,
-# )
-
-# assert isinstance(result, list)
-# assert len(result) > 0
-# assert all(isinstance(task, Task) for task in result)
+# Tests for task queries
+
+from fastapi import HTTPException
+from uuid_extensions import uuid7
+from ward import test
+
+from agents_api.autogen.openapi_model import (
+ CreateTaskRequest,
+ UpdateTaskRequest,
+ ResourceUpdatedResponse,
+ PatchTaskRequest,
+ Task,
+)
+from ward import raises
+from agents_api.clients.pg import create_db_pool
+from agents_api.queries.tasks.create_or_update_task import create_or_update_task
+from agents_api.queries.tasks.create_task import create_task
+from agents_api.queries.tasks.get_task import get_task
+from agents_api.queries.tasks.delete_task import delete_task
+from agents_api.queries.tasks.list_tasks import list_tasks
+from agents_api.queries.tasks.update_task import update_task
+from agents_api.queries.tasks.patch_task import patch_task
+from tests.fixtures import pg_dsn, test_agent, test_developer_id, test_task
+
+
+@test("query: create task sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that a task can be successfully created."""
+
+ pool = await create_db_pool(dsn=dsn)
+ await create_task(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ task_id=uuid7(),
+ data=CreateTaskRequest(
+ name="test task",
+ description="test task about",
+ input_schema={"type": "object", "additionalProperties": True},
+ main=[{"evaluate": {"hi": "_"}}],
+ ),
+ connection_pool=pool,
+ )
+
+
+@test("query: create or update task sql")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that a task can be successfully created or updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ await create_or_update_task(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ task_id=uuid7(),
+ data=CreateTaskRequest(
+ name="test task",
+ description="test task about",
+ input_schema={"type": "object", "additionalProperties": True},
+ main=[{"evaluate": {"hi": "_"}}],
+ ),
+ connection_pool=pool,
+ )
+
+
+@test("query: get task sql - exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, task=test_task):
+ """Test that an existing task can be successfully retrieved."""
+
+ pool = await create_db_pool(dsn=dsn)
+
+ # Then retrieve it
+ result = await get_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ connection_pool=pool,
+ )
+ assert result is not None
+ assert isinstance(result, Task), f"Result is not a Task, got {type(result)}"
+ assert result.id == task.id
+ assert result.name == "test task"
+ assert result.description == "test task about"
+
+
+@test("query: get task sql - not exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that attempting to retrieve a non-existent task raises an error."""
+
+ pool = await create_db_pool(dsn=dsn)
+ task_id = uuid7()
+
+ with raises(HTTPException) as exc:
+ await get_task(
+ developer_id=developer_id,
+ task_id=task_id,
+ connection_pool=pool,
+ )
+
+ assert exc.raised.status_code == 404
+ assert "Task not found" in str(exc.raised.detail)
+
+
+@test("query: delete task sql - exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, task=test_task):
+ """Test that a task can be successfully deleted."""
+
+ pool = await create_db_pool(dsn=dsn)
+
+ # First verify task exists
+ result = await get_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ connection_pool=pool,
+ )
+ assert result is not None
+ assert result.id == task.id
+
+ # Delete the task
+ deleted = await delete_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ connection_pool=pool,
+ )
+ assert deleted is not None
+ assert deleted.id == task.id
+
+ # Verify task no longer exists
+ with raises(HTTPException) as exc:
+ await get_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ connection_pool=pool,
+ )
+
+ assert exc.raised.status_code == 404
+ assert "Task not found" in str(exc.raised.detail)
+
+
+@test("query: delete task sql - not exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id):
+ """Test that attempting to delete a non-existent task raises an error."""
+
+ pool = await create_db_pool(dsn=dsn)
+ task_id = uuid7()
+
+ with raises(HTTPException) as exc:
+ await delete_task(
+ developer_id=developer_id,
+ task_id=task_id,
+ connection_pool=pool,
+ )
+
+ assert exc.raised.status_code == 404
+ assert "Task not found" in str(exc.raised.detail)
+
+
+# Add tests for list tasks
+@test("query: list tasks sql - with filters")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that tasks can be successfully filtered and retrieved."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_tasks(
+ developer_id=developer_id,
+ limit=10,
+ offset=0,
+ sort_by="updated_at",
+ direction="asc",
+ metadata_filter={"test": True},
+ connection_pool=pool,
+ )
+ assert result is not None
+ assert isinstance(result, list)
+ assert all(isinstance(task, Task) for task in result)
+ assert all(task.metadata.get("test") == True for task in result)
+
+
+@test("query: list tasks sql - no filters")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that a list of tasks can be successfully retrieved."""
+
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_tasks(
+ developer_id=developer_id,
+ connection_pool=pool,
+ )
+ assert result is not None
+ assert isinstance(result, list)
+ assert len(result) > 0
+ assert all(isinstance(task, Task) for task in result)
+
+@test("query: update task sql - exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, task=test_task):
+ """Test that a task can be successfully updated."""
+
+ pool = await create_db_pool(dsn=dsn)
+ updated = await update_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ agent_id=agent.id,
+ data=UpdateTaskRequest(
+ **{
+ "name": "updated task",
+ "canonical_name": "updated_task",
+ "description": "updated task description",
+ "input_schema": {"type": "object", "additionalProperties": True},
+ "main": [{"evaluate": {"hi": "_"}}],
+ "inherit_tools": False,
+ "metadata": {"updated": True},
+ }
+ ),
+ connection_pool=pool,
+ )
+
+ assert updated is not None
+ assert isinstance(updated, ResourceUpdatedResponse)
+ assert updated.id == task.id
+
+ # Verify task was updated
+ updated_task = await get_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ connection_pool=pool,
+ )
+ assert updated_task.name == "updated task"
+ assert updated_task.description == "updated task description"
+ assert updated_task.metadata == {"updated": True}
+
+
+@test("query: update task sql - not exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that attempting to update a non-existent task raises an error."""
+
+ pool = await create_db_pool(dsn=dsn)
+ task_id = uuid7()
+
+ with raises(HTTPException) as exc:
+ await update_task(
+ developer_id=developer_id,
+ task_id=task_id,
+ agent_id=agent.id,
+ data=UpdateTaskRequest(
+ **{
+ "canonical_name": "updated_task",
+ "name": "updated task",
+ "description": "updated task description",
+ "input_schema": {"type": "object", "additionalProperties": True},
+ "main": [{"evaluate": {"hi": "_"}}],
+ "inherit_tools": False,
+ }
+ ),
+ connection_pool=pool,
+ )
+
+ assert exc.raised.status_code == 404
+ assert "Task not found" in str(exc.raised.detail)
+
+@test("query: patch task sql - exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that patching an existing task works correctly."""
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create initial task
+ task = await create_task(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ data=CreateTaskRequest(
+ **{
+ "canonical_name": "test_task",
+ "name": "test task",
+ "description": "test task description",
+ "input_schema": {"type": "object", "additionalProperties": True},
+ "main": [{"evaluate": {"hi": "_"}}],
+ "inherit_tools": False,
+ "metadata": {"initial": True},
+ }
+ ),
+ connection_pool=pool,
+ )
+
+ # Patch the task
+ updated = await patch_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ agent_id=agent.id,
+ data=PatchTaskRequest(
+ **{
+ "name": "patched task",
+ "metadata": {"patched": True},
+ }
+ ),
+ connection_pool=pool,
+ )
+
+ assert updated is not None
+ assert isinstance(updated, ResourceUpdatedResponse)
+ assert updated.id == task.id
+
+ # Verify task was patched correctly
+ patched_task = await get_task(
+ developer_id=developer_id,
+ task_id=task.id,
+ connection_pool=pool,
+ )
+ # Check that patched fields were updated
+ assert patched_task.name == "patched task"
+ assert patched_task.metadata == {"patched": True}
+ # Check that non-patched fields remain unchanged
+ assert patched_task.canonical_name == "test_task"
+ assert patched_task.description == "test task description"
+
+
+@test("query: patch task sql - not exists")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ """Test that attempting to patch a non-existent task raises an error."""
+ pool = await create_db_pool(dsn=dsn)
+ task_id = uuid7()
+
+ with raises(HTTPException) as exc:
+ await patch_task(
+ developer_id=developer_id,
+ task_id=task_id,
+ agent_id=agent.id,
+ data=PatchTaskRequest(
+ **{
+ "name": "patched task",
+ "metadata": {"patched": True},
+ }
+ ),
+ connection_pool=pool,
+ )
+
+ assert exc.raised.status_code == 404
+ assert "Task not found" in str(exc.raised.detail)
+
diff --git a/integrations-service/integrations/autogen/Tasks.py b/integrations-service/integrations/autogen/Tasks.py
index b9212d8cb..f6bf58ddf 100644
--- a/integrations-service/integrations/autogen/Tasks.py
+++ b/integrations-service/integrations/autogen/Tasks.py
@@ -161,8 +161,21 @@ class CreateTaskRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
- name: str
+ name: Annotated[str, Field(max_length=255, min_length=1)]
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
@@ -650,7 +663,21 @@ class PatchTaskRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
+ name: Annotated[str | None, Field(max_length=255, min_length=1)] = None
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
@@ -966,8 +993,21 @@ class Task(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
- name: str
+ name: Annotated[str, Field(max_length=255, min_length=1)]
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
@@ -1124,7 +1164,21 @@ class UpdateTaskRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
+ name: Annotated[str, Field(max_length=255, min_length=1)]
+ """
+ The name of the task.
+ """
+ canonical_name: Annotated[
+ str | None,
+ Field(max_length=255, min_length=1, pattern="^[a-zA-Z][a-zA-Z0-9_]*$"),
+ ] = None
+ """
+ The canonical name of the task.
+ """
description: str = ""
+ """
+ The description of the task.
+ """
main: Annotated[
list[
EvaluateStep
diff --git a/memory-store/migrations/000005_files.up.sql b/memory-store/migrations/000005_files.up.sql
index 39426783a..1a851ca0b 100644
--- a/memory-store/migrations/000005_files.up.sql
+++ b/memory-store/migrations/000005_files.up.sql
@@ -70,15 +70,11 @@ CREATE TABLE IF NOT EXISTS user_files (
CREATE TABLE IF NOT EXISTS file_owners (
developer_id UUID NOT NULL,
file_id UUID NOT NULL,
- user_id UUID NOT NULL,
owner_type TEXT NOT NULL, -- 'user' or 'agent'
owner_id UUID NOT NULL,
CONSTRAINT pk_file_owners PRIMARY KEY (developer_id, file_id),
CONSTRAINT fk_file_owners_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id),
CONSTRAINT ct_file_owners_owner_type CHECK (owner_type IN ('user', 'agent'))
- CONSTRAINT pk_user_files PRIMARY KEY (developer_id, user_id, file_id),
- CONSTRAINT fk_user_files_user FOREIGN KEY (developer_id, user_id) REFERENCES users (developer_id, user_id),
- CONSTRAINT fk_user_files_file FOREIGN KEY (developer_id, file_id) REFERENCES files (developer_id, file_id) ON DELETE CASCADE
);
-- Create the agent_files table
diff --git a/memory-store/migrations/000008_tools.up.sql b/memory-store/migrations/000008_tools.up.sql
index 93e852de2..3318df8d8 100644
--- a/memory-store/migrations/000008_tools.up.sql
+++ b/memory-store/migrations/000008_tools.up.sql
@@ -48,22 +48,6 @@ END $$;
CREATE INDEX IF NOT EXISTS idx_tools_developer_agent ON tools (developer_id, agent_id);
--- Add foreign key constraint referencing tasks(task_id)
-DO $$
-BEGIN
- IF NOT EXISTS (
- SELECT 1
- FROM pg_constraint
- WHERE conname = 'fk_tools_task'
- ) THEN
- ALTER TABLE tools
- ADD CONSTRAINT fk_tools_task
- FOREIGN KEY (developer_id, task_id)
- REFERENCES tasks(developer_id, task_id) ON DELETE CASCADE;
- END IF;
-END
-$$;
-
-- Drop trigger if exists and recreate
DROP TRIGGER IF EXISTS trg_tools_updated_at ON tools;
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index d5a0119d8..918a09255 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -30,7 +30,7 @@ CREATE TABLE IF NOT EXISTS tasks (
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB DEFAULT '{}'::JSONB,
CONSTRAINT pk_tasks PRIMARY KEY (developer_id, task_id, "version"),
- CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name),
+ CONSTRAINT uq_tasks_canonical_name_unique UNIQUE (developer_id, canonical_name, "version"),
CONSTRAINT fk_tasks_agent FOREIGN KEY (developer_id, agent_id) REFERENCES agents (developer_id, agent_id) ON DELETE CASCADE,
CONSTRAINT ct_tasks_canonical_name_valid_identifier CHECK (canonical_name ~ '^[a-zA-Z][a-zA-Z0-9_]*$'),
CONSTRAINT ct_tasks_metadata_is_object CHECK (jsonb_typeof(metadata) = 'object'),
diff --git a/typespec/tasks/models.tsp b/typespec/tasks/models.tsp
index c3b301bd2..ca6b72e00 100644
--- a/typespec/tasks/models.tsp
+++ b/typespec/tasks/models.tsp
@@ -50,9 +50,14 @@ model ToolRef {
/** Object describing a Task */
model Task {
- @visibility("read", "create")
- name: string;
+ /** The name of the task. */
+ @visibility("read", "create", "update")
+ name: displayName;
+
+ /** The canonical name of the task. */
+ canonical_name?: canonicalName;
+ /** The description of the task. */
description: string = "";
/** The entrypoint of the task. */
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index d4835a695..768f27ea3 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -4574,9 +4574,16 @@ components:
- inherit_tools
properties:
name:
- type: string
+ allOf:
+ - $ref: '#/components/schemas/Common.displayName'
+ description: The name of the task.
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: The canonical name of the task.
description:
type: string
+ description: The description of the task.
default: ''
main:
type: array
@@ -5190,8 +5197,17 @@ components:
Tasks.PatchTaskRequest:
type: object
properties:
+ name:
+ allOf:
+ - $ref: '#/components/schemas/Common.displayName'
+ description: The name of the task.
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: The canonical name of the task.
description:
type: string
+ description: The description of the task.
default: ''
main:
type: array
@@ -5986,9 +6002,16 @@ components:
- updated_at
properties:
name:
- type: string
+ allOf:
+ - $ref: '#/components/schemas/Common.displayName'
+ description: The name of the task.
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: The canonical name of the task.
description:
type: string
+ description: The description of the task.
default: ''
main:
type: array
@@ -6333,14 +6356,24 @@ components:
Tasks.UpdateTaskRequest:
type: object
required:
+ - name
- description
- main
- input_schema
- tools
- inherit_tools
properties:
+ name:
+ allOf:
+ - $ref: '#/components/schemas/Common.displayName'
+ description: The name of the task.
+ canonical_name:
+ allOf:
+ - $ref: '#/components/schemas/Common.canonicalName'
+ description: The canonical name of the task.
description:
type: string
+ description: The description of the task.
default: ''
main:
type: array
From e18756ea739d8a3151c86b2e1ea8a7f643812127 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Sat, 21 Dec 2024 14:40:16 +0000
Subject: [PATCH 117/274] refactor: Lint agents-api (CI)
---
.../queries/developers/create_developer.py | 6 ++-
.../queries/entries/create_entries.py | 6 ++-
.../agents_api/queries/tasks/delete_task.py | 2 +-
.../agents_api/queries/tasks/get_task.py | 3 +-
.../agents_api/queries/tasks/list_tasks.py | 3 +-
.../agents_api/queries/tasks/patch_task.py | 49 ++++++++++---------
.../agents_api/queries/tasks/update_task.py | 39 ++++++++-------
agents-api/tests/fixtures.py | 1 -
agents-api/tests/test_developer_queries.py | 2 +-
agents-api/tests/test_entry_queries.py | 1 -
agents-api/tests/test_session_queries.py | 4 +-
agents-api/tests/test_task_queries.py | 22 +++++----
12 files changed, 75 insertions(+), 63 deletions(-)
diff --git a/agents-api/agents_api/queries/developers/create_developer.py b/agents-api/agents_api/queries/developers/create_developer.py
index 4cb505a14..1e927397c 100644
--- a/agents-api/agents_api/queries/developers/create_developer.py
+++ b/agents-api/agents_api/queries/developers/create_developer.py
@@ -43,7 +43,11 @@
)
}
)
-@wrap_in_class(ResourceCreatedResponse, one=True, transform=lambda d: {**d, "id": d["developer_id"], "created_at": d["created_at"]})
+@wrap_in_class(
+ ResourceCreatedResponse,
+ one=True,
+ transform=lambda d: {**d, "id": d["developer_id"], "created_at": d["created_at"]},
+)
@pg_query
@beartype
async def create_developer(
diff --git a/agents-api/agents_api/queries/entries/create_entries.py b/agents-api/agents_api/queries/entries/create_entries.py
index c11986d3c..ee931534d 100644
--- a/agents-api/agents_api/queries/entries/create_entries.py
+++ b/agents-api/agents_api/queries/entries/create_entries.py
@@ -7,7 +7,11 @@
from litellm.utils import _select_tokenizer as select_tokenizer
from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateEntryRequest, Relation, ResourceCreatedResponse
+from ...autogen.openapi_model import (
+ CreateEntryRequest,
+ Relation,
+ ResourceCreatedResponse,
+)
from ...common.utils.datetime import utcnow
from ...common.utils.messages import content_to_json
from ...metrics.counters import increase_counter
diff --git a/agents-api/agents_api/queries/tasks/delete_task.py b/agents-api/agents_api/queries/tasks/delete_task.py
index 8a058591e..20e03e28a 100644
--- a/agents-api/agents_api/queries/tasks/delete_task.py
+++ b/agents-api/agents_api/queries/tasks/delete_task.py
@@ -5,8 +5,8 @@
from beartype import beartype
from fastapi import HTTPException
-from ...common.utils.datetime import utcnow
from ...autogen.openapi_model import ResourceDeletedResponse
+from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
diff --git a/agents-api/agents_api/queries/tasks/get_task.py b/agents-api/agents_api/queries/tasks/get_task.py
index 292eabd35..03da91256 100644
--- a/agents-api/agents_api/queries/tasks/get_task.py
+++ b/agents-api/agents_api/queries/tasks/get_task.py
@@ -5,10 +5,9 @@
from beartype import beartype
from fastapi import HTTPException
-
+from ...common.protocol.tasks import spec_to_task
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-from ...common.protocol.tasks import spec_to_task
get_task_query = """
SELECT
diff --git a/agents-api/agents_api/queries/tasks/list_tasks.py b/agents-api/agents_api/queries/tasks/list_tasks.py
index 8cd0980a5..5cec7103e 100644
--- a/agents-api/agents_api/queries/tasks/list_tasks.py
+++ b/agents-api/agents_api/queries/tasks/list_tasks.py
@@ -5,10 +5,9 @@
from beartype import beartype
from fastapi import HTTPException
-
+from ...common.protocol.tasks import spec_to_task
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-from ...common.protocol.tasks import spec_to_task
list_tasks_query = """
SELECT
diff --git a/agents-api/agents_api/queries/tasks/patch_task.py b/agents-api/agents_api/queries/tasks/patch_task.py
index 0d82f9c91..2349f87c5 100644
--- a/agents-api/agents_api/queries/tasks/patch_task.py
+++ b/agents-api/agents_api/queries/tasks/patch_task.py
@@ -6,11 +6,11 @@
from fastapi import HTTPException
from sqlglot import parse_one
-from ...autogen.openapi_model import ResourceUpdatedResponse, PatchTaskRequest
+from ...autogen.openapi_model import PatchTaskRequest, ResourceUpdatedResponse
+from ...common.protocol.tasks import task_to_spec
+from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-from ...common.utils.datetime import utcnow
-from ...common.protocol.tasks import task_to_spec
# # Update task query using UPDATE
# update_task_query = parse_one("""
@@ -25,8 +25,8 @@
# inherit_tools = $8,
# input_schema = $9::jsonb,
# updated_at = NOW()
-# WHERE
-# developer_id = $1
+# WHERE
+# developer_id = $1
# AND task_id = $3
# RETURNING *;
# """).sql(pretty=True)
@@ -131,6 +131,7 @@
FROM current_version
""").sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
@@ -168,7 +169,7 @@ async def patch_task(
"""
Updates a task and its associated workflows with version control.
Only updates the fields that are provided in the request.
-
+
Parameters:
developer_id (UUID): The unique identifier of the developer.
task_id (UUID): The unique identifier of the task to update.
@@ -180,15 +181,15 @@ async def patch_task(
# Parameters for patching the task
patch_task_params = [
- developer_id, # $1
- data.canonical_name, # $2
- task_id, # $3
- agent_id, # $4
- data.metadata or None, # $5
- data.name or None, # $6
- data.description or None, # $7
- data.inherit_tools, # $8
- data.input_schema, # $9
+ developer_id, # $1
+ data.canonical_name, # $2
+ task_id, # $3
+ agent_id, # $4
+ data.metadata or None, # $5
+ data.name or None, # $6
+ data.description or None, # $7
+ data.inherit_tools, # $8
+ data.input_schema, # $9
]
if data.main is None:
@@ -202,14 +203,16 @@ async def patch_task(
workflow_name = workflow.get("name")
steps = workflow.get("steps", [])
for step_idx, step in enumerate(steps):
- workflow_params.append([
- developer_id, # $1
- task_id, # $2
- workflow_name, # $3
- step_idx, # $4
- step["kind_"], # $5
- step[step["kind_"]], # $6
- ])
+ workflow_params.append(
+ [
+ developer_id, # $1
+ task_id, # $2
+ workflow_name, # $3
+ step_idx, # $4
+ step["kind_"], # $5
+ step[step["kind_"]], # $6
+ ]
+ )
return [
(patch_task_query, patch_task_params, "fetchrow"),
diff --git a/agents-api/agents_api/queries/tasks/update_task.py b/agents-api/agents_api/queries/tasks/update_task.py
index d14f915ac..2199da7b0 100644
--- a/agents-api/agents_api/queries/tasks/update_task.py
+++ b/agents-api/agents_api/queries/tasks/update_task.py
@@ -7,10 +7,10 @@
from sqlglot import parse_one
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateTaskRequest
+from ...common.protocol.tasks import task_to_spec
+from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
-from ...common.utils.datetime import utcnow
-from ...common.protocol.tasks import task_to_spec
# # Update task query using UPDATE
# update_task_query = parse_one("""
@@ -25,8 +25,8 @@
# inherit_tools = $8,
# input_schema = $9::jsonb,
# updated_at = NOW()
-# WHERE
-# developer_id = $1
+# WHERE
+# developer_id = $1
# AND task_id = $3
# RETURNING *;
# """).sql(pretty=True)
@@ -96,6 +96,7 @@
FROM version
""").sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
@@ -132,7 +133,7 @@ async def update_task(
) -> list[tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]]:
"""
Updates a task and its associated workflows with version control.
-
+
Parameters:
developer_id (UUID): The unique identifier of the developer.
task_id (UUID): The unique identifier of the task to update.
@@ -144,15 +145,15 @@ async def update_task(
print("UPDATING TIIIIIME")
# Parameters for updating the task
update_task_params = [
- developer_id, # $1
- data.canonical_name, # $2
- task_id, # $3
- agent_id, # $4
- data.metadata or {}, # $5
- data.name, # $6
- data.description, # $7
- data.inherit_tools, # $8
- data.input_schema or {}, # $9
+ developer_id, # $1
+ data.canonical_name, # $2
+ task_id, # $3
+ agent_id, # $4
+ data.metadata or {}, # $5
+ data.name, # $6
+ data.description, # $7
+ data.inherit_tools, # $8
+ data.input_schema or {}, # $9
]
# Generate workflows from task data
@@ -164,11 +165,11 @@ async def update_task(
for step_idx, step in enumerate(steps):
workflow_params.append(
[
- developer_id, # $1
- task_id, # $2
- workflow_name, # $3
- step_idx, # $4
- step["kind_"], # $5
+ developer_id, # $1
+ task_id, # $2
+ workflow_name, # $3
+ step_idx, # $4
+ step["kind_"], # $5
step[step["kind_"]], # $6
]
)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 0e0224aff..fa996f560 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -30,7 +30,6 @@
# from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
-
from agents_api.queries.tasks.create_task import create_task
# from agents_api.queries.task.delete_task import delete_task
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index 3325b4a69..6d94b3209 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -4,8 +4,8 @@
from ward import raises, test
from agents_api.autogen.openapi_model import ResourceCreatedResponse
-from agents_api.common.protocol.developers import Developer
from agents_api.clients.pg import create_db_pool
+from agents_api.common.protocol.developers import Developer
from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import (
get_developer,
diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py
index 463627d74..1b5618974 100644
--- a/agents-api/tests/test_entry_queries.py
+++ b/agents-api/tests/test_entry_queries.py
@@ -3,7 +3,6 @@
It verifies the functionality of adding, retrieving, and processing entries as defined in the schema.
"""
-
from fastapi import HTTPException
from uuid_extensions import uuid7
from ward import raises, test
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 1d7341b08..f70d68a66 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -57,7 +57,9 @@ async def _(
)
assert result is not None
- assert isinstance(result, ResourceCreatedResponse), f"Result is not a Session, {result}"
+ assert isinstance(
+ result, ResourceCreatedResponse
+ ), f"Result is not a Session, {result}"
assert result.id == session_id
diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py
index 5e42681d6..c4303bb97 100644
--- a/agents-api/tests/test_task_queries.py
+++ b/agents-api/tests/test_task_queries.py
@@ -2,24 +2,23 @@
from fastapi import HTTPException
from uuid_extensions import uuid7
-from ward import test
+from ward import raises, test
from agents_api.autogen.openapi_model import (
CreateTaskRequest,
- UpdateTaskRequest,
- ResourceUpdatedResponse,
PatchTaskRequest,
+ ResourceUpdatedResponse,
Task,
+ UpdateTaskRequest,
)
-from ward import raises
from agents_api.clients.pg import create_db_pool
from agents_api.queries.tasks.create_or_update_task import create_or_update_task
from agents_api.queries.tasks.create_task import create_task
-from agents_api.queries.tasks.get_task import get_task
from agents_api.queries.tasks.delete_task import delete_task
+from agents_api.queries.tasks.get_task import get_task
from agents_api.queries.tasks.list_tasks import list_tasks
-from agents_api.queries.tasks.update_task import update_task
from agents_api.queries.tasks.patch_task import patch_task
+from agents_api.queries.tasks.update_task import update_task
from tests.fixtures import pg_dsn, test_agent, test_developer_id, test_task
@@ -187,8 +186,11 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert len(result) > 0
assert all(isinstance(task, Task) for task in result)
+
@test("query: update task sql - exists")
-async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, task=test_task):
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, task=test_task
+):
"""Test that a task can be successfully updated."""
pool = await create_db_pool(dsn=dsn)
@@ -225,7 +227,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, task=t
assert updated_task.metadata == {"updated": True}
-@test("query: update task sql - not exists")
+@test("query: update task sql - not exists")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that attempting to update a non-existent task raises an error."""
@@ -241,7 +243,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
**{
"canonical_name": "updated_task",
"name": "updated task",
- "description": "updated task description",
+ "description": "updated task description",
"input_schema": {"type": "object", "additionalProperties": True},
"main": [{"evaluate": {"hi": "_"}}],
"inherit_tools": False,
@@ -253,6 +255,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert exc.raised.status_code == 404
assert "Task not found" in str(exc.raised.detail)
+
@test("query: patch task sql - exists")
async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
"""Test that patching an existing task works correctly."""
@@ -330,4 +333,3 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert exc.raised.status_code == 404
assert "Task not found" in str(exc.raised.detail)
-
From 004461c86bbc28fa345f2a71fcf745a4bc7eb05e Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 21 Dec 2024 21:28:48 +0530
Subject: [PATCH 118/274] Update async_s3.py
---
agents-api/agents_api/clients/async_s3.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/agents_api/clients/async_s3.py b/agents-api/agents_api/clients/async_s3.py
index b6ba76d8b..0cd5235ee 100644
--- a/agents-api/agents_api/clients/async_s3.py
+++ b/agents-api/agents_api/clients/async_s3.py
@@ -16,7 +16,6 @@
)
-@alru_cache(maxsize=1024)
async def list_buckets() -> list[str]:
session = get_session()
From c2d54a40ab1ca244eab2b432c5211620a2808d78 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sat, 21 Dec 2024 23:20:49 +0530
Subject: [PATCH 119/274] fix: Miscellaneous fixes
Signed-off-by: Diwank Singh Tomer
---
agents-api/agents_api/autogen/Docs.py | 6 +-
.../queries/developers/get_developer.py | 1 -
.../agents_api/queries/docs/create_doc.py | 63 +++++--------------
agents-api/agents_api/queries/docs/get_doc.py | 32 ++++++----
.../agents_api/queries/docs/list_docs.py | 28 ++++++---
.../queries/docs/search_docs_by_text.py | 4 +-
.../agents_api/queries/files/create_file.py | 2 +-
agents-api/tests/test_docs_queries.py | 20 +++---
.../integrations/autogen/Docs.py | 6 +-
memory-store/migrations/000006_docs.up.sql | 25 ++++----
typespec/docs/models.tsp | 10 +--
.../@typespec/openapi3/openapi-1.0.0.yaml | 10 ++-
12 files changed, 84 insertions(+), 123 deletions(-)
diff --git a/agents-api/agents_api/autogen/Docs.py b/agents-api/agents_api/autogen/Docs.py
index af5f60d6a..574317c43 100644
--- a/agents-api/agents_api/autogen/Docs.py
+++ b/agents-api/agents_api/autogen/Docs.py
@@ -81,15 +81,11 @@ class Doc(BaseModel):
"""
Language of the document
"""
- index: Annotated[int | None, Field(json_schema_extra={"readOnly": True})] = None
- """
- Index of the document
- """
embedding_model: Annotated[
str | None, Field(json_schema_extra={"readOnly": True})
] = None
"""
- Embedding model to use for the document
+ Embedding model used for the document
"""
embedding_dimensions: Annotated[
int | None, Field(json_schema_extra={"readOnly": True})
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index 79b6e6067..b164bad81 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -1,6 +1,5 @@
"""Module for retrieving document snippets from the CozoDB based on document IDs."""
-from typing import Any, TypeVar
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/docs/create_doc.py b/agents-api/agents_api/queries/docs/create_doc.py
index d3c2fe3c1..e63a99c9d 100644
--- a/agents-api/agents_api/queries/docs/create_doc.py
+++ b/agents-api/agents_api/queries/docs/create_doc.py
@@ -1,19 +1,18 @@
-import ast
from typing import Literal
from uuid import UUID
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateDocRequest, Doc
+from ...autogen.openapi_model import CreateDocRequest, ResourceCreatedResponse
+from ...common.utils.datetime import utcnow
from ...metrics.counters import increase_counter
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Base INSERT for docs
-doc_query = parse_one("""
+doc_query = """
INSERT INTO docs (
developer_id,
doc_id,
@@ -38,48 +37,15 @@
$9, -- language
$10 -- metadata (JSONB)
)
-RETURNING *;
-""").sql(pretty=True)
+"""
# Owner association query for doc_owners
-doc_owner_query = parse_one("""
-WITH inserted_owner AS (
- INSERT INTO doc_owners (
- developer_id,
- doc_id,
- index,
- owner_type,
- owner_id
- )
- VALUES ($1, $2, $3, $4, $5)
- RETURNING doc_id
-)
-SELECT DISTINCT ON (docs.doc_id)
- docs.doc_id,
- docs.developer_id,
- docs.title,
- array_agg(docs.content ORDER BY docs.index) as content,
- array_agg(docs.index ORDER BY docs.index) as indices,
- docs.modality,
- docs.embedding_model,
- docs.embedding_dimensions,
- docs.language,
- docs.metadata,
- docs.created_at
-
-FROM inserted_owner io
-JOIN docs ON docs.doc_id = io.doc_id
-GROUP BY
- docs.doc_id,
- docs.developer_id,
- docs.title,
- docs.modality,
- docs.embedding_model,
- docs.embedding_dimensions,
- docs.language,
- docs.metadata,
- docs.created_at;
-""").sql(pretty=True)
+doc_owner_query = """
+INSERT INTO doc_owners (developer_id, doc_id, owner_type, owner_id)
+VALUES ($1, $2, $3, $4)
+ON CONFLICT DO NOTHING
+RETURNING *;
+"""
@rewrap_exceptions(
@@ -102,12 +68,12 @@
}
)
@wrap_in_class(
- Doc,
+ ResourceCreatedResponse,
one=True,
transform=lambda d: {
"id": d["doc_id"],
- "index": d["indices"][0],
- "content": d["content"][0] if len(d["content"]) == 1 else d["content"],
+ "jobs": [],
+ "created_at": utcnow(),
**d,
},
)
@@ -146,6 +112,7 @@ async def create_doc(
list[tuple[str, list] | tuple[str, list, str]]: SQL query and parameters for creating the document.
"""
queries = []
+
# Generate a UUID if not provided
current_doc_id = uuid7() if doc_id is None else doc_id
@@ -172,7 +139,6 @@ async def create_doc(
owner_params = [
developer_id,
current_doc_id,
- idx,
owner_type,
owner_id,
]
@@ -202,7 +168,6 @@ async def create_doc(
owner_params = [
developer_id,
current_doc_id,
- index,
owner_type,
owner_id,
]
diff --git a/agents-api/agents_api/queries/docs/get_doc.py b/agents-api/agents_api/queries/docs/get_doc.py
index 1cee8f354..4150a4e03 100644
--- a/agents-api/agents_api/queries/docs/get_doc.py
+++ b/agents-api/agents_api/queries/docs/get_doc.py
@@ -1,5 +1,3 @@
-import ast
-from typing import Literal
from uuid import UUID
from beartype import beartype
@@ -11,7 +9,7 @@
# Update the query to use DISTINCT ON to prevent duplicates
doc_with_embedding_query = parse_one("""
WITH doc_data AS (
- SELECT DISTINCT ON (d.doc_id)
+ SELECT
d.doc_id,
d.developer_id,
d.title,
@@ -44,18 +42,26 @@
""").sql(pretty=True)
+def transform_get_doc(d: dict) -> dict:
+ content = d["content"][0] if len(d["content"]) == 1 else d["content"]
+
+ embeddings = d["embeddings"][0] if len(d["embeddings"]) == 1 else d["embeddings"]
+ if embeddings and all((e is None) for e in embeddings):
+ embeddings = None
+
+ transformed = {
+ **d,
+ "id": d["doc_id"],
+ "content": content,
+ "embeddings": embeddings,
+ }
+ return transformed
+
+
@wrap_in_class(
Doc,
- one=True, # Changed to True since we're now returning one grouped record
- transform=lambda d: {
- "id": d["doc_id"],
- "index": d["indices"][0],
- "content": d["content"][0] if len(d["content"]) == 1 else d["content"],
- "embeddings": d["embeddings"][0]
- if len(d["embeddings"]) == 1
- else d["embeddings"],
- **d,
- },
+ one=True,
+ transform=transform_get_doc,
)
@pg_query
@beartype
diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py
index 9788b0daa..67bbe83fc 100644
--- a/agents-api/agents_api/queries/docs/list_docs.py
+++ b/agents-api/agents_api/queries/docs/list_docs.py
@@ -17,7 +17,7 @@
# Base query for listing docs with aggregated content and embeddings
base_docs_query = parse_one("""
WITH doc_data AS (
- SELECT DISTINCT ON (d.doc_id)
+ SELECT
d.doc_id,
d.developer_id,
d.title,
@@ -54,6 +54,22 @@
""").sql(pretty=True)
+def transform_list_docs(d: dict) -> dict:
+ content = d["content"][0] if len(d["content"]) == 1 else d["content"]
+
+ embeddings = d["embeddings"][0] if len(d["embeddings"]) == 1 else d["embeddings"]
+ if embeddings and all((e is None) for e in embeddings):
+ embeddings = None
+
+ transformed = {
+ **d,
+ "id": d["doc_id"],
+ "content": content,
+ "embeddings": embeddings,
+ }
+ return transformed
+
+
@rewrap_exceptions(
{
asyncpg.NoDataFoundError: partialclass(
@@ -71,15 +87,7 @@
@wrap_in_class(
Doc,
one=False,
- transform=lambda d: {
- "id": d["doc_id"],
- "index": d["indices"][0],
- "content": d["content"][0] if len(d["content"]) == 1 else d["content"],
- "embedding": d["embeddings"][0]
- if d.get("embeddings") and len(d["embeddings"]) == 1
- else d.get("embeddings"),
- **d,
- },
+ transform=transform_list_docs,
)
@pg_query
@beartype
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index 9c22a60ce..96b13c9d6 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -1,11 +1,9 @@
-import json
-from typing import Any, List, Literal
+from typing import Any, Literal
from uuid import UUID
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
from ...autogen.openapi_model import DocReference
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index f2e35a6f4..daa3a4017 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -5,7 +5,7 @@
import base64
import hashlib
-from typing import Any, Literal
+from typing import Literal
from uuid import UUID
import asyncpg
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 82490cb77..1b3670a0e 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -19,11 +19,11 @@
@test("query: create user doc")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
- doc = await create_doc(
+ doc_created = await create_doc(
developer_id=developer.id,
data=CreateDocRequest(
title="User Doc",
- content="Docs for user testing",
+ content=["Docs for user testing", "Docs for user testing 2"],
metadata={"test": "test"},
embed_instruction="Embed the document",
),
@@ -31,16 +31,16 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
owner_id=user.id,
connection_pool=pool,
)
- assert doc.title == "User Doc"
+
+ assert doc_created.id is not None
# Verify doc appears in user's docs
- docs_list = await list_docs(
+ found = await get_doc(
developer_id=developer.id,
- owner_type="user",
- owner_id=user.id,
+ doc_id=doc_created.id,
connection_pool=pool,
)
- assert any(d.id == doc.id for d in docs_list)
+ assert found.id == doc_created.id
@test("query: create agent doc")
@@ -58,7 +58,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
owner_id=agent.id,
connection_pool=pool,
)
- assert doc.title == "Agent Doc"
+ assert doc.id is not None
# Verify doc appears in agent's docs
docs_list = await list_docs(
@@ -79,8 +79,8 @@ async def _(dsn=pg_dsn, developer=test_developer, doc=test_doc):
connection_pool=pool,
)
assert doc_test.id == doc.id
- assert doc_test.title == doc.title
- assert doc_test.content == doc.content
+ assert doc_test.title is not None
+ assert doc_test.content is not None
@test("query: list user docs")
diff --git a/integrations-service/integrations/autogen/Docs.py b/integrations-service/integrations/autogen/Docs.py
index af5f60d6a..574317c43 100644
--- a/integrations-service/integrations/autogen/Docs.py
+++ b/integrations-service/integrations/autogen/Docs.py
@@ -81,15 +81,11 @@ class Doc(BaseModel):
"""
Language of the document
"""
- index: Annotated[int | None, Field(json_schema_extra={"readOnly": True})] = None
- """
- Index of the document
- """
embedding_model: Annotated[
str | None, Field(json_schema_extra={"readOnly": True})
] = None
"""
- Embedding model to use for the document
+ Embedding model used for the document
"""
embedding_dimensions: Annotated[
int | None, Field(json_schema_extra={"readOnly": True})
diff --git a/memory-store/migrations/000006_docs.up.sql b/memory-store/migrations/000006_docs.up.sql
index 37d17a590..8abd878bc 100644
--- a/memory-store/migrations/000006_docs.up.sql
+++ b/memory-store/migrations/000006_docs.up.sql
@@ -24,12 +24,11 @@ CREATE TABLE IF NOT EXISTS docs (
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
metadata JSONB NOT NULL DEFAULT '{}'::JSONB,
- CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id),
+ CONSTRAINT pk_docs PRIMARY KEY (developer_id, doc_id, index),
CONSTRAINT ct_docs_embedding_dimensions_positive CHECK (embedding_dimensions > 0),
CONSTRAINT ct_docs_valid_modality CHECK (modality IN ('text', 'image', 'mixed')),
CONSTRAINT ct_docs_index_positive CHECK (index >= 0),
- CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language)),
- UNIQUE (developer_id, doc_id, index)
+ CONSTRAINT ct_docs_valid_language CHECK (is_valid_language (language))
);
-- Create foreign key constraint if not exists (using DO block for safety)
@@ -62,20 +61,20 @@ END $$;
CREATE TABLE IF NOT EXISTS doc_owners (
developer_id UUID NOT NULL,
doc_id UUID NOT NULL,
- owner_type TEXT NOT NULL, -- 'user' or 'agent'
+ owner_type TEXT NOT NULL, -- 'user' or 'agent'
owner_id UUID NOT NULL,
CONSTRAINT pk_doc_owners PRIMARY KEY (developer_id, doc_id),
- CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
+ -- TODO: Ensure that doc exists (this constraint is not working)
+ -- CONSTRAINT fk_doc_owners_doc FOREIGN KEY (developer_id, doc_id) REFERENCES docs (developer_id, doc_id),
CONSTRAINT ct_doc_owners_owner_type CHECK (owner_type IN ('user', 'agent'))
);
-- Create indexes
-CREATE INDEX IF NOT EXISTS idx_doc_owners_owner
- ON doc_owners (developer_id, owner_type, owner_id);
+CREATE INDEX IF NOT EXISTS idx_doc_owners_owner ON doc_owners (developer_id, owner_type, owner_id);
-- Create function to validate owner reference
-CREATE OR REPLACE FUNCTION validate_doc_owner()
-RETURNS TRIGGER AS $$
+CREATE
+OR REPLACE FUNCTION validate_doc_owner () RETURNS TRIGGER AS $$
BEGIN
IF NEW.owner_type = 'user' THEN
IF NOT EXISTS (
@@ -97,10 +96,10 @@ END;
$$ LANGUAGE plpgsql;
-- Create trigger for validation
-CREATE TRIGGER trg_validate_doc_owner
-BEFORE INSERT OR UPDATE ON doc_owners
-FOR EACH ROW
-EXECUTE FUNCTION validate_doc_owner();
+CREATE TRIGGER trg_validate_doc_owner BEFORE INSERT
+OR
+UPDATE ON doc_owners FOR EACH ROW
+EXECUTE FUNCTION validate_doc_owner ();
-- Create indexes if not exists
CREATE INDEX IF NOT EXISTS idx_docs_metadata ON docs USING GIN (metadata);
diff --git a/typespec/docs/models.tsp b/typespec/docs/models.tsp
index f4d16cbd5..afc3b36fd 100644
--- a/typespec/docs/models.tsp
+++ b/typespec/docs/models.tsp
@@ -26,7 +26,7 @@ model Doc {
/** Embeddings for the document */
@visibility("read")
- embeddings?: float32[] | float32[][];
+ embeddings: float32[] | float32[][] | null = null;
@visibility("read")
/** Modality of the document */
@@ -37,11 +37,7 @@ model Doc {
language?: string;
@visibility("read")
- /** Index of the document */
- index?: uint16;
-
- @visibility("read")
- /** Embedding model to use for the document */
+ /** Embedding model used for the document */
embedding_model?: string;
@visibility("read")
@@ -172,4 +168,4 @@ model DocSearchResponse {
/** The time taken to search in seconds */
@minValueExclusive(0)
time: float;
-}
\ No newline at end of file
+}
diff --git a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
index c19bc4ed2..3b7fc0420 100644
--- a/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
+++ b/typespec/tsp-output/@typespec/openapi3/openapi-1.0.0.yaml
@@ -2838,6 +2838,7 @@ components:
- created_at
- title
- content
+ - embeddings
properties:
id:
allOf:
@@ -2874,7 +2875,9 @@ components:
items:
type: number
format: float
+ nullable: true
description: Embeddings for the document
+ default: null
readOnly: true
modality:
type: string
@@ -2884,14 +2887,9 @@ components:
type: string
description: Language of the document
readOnly: true
- index:
- type: integer
- format: uint16
- description: Index of the document
- readOnly: true
embedding_model:
type: string
- description: Embedding model to use for the document
+ description: Embedding model used for the document
readOnly: true
embedding_dimensions:
type: integer
From 6a52a4022ca8a52a70701f0f3878595759380f05 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 21 Dec 2024 21:05:17 +0300
Subject: [PATCH 120/274] WIP
---
.../tools/get_tool_args_from_metadata.py | 33 +++++--------------
1 file changed, 9 insertions(+), 24 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 2cdb92cb9..a8a9dba1a 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -2,16 +2,9 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -51,10 +44,6 @@ def tool_args_for_task(
"""
queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "tasks", task_id=task_id, parents=[("agents", "agent_id")]
- ),
get_query,
]
@@ -95,25 +84,21 @@ def tool_args_for_session(
"""
queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
get_query,
]
return (queries, {"agent_id": agent_id, "session_id": session_id})
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(dict, transform=lambda x: x["values"], one=True)
-@cozo_query
+@pg_query
@beartype
def get_tool_args_from_metadata(
*,
From 8db396f253db06203eafbc6b064ae3dc19e0510b Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 21 Dec 2024 21:36:36 +0300
Subject: [PATCH 121/274] feat: Add executions queries
---
.../models/execution/count_executions.py | 61 ---------------
.../models/execution/get_execution.py | 78 -------------------
.../executions}/__init__.py | 0
.../executions}/constants.py | 0
.../queries/executions/count_executions.py | 39 ++++++++++
.../executions}/create_execution.py | 0
.../create_execution_transition.py | 0
.../executions}/create_temporal_lookup.py | 0
.../queries/executions/get_execution.py | 52 +++++++++++++
.../executions}/get_execution_transition.py | 0
.../executions}/get_paused_execution_token.py | 0
.../executions}/get_temporal_workflow_data.py | 0
.../executions}/list_execution_transitions.py | 0
.../executions}/list_executions.py | 0
.../executions}/lookup_temporal_data.py | 0
.../executions}/prepare_execution_input.py | 0
.../executions}/update_execution.py | 0
17 files changed, 91 insertions(+), 139 deletions(-)
delete mode 100644 agents-api/agents_api/models/execution/count_executions.py
delete mode 100644 agents-api/agents_api/models/execution/get_execution.py
rename agents-api/agents_api/{models/execution => queries/executions}/__init__.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/constants.py (100%)
create mode 100644 agents-api/agents_api/queries/executions/count_executions.py
rename agents-api/agents_api/{models/execution => queries/executions}/create_execution.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/create_execution_transition.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/create_temporal_lookup.py (100%)
create mode 100644 agents-api/agents_api/queries/executions/get_execution.py
rename agents-api/agents_api/{models/execution => queries/executions}/get_execution_transition.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/get_paused_execution_token.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/get_temporal_workflow_data.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/list_execution_transitions.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/list_executions.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/lookup_temporal_data.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/prepare_execution_input.py (100%)
rename agents-api/agents_api/{models/execution => queries/executions}/update_execution.py (100%)
diff --git a/agents-api/agents_api/models/execution/count_executions.py b/agents-api/agents_api/models/execution/count_executions.py
deleted file mode 100644
index d130f0359..000000000
--- a/agents-api/agents_api/models/execution/count_executions.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(dict, one=True)
-@cozo_query
-@beartype
-def count_executions(
- *,
- developer_id: UUID,
- task_id: UUID,
-) -> tuple[list[str], dict]:
- count_query = """
- input[task_id] <- [[to_uuid($task_id)]]
-
- counter[count(id)] :=
- input[task_id],
- *executions:task_id_execution_id_idx {
- task_id,
- execution_id: id,
- }
-
- ?[count] := counter[count]
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "tasks",
- task_id=task_id,
- parents=[("agents", "agent_id")],
- ),
- count_query,
- ]
-
- return (queries, {"task_id": str(task_id)})
diff --git a/agents-api/agents_api/models/execution/get_execution.py b/agents-api/agents_api/models/execution/get_execution.py
deleted file mode 100644
index db0279b1f..000000000
--- a/agents-api/agents_api/models/execution/get_execution.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Execution
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- wrap_in_class,
-)
-from .constants import OUTPUT_UNNEST_KEY
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- AssertionError: partialclass(HTTPException, status_code=404),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Execution,
- one=True,
- transform=lambda d: {
- **d,
- "output": d["output"][OUTPUT_UNNEST_KEY]
- if isinstance(d["output"], dict) and OUTPUT_UNNEST_KEY in d["output"]
- else d["output"],
- },
-)
-@cozo_query
-@beartype
-def get_execution(
- *,
- execution_id: UUID,
-) -> tuple[str, dict]:
- # Executions are allowed direct GET access if they have execution_id
-
- # NOTE: Do not remove outer curly braces
- query = """
- {
- input[execution_id] <- [[to_uuid($execution_id)]]
-
- ?[id, task_id, status, input, output, error, session_id, metadata, created_at, updated_at] :=
- input[execution_id],
- *executions {
- task_id,
- execution_id,
- status,
- input,
- output,
- error,
- session_id,
- metadata,
- created_at,
- updated_at,
- },
- id = execution_id
-
- :limit 1
- }
- """
-
- return (
- query,
- {
- "execution_id": str(execution_id),
- },
- )
diff --git a/agents-api/agents_api/models/execution/__init__.py b/agents-api/agents_api/queries/executions/__init__.py
similarity index 100%
rename from agents-api/agents_api/models/execution/__init__.py
rename to agents-api/agents_api/queries/executions/__init__.py
diff --git a/agents-api/agents_api/models/execution/constants.py b/agents-api/agents_api/queries/executions/constants.py
similarity index 100%
rename from agents-api/agents_api/models/execution/constants.py
rename to agents-api/agents_api/queries/executions/constants.py
diff --git a/agents-api/agents_api/queries/executions/count_executions.py b/agents-api/agents_api/queries/executions/count_executions.py
new file mode 100644
index 000000000..5ec29a8b6
--- /dev/null
+++ b/agents-api/agents_api/queries/executions/count_executions.py
@@ -0,0 +1,39 @@
+from typing import Any, TypeVar
+from uuid import UUID
+
+import sqlvalidator
+from beartype import beartype
+
+from ..utils import (
+ pg_query,
+ wrap_in_class,
+)
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+sql_query = sqlvalidator.parse(
+ """
+SELECT COUNT(*) FROM executions
+WHERE
+ developer_id = $1
+ AND task_id = $2
+"""
+)
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
+@wrap_in_class(dict, one=True)
+@pg_query
+@beartype
+def count_executions(
+ *,
+ developer_id: UUID,
+ task_id: UUID,
+) -> tuple[list[str], dict]:
+ return (sql_query.format(), [developer_id, task_id])
diff --git a/agents-api/agents_api/models/execution/create_execution.py b/agents-api/agents_api/queries/executions/create_execution.py
similarity index 100%
rename from agents-api/agents_api/models/execution/create_execution.py
rename to agents-api/agents_api/queries/executions/create_execution.py
diff --git a/agents-api/agents_api/models/execution/create_execution_transition.py b/agents-api/agents_api/queries/executions/create_execution_transition.py
similarity index 100%
rename from agents-api/agents_api/models/execution/create_execution_transition.py
rename to agents-api/agents_api/queries/executions/create_execution_transition.py
diff --git a/agents-api/agents_api/models/execution/create_temporal_lookup.py b/agents-api/agents_api/queries/executions/create_temporal_lookup.py
similarity index 100%
rename from agents-api/agents_api/models/execution/create_temporal_lookup.py
rename to agents-api/agents_api/queries/executions/create_temporal_lookup.py
diff --git a/agents-api/agents_api/queries/executions/get_execution.py b/agents-api/agents_api/queries/executions/get_execution.py
new file mode 100644
index 000000000..474e0c63d
--- /dev/null
+++ b/agents-api/agents_api/queries/executions/get_execution.py
@@ -0,0 +1,52 @@
+from typing import Any, TypeVar
+from uuid import UUID
+
+from beartype import beartype
+
+import sqlvalidator
+from ...autogen.openapi_model import Execution
+from ..utils import (
+ pg_query,
+ wrap_in_class,
+)
+from .constants import OUTPUT_UNNEST_KEY
+
+ModelT = TypeVar("ModelT", bound=Any)
+T = TypeVar("T")
+
+sql_query = sqlvalidator.parse("""
+SELECT * FROM executions
+WHERE
+ execution_id = $1
+LIMIT 1
+""")
+
+
+# @rewrap_exceptions(
+# {
+# AssertionError: partialclass(HTTPException, status_code=404),
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
+@wrap_in_class(
+ Execution,
+ one=True,
+ transform=lambda d: {
+ **d,
+ "output": d["output"][OUTPUT_UNNEST_KEY]
+ if isinstance(d["output"], dict) and OUTPUT_UNNEST_KEY in d["output"]
+ else d["output"],
+ },
+)
+@pg_query
+@beartype
+def get_execution(
+ *,
+ execution_id: UUID,
+) -> tuple[str, dict]:
+ return (
+ sql_query.format(),
+ [execution_id],
+ )
diff --git a/agents-api/agents_api/models/execution/get_execution_transition.py b/agents-api/agents_api/queries/executions/get_execution_transition.py
similarity index 100%
rename from agents-api/agents_api/models/execution/get_execution_transition.py
rename to agents-api/agents_api/queries/executions/get_execution_transition.py
diff --git a/agents-api/agents_api/models/execution/get_paused_execution_token.py b/agents-api/agents_api/queries/executions/get_paused_execution_token.py
similarity index 100%
rename from agents-api/agents_api/models/execution/get_paused_execution_token.py
rename to agents-api/agents_api/queries/executions/get_paused_execution_token.py
diff --git a/agents-api/agents_api/models/execution/get_temporal_workflow_data.py b/agents-api/agents_api/queries/executions/get_temporal_workflow_data.py
similarity index 100%
rename from agents-api/agents_api/models/execution/get_temporal_workflow_data.py
rename to agents-api/agents_api/queries/executions/get_temporal_workflow_data.py
diff --git a/agents-api/agents_api/models/execution/list_execution_transitions.py b/agents-api/agents_api/queries/executions/list_execution_transitions.py
similarity index 100%
rename from agents-api/agents_api/models/execution/list_execution_transitions.py
rename to agents-api/agents_api/queries/executions/list_execution_transitions.py
diff --git a/agents-api/agents_api/models/execution/list_executions.py b/agents-api/agents_api/queries/executions/list_executions.py
similarity index 100%
rename from agents-api/agents_api/models/execution/list_executions.py
rename to agents-api/agents_api/queries/executions/list_executions.py
diff --git a/agents-api/agents_api/models/execution/lookup_temporal_data.py b/agents-api/agents_api/queries/executions/lookup_temporal_data.py
similarity index 100%
rename from agents-api/agents_api/models/execution/lookup_temporal_data.py
rename to agents-api/agents_api/queries/executions/lookup_temporal_data.py
diff --git a/agents-api/agents_api/models/execution/prepare_execution_input.py b/agents-api/agents_api/queries/executions/prepare_execution_input.py
similarity index 100%
rename from agents-api/agents_api/models/execution/prepare_execution_input.py
rename to agents-api/agents_api/queries/executions/prepare_execution_input.py
diff --git a/agents-api/agents_api/models/execution/update_execution.py b/agents-api/agents_api/queries/executions/update_execution.py
similarity index 100%
rename from agents-api/agents_api/models/execution/update_execution.py
rename to agents-api/agents_api/queries/executions/update_execution.py
From f80ff87c9815dd554066d3461c12596ef622434d Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Sat, 21 Dec 2024 18:44:35 +0000
Subject: [PATCH 122/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/files/create_file.py | 7 +++----
agents-api/agents_api/queries/files/get_file.py | 5 ++---
agents-api/agents_api/queries/files/list_files.py | 3 +--
agents-api/tests/test_session_queries.py | 4 ++--
4 files changed, 8 insertions(+), 11 deletions(-)
diff --git a/agents-api/agents_api/queries/files/create_file.py b/agents-api/agents_api/queries/files/create_file.py
index d763cb7b9..daa3a4017 100644
--- a/agents-api/agents_api/queries/files/create_file.py
+++ b/agents-api/agents_api/queries/files/create_file.py
@@ -8,16 +8,15 @@
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
from sqlglot import parse_one
from uuid_extensions import uuid7
-import asyncpg
-from fastapi import HTTPException
-
from ...autogen.openapi_model import CreateFileRequest, File
from ...metrics.counters import increase_counter
-from ..utils import pg_query, wrap_in_class, rewrap_exceptions, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Create file
file_query = parse_one("""
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 4fc46264e..04ba8ea71 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -6,11 +6,10 @@
from typing import Literal
from uuid import UUID
-from beartype import beartype
-from sqlglot import parse_one
-
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import File
from ..utils import (
diff --git a/agents-api/agents_api/queries/files/list_files.py b/agents-api/agents_api/queries/files/list_files.py
index d8d8f5064..38363d09c 100644
--- a/agents-api/agents_api/queries/files/list_files.py
+++ b/agents-api/agents_api/queries/files/list_files.py
@@ -6,12 +6,11 @@
from typing import Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
from sqlglot import parse_one
-import asyncpg
-
from ...autogen.openapi_model import File
from ..utils import (
partialclass,
diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py
index 0ce1e9cc5..f70d68a66 100644
--- a/agents-api/tests/test_session_queries.py
+++ b/agents-api/tests/test_session_queries.py
@@ -10,11 +10,11 @@
CreateOrUpdateSessionRequest,
CreateSessionRequest,
PatchSessionRequest,
+ ResourceCreatedResponse,
ResourceDeletedResponse,
ResourceUpdatedResponse,
- UpdateSessionRequest,
- ResourceCreatedResponse,
Session,
+ UpdateSessionRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.queries.sessions import (
From 747aceb0c36a7b0edf40cfccc774dc4a9da7434b Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sun, 22 Dec 2024 13:39:41 +0530
Subject: [PATCH 123/274] fix: Fix search_by_text; remove tools.task_version
column
Signed-off-by: Diwank Singh Tomer
---
.../queries/docs/search_docs_by_text.py | 30 ++++----
.../queries/tasks/create_or_update_task.py | 26 +++----
.../agents_api/queries/tasks/create_task.py | 2 -
agents-api/tests/test_docs_queries.py | 62 ++++++++--------
memory-store/migrations/000007_ann.up.sql | 4 +-
memory-store/migrations/000008_tools.up.sql | 6 +-
memory-store/migrations/000010_tasks.up.sql | 46 +++++++-----
.../migrations/000018_doc_search.up.sql | 70 +++++++++----------
8 files changed, 123 insertions(+), 123 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index 96b13c9d6..86877c752 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -9,13 +9,16 @@
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
search_docs_text_query = """
- SELECT * FROM search_by_text(
- $1, -- developer_id
- $2, -- query
- $3, -- owner_types
- ( SELECT array_agg(*)::UUID[] FROM jsonb_array_elements($4) )
- )
- """
+SELECT * FROM search_by_text(
+ $1, -- developer_id
+ $2, -- query
+ $3, -- owner_types
+ $UUID_LIST::uuid[], -- owner_ids
+ $4, -- search_language
+ $5, -- k
+ $6 -- metadata_filter
+)
+"""
@rewrap_exceptions(
@@ -38,7 +41,7 @@
**d,
},
)
-@pg_query(debug=True)
+@pg_query
@beartype
async def search_docs_by_text(
*,
@@ -68,16 +71,19 @@ async def search_docs_by_text(
raise HTTPException(status_code=400, detail="k must be >= 1")
# Extract owner types and IDs
- owner_types = [owner[0] for owner in owners]
- owner_ids = [owner[1] for owner in owners]
+ owner_types: list[str] = [owner[0] for owner in owners]
+ owner_ids: list[str] = [str(owner[1]) for owner in owners]
+
+ # NOTE: Manually replace uuids list coz asyncpg isnt sending it correctly
+ owner_ids_pg_str = f"ARRAY['{'\', \''.join(owner_ids)}']"
+ query = search_docs_text_query.replace("$UUID_LIST", owner_ids_pg_str)
return (
- search_docs_text_query,
+ query,
[
developer_id,
query,
owner_types,
- owner_ids,
search_language,
k,
metadata_filter,
diff --git a/agents-api/agents_api/queries/tasks/create_or_update_task.py b/agents-api/agents_api/queries/tasks/create_or_update_task.py
index 1f259ac16..ed1ebae71 100644
--- a/agents-api/agents_api/queries/tasks/create_or_update_task.py
+++ b/agents-api/agents_api/queries/tasks/create_or_update_task.py
@@ -20,14 +20,7 @@
# Define the raw SQL query for creating or updating a task
tools_query = parse_one("""
-WITH version AS (
- SELECT COALESCE(MAX("version"), 0) as current_version
- FROM tasks
- WHERE developer_id = $1
- AND task_id = $3
-)
INSERT INTO tools (
- task_version,
developer_id,
agent_id,
task_id,
@@ -37,8 +30,7 @@
description,
spec
)
-SELECT
- current_version, -- task_version
+VALUES (
$1, -- developer_id
$2, -- agent_id
$3, -- task_id
@@ -47,23 +39,23 @@
$6, -- name
$7, -- description
$8 -- spec
-FROM version
+)
""").sql(pretty=True)
task_query = parse_one("""
WITH current_version AS (
SELECT COALESCE(
(SELECT MAX("version")
- FROM tasks
- WHERE developer_id = $1
+ FROM tasks
+ WHERE developer_id = $1
AND task_id = $4),
0
) + 1 as next_version,
COALESCE(
- (SELECT canonical_name
- FROM tasks
- WHERE developer_id = $1 AND task_id = $4
- ORDER BY version DESC
+ (SELECT canonical_name
+ FROM tasks
+ WHERE developer_id = $1 AND task_id = $4
+ ORDER BY version DESC
LIMIT 1),
$2
) as effective_canonical_name
@@ -100,7 +92,7 @@
workflows_query = parse_one("""
WITH version AS (
SELECT COALESCE(MAX("version"), 0) as current_version
- FROM tasks
+ FROM tasks
WHERE developer_id = $1
AND task_id = $2
)
diff --git a/agents-api/agents_api/queries/tasks/create_task.py b/agents-api/agents_api/queries/tasks/create_task.py
index 58287fbbc..2e23a2252 100644
--- a/agents-api/agents_api/queries/tasks/create_task.py
+++ b/agents-api/agents_api/queries/tasks/create_task.py
@@ -21,7 +21,6 @@
# Define the raw SQL query for creating or updating a task
tools_query = parse_one("""
INSERT INTO tools (
- task_version,
developer_id,
agent_id,
task_id,
@@ -32,7 +31,6 @@
spec
)
VALUES (
- 1, -- task_version
$1, -- developer_id
$2, -- agent_id
$3, -- task_id
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 1c49d7bc2..01f2bed47 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -215,34 +215,34 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
assert not any(d.id == doc_agent.id for d in docs_list)
-# @test("query: search docs by text")
-# async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
-# pool = await create_db_pool(dsn=dsn)
-
-# # Create a test document
-# await create_doc(
-# developer_id=developer.id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(
-# title="Hello",
-# content="The world is a funny little thing",
-# metadata={"test": "test"},
-# embed_instruction="Embed the document",
-# ),
-# connection_pool=pool,
-# )
-
-# # Search using the correct parameter types
-# result = await search_docs_by_text(
-# developer_id=developer.id,
-# owners=[("agent", agent.id)],
-# query="funny",
-# k=3, # Add k parameter
-# search_language="english", # Add language parameter
-# metadata_filter={}, # Add metadata filter
-# connection_pool=pool,
-# )
-
-# assert len(result) >= 1
-# assert result[0].metadata is not None
+@test("query: search docs by text")
+async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a test document
+ await create_doc(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ data=CreateDocRequest(
+ title="Hello",
+ content="The world is a funny little thing",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ connection_pool=pool,
+ )
+
+ # Search using the correct parameter types
+ result = await search_docs_by_text(
+ developer_id=developer.id,
+ owners=[("agent", agent.id)],
+ query="funny thing",
+ k=3, # Add k parameter
+ search_language="english", # Add language parameter
+ metadata_filter={"test": "test"}, # Add metadata filter
+ connection_pool=pool,
+ )
+
+ assert len(result) >= 1
+ assert result[0].metadata is not None
diff --git a/memory-store/migrations/000007_ann.up.sql b/memory-store/migrations/000007_ann.up.sql
index c98b9a2be..725a78786 100644
--- a/memory-store/migrations/000007_ann.up.sql
+++ b/memory-store/migrations/000007_ann.up.sql
@@ -10,7 +10,7 @@ SELECT
ai.create_vectorizer (
source => 'docs',
destination => 'docs_embeddings',
- embedding => ai.embedding_voyageai ('voyage-3', 1024), -- need to parameterize this
+ embedding => ai.embedding_voyageai ('voyage-3', 1024, 'document'), -- need to parameterize this
-- actual chunking is managed by the docs table
-- this is to prevent running out of context window
chunking => ai.chunking_recursive_character_text_splitter (
@@ -45,4 +45,4 @@ SELECT
formatting => ai.formatting_python_template (E'Title: $title\n\n$chunk'),
processing => ai.processing_default (),
enqueue_existing => TRUE
- );
\ No newline at end of file
+ );
diff --git a/memory-store/migrations/000008_tools.up.sql b/memory-store/migrations/000008_tools.up.sql
index 70ddbe136..ad5db146c 100644
--- a/memory-store/migrations/000008_tools.up.sql
+++ b/memory-store/migrations/000008_tools.up.sql
@@ -6,7 +6,6 @@ CREATE TABLE IF NOT EXISTS tools (
agent_id UUID NOT NULL,
tool_id UUID NOT NULL,
task_id UUID DEFAULT NULL,
- task_version INT DEFAULT NULL,
type TEXT NOT NULL CONSTRAINT ct_tools_type_length CHECK (
length(type) >= 1
AND length(type) <= 255
@@ -22,7 +21,8 @@ CREATE TABLE IF NOT EXISTS tools (
spec JSONB NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
- CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id, type, name),
+ CONSTRAINT pk_tools PRIMARY KEY (developer_id, agent_id, tool_id),
+ CONSTRAINT ct_unique_name_per_agent UNIQUE (agent_id, name, task_id),
CONSTRAINT ct_spec_is_object CHECK (jsonb_typeof(spec) = 'object')
);
@@ -38,7 +38,7 @@ DO $$ BEGIN
) THEN
ALTER TABLE tools
ADD CONSTRAINT fk_tools_agent
- FOREIGN KEY (developer_id, agent_id)
+ FOREIGN KEY (developer_id, agent_id)
REFERENCES agents(developer_id, agent_id) ON DELETE CASCADE;
END IF;
END $$;
diff --git a/memory-store/migrations/000010_tasks.up.sql b/memory-store/migrations/000010_tasks.up.sql
index cc873f634..ce711d079 100644
--- a/memory-store/migrations/000010_tasks.up.sql
+++ b/memory-store/migrations/000010_tasks.up.sql
@@ -39,7 +39,7 @@ CREATE TABLE IF NOT EXISTS tasks (
);
-- Create sorted index on task_id if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_id_sorted') THEN
CREATE INDEX idx_tasks_id_sorted ON tasks (task_id DESC);
@@ -47,7 +47,7 @@ BEGIN
END $$;
-- Create index on canonical_name if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_canonical_name') THEN
CREATE INDEX idx_tasks_canonical_name ON tasks (developer_id DESC, canonical_name);
@@ -55,33 +55,41 @@ BEGIN
END $$;
-- Create a GIN index on metadata if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_tasks_metadata') THEN
CREATE INDEX idx_tasks_metadata ON tasks USING GIN (metadata);
END IF;
END $$;
--- Add foreign key constraint if it doesn't exist
-DO $$
+-- Create function to validate owner reference
+CREATE OR REPLACE FUNCTION validate_tool_task()
+RETURNS TRIGGER AS $$
BEGIN
- IF NOT EXISTS (
- SELECT 1
- FROM information_schema.table_constraints
- WHERE constraint_name = 'fk_tools_task_id'
- ) THEN
- ALTER TABLE tools ADD CONSTRAINT fk_tools_task_id
- FOREIGN KEY (developer_id, task_id, task_version) REFERENCES tasks(developer_id, task_id, version)
- DEFERRABLE INITIALLY DEFERRED;
+ IF NEW.task_id IS NOT NULL THEN
+ IF NOT EXISTS (
+ SELECT 1 FROM tasks
+ WHERE developer_id = NEW.developer_id AND task_id = NEW.task_id
+ ) THEN
+ RAISE EXCEPTION 'Invalid task reference';
+ END IF;
END IF;
-END $$;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+-- Create trigger for validation
+CREATE TRIGGER trg_validate_tool_task
+BEFORE INSERT OR UPDATE ON tools
+FOR EACH ROW
+EXECUTE FUNCTION validate_tool_task();
--- Create trigger if it doesn't exist
-DO $$
+-- Create updated_at trigger if it doesn't exist
+DO $$
BEGIN
IF NOT EXISTS (
- SELECT 1
- FROM pg_trigger
+ SELECT 1
+ FROM pg_trigger
WHERE tgname = 'trg_tasks_updated_at'
) THEN
CREATE TRIGGER trg_tasks_updated_at
@@ -116,4 +124,4 @@ CREATE TABLE IF NOT EXISTS workflows (
-- Add comment to 'workflows' table
COMMENT ON TABLE workflows IS 'Stores normalized workflows for tasks';
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index 593d00a7f..db25e79d2 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -31,7 +31,7 @@ begin
model_input_md5 := md5(_provider || '++' || _model || '++' || _input_text || '++' || _input_type);
- select embedding into cached_embedding
+ select embedding into cached_embedding
from embeddings_cache c
where c.model_input_md5 = model_input_md5;
@@ -62,12 +62,13 @@ end;
$$;
-- Create a type for the search results if it doesn't exist
-DO $$
+DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_type WHERE typname = 'doc_search_result'
) THEN
CREATE TYPE doc_search_result AS (
+ developer_id uuid,
doc_id uuid,
index integer,
title text,
@@ -106,23 +107,20 @@ BEGIN
RAISE EXCEPTION 'confidence must be between 0 and 1';
END IF;
- IF owner_types IS NOT NULL AND owner_ids IS NOT NULL AND
- array_length(owner_types, 1) != array_length(owner_ids, 1) THEN
+ IF owner_types IS NOT NULL AND owner_ids IS NOT NULL AND
+ array_length(owner_types, 1) != array_length(owner_ids, 1) AND
+ array_length(owner_types, 1) <= 0 THEN
RAISE EXCEPTION 'owner_types and owner_ids arrays must have the same length';
END IF;
-- Calculate search threshold from confidence
search_threshold := 1.0 - confidence;
- -- Build owner filter SQL if provided
- IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
- owner_filter_sql := '
- AND (
- doc_owners.owner_id = ANY($5::uuid[]) AND doc_owners.owner_type = ANY($4::text[])
- )';
- ELSE
- owner_filter_sql := '';
- END IF;
+ -- Build owner filter SQL
+ owner_filter_sql := '
+ AND (
+ doc_owners.owner_id = ANY($5::uuid[]) AND doc_owners.owner_type = ANY($4::text[])
+ )';
-- Build metadata filter SQL if provided
IF metadata_filter IS NOT NULL THEN
@@ -134,7 +132,7 @@ BEGIN
-- Return search results
RETURN QUERY EXECUTE format(
'WITH ranked_docs AS (
- SELECT
+ SELECT
d.developer_id,
d.doc_id,
d.index,
@@ -159,7 +157,7 @@ BEGIN
owner_filter_sql,
metadata_filter_sql
)
- USING
+ USING
query_embedding,
search_threshold,
k,
@@ -167,7 +165,7 @@ BEGIN
owner_ids,
metadata_filter,
developer_id;
-
+
END;
$$;
@@ -186,7 +184,7 @@ OR REPLACE FUNCTION embed_and_search_by_vector (
confidence float DEFAULT 0.5,
metadata_filter jsonb DEFAULT NULL,
embedding_provider text DEFAULT 'voyageai',
- embedding_model text DEFAULT 'voyage-01',
+ embedding_model text DEFAULT 'voyage-3',
input_type text DEFAULT 'query',
api_key text DEFAULT NULL,
api_key_name text DEFAULT NULL
@@ -225,7 +223,7 @@ OR REPLACE FUNCTION search_by_text (
developer_id UUID,
query_text text,
owner_types TEXT[],
- owner_ids UUID [],
+ owner_ids UUID[],
search_language text DEFAULT 'english',
k integer DEFAULT 3,
metadata_filter jsonb DEFAULT NULL
@@ -240,27 +238,25 @@ BEGIN
RAISE EXCEPTION 'k must be greater than 0';
END IF;
- IF owner_types IS NOT NULL AND owner_ids IS NOT NULL AND
- array_length(owner_types, 1) != array_length(owner_ids, 1) THEN
+ IF owner_types IS NOT NULL AND owner_ids IS NOT NULL AND
+ array_length(owner_types, 1) != array_length(owner_ids, 1) AND
+ array_length(owner_types, 1) <= 0 THEN
RAISE EXCEPTION 'owner_types and owner_ids arrays must have the same length';
END IF;
-- Convert search query to tsquery
ts_query := websearch_to_tsquery(search_language::regconfig, query_text);
- -- Build owner filter SQL if provided
- IF owner_types IS NOT NULL AND owner_ids IS NOT NULL THEN
- owner_filter_sql := '
- AND (
- doc_owners.owner_id = ANY($5::uuid[]) AND doc_owners.owner_type = ANY($4::text[])
- )';
- ELSE
- owner_filter_sql := '';
- END IF;
+ -- Build owner filter SQL
+ owner_filter_sql := '
+ AND (
+ doc_owners.owner_id = ANY($4::uuid[]) AND doc_owners.owner_type = ANY($3::text[])
+ )';
+
-- Build metadata filter SQL if provided
IF metadata_filter IS NOT NULL THEN
- metadata_filter_sql := 'AND d.metadata @> $6';
+ metadata_filter_sql := 'AND d.metadata @> $5';
ELSE
metadata_filter_sql := '';
END IF;
@@ -268,7 +264,7 @@ BEGIN
-- Return search results
RETURN QUERY EXECUTE format(
'WITH ranked_docs AS (
- SELECT
+ SELECT
d.developer_id,
d.doc_id,
d.index,
@@ -289,11 +285,11 @@ BEGIN
SELECT DISTINCT ON (doc_id) *
FROM ranked_docs
ORDER BY doc_id, distance DESC
- LIMIT $3',
+ LIMIT $2',
owner_filter_sql,
metadata_filter_sql
)
- USING
+ USING
ts_query,
k,
owner_types,
@@ -409,7 +405,7 @@ BEGIN
) combined
),
scores AS (
- SELECT
+ SELECT
r.developer_id,
r.doc_id,
r.title,
@@ -426,13 +422,13 @@ BEGIN
LEFT JOIN embedding_results e ON r.doc_id = e.doc_id AND r.developer_id = e.developer_id
),
normalized_scores AS (
- SELECT
+ SELECT
*,
unnest(dbsf_normalize(array_agg(text_score) OVER ())) as norm_text_score,
unnest(dbsf_normalize(array_agg(embedding_score) OVER ())) as norm_embedding_score
FROM scores
)
- SELECT
+ SELECT
developer_id,
doc_id,
index,
@@ -464,7 +460,7 @@ OR REPLACE FUNCTION embed_and_search_hybrid (
metadata_filter jsonb DEFAULT NULL,
search_language text DEFAULT 'english',
embedding_provider text DEFAULT 'voyageai',
- embedding_model text DEFAULT 'voyage-01',
+ embedding_model text DEFAULT 'voyage-3',
input_type text DEFAULT 'query',
api_key text DEFAULT NULL,
api_key_name text DEFAULT NULL
From b946119485c729e25b78afe23774b7ccc95fde64 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sun, 22 Dec 2024 17:13:30 +0530
Subject: [PATCH 124/274] fix: Fix canonical name collisions in tests
Signed-off-by: Diwank Singh Tomer
---
.../agents_api/queries/agents/create_agent.py | 2 +-
.../queries/agents/create_or_update_agent.py | 2 +-
.../queries/tasks/create_or_update_task.py | 2 +-
.../agents_api/queries/tasks/create_task.py | 2 +-
agents-api/agents_api/queries/utils.py | 22 +++++--------------
agents-api/pyproject.toml | 1 +
agents-api/tests/fixtures.py | 18 ---------------
agents-api/tests/test_developer_queries.py | 4 ++--
agents-api/tests/test_docs_queries.py | 3 ---
agents-api/tests/test_task_queries.py | 2 +-
agents-api/uv.lock | 11 ++++++++++
11 files changed, 24 insertions(+), 45 deletions(-)
diff --git a/agents-api/agents_api/queries/agents/create_agent.py b/agents-api/agents_api/queries/agents/create_agent.py
index 58141a676..3f7807021 100644
--- a/agents-api/agents_api/queries/agents/create_agent.py
+++ b/agents-api/agents_api/queries/agents/create_agent.py
@@ -114,7 +114,7 @@ async def create_agent(
# Set default values
data.metadata = data.metadata or {}
- data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ data.canonical_name = data.canonical_name or generate_canonical_name()
params = [
developer_id,
diff --git a/agents-api/agents_api/queries/agents/create_or_update_agent.py b/agents-api/agents_api/queries/agents/create_or_update_agent.py
index 3140112e7..76ddaa8cc 100644
--- a/agents-api/agents_api/queries/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/queries/agents/create_or_update_agent.py
@@ -117,7 +117,7 @@ async def create_or_update_agent(
# Set default values
data.metadata = data.metadata or {}
- data.canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ data.canonical_name = data.canonical_name or generate_canonical_name()
params = [
developer_id,
diff --git a/agents-api/agents_api/queries/tasks/create_or_update_task.py b/agents-api/agents_api/queries/tasks/create_or_update_task.py
index ed1ebae71..d02814875 100644
--- a/agents-api/agents_api/queries/tasks/create_or_update_task.py
+++ b/agents-api/agents_api/queries/tasks/create_or_update_task.py
@@ -167,7 +167,7 @@ async def create_or_update_task(
"""
# Generate canonical name from task name if not provided
- canonical_name = data.canonical_name or generate_canonical_name(data.name)
+ canonical_name = data.canonical_name or generate_canonical_name()
# Version will be determined by the CTE
task_params = [
diff --git a/agents-api/agents_api/queries/tasks/create_task.py b/agents-api/agents_api/queries/tasks/create_task.py
index 2e23a2252..6deffc3d5 100644
--- a/agents-api/agents_api/queries/tasks/create_task.py
+++ b/agents-api/agents_api/queries/tasks/create_task.py
@@ -150,7 +150,7 @@ async def create_task(
agent_id, # $2
task_id, # $3
data.name, # $4
- data.canonical_name or generate_canonical_name(data.name), # $5
+ data.canonical_name or generate_canonical_name(), # $5
data.description, # $6
data.inherit_tools, # $7
data.input_schema or {}, # $8
diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py
index 5151924ff..1a9ce7dc2 100644
--- a/agents-api/agents_api/queries/utils.py
+++ b/agents-api/agents_api/queries/utils.py
@@ -1,7 +1,5 @@
import concurrent.futures
import inspect
-import random
-import re
import socket
import time
from functools import partialmethod, wraps
@@ -18,6 +16,7 @@
)
import asyncpg
+import namer
from asyncpg import Record
from beartype import beartype
from fastapi import HTTPException
@@ -32,22 +31,11 @@
ModelT = TypeVar("ModelT", bound=BaseModel)
-def generate_canonical_name(name: str) -> str:
- """Convert a display name to a canonical name.
- Example: "My Cool Agent!" -> "my_cool_agent"
- """
- # Remove special characters, replace spaces with underscores
- canonical = re.sub(r"[^\w\s-]", "", name.lower())
- canonical = re.sub(r"[-\s]+", "_", canonical)
+def generate_canonical_name() -> str:
+ """Generate canonical name"""
- # Ensure it starts with a letter (prepend 'a' if not)
- if not canonical[0].isalpha():
- canonical = f"a_{canonical}"
-
- # Add 3 random numbers to the end
- canonical = f"{canonical}_{random.randint(100, 999)}"
-
- return canonical
+ categories: list[str] = ["astronomy", "physics", "scientists", "math"]
+ return namer.generate(separator="_", suffix_length=3, category=categories)
def partialclass(cls, *args, **kwargs):
diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml
index db271a021..7ce441024 100644
--- a/agents-api/pyproject.toml
+++ b/agents-api/pyproject.toml
@@ -52,6 +52,7 @@ dependencies = [
"asyncpg>=0.30.0",
"sqlglot>=26.0.0",
"testcontainers>=4.9.0",
+ "unique-namer>=1.6.1",
]
[dependency-groups]
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 3c73481b9..df799b701 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -47,8 +47,6 @@
patch_embed_acompletion as patch_embed_acompletion_ctx,
)
-EMBEDDING_SIZE: int = 1024
-
@fixture(scope="global")
def pg_dsn():
@@ -219,22 +217,6 @@ async def test_session(
return session
-# @fixture(scope="global")
-# async def test_doc(
-# dsn=pg_dsn,
-# developer_id=test_developer_id,
-# agent=test_agent,
-# ):
-# async with get_pg_client(dsn=dsn) as client:
-# doc = await create_doc(
-# developer_id=developer_id,
-# owner_type="agent",
-# owner_id=agent.id,
-# data=CreateDocRequest(title="Hello", content=["World"]),
-# client=client,
-# )
-# yield doc
-
# @fixture(scope="global")
# async def test_user_doc(
diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py
index 6d94b3209..1cea37d27 100644
--- a/agents-api/tests/test_developer_queries.py
+++ b/agents-api/tests/test_developer_queries.py
@@ -34,7 +34,7 @@ async def _(dsn=pg_dsn, dev=test_new_developer):
connection_pool=pool,
)
- assert type(developer) == Developer
+ assert type(developer) is Developer
assert developer.id == dev.id
assert developer.email == dev.email
assert developer.active
@@ -55,7 +55,7 @@ async def _(dsn=pg_dsn):
connection_pool=pool,
)
- assert type(developer) == ResourceCreatedResponse
+ assert type(developer) is ResourceCreatedResponse
assert developer.id == dev_id
assert developer.created_at is not None
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 01f2bed47..69ae65613 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -6,13 +6,10 @@
from agents_api.queries.docs.delete_doc import delete_doc
from agents_api.queries.docs.get_doc import get_doc
from agents_api.queries.docs.list_docs import list_docs
-
-# If you wish to test text/embedding/hybrid search, import them:
from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
-# You can rename or remove these imports to match your actual fixtures
from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py
index c4303bb97..43394d244 100644
--- a/agents-api/tests/test_task_queries.py
+++ b/agents-api/tests/test_task_queries.py
@@ -169,7 +169,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
assert result is not None
assert isinstance(result, list)
assert all(isinstance(task, Task) for task in result)
- assert all(task.metadata.get("test") == True for task in result)
+ assert all(task.metadata.get("test") is True for task in result)
@test("query: list tasks sql - no filters")
diff --git a/agents-api/uv.lock b/agents-api/uv.lock
index 569aa96dc..e7f171c9b 100644
--- a/agents-api/uv.lock
+++ b/agents-api/uv.lock
@@ -53,6 +53,7 @@ dependencies = [
{ name = "testcontainers" },
{ name = "thefuzz" },
{ name = "tiktoken" },
+ { name = "unique-namer" },
{ name = "uuid7" },
{ name = "uvicorn" },
{ name = "uvloop" },
@@ -122,6 +123,7 @@ requires-dist = [
{ name = "testcontainers", specifier = ">=4.9.0" },
{ name = "thefuzz", specifier = "~=0.22.1" },
{ name = "tiktoken", specifier = "~=0.7.0" },
+ { name = "unique-namer", specifier = ">=1.6.1" },
{ name = "uuid7", specifier = ">=0.1.0" },
{ name = "uvicorn", specifier = "~=0.30.6" },
{ name = "uvloop", specifier = "~=0.21.0" },
@@ -3209,6 +3211,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 },
]
+[[package]]
+name = "unique-namer"
+version = "1.6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/04/47/26e9f45b64ad2d7c77eefb48a0e84ae0c0070fa812bf6ab95584559ce53c/unique_namer-1.6.1.tar.gz", hash = "sha256:7f4e3143f923c24baaed56bb93726e10669333271caa71ffd5d8f1a928a5befe", size = 73334 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fb/72/e06078006bbc3635490b872e8647294cf5921f378634de43520012b7c09e/unique_namer-1.6.1-py3-none-any.whl", hash = "sha256:6e76751c0886244625b43a8e5e7c18168a9205f5a944c0dbbbd9eb219c4812f2", size = 71111 },
+]
+
[[package]]
name = "uri-template"
version = "1.3.0"
From 4fc4f0e1899a6101a29f1dc51f143a1d50b518dc Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Sun, 22 Dec 2024 11:50:22 +0000
Subject: [PATCH 125/274] refactor: Lint agents-api (CI)
---
agents-api/tests/fixtures.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index df799b701..ea3866ff2 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -217,7 +217,6 @@ async def test_session(
return session
-
# @fixture(scope="global")
# async def test_user_doc(
# dsn=pg_dsn,
From e2181fb94126b53a48406af9b6a9d1ab89976ee1 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Sun, 22 Dec 2024 22:06:43 +0530
Subject: [PATCH 126/274] fix: Fix search by embedding
Signed-off-by: Diwank Singh Tomer
---
.../queries/docs/search_docs_by_embedding.py | 65 +++++++++++--------
agents-api/tests/test_docs_queries.py | 34 +++++++++-
2 files changed, 71 insertions(+), 28 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index 5a89803ee..6fb6b82eb 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -1,31 +1,23 @@
-from typing import List, Literal
+from typing import Any, List, Literal
from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
from ...autogen.openapi_model import DocReference
from ..utils import pg_query, wrap_in_class
-# If you're doing approximate ANN (DiskANN) or IVF, you might use a special function or hint.
-# For a basic vector distance search, you can do something like:
-search_docs_by_embedding_query = parse_one("""
-SELECT d.*,
- (d.embedding <-> $3) AS distance
-FROM docs d
-LEFT JOIN doc_owners do
- ON d.developer_id = do.developer_id
- AND d.doc_id = do.doc_id
-WHERE d.developer_id = $1
- AND (
- ($4::text IS NULL AND $5::uuid IS NULL)
- OR (do.owner_type = $4 AND do.owner_id = $5)
- )
- AND d.embedding IS NOT NULL
-ORDER BY d.embedding <-> $3
-LIMIT $2;
-""").sql(pretty=True)
+search_docs_by_embedding_query = """
+SELECT * FROM search_by_vector(
+ $1, -- developer_id
+ $2::vector(1024), -- query_embedding
+ $3::text[], -- owner_types
+ $UUID_LIST::uuid[], -- owner_ids
+ $4, -- k
+ $5, -- confidence
+ $6 -- metadata_filter
+)
+"""
@wrap_in_class(
@@ -46,8 +38,9 @@ async def search_docs_by_embedding(
developer_id: UUID,
query_embedding: List[float],
k: int = 10,
- owner_type: Literal["user", "agent", "org"] | None = None,
- owner_id: UUID | None = None,
+ owners: list[tuple[Literal["user", "agent"], UUID]],
+ confidence: float = 0.5,
+ metadata_filter: dict[str, Any] = {},
) -> tuple[str, list]:
"""
Vector-based doc search:
@@ -56,8 +49,9 @@ async def search_docs_by_embedding(
developer_id (UUID): The ID of the developer.
query_embedding (List[float]): The vector to query.
k (int): The number of results to return.
- owner_type (Literal["user", "agent", "org"]): The type of the owner of the documents.
- owner_id (UUID): The ID of the owner of the documents.
+ owners (list[tuple[Literal["user", "agent"], UUID]]): List of (owner_type, owner_id) tuples.
+ confidence (float): The confidence threshold for the search.
+ metadata_filter (dict): Metadata filter criteria.
Returns:
tuple[str, list]: SQL query and parameters for searching the documents.
@@ -65,11 +59,28 @@ async def search_docs_by_embedding(
if k < 1:
raise HTTPException(status_code=400, detail="k must be >= 1")
- # Validate embedding length if needed; e.g. 1024 floats
if not query_embedding:
raise HTTPException(status_code=400, detail="Empty embedding provided")
+ # Convert query_embedding to a string
+ query_embedding_str = f"[{', '.join(map(str, query_embedding))}]"
+
+ # Extract owner types and IDs
+ owner_types: list[str] = [owner[0] for owner in owners]
+ owner_ids: list[str] = [str(owner[1]) for owner in owners]
+
+ # NOTE: Manually replace uuids list coz asyncpg isnt sending it correctly
+ owner_ids_pg_str = f"ARRAY['{'\', \''.join(owner_ids)}']"
+ query = search_docs_by_embedding_query.replace("$UUID_LIST", owner_ids_pg_str)
+
return (
- search_docs_by_embedding_query,
- [developer_id, k, query_embedding, owner_type, owner_id],
+ query,
+ [
+ developer_id,
+ query_embedding_str,
+ owner_types,
+ k,
+ confidence,
+ metadata_filter,
+ ],
)
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 69ae65613..6a114ab5c 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -6,9 +6,9 @@
from agents_api.queries.docs.delete_doc import delete_doc
from agents_api.queries.docs.get_doc import get_doc
from agents_api.queries.docs.list_docs import list_docs
+from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
-# from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
@@ -243,3 +243,35 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
assert len(result) >= 1
assert result[0].metadata is not None
+
+
+@test("query: search docs by embedding")
+async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a test document
+ await create_doc(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ data=CreateDocRequest(
+ title="Hello",
+ content="The world is a funny little thing",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ connection_pool=pool,
+ )
+
+ # Search using the correct parameter types
+ result = await search_docs_by_embedding(
+ developer_id=developer.id,
+ owners=[("agent", agent.id)],
+ query_embedding=[1.0]*1024,
+ k=3, # Add k parameter
+ metadata_filter={"test": "test"}, # Add metadata filter
+ connection_pool=pool,
+ )
+
+ assert len(result) >= 1
+ assert result[0].metadata is not None
From 934db8a6798c23fdec06f580a7eb3450c3e3af38 Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Sun, 22 Dec 2024 16:37:56 +0000
Subject: [PATCH 127/274] refactor: Lint agents-api (CI)
---
agents-api/tests/test_docs_queries.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 6a114ab5c..6914b1112 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -267,7 +267,7 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
result = await search_docs_by_embedding(
developer_id=developer.id,
owners=[("agent", agent.id)],
- query_embedding=[1.0]*1024,
+ query_embedding=[1.0] * 1024,
k=3, # Add k parameter
metadata_filter={"test": "test"}, # Add metadata filter
connection_pool=pool,
From 39589d2b33fa2e4138f74fa5a505109567e8fa2a Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Mon, 23 Dec 2024 08:44:57 +0300
Subject: [PATCH 128/274] wip
---
agents-api/agents_api/routers/agents/create_agent.py | 2 +-
.../agents_api/routers/agents/create_or_update_agent.py | 4 ++--
agents-api/agents_api/routers/agents/delete_agent.py | 2 +-
agents-api/agents_api/routers/agents/get_agent_details.py | 2 +-
4 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/agents-api/agents_api/routers/agents/create_agent.py b/agents-api/agents_api/routers/agents/create_agent.py
index 2e1c4df0a..e861617ba 100644
--- a/agents-api/agents_api/routers/agents/create_agent.py
+++ b/agents-api/agents_api/routers/agents/create_agent.py
@@ -9,7 +9,7 @@
ResourceCreatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.agent.create_agent import create_agent as create_agent_query
+from ...queries.agents.create_agent import create_agent as create_agent_query
from .router import router
diff --git a/agents-api/agents_api/routers/agents/create_or_update_agent.py b/agents-api/agents_api/routers/agents/create_or_update_agent.py
index 2dcbcd599..018a679c8 100644
--- a/agents-api/agents_api/routers/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/routers/agents/create_or_update_agent.py
@@ -4,7 +4,7 @@
from fastapi import Depends
from starlette.status import HTTP_201_CREATED
-import agents_api.models as models
+from ...queries.agents.create_or_update_agent import create_or_update_agent as create_or_update_agent_query
from ...autogen.openapi_model import (
CreateOrUpdateAgentRequest,
@@ -21,7 +21,7 @@ async def create_or_update_agent(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceCreatedResponse:
# TODO: Validate model name
- agent = models.agent.create_or_update_agent(
+ agent = create_or_update_agent_query(
developer_id=x_developer_id,
agent_id=agent_id,
data=data,
diff --git a/agents-api/agents_api/routers/agents/delete_agent.py b/agents-api/agents_api/routers/agents/delete_agent.py
index 03fcd56a0..fbf482f8d 100644
--- a/agents-api/agents_api/routers/agents/delete_agent.py
+++ b/agents-api/agents_api/routers/agents/delete_agent.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.agent.delete_agent import delete_agent as delete_agent_query
+from ...queries.agents.delete_agent import delete_agent as delete_agent_query
from .router import router
diff --git a/agents-api/agents_api/routers/agents/get_agent_details.py b/agents-api/agents_api/routers/agents/get_agent_details.py
index 3d684368e..6d90bc3ab 100644
--- a/agents-api/agents_api/routers/agents/get_agent_details.py
+++ b/agents-api/agents_api/routers/agents/get_agent_details.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import Agent
from ...dependencies.developer_id import get_developer_id
-from ...models.agent.get_agent import get_agent as get_agent_query
+from ...queries.agents.get_agent import get_agent as get_agent_query
from .router import router
From 3ae8d9e6af56eb5401efebc9b1e2a48611c18d75 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Mon, 23 Dec 2024 05:46:09 +0000
Subject: [PATCH 129/274] refactor: Lint agents-api (CI)
---
.../agents_api/routers/agents/create_or_update_agent.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/routers/agents/create_or_update_agent.py b/agents-api/agents_api/routers/agents/create_or_update_agent.py
index 018a679c8..24cca09e4 100644
--- a/agents-api/agents_api/routers/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/routers/agents/create_or_update_agent.py
@@ -4,13 +4,14 @@
from fastapi import Depends
from starlette.status import HTTP_201_CREATED
-from ...queries.agents.create_or_update_agent import create_or_update_agent as create_or_update_agent_query
-
from ...autogen.openapi_model import (
CreateOrUpdateAgentRequest,
ResourceCreatedResponse,
)
from ...dependencies.developer_id import get_developer_id
+from ...queries.agents.create_or_update_agent import (
+ create_or_update_agent as create_or_update_agent_query,
+)
from .router import router
From daa41d6118058dcdddb10ff62d446a3ecb7790b7 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 08:49:36 +0300
Subject: [PATCH 130/274] chore: configure `users` router with pg queries
---
agents-api/agents_api/routers/users/create_or_update_user.py | 2 +-
agents-api/agents_api/routers/users/create_user.py | 2 +-
agents-api/agents_api/routers/users/delete_user.py | 2 +-
agents-api/agents_api/routers/users/get_user_details.py | 2 +-
agents-api/agents_api/routers/users/list_users.py | 2 +-
agents-api/agents_api/routers/users/patch_user.py | 2 +-
agents-api/agents_api/routers/users/update_user.py | 2 +-
drafts/cozo | 1 +
8 files changed, 8 insertions(+), 7 deletions(-)
create mode 160000 drafts/cozo
diff --git a/agents-api/agents_api/routers/users/create_or_update_user.py b/agents-api/agents_api/routers/users/create_or_update_user.py
index 0141983c9..746134499 100644
--- a/agents-api/agents_api/routers/users/create_or_update_user.py
+++ b/agents-api/agents_api/routers/users/create_or_update_user.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import CreateOrUpdateUserRequest, ResourceCreatedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.user.create_or_update_user import (
+from ...queries.users.create_or_update_user import (
create_or_update_user as create_or_update_user_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/users/create_user.py b/agents-api/agents_api/routers/users/create_user.py
index 4724a77b4..e18ca3c97 100644
--- a/agents-api/agents_api/routers/users/create_user.py
+++ b/agents-api/agents_api/routers/users/create_user.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import CreateUserRequest, ResourceCreatedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.user.create_user import create_user as create_user_query
+from ...queries.users.create_user import create_user as create_user_query
from .router import router
diff --git a/agents-api/agents_api/routers/users/delete_user.py b/agents-api/agents_api/routers/users/delete_user.py
index d9d8032e7..446c7cf0c 100644
--- a/agents-api/agents_api/routers/users/delete_user.py
+++ b/agents-api/agents_api/routers/users/delete_user.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.user.delete_user import delete_user as delete_user_query
+from ...queries.users.delete_user import delete_user as delete_user_query
from .router import router
diff --git a/agents-api/agents_api/routers/users/get_user_details.py b/agents-api/agents_api/routers/users/get_user_details.py
index 71a26c2dc..1a1cfd6d3 100644
--- a/agents-api/agents_api/routers/users/get_user_details.py
+++ b/agents-api/agents_api/routers/users/get_user_details.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import User
from ...dependencies.developer_id import get_developer_id
-from ...models.user.get_user import get_user as get_user_query
+from ...queries.users.get_user import get_user as get_user_query
from .router import router
diff --git a/agents-api/agents_api/routers/users/list_users.py b/agents-api/agents_api/routers/users/list_users.py
index 926699d40..c57dec613 100644
--- a/agents-api/agents_api/routers/users/list_users.py
+++ b/agents-api/agents_api/routers/users/list_users.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ListResponse, User
from ...dependencies.developer_id import get_developer_id
from ...dependencies.query_filter import MetadataFilter, create_filter_extractor
-from ...models.user.list_users import list_users as list_users_query
+from ...queries.users.list_users import list_users as list_users_query
from .router import router
diff --git a/agents-api/agents_api/routers/users/patch_user.py b/agents-api/agents_api/routers/users/patch_user.py
index 8a49aaf93..0e8b5fc53 100644
--- a/agents-api/agents_api/routers/users/patch_user.py
+++ b/agents-api/agents_api/routers/users/patch_user.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.user.patch_user import patch_user as patch_user_query
+from ...queries.users.patch_user import patch_user as patch_user_query
from .router import router
diff --git a/agents-api/agents_api/routers/users/update_user.py b/agents-api/agents_api/routers/users/update_user.py
index d9104da73..82069fe94 100644
--- a/agents-api/agents_api/routers/users/update_user.py
+++ b/agents-api/agents_api/routers/users/update_user.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest
from ...dependencies.developer_id import get_developer_id
-from ...models.user.update_user import update_user as update_user_query
+from ...queries.users.update_user import update_user as update_user_query
from .router import router
diff --git a/drafts/cozo b/drafts/cozo
new file mode 160000
index 000000000..faf89ef77
--- /dev/null
+++ b/drafts/cozo
@@ -0,0 +1 @@
+Subproject commit faf89ef77e6462460f873e9de618001d968a1a40
From 96e9b0eeb81f88adc2ea4798a6d024c611bc26e2 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 09:38:18 +0300
Subject: [PATCH 131/274] chore: configure `tasks` router with pg queries
---
agents-api/agents_api/routers/tasks/create_or_update_task.py | 2 +-
agents-api/agents_api/routers/tasks/create_task.py | 2 +-
agents-api/agents_api/routers/tasks/create_task_execution.py | 5 +++--
agents-api/agents_api/routers/tasks/get_execution_details.py | 1 +
agents-api/agents_api/routers/tasks/get_task_details.py | 2 +-
.../agents_api/routers/tasks/list_execution_transitions.py | 1 +
agents-api/agents_api/routers/tasks/list_task_executions.py | 1 +
agents-api/agents_api/routers/tasks/list_tasks.py | 2 +-
agents-api/agents_api/routers/tasks/patch_execution.py | 1 +
.../agents_api/routers/tasks/stream_transitions_events.py | 1 +
agents-api/agents_api/routers/tasks/update_execution.py | 1 +
monitoring/grafana/provisioning/dashboards/main.yaml | 0
12 files changed, 13 insertions(+), 6 deletions(-)
create mode 100755 monitoring/grafana/provisioning/dashboards/main.yaml
diff --git a/agents-api/agents_api/routers/tasks/create_or_update_task.py b/agents-api/agents_api/routers/tasks/create_or_update_task.py
index f40530dfc..7c93be8b0 100644
--- a/agents-api/agents_api/routers/tasks/create_or_update_task.py
+++ b/agents-api/agents_api/routers/tasks/create_or_update_task.py
@@ -11,7 +11,7 @@
ResourceUpdatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.task.create_or_update_task import (
+from ...queries.tasks.create_or_update_task import (
create_or_update_task as create_or_update_task_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/create_task.py b/agents-api/agents_api/routers/tasks/create_task.py
index 0e233ac97..0dc4e91e4 100644
--- a/agents-api/agents_api/routers/tasks/create_task.py
+++ b/agents-api/agents_api/routers/tasks/create_task.py
@@ -11,7 +11,7 @@
ResourceCreatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.task.create_task import create_task as create_task_query
+from ...queries.tasks.create_task import create_task as create_task_query
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index bb1497b4c..7fc5c9a79 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -21,7 +21,8 @@
from ...common.protocol.developers import Developer
from ...dependencies.developer_id import get_developer_id
from ...env import max_free_executions
-from ...models.developer.get_developer import get_developer
+from ...queries.developers.get_developer import get_developer
+# TODO: Change these once we have pg queries for executions
from ...models.execution.count_executions import (
count_executions as count_executions_query,
)
@@ -33,7 +34,7 @@
from ...models.execution.update_execution import (
update_execution as update_execution_query,
)
-from ...models.task.get_task import get_task as get_task_query
+from ...queries.tasks.get_task import get_task as get_task_query
from .router import router
logger: logging.Logger = logging.getLogger(__name__)
diff --git a/agents-api/agents_api/routers/tasks/get_execution_details.py b/agents-api/agents_api/routers/tasks/get_execution_details.py
index 95bccbc07..87c4e24b9 100644
--- a/agents-api/agents_api/routers/tasks/get_execution_details.py
+++ b/agents-api/agents_api/routers/tasks/get_execution_details.py
@@ -3,6 +3,7 @@
from ...autogen.openapi_model import (
Execution,
)
+# TODO: Change this once we have pg queries for executions
from ...models.execution.get_execution import (
get_execution as get_execution_query,
)
diff --git a/agents-api/agents_api/routers/tasks/get_task_details.py b/agents-api/agents_api/routers/tasks/get_task_details.py
index 9f8008118..35a7ef747 100644
--- a/agents-api/agents_api/routers/tasks/get_task_details.py
+++ b/agents-api/agents_api/routers/tasks/get_task_details.py
@@ -8,7 +8,7 @@
Task,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.task.get_task import get_task as get_task_query
+from ...queries.tasks.get_task import get_task as get_task_query
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/list_execution_transitions.py b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
index 9ce169509..7a394c103 100644
--- a/agents-api/agents_api/routers/tasks/list_execution_transitions.py
+++ b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
@@ -5,6 +5,7 @@
ListResponse,
Transition,
)
+# TODO: Change this once we have pg queries for executions
from ...models.execution.list_execution_transitions import (
list_execution_transitions as list_execution_transitions_query,
)
diff --git a/agents-api/agents_api/routers/tasks/list_task_executions.py b/agents-api/agents_api/routers/tasks/list_task_executions.py
index 72cbd9b40..abe54a0a8 100644
--- a/agents-api/agents_api/routers/tasks/list_task_executions.py
+++ b/agents-api/agents_api/routers/tasks/list_task_executions.py
@@ -8,6 +8,7 @@
ListResponse,
)
from ...dependencies.developer_id import get_developer_id
+# TODO: Change this once we have pg queries for executions
from ...models.execution.list_executions import (
list_executions as list_task_executions_query,
)
diff --git a/agents-api/agents_api/routers/tasks/list_tasks.py b/agents-api/agents_api/routers/tasks/list_tasks.py
index a53983006..2422cdef3 100644
--- a/agents-api/agents_api/routers/tasks/list_tasks.py
+++ b/agents-api/agents_api/routers/tasks/list_tasks.py
@@ -8,7 +8,7 @@
Task,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.task.list_tasks import list_tasks as list_tasks_query
+from ...queries.tasks.list_tasks import list_tasks as list_tasks_query
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/patch_execution.py b/agents-api/agents_api/routers/tasks/patch_execution.py
index 3cc45ee37..b9a8ddcec 100644
--- a/agents-api/agents_api/routers/tasks/patch_execution.py
+++ b/agents-api/agents_api/routers/tasks/patch_execution.py
@@ -8,6 +8,7 @@
UpdateExecutionRequest,
)
from ...dependencies.developer_id import get_developer_id
+# TODO: Change this once we have pg queries for executions
from ...models.execution.update_execution import (
update_execution as update_execution_query,
)
diff --git a/agents-api/agents_api/routers/tasks/stream_transitions_events.py b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
index 37500b0d6..cebc345c9 100644
--- a/agents-api/agents_api/routers/tasks/stream_transitions_events.py
+++ b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
@@ -18,6 +18,7 @@
from ...autogen.openapi_model import TransitionEvent
from ...clients.temporal import get_workflow_handle
from ...dependencies.developer_id import get_developer_id
+# TODO: Change this once we have pg queries for executions
from ...models.execution.lookup_temporal_data import lookup_temporal_data
from ...worker.codec import from_payload_data
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/update_execution.py b/agents-api/agents_api/routers/tasks/update_execution.py
index e88c36ed9..08f802f51 100644
--- a/agents-api/agents_api/routers/tasks/update_execution.py
+++ b/agents-api/agents_api/routers/tasks/update_execution.py
@@ -10,6 +10,7 @@
)
from ...clients.temporal import get_client
from ...dependencies.developer_id import get_developer_id
+# TODO: Change this once we have pg queries for executions
from ...models.execution.get_paused_execution_token import (
get_paused_execution_token,
)
diff --git a/monitoring/grafana/provisioning/dashboards/main.yaml b/monitoring/grafana/provisioning/dashboards/main.yaml
new file mode 100755
index 000000000..e69de29bb
From d580873e10fb7a9fb857dfff1de8dd13be1528f1 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 06:39:32 +0000
Subject: [PATCH 132/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/routers/tasks/create_task_execution.py | 3 ++-
agents-api/agents_api/routers/tasks/get_execution_details.py | 1 +
.../agents_api/routers/tasks/list_execution_transitions.py | 1 +
agents-api/agents_api/routers/tasks/list_task_executions.py | 1 +
agents-api/agents_api/routers/tasks/patch_execution.py | 1 +
.../agents_api/routers/tasks/stream_transitions_events.py | 1 +
agents-api/agents_api/routers/tasks/update_execution.py | 1 +
7 files changed, 8 insertions(+), 1 deletion(-)
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index 7fc5c9a79..393c9e6d1 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -21,7 +21,7 @@
from ...common.protocol.developers import Developer
from ...dependencies.developer_id import get_developer_id
from ...env import max_free_executions
-from ...queries.developers.get_developer import get_developer
+
# TODO: Change these once we have pg queries for executions
from ...models.execution.count_executions import (
count_executions as count_executions_query,
@@ -34,6 +34,7 @@
from ...models.execution.update_execution import (
update_execution as update_execution_query,
)
+from ...queries.developers.get_developer import get_developer
from ...queries.tasks.get_task import get_task as get_task_query
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/get_execution_details.py b/agents-api/agents_api/routers/tasks/get_execution_details.py
index 87c4e24b9..a2b219d53 100644
--- a/agents-api/agents_api/routers/tasks/get_execution_details.py
+++ b/agents-api/agents_api/routers/tasks/get_execution_details.py
@@ -3,6 +3,7 @@
from ...autogen.openapi_model import (
Execution,
)
+
# TODO: Change this once we have pg queries for executions
from ...models.execution.get_execution import (
get_execution as get_execution_query,
diff --git a/agents-api/agents_api/routers/tasks/list_execution_transitions.py b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
index 7a394c103..8d3fb586c 100644
--- a/agents-api/agents_api/routers/tasks/list_execution_transitions.py
+++ b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
@@ -5,6 +5,7 @@
ListResponse,
Transition,
)
+
# TODO: Change this once we have pg queries for executions
from ...models.execution.list_execution_transitions import (
list_execution_transitions as list_execution_transitions_query,
diff --git a/agents-api/agents_api/routers/tasks/list_task_executions.py b/agents-api/agents_api/routers/tasks/list_task_executions.py
index abe54a0a8..aad2cf124 100644
--- a/agents-api/agents_api/routers/tasks/list_task_executions.py
+++ b/agents-api/agents_api/routers/tasks/list_task_executions.py
@@ -8,6 +8,7 @@
ListResponse,
)
from ...dependencies.developer_id import get_developer_id
+
# TODO: Change this once we have pg queries for executions
from ...models.execution.list_executions import (
list_executions as list_task_executions_query,
diff --git a/agents-api/agents_api/routers/tasks/patch_execution.py b/agents-api/agents_api/routers/tasks/patch_execution.py
index b9a8ddcec..9fbb2f296 100644
--- a/agents-api/agents_api/routers/tasks/patch_execution.py
+++ b/agents-api/agents_api/routers/tasks/patch_execution.py
@@ -8,6 +8,7 @@
UpdateExecutionRequest,
)
from ...dependencies.developer_id import get_developer_id
+
# TODO: Change this once we have pg queries for executions
from ...models.execution.update_execution import (
update_execution as update_execution_query,
diff --git a/agents-api/agents_api/routers/tasks/stream_transitions_events.py b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
index cebc345c9..b3b469c9e 100644
--- a/agents-api/agents_api/routers/tasks/stream_transitions_events.py
+++ b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
@@ -18,6 +18,7 @@
from ...autogen.openapi_model import TransitionEvent
from ...clients.temporal import get_workflow_handle
from ...dependencies.developer_id import get_developer_id
+
# TODO: Change this once we have pg queries for executions
from ...models.execution.lookup_temporal_data import lookup_temporal_data
from ...worker.codec import from_payload_data
diff --git a/agents-api/agents_api/routers/tasks/update_execution.py b/agents-api/agents_api/routers/tasks/update_execution.py
index 08f802f51..f58b65533 100644
--- a/agents-api/agents_api/routers/tasks/update_execution.py
+++ b/agents-api/agents_api/routers/tasks/update_execution.py
@@ -10,6 +10,7 @@
)
from ...clients.temporal import get_client
from ...dependencies.developer_id import get_developer_id
+
# TODO: Change this once we have pg queries for executions
from ...models.execution.get_paused_execution_token import (
get_paused_execution_token,
From 14b57617ea7edaca658feea1f5a5e94c7851d1f6 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Mon, 23 Dec 2024 09:54:48 +0300
Subject: [PATCH 133/274] wip
---
agents-api/agents_api/routers/agents/list_agents.py | 2 +-
agents-api/agents_api/routers/agents/patch_agent.py | 2 +-
agents-api/agents_api/routers/agents/update_agent.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/agents-api/agents_api/routers/agents/list_agents.py b/agents-api/agents_api/routers/agents/list_agents.py
index b96bec089..37b14ebad 100644
--- a/agents-api/agents_api/routers/agents/list_agents.py
+++ b/agents-api/agents_api/routers/agents/list_agents.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import Agent, ListResponse
from ...dependencies.developer_id import get_developer_id
from ...dependencies.query_filter import MetadataFilter, create_filter_extractor
-from ...models.agent.list_agents import list_agents as list_agents_query
+from ...queries.agents.list_agents import list_agents as list_agents_query
from .router import router
diff --git a/agents-api/agents_api/routers/agents/patch_agent.py b/agents-api/agents_api/routers/agents/patch_agent.py
index f31f2c63e..b78edc2e5 100644
--- a/agents-api/agents_api/routers/agents/patch_agent.py
+++ b/agents-api/agents_api/routers/agents/patch_agent.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.agent.patch_agent import patch_agent as patch_agent_query
+from ...queries.agents.patch_agent import patch_agent as patch_agent_query
from .router import router
diff --git a/agents-api/agents_api/routers/agents/update_agent.py b/agents-api/agents_api/routers/agents/update_agent.py
index d878b7d6b..2c5235971 100644
--- a/agents-api/agents_api/routers/agents/update_agent.py
+++ b/agents-api/agents_api/routers/agents/update_agent.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
from ...dependencies.developer_id import get_developer_id
-from ...models.agent.update_agent import update_agent as update_agent_query
+from ...queries.agents.update_agent import update_agent as update_agent_query
from .router import router
From 5887e12050d47c33e6763240e02c7ac33505c00d Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Mon, 23 Dec 2024 06:57:30 +0000
Subject: [PATCH 134/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/executions/count_executions.py | 1 +
agents-api/agents_api/queries/executions/get_execution.py | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/agents-api/agents_api/queries/executions/count_executions.py b/agents-api/agents_api/queries/executions/count_executions.py
index 5ec29a8b6..764ef6826 100644
--- a/agents-api/agents_api/queries/executions/count_executions.py
+++ b/agents-api/agents_api/queries/executions/count_executions.py
@@ -21,6 +21,7 @@
"""
)
+
# @rewrap_exceptions(
# {
# QueryException: partialclass(HTTPException, status_code=400),
diff --git a/agents-api/agents_api/queries/executions/get_execution.py b/agents-api/agents_api/queries/executions/get_execution.py
index 474e0c63d..4fd948683 100644
--- a/agents-api/agents_api/queries/executions/get_execution.py
+++ b/agents-api/agents_api/queries/executions/get_execution.py
@@ -1,9 +1,9 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-import sqlvalidator
from ...autogen.openapi_model import Execution
from ..utils import (
pg_query,
From ebe9922ec3944365ca35765276e7f84b598f6d3d Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 10:01:31 +0300
Subject: [PATCH 135/274] chore: configure `sessions` router with pg queries
---
agents-api/agents_api/routers/sessions/chat.py | 8 ++++----
.../routers/sessions/create_or_update_session.py | 2 +-
agents-api/agents_api/routers/sessions/create_session.py | 2 +-
agents-api/agents_api/routers/sessions/delete_session.py | 2 +-
agents-api/agents_api/routers/sessions/get_session.py | 2 +-
.../agents_api/routers/sessions/get_session_history.py | 2 +-
agents-api/agents_api/routers/sessions/list_sessions.py | 2 +-
agents-api/agents_api/routers/sessions/patch_session.py | 2 +-
agents-api/agents_api/routers/sessions/update_session.py | 2 +-
9 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/agents-api/agents_api/routers/sessions/chat.py b/agents-api/agents_api/routers/sessions/chat.py
index 7cf1110fb..63da93dcd 100644
--- a/agents-api/agents_api/routers/sessions/chat.py
+++ b/agents-api/agents_api/routers/sessions/chat.py
@@ -19,10 +19,10 @@
from ...common.utils.template import render_template
from ...dependencies.developer_id import get_developer_data
from ...env import max_free_sessions
-from ...models.chat.gather_messages import gather_messages
-from ...models.chat.prepare_chat_context import prepare_chat_context
-from ...models.entry.create_entries import create_entries
-from ...models.session.count_sessions import count_sessions as count_sessions_query
+from ...queries.chat.gather_messages import gather_messages
+from ...queries.chat.prepare_chat_context import prepare_chat_context
+from ...queries.entries.create_entries import create_entries
+from ...queries.sessions.count_sessions import count_sessions as count_sessions_query
from .metrics import total_tokens_per_user
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/create_or_update_session.py b/agents-api/agents_api/routers/sessions/create_or_update_session.py
index a4efb0444..576d9d27e 100644
--- a/agents-api/agents_api/routers/sessions/create_or_update_session.py
+++ b/agents-api/agents_api/routers/sessions/create_or_update_session.py
@@ -9,7 +9,7 @@
ResourceUpdatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.session.create_or_update_session import (
+from ...queries.sessions.create_or_update_session import (
create_or_update_session as create_session_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/create_session.py b/agents-api/agents_api/routers/sessions/create_session.py
index a83b71d5a..3dd52ac14 100644
--- a/agents-api/agents_api/routers/sessions/create_session.py
+++ b/agents-api/agents_api/routers/sessions/create_session.py
@@ -9,7 +9,7 @@
ResourceCreatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.session.create_session import create_session as create_session_query
+from ...queries.sessions.create_session import create_session as create_session_query
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/delete_session.py b/agents-api/agents_api/routers/sessions/delete_session.py
index 1a664a871..a9d5450d4 100644
--- a/agents-api/agents_api/routers/sessions/delete_session.py
+++ b/agents-api/agents_api/routers/sessions/delete_session.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.session.delete_session import delete_session as delete_session_query
+from ...queries.sessions.delete_session import delete_session as delete_session_query
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/get_session.py b/agents-api/agents_api/routers/sessions/get_session.py
index df70a8f72..cce88071b 100644
--- a/agents-api/agents_api/routers/sessions/get_session.py
+++ b/agents-api/agents_api/routers/sessions/get_session.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import Session
from ...dependencies.developer_id import get_developer_id
-from ...models.session.get_session import get_session as get_session_query
+from ...queries.sessions.get_session import get_session as get_session_query
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/get_session_history.py b/agents-api/agents_api/routers/sessions/get_session_history.py
index fa993975b..0a76176d1 100644
--- a/agents-api/agents_api/routers/sessions/get_session_history.py
+++ b/agents-api/agents_api/routers/sessions/get_session_history.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import History
from ...dependencies.developer_id import get_developer_id
-from ...models.entry.get_history import get_history as get_history_query
+from ...queries.entries.get_history import get_history as get_history_query
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/list_sessions.py b/agents-api/agents_api/routers/sessions/list_sessions.py
index fc9cd2e99..f5a806d06 100644
--- a/agents-api/agents_api/routers/sessions/list_sessions.py
+++ b/agents-api/agents_api/routers/sessions/list_sessions.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ListResponse, Session
from ...dependencies.developer_id import get_developer_id
from ...dependencies.query_filter import MetadataFilter, create_filter_extractor
-from ...models.session.list_sessions import list_sessions as list_sessions_query
+from ...queries.sessions.list_sessions import list_sessions as list_sessions_query
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/patch_session.py b/agents-api/agents_api/routers/sessions/patch_session.py
index 8eefab4dc..eeda3af65 100644
--- a/agents-api/agents_api/routers/sessions/patch_session.py
+++ b/agents-api/agents_api/routers/sessions/patch_session.py
@@ -8,7 +8,7 @@
ResourceUpdatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.session.patch_session import patch_session as patch_session_query
+from ...queries.sessions.patch_session import patch_session as patch_session_query
from .router import router
diff --git a/agents-api/agents_api/routers/sessions/update_session.py b/agents-api/agents_api/routers/sessions/update_session.py
index f35368d84..598a2b4d8 100644
--- a/agents-api/agents_api/routers/sessions/update_session.py
+++ b/agents-api/agents_api/routers/sessions/update_session.py
@@ -8,7 +8,7 @@
UpdateSessionRequest,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.session.update_session import update_session as update_session_query
+from ...queries.sessions.update_session import update_session as update_session_query
from .router import router
From 19e96ba705df38c41ad015d6fd2fce34341745bd Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 10:05:17 +0300
Subject: [PATCH 136/274] chore: configure `healthz` router with pg queries
---
agents-api/agents_api/routers/healthz/check_health.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/agents-api/agents_api/routers/healthz/check_health.py b/agents-api/agents_api/routers/healthz/check_health.py
index 5a466ba39..a031f3a46 100644
--- a/agents-api/agents_api/routers/healthz/check_health.py
+++ b/agents-api/agents_api/routers/healthz/check_health.py
@@ -1,7 +1,7 @@
import logging
from uuid import UUID
-from ...models.agent.list_agents import list_agents as list_agents_query
+from ...queries.agents.list_agents import list_agents as list_agents_query
from .router import router
From d3e8831be79a9c34466a959e2b987b1becf90055 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 10:06:02 +0300
Subject: [PATCH 137/274] chore: configure `files` router with pg queries
---
agents-api/agents_api/routers/files/create_file.py | 2 +-
agents-api/agents_api/routers/files/delete_file.py | 2 +-
agents-api/agents_api/routers/files/get_file.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/agents-api/agents_api/routers/files/create_file.py b/agents-api/agents_api/routers/files/create_file.py
index 80d80e6f3..1be9eff90 100644
--- a/agents-api/agents_api/routers/files/create_file.py
+++ b/agents-api/agents_api/routers/files/create_file.py
@@ -12,7 +12,7 @@
)
from ...clients import async_s3
from ...dependencies.developer_id import get_developer_id
-from ...models.files.create_file import create_file as create_file_query
+from ...queries.files.create_file import create_file as create_file_query
from .router import router
diff --git a/agents-api/agents_api/routers/files/delete_file.py b/agents-api/agents_api/routers/files/delete_file.py
index fbe10290e..da8584438 100644
--- a/agents-api/agents_api/routers/files/delete_file.py
+++ b/agents-api/agents_api/routers/files/delete_file.py
@@ -7,7 +7,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...clients import async_s3
from ...dependencies.developer_id import get_developer_id
-from ...models.files.delete_file import delete_file as delete_file_query
+from ...queries.files.delete_file import delete_file as delete_file_query
from .router import router
diff --git a/agents-api/agents_api/routers/files/get_file.py b/agents-api/agents_api/routers/files/get_file.py
index cc5dcdc35..a0007ba4e 100644
--- a/agents-api/agents_api/routers/files/get_file.py
+++ b/agents-api/agents_api/routers/files/get_file.py
@@ -7,7 +7,7 @@
from ...autogen.openapi_model import File
from ...clients import async_s3
from ...dependencies.developer_id import get_developer_id
-from ...models.files.get_file import get_file as get_file_query
+from ...queries.files.get_file import get_file as get_file_query
from .router import router
From 985384689f349c4eb25fed0905a6543efe923900 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 10:13:49 +0300
Subject: [PATCH 138/274] chore: add `executions` pg queries in `tasks` router
---
.../agents_api/routers/tasks/create_task_execution.py | 11 +++++------
.../agents_api/routers/tasks/get_execution_details.py | 3 +--
.../routers/tasks/list_execution_transitions.py | 3 +--
.../agents_api/routers/tasks/list_task_executions.py | 3 +--
.../agents_api/routers/tasks/patch_execution.py | 3 +--
.../routers/tasks/stream_transitions_events.py | 3 +--
.../agents_api/routers/tasks/update_execution.py | 5 ++---
7 files changed, 12 insertions(+), 19 deletions(-)
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index 393c9e6d1..c02ba1c7c 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -22,16 +22,15 @@
from ...dependencies.developer_id import get_developer_id
from ...env import max_free_executions
-# TODO: Change these once we have pg queries for executions
-from ...models.execution.count_executions import (
+from ...queries.executions.count_executions import (
count_executions as count_executions_query,
)
-from ...models.execution.create_execution import (
+from ...queries.executions.create_execution import (
create_execution as create_execution_query,
)
-from ...models.execution.create_temporal_lookup import create_temporal_lookup
-from ...models.execution.prepare_execution_input import prepare_execution_input
-from ...models.execution.update_execution import (
+from ...queries.executions.create_temporal_lookup import create_temporal_lookup
+from ...queries.executions.prepare_execution_input import prepare_execution_input
+from ...queries.executions.update_execution import (
update_execution as update_execution_query,
)
from ...queries.developers.get_developer import get_developer
diff --git a/agents-api/agents_api/routers/tasks/get_execution_details.py b/agents-api/agents_api/routers/tasks/get_execution_details.py
index a2b219d53..ca0ced01e 100644
--- a/agents-api/agents_api/routers/tasks/get_execution_details.py
+++ b/agents-api/agents_api/routers/tasks/get_execution_details.py
@@ -4,8 +4,7 @@
Execution,
)
-# TODO: Change this once we have pg queries for executions
-from ...models.execution.get_execution import (
+from ...queries.executions.get_execution import (
get_execution as get_execution_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/list_execution_transitions.py b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
index 8d3fb586c..b8ea0dc90 100644
--- a/agents-api/agents_api/routers/tasks/list_execution_transitions.py
+++ b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
@@ -6,8 +6,7 @@
Transition,
)
-# TODO: Change this once we have pg queries for executions
-from ...models.execution.list_execution_transitions import (
+from ...queries.executions.list_execution_transitions import (
list_execution_transitions as list_execution_transitions_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/list_task_executions.py b/agents-api/agents_api/routers/tasks/list_task_executions.py
index aad2cf124..1cf3c882a 100644
--- a/agents-api/agents_api/routers/tasks/list_task_executions.py
+++ b/agents-api/agents_api/routers/tasks/list_task_executions.py
@@ -9,8 +9,7 @@
)
from ...dependencies.developer_id import get_developer_id
-# TODO: Change this once we have pg queries for executions
-from ...models.execution.list_executions import (
+from ...queries.executions.list_executions import (
list_executions as list_task_executions_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/patch_execution.py b/agents-api/agents_api/routers/tasks/patch_execution.py
index 9fbb2f296..1f37b03da 100644
--- a/agents-api/agents_api/routers/tasks/patch_execution.py
+++ b/agents-api/agents_api/routers/tasks/patch_execution.py
@@ -9,8 +9,7 @@
)
from ...dependencies.developer_id import get_developer_id
-# TODO: Change this once we have pg queries for executions
-from ...models.execution.update_execution import (
+from ...queries.executions.update_execution import (
update_execution as update_execution_query,
)
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/stream_transitions_events.py b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
index b3b469c9e..fd4cf0406 100644
--- a/agents-api/agents_api/routers/tasks/stream_transitions_events.py
+++ b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
@@ -19,8 +19,7 @@
from ...clients.temporal import get_workflow_handle
from ...dependencies.developer_id import get_developer_id
-# TODO: Change this once we have pg queries for executions
-from ...models.execution.lookup_temporal_data import lookup_temporal_data
+from ...queries.executions.lookup_temporal_data import lookup_temporal_data
from ...worker.codec import from_payload_data
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/update_execution.py b/agents-api/agents_api/routers/tasks/update_execution.py
index f58b65533..1b3712ea1 100644
--- a/agents-api/agents_api/routers/tasks/update_execution.py
+++ b/agents-api/agents_api/routers/tasks/update_execution.py
@@ -11,11 +11,10 @@
from ...clients.temporal import get_client
from ...dependencies.developer_id import get_developer_id
-# TODO: Change this once we have pg queries for executions
-from ...models.execution.get_paused_execution_token import (
+from ...queries.executions.get_paused_execution_token import (
get_paused_execution_token,
)
-from ...models.execution.get_temporal_workflow_data import (
+from ...queries.executions.get_temporal_workflow_data import (
get_temporal_workflow_data,
)
from .router import router
From e9760602c015a645a64a08d38e4b8155f7f50688 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 07:14:47 +0000
Subject: [PATCH 139/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/routers/tasks/create_task_execution.py | 3 +--
agents-api/agents_api/routers/tasks/get_execution_details.py | 1 -
.../agents_api/routers/tasks/list_execution_transitions.py | 1 -
agents-api/agents_api/routers/tasks/list_task_executions.py | 1 -
agents-api/agents_api/routers/tasks/patch_execution.py | 1 -
.../agents_api/routers/tasks/stream_transitions_events.py | 1 -
agents-api/agents_api/routers/tasks/update_execution.py | 1 -
7 files changed, 1 insertion(+), 8 deletions(-)
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index c02ba1c7c..eb08c90c0 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -21,7 +21,7 @@
from ...common.protocol.developers import Developer
from ...dependencies.developer_id import get_developer_id
from ...env import max_free_executions
-
+from ...queries.developers.get_developer import get_developer
from ...queries.executions.count_executions import (
count_executions as count_executions_query,
)
@@ -33,7 +33,6 @@
from ...queries.executions.update_execution import (
update_execution as update_execution_query,
)
-from ...queries.developers.get_developer import get_developer
from ...queries.tasks.get_task import get_task as get_task_query
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/get_execution_details.py b/agents-api/agents_api/routers/tasks/get_execution_details.py
index ca0ced01e..387cf41c0 100644
--- a/agents-api/agents_api/routers/tasks/get_execution_details.py
+++ b/agents-api/agents_api/routers/tasks/get_execution_details.py
@@ -3,7 +3,6 @@
from ...autogen.openapi_model import (
Execution,
)
-
from ...queries.executions.get_execution import (
get_execution as get_execution_query,
)
diff --git a/agents-api/agents_api/routers/tasks/list_execution_transitions.py b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
index b8ea0dc90..460e4e764 100644
--- a/agents-api/agents_api/routers/tasks/list_execution_transitions.py
+++ b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
@@ -5,7 +5,6 @@
ListResponse,
Transition,
)
-
from ...queries.executions.list_execution_transitions import (
list_execution_transitions as list_execution_transitions_query,
)
diff --git a/agents-api/agents_api/routers/tasks/list_task_executions.py b/agents-api/agents_api/routers/tasks/list_task_executions.py
index 1cf3c882a..658904efa 100644
--- a/agents-api/agents_api/routers/tasks/list_task_executions.py
+++ b/agents-api/agents_api/routers/tasks/list_task_executions.py
@@ -8,7 +8,6 @@
ListResponse,
)
from ...dependencies.developer_id import get_developer_id
-
from ...queries.executions.list_executions import (
list_executions as list_task_executions_query,
)
diff --git a/agents-api/agents_api/routers/tasks/patch_execution.py b/agents-api/agents_api/routers/tasks/patch_execution.py
index 1f37b03da..3b4b91c8c 100644
--- a/agents-api/agents_api/routers/tasks/patch_execution.py
+++ b/agents-api/agents_api/routers/tasks/patch_execution.py
@@ -8,7 +8,6 @@
UpdateExecutionRequest,
)
from ...dependencies.developer_id import get_developer_id
-
from ...queries.executions.update_execution import (
update_execution as update_execution_query,
)
diff --git a/agents-api/agents_api/routers/tasks/stream_transitions_events.py b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
index fd4cf0406..61168cd86 100644
--- a/agents-api/agents_api/routers/tasks/stream_transitions_events.py
+++ b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
@@ -18,7 +18,6 @@
from ...autogen.openapi_model import TransitionEvent
from ...clients.temporal import get_workflow_handle
from ...dependencies.developer_id import get_developer_id
-
from ...queries.executions.lookup_temporal_data import lookup_temporal_data
from ...worker.codec import from_payload_data
from .router import router
diff --git a/agents-api/agents_api/routers/tasks/update_execution.py b/agents-api/agents_api/routers/tasks/update_execution.py
index 1b3712ea1..613958919 100644
--- a/agents-api/agents_api/routers/tasks/update_execution.py
+++ b/agents-api/agents_api/routers/tasks/update_execution.py
@@ -10,7 +10,6 @@
)
from ...clients.temporal import get_client
from ...dependencies.developer_id import get_developer_id
-
from ...queries.executions.get_paused_execution_token import (
get_paused_execution_token,
)
From 8d40526c2380dc9e157f588108b2fd899b77df63 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Mon, 23 Dec 2024 10:20:31 +0300
Subject: [PATCH 140/274] fix(agents-api): Make query functions async
---
.../agents_api/queries/executions/count_executions.py | 2 +-
.../agents_api/queries/executions/create_execution.py | 2 +-
.../queries/executions/create_execution_transition.py | 1 -
.../queries/executions/create_temporal_lookup.py | 2 +-
.../agents_api/queries/executions/get_execution.py | 2 +-
.../queries/executions/get_execution_transition.py | 2 +-
.../queries/executions/get_paused_execution_token.py | 2 +-
.../queries/executions/get_temporal_workflow_data.py | 2 +-
.../queries/executions/list_execution_transitions.py | 2 +-
.../agents_api/queries/executions/list_executions.py | 2 +-
.../queries/executions/lookup_temporal_data.py | 2 +-
.../queries/executions/prepare_execution_input.py | 2 +-
.../agents_api/queries/executions/update_execution.py | 2 +-
agents-api/agents_api/queries/tools/create_tools.py | 2 +-
agents-api/agents_api/queries/tools/delete_tool.py | 2 +-
agents-api/agents_api/queries/tools/get_tool.py | 2 +-
.../queries/tools/get_tool_args_from_metadata.py | 10 +++++-----
agents-api/agents_api/queries/tools/list_tools.py | 2 +-
agents-api/agents_api/queries/tools/patch_tool.py | 2 +-
agents-api/agents_api/queries/tools/update_tool.py | 2 +-
.../agents_api/routers/agents/create_agent_tool.py | 3 ++-
.../agents_api/routers/agents/delete_agent_tool.py | 4 ++--
.../agents_api/routers/agents/list_agent_tools.py | 2 +-
.../agents_api/routers/agents/patch_agent_tool.py | 2 +-
.../agents_api/routers/agents/update_agent_tool.py | 2 +-
agents-api/agents_api/routers/docs/create_doc.py | 6 +++---
agents-api/agents_api/routers/docs/delete_doc.py | 2 +-
agents-api/agents_api/routers/docs/get_doc.py | 2 +-
agents-api/agents_api/routers/docs/list_docs.py | 6 +++---
agents-api/agents_api/routers/docs/search_docs.py | 8 ++++----
30 files changed, 42 insertions(+), 42 deletions(-)
diff --git a/agents-api/agents_api/queries/executions/count_executions.py b/agents-api/agents_api/queries/executions/count_executions.py
index 764ef6826..21cc130e2 100644
--- a/agents-api/agents_api/queries/executions/count_executions.py
+++ b/agents-api/agents_api/queries/executions/count_executions.py
@@ -32,7 +32,7 @@
@wrap_in_class(dict, one=True)
@pg_query
@beartype
-def count_executions(
+async def count_executions(
*,
developer_id: UUID,
task_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/create_execution.py b/agents-api/agents_api/queries/executions/create_execution.py
index 59efd7ac3..0b93df318 100644
--- a/agents-api/agents_api/queries/executions/create_execution.py
+++ b/agents-api/agents_api/queries/executions/create_execution.py
@@ -41,7 +41,7 @@
@cozo_query
@increase_counter("create_execution")
@beartype
-def create_execution(
+async def create_execution(
*,
developer_id: UUID,
task_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/create_execution_transition.py b/agents-api/agents_api/queries/executions/create_execution_transition.py
index 5cbcb97bc..cb799072a 100644
--- a/agents-api/agents_api/queries/executions/create_execution_transition.py
+++ b/agents-api/agents_api/queries/executions/create_execution_transition.py
@@ -25,7 +25,6 @@
)
from .update_execution import update_execution
-
@beartype
def _create_execution_transition(
*,
diff --git a/agents-api/agents_api/queries/executions/create_temporal_lookup.py b/agents-api/agents_api/queries/executions/create_temporal_lookup.py
index e47a505db..7d694cca1 100644
--- a/agents-api/agents_api/queries/executions/create_temporal_lookup.py
+++ b/agents-api/agents_api/queries/executions/create_temporal_lookup.py
@@ -31,7 +31,7 @@
@cozo_query
@increase_counter("create_temporal_lookup")
@beartype
-def create_temporal_lookup(
+async def create_temporal_lookup(
*,
developer_id: UUID,
execution_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/get_execution.py b/agents-api/agents_api/queries/executions/get_execution.py
index 4fd948683..cf2bfad46 100644
--- a/agents-api/agents_api/queries/executions/get_execution.py
+++ b/agents-api/agents_api/queries/executions/get_execution.py
@@ -42,7 +42,7 @@
)
@pg_query
@beartype
-def get_execution(
+async def get_execution(
*,
execution_id: UUID,
) -> tuple[str, dict]:
diff --git a/agents-api/agents_api/queries/executions/get_execution_transition.py b/agents-api/agents_api/queries/executions/get_execution_transition.py
index e2b38789a..545ed615d 100644
--- a/agents-api/agents_api/queries/executions/get_execution_transition.py
+++ b/agents-api/agents_api/queries/executions/get_execution_transition.py
@@ -30,7 +30,7 @@
@wrap_in_class(Transition, one=True)
@cozo_query
@beartype
-def get_execution_transition(
+async def get_execution_transition(
*,
developer_id: UUID,
transition_id: UUID | None = None,
diff --git a/agents-api/agents_api/queries/executions/get_paused_execution_token.py b/agents-api/agents_api/queries/executions/get_paused_execution_token.py
index 6c32c7692..43121acb1 100644
--- a/agents-api/agents_api/queries/executions/get_paused_execution_token.py
+++ b/agents-api/agents_api/queries/executions/get_paused_execution_token.py
@@ -29,7 +29,7 @@
@wrap_in_class(dict, one=True)
@cozo_query
@beartype
-def get_paused_execution_token(
+async def get_paused_execution_token(
*,
developer_id: UUID,
execution_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/get_temporal_workflow_data.py b/agents-api/agents_api/queries/executions/get_temporal_workflow_data.py
index 8b1bf4604..69af9810c 100644
--- a/agents-api/agents_api/queries/executions/get_temporal_workflow_data.py
+++ b/agents-api/agents_api/queries/executions/get_temporal_workflow_data.py
@@ -27,7 +27,7 @@
@wrap_in_class(dict, one=True)
@cozo_query
@beartype
-def get_temporal_workflow_data(
+async def get_temporal_workflow_data(
*,
execution_id: UUID,
) -> tuple[str, dict]:
diff --git a/agents-api/agents_api/queries/executions/list_execution_transitions.py b/agents-api/agents_api/queries/executions/list_execution_transitions.py
index 8931676f6..f6b022077 100644
--- a/agents-api/agents_api/queries/executions/list_execution_transitions.py
+++ b/agents-api/agents_api/queries/executions/list_execution_transitions.py
@@ -23,7 +23,7 @@
@wrap_in_class(Transition)
@cozo_query
@beartype
-def list_execution_transitions(
+async def list_execution_transitions(
*,
execution_id: UUID,
limit: int = 100,
diff --git a/agents-api/agents_api/queries/executions/list_executions.py b/agents-api/agents_api/queries/executions/list_executions.py
index 64add074f..b7a2b749a 100644
--- a/agents-api/agents_api/queries/executions/list_executions.py
+++ b/agents-api/agents_api/queries/executions/list_executions.py
@@ -39,7 +39,7 @@
)
@cozo_query
@beartype
-def list_executions(
+async def list_executions(
*,
developer_id: UUID,
task_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/lookup_temporal_data.py b/agents-api/agents_api/queries/executions/lookup_temporal_data.py
index 35f09129b..98afd7b92 100644
--- a/agents-api/agents_api/queries/executions/lookup_temporal_data.py
+++ b/agents-api/agents_api/queries/executions/lookup_temporal_data.py
@@ -29,7 +29,7 @@
@wrap_in_class(dict, one=True)
@cozo_query
@beartype
-def lookup_temporal_data(
+async def lookup_temporal_data(
*,
developer_id: UUID,
execution_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/prepare_execution_input.py b/agents-api/agents_api/queries/executions/prepare_execution_input.py
index 5e841b9f2..b2ad12e6a 100644
--- a/agents-api/agents_api/queries/executions/prepare_execution_input.py
+++ b/agents-api/agents_api/queries/executions/prepare_execution_input.py
@@ -55,7 +55,7 @@
)
@cozo_query
@beartype
-def prepare_execution_input(
+async def prepare_execution_input(
*,
developer_id: UUID,
task_id: UUID,
diff --git a/agents-api/agents_api/queries/executions/update_execution.py b/agents-api/agents_api/queries/executions/update_execution.py
index f33368412..17990cc9f 100644
--- a/agents-api/agents_api/queries/executions/update_execution.py
+++ b/agents-api/agents_api/queries/executions/update_execution.py
@@ -45,7 +45,7 @@
@cozo_query
@increase_counter("update_execution")
@beartype
-def update_execution(
+async def update_execution(
*,
developer_id: UUID,
task_id: UUID,
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index d50e98e80..c8946450b 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -70,7 +70,7 @@
@pg_query
@increase_counter("create_tools")
@beartype
-def create_tools(
+async def create_tools(
*,
developer_id: UUID,
agent_id: UUID,
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 17535e1e4..0f9a1f69b 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -45,7 +45,7 @@
)
@pg_query
@beartype
-def delete_tool(
+async def delete_tool(
*,
developer_id: UUID,
agent_id: UUID,
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index af63be0c9..74895f57d 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -45,7 +45,7 @@
)
@pg_query
@beartype
-def get_tool(
+async def get_tool(
*,
developer_id: UUID,
agent_id: UUID,
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index a8a9dba1a..e0449d1e3 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -9,7 +9,7 @@
)
-def tool_args_for_task(
+async def tool_args_for_task(
*,
developer_id: UUID,
agent_id: UUID,
@@ -50,7 +50,7 @@ def tool_args_for_task(
return (queries, {"agent_id": agent_id, "task_id": task_id})
-def tool_args_for_session(
+async def tool_args_for_session(
*,
developer_id: UUID,
session_id: UUID,
@@ -100,7 +100,7 @@ def tool_args_for_session(
@wrap_in_class(dict, transform=lambda x: x["values"], one=True)
@pg_query
@beartype
-def get_tool_args_from_metadata(
+async def get_tool_args_from_metadata(
*,
developer_id: UUID,
agent_id: UUID,
@@ -118,13 +118,13 @@ def get_tool_args_from_metadata(
match session_id, task_id:
case (None, task_id) if task_id is not None:
- return tool_args_for_task(
+ return await tool_args_for_task(
**common,
task_id=task_id,
)
case (session_id, None) if session_id is not None:
- return tool_args_for_session(
+ return await tool_args_for_session(
**common,
session_id=session_id,
)
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 3dac84875..182257790 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -51,7 +51,7 @@
)
@pg_query
@beartype
-def list_tools(
+async def list_tools(
*,
developer_id: UUID,
agent_id: UUID,
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index aa663dec0..31682bfa1 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -53,7 +53,7 @@
@pg_query
@increase_counter("patch_tool")
@beartype
-def patch_tool(
+async def patch_tool(
*, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
) -> tuple[list[str], list]:
"""
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index 356e28bbf..97ba82477 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -52,7 +52,7 @@
@pg_query
@increase_counter("update_tool")
@beartype
-def update_tool(
+async def update_tool(
*,
developer_id: UUID,
agent_id: UUID,
diff --git a/agents-api/agents_api/routers/agents/create_agent_tool.py b/agents-api/agents_api/routers/agents/create_agent_tool.py
index 21b8e175a..8719fef14 100644
--- a/agents-api/agents_api/routers/agents/create_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/create_agent_tool.py
@@ -5,6 +5,7 @@
from starlette.status import HTTP_201_CREATED
import agents_api.models as models
+from ...queries.tools.create_tools import create_tools as create_tools_query
from ...autogen.openapi_model import (
CreateToolRequest,
@@ -20,7 +21,7 @@ async def create_agent_tool(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
data: CreateToolRequest,
) -> ResourceCreatedResponse:
- tool = models.tools.create_tools(
+ tool = await create_tools_query(
developer_id=x_developer_id,
agent_id=agent_id,
data=[data],
diff --git a/agents-api/agents_api/routers/agents/delete_agent_tool.py b/agents-api/agents_api/routers/agents/delete_agent_tool.py
index 772116d64..ab89faa24 100644
--- a/agents-api/agents_api/routers/agents/delete_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/delete_agent_tool.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.tools.delete_tool import delete_tool
+from ...queries.tools.delete_tool import delete_tool as delete_tool_query
from .router import router
@@ -15,7 +15,7 @@ async def delete_agent_tool(
tool_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceDeletedResponse:
- return delete_tool(
+ return delete_tool_query(
developer_id=x_developer_id,
agent_id=agent_id,
tool_id=tool_id,
diff --git a/agents-api/agents_api/routers/agents/list_agent_tools.py b/agents-api/agents_api/routers/agents/list_agent_tools.py
index 59d1a6ade..98f5dd109 100644
--- a/agents-api/agents_api/routers/agents/list_agent_tools.py
+++ b/agents-api/agents_api/routers/agents/list_agent_tools.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import ListResponse, Tool
from ...dependencies.developer_id import get_developer_id
-from ...models.tools.list_tools import list_tools as list_tools_query
+from ...queries.tools.list_tools import list_tools as list_tools_query
from .router import router
diff --git a/agents-api/agents_api/routers/agents/patch_agent_tool.py b/agents-api/agents_api/routers/agents/patch_agent_tool.py
index e4031810b..a45349340 100644
--- a/agents-api/agents_api/routers/agents/patch_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/patch_agent_tool.py
@@ -8,7 +8,7 @@
ResourceUpdatedResponse,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.tools.patch_tool import patch_tool as patch_tool_query
+from ...queries.tools.patch_tool import patch_tool as patch_tool_query
from .router import router
diff --git a/agents-api/agents_api/routers/agents/update_agent_tool.py b/agents-api/agents_api/routers/agents/update_agent_tool.py
index b736ea686..7ba66fa53 100644
--- a/agents-api/agents_api/routers/agents/update_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/update_agent_tool.py
@@ -8,7 +8,7 @@
UpdateToolRequest,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.tools.update_tool import update_tool as update_tool_query
+from ...queries.tools.update_tool import update_tool as update_tool_query
from .router import router
diff --git a/agents-api/agents_api/routers/docs/create_doc.py b/agents-api/agents_api/routers/docs/create_doc.py
index ce48b9b86..c514fe9ee 100644
--- a/agents-api/agents_api/routers/docs/create_doc.py
+++ b/agents-api/agents_api/routers/docs/create_doc.py
@@ -12,7 +12,7 @@
from ...common.retry_policies import DEFAULT_RETRY_POLICY
from ...dependencies.developer_id import get_developer_id
from ...env import temporal_task_queue, testing
-from ...models.docs.create_doc import create_doc as create_doc_query
+from ...queries.docs.create_doc import create_doc as create_doc_query
from .router import router
@@ -76,7 +76,7 @@ async def create_user_doc(
ResourceCreatedResponse: The created document.
"""
- doc: Doc = create_doc_query(
+ doc: Doc = await create_doc_query(
developer_id=x_developer_id,
owner_type="user",
owner_id=user_id,
@@ -107,7 +107,7 @@ async def create_agent_doc(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
background_tasks: BackgroundTasks,
) -> ResourceCreatedResponse:
- doc: Doc = create_doc_query(
+ doc: Doc = await create_doc_query(
developer_id=x_developer_id,
owner_type="agent",
owner_id=agent_id,
diff --git a/agents-api/agents_api/routers/docs/delete_doc.py b/agents-api/agents_api/routers/docs/delete_doc.py
index c67e46447..cbe8413b3 100644
--- a/agents-api/agents_api/routers/docs/delete_doc.py
+++ b/agents-api/agents_api/routers/docs/delete_doc.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import ResourceDeletedResponse
from ...dependencies.developer_id import get_developer_id
-from ...models.docs.delete_doc import delete_doc as delete_doc_query
+from ...queries.docs.delete_doc import delete_doc as delete_doc_query
from .router import router
diff --git a/agents-api/agents_api/routers/docs/get_doc.py b/agents-api/agents_api/routers/docs/get_doc.py
index b120bc867..7df55fac4 100644
--- a/agents-api/agents_api/routers/docs/get_doc.py
+++ b/agents-api/agents_api/routers/docs/get_doc.py
@@ -5,7 +5,7 @@
from ...autogen.openapi_model import Doc
from ...dependencies.developer_id import get_developer_id
-from ...models.docs.get_doc import get_doc as get_doc_query
+from ...queries.docs.get_doc import get_doc as get_doc_query
from .router import router
diff --git a/agents-api/agents_api/routers/docs/list_docs.py b/agents-api/agents_api/routers/docs/list_docs.py
index 2f663a324..5f24e42cd 100644
--- a/agents-api/agents_api/routers/docs/list_docs.py
+++ b/agents-api/agents_api/routers/docs/list_docs.py
@@ -6,7 +6,7 @@
from ...autogen.openapi_model import Doc, ListResponse
from ...dependencies.developer_id import get_developer_id
from ...dependencies.query_filter import MetadataFilter, create_filter_extractor
-from ...models.docs.list_docs import list_docs as list_docs_query
+from ...queries.docs.list_docs import list_docs as list_docs_query
from .router import router
@@ -23,7 +23,7 @@ async def list_user_docs(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Doc]:
- docs = list_docs_query(
+ docs = await list_docs_query(
developer_id=x_developer_id,
owner_type="user",
owner_id=user_id,
@@ -49,7 +49,7 @@ async def list_agent_docs(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Doc]:
- docs = list_docs_query(
+ docs = await list_docs_query(
developer_id=x_developer_id,
owner_type="agent",
owner_id=agent_id,
diff --git a/agents-api/agents_api/routers/docs/search_docs.py b/agents-api/agents_api/routers/docs/search_docs.py
index 22bba86a1..d4653920a 100644
--- a/agents-api/agents_api/routers/docs/search_docs.py
+++ b/agents-api/agents_api/routers/docs/search_docs.py
@@ -13,10 +13,10 @@
VectorDocSearchRequest,
)
from ...dependencies.developer_id import get_developer_id
-from ...models.docs.mmr import maximal_marginal_relevance
-from ...models.docs.search_docs_by_embedding import search_docs_by_embedding
-from ...models.docs.search_docs_by_text import search_docs_by_text
-from ...models.docs.search_docs_hybrid import search_docs_hybrid
+from ...queries.docs.mmr import maximal_marginal_relevance
+from ...queries.docs.search_docs_by_embedding import search_docs_by_embedding
+from ...queries.docs.search_docs_by_text import search_docs_by_text
+from ...queries.docs.search_docs_hybrid import search_docs_hybrid
from .router import router
From b339e031f9f9529a051af424180f621879b27789 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Mon, 23 Dec 2024 07:21:18 +0000
Subject: [PATCH 141/274] refactor: Lint agents-api (CI)
---
.../queries/executions/create_execution_transition.py | 1 +
agents-api/agents_api/routers/agents/create_agent_tool.py | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/agents-api/agents_api/queries/executions/create_execution_transition.py b/agents-api/agents_api/queries/executions/create_execution_transition.py
index cb799072a..5cbcb97bc 100644
--- a/agents-api/agents_api/queries/executions/create_execution_transition.py
+++ b/agents-api/agents_api/queries/executions/create_execution_transition.py
@@ -25,6 +25,7 @@
)
from .update_execution import update_execution
+
@beartype
def _create_execution_transition(
*,
diff --git a/agents-api/agents_api/routers/agents/create_agent_tool.py b/agents-api/agents_api/routers/agents/create_agent_tool.py
index 8719fef14..c70d7f5c3 100644
--- a/agents-api/agents_api/routers/agents/create_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/create_agent_tool.py
@@ -5,13 +5,13 @@
from starlette.status import HTTP_201_CREATED
import agents_api.models as models
-from ...queries.tools.create_tools import create_tools as create_tools_query
from ...autogen.openapi_model import (
CreateToolRequest,
ResourceCreatedResponse,
)
from ...dependencies.developer_id import get_developer_id
+from ...queries.tools.create_tools import create_tools as create_tools_query
from .router import router
From 14d838d079cc6f0ee9d1150e1bd9454d83d56af7 Mon Sep 17 00:00:00 2001
From: HamadaSalhab
Date: Mon, 23 Dec 2024 10:38:22 +0300
Subject: [PATCH 142/274] chore: await asynchronous query functions in all
routers
---
agents-api/agents_api/routers/files/create_file.py | 2 +-
agents-api/agents_api/routers/files/delete_file.py | 4 +++-
agents-api/agents_api/routers/files/get_file.py | 2 +-
.../agents_api/routers/healthz/check_health.py | 2 +-
agents-api/agents_api/routers/sessions/chat.py | 4 ++--
.../routers/sessions/create_or_update_session.py | 4 ++--
.../agents_api/routers/sessions/create_session.py | 2 +-
.../agents_api/routers/sessions/delete_session.py | 4 +++-
.../agents_api/routers/sessions/get_session.py | 2 +-
.../routers/sessions/get_session_history.py | 2 +-
.../agents_api/routers/sessions/list_sessions.py | 2 +-
.../agents_api/routers/sessions/patch_session.py | 2 +-
.../agents_api/routers/sessions/update_session.py | 2 +-
.../routers/tasks/create_or_update_task.py | 2 +-
agents-api/agents_api/routers/tasks/create_task.py | 2 +-
.../routers/tasks/create_task_execution.py | 12 ++++++------
.../routers/tasks/get_execution_details.py | 2 +-
.../agents_api/routers/tasks/get_task_details.py | 2 +-
.../routers/tasks/list_execution_transitions.py | 2 +-
.../agents_api/routers/tasks/list_task_executions.py | 2 +-
agents-api/agents_api/routers/tasks/list_tasks.py | 2 +-
.../agents_api/routers/tasks/patch_execution.py | 2 +-
.../routers/tasks/stream_transitions_events.py | 2 +-
.../agents_api/routers/tasks/update_execution.py | 4 ++--
.../routers/users/create_or_update_user.py | 2 +-
agents-api/agents_api/routers/users/create_user.py | 2 +-
agents-api/agents_api/routers/users/delete_user.py | 2 +-
.../agents_api/routers/users/get_user_details.py | 2 +-
agents-api/agents_api/routers/users/list_users.py | 2 +-
agents-api/agents_api/routers/users/patch_user.py | 2 +-
agents-api/agents_api/routers/users/update_user.py | 2 +-
31 files changed, 43 insertions(+), 39 deletions(-)
diff --git a/agents-api/agents_api/routers/files/create_file.py b/agents-api/agents_api/routers/files/create_file.py
index 1be9eff90..7adc0b74e 100644
--- a/agents-api/agents_api/routers/files/create_file.py
+++ b/agents-api/agents_api/routers/files/create_file.py
@@ -29,7 +29,7 @@ async def create_file(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
data: CreateFileRequest,
) -> ResourceCreatedResponse:
- file: File = create_file_query(
+ file: File = await create_file_query(
developer_id=x_developer_id,
data=data,
)
diff --git a/agents-api/agents_api/routers/files/delete_file.py b/agents-api/agents_api/routers/files/delete_file.py
index da8584438..72b4c10a7 100644
--- a/agents-api/agents_api/routers/files/delete_file.py
+++ b/agents-api/agents_api/routers/files/delete_file.py
@@ -22,7 +22,9 @@ async def delete_file_content(file_id: UUID) -> None:
async def delete_file(
file_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> ResourceDeletedResponse:
- resource_deleted = delete_file_query(developer_id=x_developer_id, file_id=file_id)
+ resource_deleted = await delete_file_query(
+ developer_id=x_developer_id, file_id=file_id
+ )
# Delete the file content from blob storage
await delete_file_content(file_id)
diff --git a/agents-api/agents_api/routers/files/get_file.py b/agents-api/agents_api/routers/files/get_file.py
index a0007ba4e..6473fc570 100644
--- a/agents-api/agents_api/routers/files/get_file.py
+++ b/agents-api/agents_api/routers/files/get_file.py
@@ -23,7 +23,7 @@ async def fetch_file_content(file_id: UUID) -> str:
async def get_file(
file_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> File:
- file = get_file_query(developer_id=x_developer_id, file_id=file_id)
+ file = await get_file_query(developer_id=x_developer_id, file_id=file_id)
# Fetch the file content from blob storage
file.content = await fetch_file_content(file.id)
diff --git a/agents-api/agents_api/routers/healthz/check_health.py b/agents-api/agents_api/routers/healthz/check_health.py
index a031f3a46..33fb19eff 100644
--- a/agents-api/agents_api/routers/healthz/check_health.py
+++ b/agents-api/agents_api/routers/healthz/check_health.py
@@ -9,7 +9,7 @@
async def check_health() -> dict:
try:
# Check if the database is reachable
- list_agents_query(
+ await list_agents_query(
developer_id=UUID("00000000-0000-0000-0000-000000000000"),
)
except Exception as e:
diff --git a/agents-api/agents_api/routers/sessions/chat.py b/agents-api/agents_api/routers/sessions/chat.py
index 63da93dcd..a5716fcdb 100644
--- a/agents-api/agents_api/routers/sessions/chat.py
+++ b/agents-api/agents_api/routers/sessions/chat.py
@@ -57,7 +57,7 @@ async def chat(
# check if the developer is paid
if "paid" not in developer.tags:
# get the session length
- sessions = count_sessions_query(developer_id=developer.id)
+ sessions = await count_sessions_query(developer_id=developer.id)
session_length = sessions["count"]
if session_length > max_free_sessions:
raise HTTPException(
@@ -69,7 +69,7 @@ async def chat(
raise NotImplementedError("Streaming is not yet implemented")
# First get the chat context
- chat_context: ChatContext = prepare_chat_context(
+ chat_context: ChatContext = await prepare_chat_context(
developer_id=developer.id,
session_id=session_id,
)
diff --git a/agents-api/agents_api/routers/sessions/create_or_update_session.py b/agents-api/agents_api/routers/sessions/create_or_update_session.py
index 576d9d27e..89201710f 100644
--- a/agents-api/agents_api/routers/sessions/create_or_update_session.py
+++ b/agents-api/agents_api/routers/sessions/create_or_update_session.py
@@ -10,7 +10,7 @@
)
from ...dependencies.developer_id import get_developer_id
from ...queries.sessions.create_or_update_session import (
- create_or_update_session as create_session_query,
+ create_or_update_session as create_or_update_session_query,
)
from .router import router
@@ -21,7 +21,7 @@ async def create_or_update_session(
session_id: UUID,
data: CreateOrUpdateSessionRequest,
) -> ResourceUpdatedResponse:
- session_updated = create_session_query(
+ session_updated = await create_or_update_session_query(
developer_id=x_developer_id,
session_id=session_id,
data=data,
diff --git a/agents-api/agents_api/routers/sessions/create_session.py b/agents-api/agents_api/routers/sessions/create_session.py
index 3dd52ac14..8359f808b 100644
--- a/agents-api/agents_api/routers/sessions/create_session.py
+++ b/agents-api/agents_api/routers/sessions/create_session.py
@@ -18,7 +18,7 @@ async def create_session(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
data: CreateSessionRequest,
) -> ResourceCreatedResponse:
- session = create_session_query(
+ session = await create_session_query(
developer_id=x_developer_id,
data=data,
)
diff --git a/agents-api/agents_api/routers/sessions/delete_session.py b/agents-api/agents_api/routers/sessions/delete_session.py
index a9d5450d4..c59e507bd 100644
--- a/agents-api/agents_api/routers/sessions/delete_session.py
+++ b/agents-api/agents_api/routers/sessions/delete_session.py
@@ -16,4 +16,6 @@
async def delete_session(
session_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> ResourceDeletedResponse:
- return delete_session_query(developer_id=x_developer_id, session_id=session_id)
+ return await delete_session_query(
+ developer_id=x_developer_id, session_id=session_id
+ )
diff --git a/agents-api/agents_api/routers/sessions/get_session.py b/agents-api/agents_api/routers/sessions/get_session.py
index cce88071b..b77a01176 100644
--- a/agents-api/agents_api/routers/sessions/get_session.py
+++ b/agents-api/agents_api/routers/sessions/get_session.py
@@ -13,4 +13,4 @@
async def get_session(
session_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> Session:
- return get_session_query(developer_id=x_developer_id, session_id=session_id)
+ return await get_session_query(developer_id=x_developer_id, session_id=session_id)
diff --git a/agents-api/agents_api/routers/sessions/get_session_history.py b/agents-api/agents_api/routers/sessions/get_session_history.py
index 0a76176d1..e62aa9d2c 100644
--- a/agents-api/agents_api/routers/sessions/get_session_history.py
+++ b/agents-api/agents_api/routers/sessions/get_session_history.py
@@ -13,4 +13,4 @@
async def get_session_history(
session_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> History:
- return get_history_query(developer_id=x_developer_id, session_id=session_id)
+ return await get_history_query(developer_id=x_developer_id, session_id=session_id)
diff --git a/agents-api/agents_api/routers/sessions/list_sessions.py b/agents-api/agents_api/routers/sessions/list_sessions.py
index f5a806d06..108f1528f 100644
--- a/agents-api/agents_api/routers/sessions/list_sessions.py
+++ b/agents-api/agents_api/routers/sessions/list_sessions.py
@@ -21,7 +21,7 @@ async def list_sessions(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Session]:
- sessions = list_sessions_query(
+ sessions = await list_sessions_query(
developer_id=x_developer_id,
limit=limit,
offset=offset,
diff --git a/agents-api/agents_api/routers/sessions/patch_session.py b/agents-api/agents_api/routers/sessions/patch_session.py
index eeda3af65..87acd3c0d 100644
--- a/agents-api/agents_api/routers/sessions/patch_session.py
+++ b/agents-api/agents_api/routers/sessions/patch_session.py
@@ -18,7 +18,7 @@ async def patch_session(
session_id: UUID,
data: PatchSessionRequest,
) -> ResourceUpdatedResponse:
- return patch_session_query(
+ return await patch_session_query(
developer_id=x_developer_id,
session_id=session_id,
data=data,
diff --git a/agents-api/agents_api/routers/sessions/update_session.py b/agents-api/agents_api/routers/sessions/update_session.py
index 598a2b4d8..0c25e0652 100644
--- a/agents-api/agents_api/routers/sessions/update_session.py
+++ b/agents-api/agents_api/routers/sessions/update_session.py
@@ -18,7 +18,7 @@ async def update_session(
session_id: UUID,
data: UpdateSessionRequest,
) -> ResourceUpdatedResponse:
- return update_session_query(
+ return await update_session_query(
developer_id=x_developer_id,
session_id=session_id,
data=data,
diff --git a/agents-api/agents_api/routers/tasks/create_or_update_task.py b/agents-api/agents_api/routers/tasks/create_or_update_task.py
index 7c93be8b0..2316cef39 100644
--- a/agents-api/agents_api/routers/tasks/create_or_update_task.py
+++ b/agents-api/agents_api/routers/tasks/create_or_update_task.py
@@ -40,7 +40,7 @@ async def create_or_update_task(
except ValidationError:
pass
- return create_or_update_task_query(
+ return await create_or_update_task_query(
developer_id=x_developer_id,
agent_id=agent_id,
task_id=task_id,
diff --git a/agents-api/agents_api/routers/tasks/create_task.py b/agents-api/agents_api/routers/tasks/create_task.py
index 0dc4e91e4..0e8813102 100644
--- a/agents-api/agents_api/routers/tasks/create_task.py
+++ b/agents-api/agents_api/routers/tasks/create_task.py
@@ -35,7 +35,7 @@ async def create_task(
except ValidationError:
pass
- return create_task_query(
+ return await create_task_query(
developer_id=x_developer_id,
agent_id=agent_id,
data=data,
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index eb08c90c0..6cc1e3e4f 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -50,7 +50,7 @@ async def start_execution(
) -> tuple[Execution, WorkflowHandle]:
execution_id = uuid7()
- execution = create_execution_query(
+ execution = await create_execution_query(
developer_id=developer_id,
task_id=task_id,
execution_id=execution_id,
@@ -58,7 +58,7 @@ async def start_execution(
client=client,
)
- execution_input = prepare_execution_input(
+ execution_input = await prepare_execution_input(
developer_id=developer_id,
task_id=task_id,
execution_id=execution_id,
@@ -76,7 +76,7 @@ async def start_execution(
except Exception as e:
logger.exception(e)
- update_execution_query(
+ await update_execution_query(
developer_id=developer_id,
task_id=task_id,
execution_id=execution_id,
@@ -104,7 +104,7 @@ async def create_task_execution(
background_tasks: BackgroundTasks,
) -> ResourceCreatedResponse:
try:
- task = get_task_query(task_id=task_id, developer_id=x_developer_id)
+ task = await get_task_query(task_id=task_id, developer_id=x_developer_id)
validate(data.input, task.input_schema)
except ValidationError:
@@ -121,11 +121,11 @@ async def create_task_execution(
raise
# get developer data
- developer: Developer = get_developer(developer_id=x_developer_id)
+ developer: Developer = await get_developer(developer_id=x_developer_id)
# # check if the developer is paid
if "paid" not in developer.tags:
- executions = count_executions_query(
+ executions = await count_executions_query(
developer_id=x_developer_id, task_id=task_id
)
diff --git a/agents-api/agents_api/routers/tasks/get_execution_details.py b/agents-api/agents_api/routers/tasks/get_execution_details.py
index 387cf41c0..53b6ad6d5 100644
--- a/agents-api/agents_api/routers/tasks/get_execution_details.py
+++ b/agents-api/agents_api/routers/tasks/get_execution_details.py
@@ -11,4 +11,4 @@
@router.get("/executions/{execution_id}", tags=["executions"])
async def get_execution_details(execution_id: UUID) -> Execution:
- return get_execution_query(execution_id=execution_id)
+ return await get_execution_query(execution_id=execution_id)
diff --git a/agents-api/agents_api/routers/tasks/get_task_details.py b/agents-api/agents_api/routers/tasks/get_task_details.py
index 35a7ef747..452ab961d 100644
--- a/agents-api/agents_api/routers/tasks/get_task_details.py
+++ b/agents-api/agents_api/routers/tasks/get_task_details.py
@@ -22,7 +22,7 @@ async def get_task_details(
)
try:
- task = get_task_query(developer_id=x_developer_id, task_id=task_id)
+ task = await get_task_query(developer_id=x_developer_id, task_id=task_id)
task_data = task.model_dump()
except AssertionError:
raise not_found
diff --git a/agents-api/agents_api/routers/tasks/list_execution_transitions.py b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
index 460e4e764..9b2aad042 100644
--- a/agents-api/agents_api/routers/tasks/list_execution_transitions.py
+++ b/agents-api/agents_api/routers/tasks/list_execution_transitions.py
@@ -19,7 +19,7 @@ async def list_execution_transitions(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Transition]:
- transitions = list_execution_transitions_query(
+ transitions = await list_execution_transitions_query(
execution_id=execution_id,
limit=limit,
offset=offset,
diff --git a/agents-api/agents_api/routers/tasks/list_task_executions.py b/agents-api/agents_api/routers/tasks/list_task_executions.py
index 658904efa..17256f038 100644
--- a/agents-api/agents_api/routers/tasks/list_task_executions.py
+++ b/agents-api/agents_api/routers/tasks/list_task_executions.py
@@ -23,7 +23,7 @@ async def list_task_executions(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Execution]:
- executions = list_task_executions_query(
+ executions = await list_task_executions_query(
task_id=task_id,
developer_id=x_developer_id,
limit=limit,
diff --git a/agents-api/agents_api/routers/tasks/list_tasks.py b/agents-api/agents_api/routers/tasks/list_tasks.py
index 2422cdef3..529700c09 100644
--- a/agents-api/agents_api/routers/tasks/list_tasks.py
+++ b/agents-api/agents_api/routers/tasks/list_tasks.py
@@ -21,7 +21,7 @@ async def list_tasks(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Task]:
- query_results = list_tasks_query(
+ query_results = await list_tasks_query(
agent_id=agent_id,
developer_id=x_developer_id,
limit=limit,
diff --git a/agents-api/agents_api/routers/tasks/patch_execution.py b/agents-api/agents_api/routers/tasks/patch_execution.py
index 3b4b91c8c..15b3162be 100644
--- a/agents-api/agents_api/routers/tasks/patch_execution.py
+++ b/agents-api/agents_api/routers/tasks/patch_execution.py
@@ -21,7 +21,7 @@ async def patch_execution(
execution_id: UUID,
data: UpdateExecutionRequest,
) -> ResourceUpdatedResponse:
- return update_execution_query(
+ return await update_execution_query(
developer_id=x_developer_id,
task_id=task_id,
execution_id=execution_id,
diff --git a/agents-api/agents_api/routers/tasks/stream_transitions_events.py b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
index 61168cd86..cb9ded05a 100644
--- a/agents-api/agents_api/routers/tasks/stream_transitions_events.py
+++ b/agents-api/agents_api/routers/tasks/stream_transitions_events.py
@@ -87,7 +87,7 @@ async def stream_transitions_events(
next_page_token: Annotated[str | None, Query()] = None,
):
# Get temporal id
- temporal_data = lookup_temporal_data(
+ temporal_data = await lookup_temporal_data(
developer_id=x_developer_id,
execution_id=execution_id,
)
diff --git a/agents-api/agents_api/routers/tasks/update_execution.py b/agents-api/agents_api/routers/tasks/update_execution.py
index 613958919..281fc8e2a 100644
--- a/agents-api/agents_api/routers/tasks/update_execution.py
+++ b/agents-api/agents_api/routers/tasks/update_execution.py
@@ -31,14 +31,14 @@ async def update_execution(
case StopExecutionRequest():
try:
wf_handle = temporal_client.get_workflow_handle_for(
- *get_temporal_workflow_data(execution_id=execution_id)
+ *await get_temporal_workflow_data(execution_id=execution_id)
)
await wf_handle.cancel()
except Exception:
raise HTTPException(status_code=500, detail="Failed to stop execution")
case ResumeExecutionRequest():
- token_data = get_paused_execution_token(
+ token_data = await get_paused_execution_token(
developer_id=x_developer_id, execution_id=execution_id
)
activity_id = token_data["metadata"].get("x-activity-id", None)
diff --git a/agents-api/agents_api/routers/users/create_or_update_user.py b/agents-api/agents_api/routers/users/create_or_update_user.py
index 746134499..0a1f9db37 100644
--- a/agents-api/agents_api/routers/users/create_or_update_user.py
+++ b/agents-api/agents_api/routers/users/create_or_update_user.py
@@ -18,7 +18,7 @@ async def create_or_update_user(
user_id: UUID,
data: CreateOrUpdateUserRequest,
) -> ResourceCreatedResponse:
- user = create_or_update_user_query(
+ user = await create_or_update_user_query(
developer_id=x_developer_id,
user_id=user_id,
data=data,
diff --git a/agents-api/agents_api/routers/users/create_user.py b/agents-api/agents_api/routers/users/create_user.py
index e18ca3c97..1ac42bc36 100644
--- a/agents-api/agents_api/routers/users/create_user.py
+++ b/agents-api/agents_api/routers/users/create_user.py
@@ -15,7 +15,7 @@ async def create_user(
data: CreateUserRequest,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceCreatedResponse:
- user = create_user_query(
+ user = await create_user_query(
developer_id=x_developer_id,
data=data,
)
diff --git a/agents-api/agents_api/routers/users/delete_user.py b/agents-api/agents_api/routers/users/delete_user.py
index 446c7cf0c..bbc7f8736 100644
--- a/agents-api/agents_api/routers/users/delete_user.py
+++ b/agents-api/agents_api/routers/users/delete_user.py
@@ -14,4 +14,4 @@
async def delete_user(
user_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> ResourceDeletedResponse:
- return delete_user_query(developer_id=x_developer_id, user_id=user_id)
+ return await delete_user_query(developer_id=x_developer_id, user_id=user_id)
diff --git a/agents-api/agents_api/routers/users/get_user_details.py b/agents-api/agents_api/routers/users/get_user_details.py
index 1a1cfd6d3..4a219869c 100644
--- a/agents-api/agents_api/routers/users/get_user_details.py
+++ b/agents-api/agents_api/routers/users/get_user_details.py
@@ -14,4 +14,4 @@ async def get_user_details(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
user_id: UUID,
) -> User:
- return get_user_query(developer_id=x_developer_id, user_id=user_id)
+ return await get_user_query(developer_id=x_developer_id, user_id=user_id)
diff --git a/agents-api/agents_api/routers/users/list_users.py b/agents-api/agents_api/routers/users/list_users.py
index c57dec613..4c027bbd3 100644
--- a/agents-api/agents_api/routers/users/list_users.py
+++ b/agents-api/agents_api/routers/users/list_users.py
@@ -21,7 +21,7 @@ async def list_users(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[User]:
- users = list_users_query(
+ users = await list_users_query(
developer_id=x_developer_id,
limit=limit,
offset=offset,
diff --git a/agents-api/agents_api/routers/users/patch_user.py b/agents-api/agents_api/routers/users/patch_user.py
index 0e8b5fc53..03cd9bcfe 100644
--- a/agents-api/agents_api/routers/users/patch_user.py
+++ b/agents-api/agents_api/routers/users/patch_user.py
@@ -15,7 +15,7 @@ async def patch_user(
data: PatchUserRequest,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceUpdatedResponse:
- return patch_user_query(
+ return await patch_user_query(
developer_id=x_developer_id,
user_id=user_id,
data=data,
diff --git a/agents-api/agents_api/routers/users/update_user.py b/agents-api/agents_api/routers/users/update_user.py
index 82069fe94..8071657d7 100644
--- a/agents-api/agents_api/routers/users/update_user.py
+++ b/agents-api/agents_api/routers/users/update_user.py
@@ -15,7 +15,7 @@ async def update_user(
data: UpdateUserRequest,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceUpdatedResponse:
- return update_user_query(
+ return await update_user_query(
developer_id=x_developer_id,
user_id=user_id,
data=data,
From 3bc5875ada34e3e59524fd8f3870c30466f13603 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Mon, 23 Dec 2024 10:48:04 +0300
Subject: [PATCH 143/274] fix(agents-api): await async functions in routers
---
agents-api/agents_api/routers/agents/create_agent.py | 2 +-
.../agents_api/routers/agents/create_agent_tool.py | 2 +-
.../routers/agents/create_or_update_agent.py | 2 +-
agents-api/agents_api/routers/agents/delete_agent.py | 2 +-
.../agents_api/routers/agents/delete_agent_tool.py | 2 +-
.../agents_api/routers/agents/get_agent_details.py | 2 +-
.../agents_api/routers/agents/list_agent_tools.py | 2 +-
agents-api/agents_api/routers/agents/list_agents.py | 2 +-
agents-api/agents_api/routers/agents/patch_agent.py | 2 +-
.../agents_api/routers/agents/patch_agent_tool.py | 2 +-
agents-api/agents_api/routers/agents/update_agent.py | 2 +-
.../agents_api/routers/agents/update_agent_tool.py | 2 +-
agents-api/agents_api/routers/docs/delete_doc.py | 4 ++--
agents-api/agents_api/routers/docs/get_doc.py | 2 +-
agents-api/agents_api/routers/docs/search_docs.py | 12 ++++++------
15 files changed, 21 insertions(+), 21 deletions(-)
diff --git a/agents-api/agents_api/routers/agents/create_agent.py b/agents-api/agents_api/routers/agents/create_agent.py
index e861617ba..f630d5251 100644
--- a/agents-api/agents_api/routers/agents/create_agent.py
+++ b/agents-api/agents_api/routers/agents/create_agent.py
@@ -19,7 +19,7 @@ async def create_agent(
data: CreateAgentRequest,
) -> ResourceCreatedResponse:
# TODO: Validate model name
- agent = create_agent_query(
+ agent = await create_agent_query(
developer_id=x_developer_id,
data=data,
)
diff --git a/agents-api/agents_api/routers/agents/create_agent_tool.py b/agents-api/agents_api/routers/agents/create_agent_tool.py
index c70d7f5c3..80c90a4de 100644
--- a/agents-api/agents_api/routers/agents/create_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/create_agent_tool.py
@@ -4,7 +4,7 @@
from fastapi import Depends
from starlette.status import HTTP_201_CREATED
-import agents_api.models as models
+from ...queries.tools.create_tools import create_tools as create_tools_query
from ...autogen.openapi_model import (
CreateToolRequest,
diff --git a/agents-api/agents_api/routers/agents/create_or_update_agent.py b/agents-api/agents_api/routers/agents/create_or_update_agent.py
index 24cca09e4..fd2fc124c 100644
--- a/agents-api/agents_api/routers/agents/create_or_update_agent.py
+++ b/agents-api/agents_api/routers/agents/create_or_update_agent.py
@@ -22,7 +22,7 @@ async def create_or_update_agent(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceCreatedResponse:
# TODO: Validate model name
- agent = create_or_update_agent_query(
+ agent = await create_or_update_agent_query(
developer_id=x_developer_id,
agent_id=agent_id,
data=data,
diff --git a/agents-api/agents_api/routers/agents/delete_agent.py b/agents-api/agents_api/routers/agents/delete_agent.py
index fbf482f8d..3acb56aa2 100644
--- a/agents-api/agents_api/routers/agents/delete_agent.py
+++ b/agents-api/agents_api/routers/agents/delete_agent.py
@@ -14,4 +14,4 @@
async def delete_agent(
agent_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
) -> ResourceDeletedResponse:
- return delete_agent_query(developer_id=x_developer_id, agent_id=agent_id)
+ return await delete_agent_query(developer_id=x_developer_id, agent_id=agent_id)
diff --git a/agents-api/agents_api/routers/agents/delete_agent_tool.py b/agents-api/agents_api/routers/agents/delete_agent_tool.py
index ab89faa24..6f82e0768 100644
--- a/agents-api/agents_api/routers/agents/delete_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/delete_agent_tool.py
@@ -15,7 +15,7 @@ async def delete_agent_tool(
tool_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceDeletedResponse:
- return delete_tool_query(
+ return await delete_tool_query(
developer_id=x_developer_id,
agent_id=agent_id,
tool_id=tool_id,
diff --git a/agents-api/agents_api/routers/agents/get_agent_details.py b/agents-api/agents_api/routers/agents/get_agent_details.py
index 6d90bc3ab..30f7d3a34 100644
--- a/agents-api/agents_api/routers/agents/get_agent_details.py
+++ b/agents-api/agents_api/routers/agents/get_agent_details.py
@@ -14,4 +14,4 @@ async def get_agent_details(
agent_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> Agent:
- return get_agent_query(developer_id=x_developer_id, agent_id=agent_id)
+ return await get_agent_query(developer_id=x_developer_id, agent_id=agent_id)
diff --git a/agents-api/agents_api/routers/agents/list_agent_tools.py b/agents-api/agents_api/routers/agents/list_agent_tools.py
index 98f5dd109..7712cbf26 100644
--- a/agents-api/agents_api/routers/agents/list_agent_tools.py
+++ b/agents-api/agents_api/routers/agents/list_agent_tools.py
@@ -20,7 +20,7 @@ async def list_agent_tools(
) -> ListResponse[Tool]:
# FIXME: list agent tools is returning an empty list
# SCRUM-22
- tools = list_tools_query(
+ tools = await list_tools_query(
agent_id=agent_id,
developer_id=x_developer_id,
limit=limit,
diff --git a/agents-api/agents_api/routers/agents/list_agents.py b/agents-api/agents_api/routers/agents/list_agents.py
index 37b14ebad..f3b74f7a4 100644
--- a/agents-api/agents_api/routers/agents/list_agents.py
+++ b/agents-api/agents_api/routers/agents/list_agents.py
@@ -24,7 +24,7 @@ async def list_agents(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> ListResponse[Agent]:
- agents = list_agents_query(
+ agents = await list_agents_query(
developer_id=x_developer_id,
limit=limit,
offset=offset,
diff --git a/agents-api/agents_api/routers/agents/patch_agent.py b/agents-api/agents_api/routers/agents/patch_agent.py
index b78edc2e5..bb7c16d5c 100644
--- a/agents-api/agents_api/routers/agents/patch_agent.py
+++ b/agents-api/agents_api/routers/agents/patch_agent.py
@@ -21,7 +21,7 @@ async def patch_agent(
agent_id: UUID,
data: PatchAgentRequest,
) -> ResourceUpdatedResponse:
- return patch_agent_query(
+ return await patch_agent_query(
agent_id=agent_id,
developer_id=x_developer_id,
data=data,
diff --git a/agents-api/agents_api/routers/agents/patch_agent_tool.py b/agents-api/agents_api/routers/agents/patch_agent_tool.py
index a45349340..cef29dea2 100644
--- a/agents-api/agents_api/routers/agents/patch_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/patch_agent_tool.py
@@ -19,7 +19,7 @@ async def patch_agent_tool(
tool_id: UUID,
data: PatchToolRequest,
) -> ResourceUpdatedResponse:
- return patch_tool_query(
+ return await patch_tool_query(
developer_id=x_developer_id,
agent_id=agent_id,
tool_id=tool_id,
diff --git a/agents-api/agents_api/routers/agents/update_agent.py b/agents-api/agents_api/routers/agents/update_agent.py
index 2c5235971..608da0b20 100644
--- a/agents-api/agents_api/routers/agents/update_agent.py
+++ b/agents-api/agents_api/routers/agents/update_agent.py
@@ -21,7 +21,7 @@ async def update_agent(
agent_id: UUID,
data: UpdateAgentRequest,
) -> ResourceUpdatedResponse:
- return update_agent_query(
+ return await update_agent_query(
developer_id=x_developer_id,
agent_id=agent_id,
data=data,
diff --git a/agents-api/agents_api/routers/agents/update_agent_tool.py b/agents-api/agents_api/routers/agents/update_agent_tool.py
index 7ba66fa53..790cff39c 100644
--- a/agents-api/agents_api/routers/agents/update_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/update_agent_tool.py
@@ -19,7 +19,7 @@ async def update_agent_tool(
tool_id: UUID,
data: UpdateToolRequest,
) -> ResourceUpdatedResponse:
- return update_tool_query(
+ return await update_tool_query(
developer_id=x_developer_id,
agent_id=agent_id,
tool_id=tool_id,
diff --git a/agents-api/agents_api/routers/docs/delete_doc.py b/agents-api/agents_api/routers/docs/delete_doc.py
index cbe8413b3..a639db17b 100644
--- a/agents-api/agents_api/routers/docs/delete_doc.py
+++ b/agents-api/agents_api/routers/docs/delete_doc.py
@@ -18,7 +18,7 @@ async def delete_agent_doc(
agent_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceDeletedResponse:
- return delete_doc_query(
+ return await delete_doc_query(
developer_id=x_developer_id,
owner_id=agent_id,
owner_type="agent",
@@ -34,7 +34,7 @@ async def delete_user_doc(
user_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> ResourceDeletedResponse:
- return delete_doc_query(
+ return await delete_doc_query(
developer_id=x_developer_id,
owner_id=user_id,
owner_type="user",
diff --git a/agents-api/agents_api/routers/docs/get_doc.py b/agents-api/agents_api/routers/docs/get_doc.py
index 7df55fac4..498fb46e0 100644
--- a/agents-api/agents_api/routers/docs/get_doc.py
+++ b/agents-api/agents_api/routers/docs/get_doc.py
@@ -14,4 +14,4 @@ async def get_doc(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
doc_id: UUID,
) -> Doc:
- return get_doc_query(developer_id=x_developer_id, doc_id=doc_id)
+ return await get_doc_query(developer_id=x_developer_id, doc_id=doc_id)
diff --git a/agents-api/agents_api/routers/docs/search_docs.py b/agents-api/agents_api/routers/docs/search_docs.py
index d4653920a..ead9e1edb 100644
--- a/agents-api/agents_api/routers/docs/search_docs.py
+++ b/agents-api/agents_api/routers/docs/search_docs.py
@@ -20,7 +20,7 @@
from .router import router
-def get_search_fn_and_params(
+async def get_search_fn_and_params(
search_params,
) -> Tuple[
Any, Optional[Dict[str, Union[float, int, str, Dict[str, float], List[float]]]]
@@ -31,7 +31,7 @@ def get_search_fn_and_params(
case TextOnlyDocSearchRequest(
text=query, limit=k, metadata_filter=metadata_filter
):
- search_fn = search_docs_by_text
+ search_fn = await search_docs_by_text
params = dict(
query=query,
k=k,
@@ -44,7 +44,7 @@ def get_search_fn_and_params(
confidence=confidence,
metadata_filter=metadata_filter,
):
- search_fn = search_docs_by_embedding
+ search_fn = await search_docs_by_embedding
params = dict(
query_embedding=query_embedding,
k=k * 3 if search_params.mmr_strength > 0 else k,
@@ -60,7 +60,7 @@ def get_search_fn_and_params(
alpha=alpha,
metadata_filter=metadata_filter,
):
- search_fn = search_docs_hybrid
+ search_fn = await search_docs_hybrid
params = dict(
query=query,
query_embedding=query_embedding,
@@ -94,7 +94,7 @@ async def search_user_docs(
"""
# MMR here
- search_fn, params = get_search_fn_and_params(search_params)
+ search_fn, params = await get_search_fn_and_params(search_params)
start = time.time()
docs: list[DocReference] = search_fn(
@@ -145,7 +145,7 @@ async def search_agent_docs(
DocSearchResponse: The search results.
"""
- search_fn, params = get_search_fn_and_params(search_params)
+ search_fn, params = await get_search_fn_and_params(search_params)
start = time.time()
docs: list[DocReference] = search_fn(
From 969e38c38c3e1b36a7fe49e37fb3290a139083cb Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Mon, 23 Dec 2024 07:48:54 +0000
Subject: [PATCH 144/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/routers/agents/create_agent_tool.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/agents-api/agents_api/routers/agents/create_agent_tool.py b/agents-api/agents_api/routers/agents/create_agent_tool.py
index 80c90a4de..74e98b3f9 100644
--- a/agents-api/agents_api/routers/agents/create_agent_tool.py
+++ b/agents-api/agents_api/routers/agents/create_agent_tool.py
@@ -4,8 +4,6 @@
from fastapi import Depends
from starlette.status import HTTP_201_CREATED
-from ...queries.tools.create_tools import create_tools as create_tools_query
-
from ...autogen.openapi_model import (
CreateToolRequest,
ResourceCreatedResponse,
From 9d0068eb75c2923caf7d1e5034dca8f042718f34 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Wed, 18 Dec 2024 15:39:35 +0300
Subject: [PATCH 145/274] chore: Move ti queries directory
---
.../models/chat/get_cached_response.py | 15 ---------------
.../models/chat/set_cached_response.py | 19 -------------------
.../{models => queries}/chat/__init__.py | 2 --
.../chat/gather_messages.py | 0
.../chat/prepare_chat_context.py | 15 +++++++--------
5 files changed, 7 insertions(+), 44 deletions(-)
delete mode 100644 agents-api/agents_api/models/chat/get_cached_response.py
delete mode 100644 agents-api/agents_api/models/chat/set_cached_response.py
rename agents-api/agents_api/{models => queries}/chat/__init__.py (92%)
rename agents-api/agents_api/{models => queries}/chat/gather_messages.py (100%)
rename agents-api/agents_api/{models => queries}/chat/prepare_chat_context.py (92%)
diff --git a/agents-api/agents_api/models/chat/get_cached_response.py b/agents-api/agents_api/models/chat/get_cached_response.py
deleted file mode 100644
index 368c88567..000000000
--- a/agents-api/agents_api/models/chat/get_cached_response.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from beartype import beartype
-
-from ..utils import cozo_query
-
-
-@cozo_query
-@beartype
-def get_cached_response(key: str) -> tuple[str, dict]:
- query = """
- input[key] <- [[$key]]
- ?[key, value] := input[key], *session_cache{key, value}
- :limit 1
- """
-
- return (query, {"key": key})
diff --git a/agents-api/agents_api/models/chat/set_cached_response.py b/agents-api/agents_api/models/chat/set_cached_response.py
deleted file mode 100644
index 8625f3f1b..000000000
--- a/agents-api/agents_api/models/chat/set_cached_response.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from beartype import beartype
-
-from ..utils import cozo_query
-
-
-@cozo_query
-@beartype
-def set_cached_response(key: str, value: dict) -> tuple[str, dict]:
- query = """
- ?[key, value] <- [[$key, $value]]
-
- :insert session_cache {
- key => value
- }
-
- :returning
- """
-
- return (query, {"key": key, "value": value})
diff --git a/agents-api/agents_api/models/chat/__init__.py b/agents-api/agents_api/queries/chat/__init__.py
similarity index 92%
rename from agents-api/agents_api/models/chat/__init__.py
rename to agents-api/agents_api/queries/chat/__init__.py
index 428b72572..2c05b4f8b 100644
--- a/agents-api/agents_api/models/chat/__init__.py
+++ b/agents-api/agents_api/queries/chat/__init__.py
@@ -17,6 +17,4 @@
# ruff: noqa: F401, F403, F405
from .gather_messages import gather_messages
-from .get_cached_response import get_cached_response
from .prepare_chat_context import prepare_chat_context
-from .set_cached_response import set_cached_response
diff --git a/agents-api/agents_api/models/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
similarity index 100%
rename from agents-api/agents_api/models/chat/gather_messages.py
rename to agents-api/agents_api/queries/chat/gather_messages.py
diff --git a/agents-api/agents_api/models/chat/prepare_chat_context.py b/agents-api/agents_api/queries/chat/prepare_chat_context.py
similarity index 92%
rename from agents-api/agents_api/models/chat/prepare_chat_context.py
rename to agents-api/agents_api/queries/chat/prepare_chat_context.py
index f77686d7a..4731618f8 100644
--- a/agents-api/agents_api/models/chat/prepare_chat_context.py
+++ b/agents-api/agents_api/queries/chat/prepare_chat_context.py
@@ -3,7 +3,6 @@
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from ...common.protocol.sessions import ChatContext, make_session
@@ -22,13 +21,13 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+# TODO: implement this part
+# @rewrap_exceptions(
+# {
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ChatContext,
one=True,
From 780100b1f2a6ce87a4918b6b45c1a03ee9d4f10b Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Thu, 19 Dec 2024 15:34:37 +0300
Subject: [PATCH 146/274] feat: Add prepare chat context query
---
.../queries/chat/gather_messages.py | 12 +-
.../queries/chat/prepare_chat_context.py | 225 ++++++++++--------
2 files changed, 129 insertions(+), 108 deletions(-)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 28dc6607f..34a7c564f 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -3,18 +3,17 @@
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from ...autogen.openapi_model import ChatInput, DocReference, History
from ...clients import litellm
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
-from ..docs.search_docs_by_embedding import search_docs_by_embedding
-from ..docs.search_docs_by_text import search_docs_by_text
-from ..docs.search_docs_hybrid import search_docs_hybrid
-from ..entry.get_history import get_history
-from ..session.get_session import get_session
+# from ..docs.search_docs_by_embedding import search_docs_by_embedding
+# from ..docs.search_docs_by_text import search_docs_by_text
+# from ..docs.search_docs_hybrid import search_docs_hybrid
+# from ..entry.get_history import get_history
+from ..sessions.get_session import get_session
from ..utils import (
partialclass,
rewrap_exceptions,
@@ -25,7 +24,6 @@
@rewrap_exceptions(
{
- QueryException: partialclass(HTTPException, status_code=400),
ValidationError: partialclass(HTTPException, status_code=400),
TypeError: partialclass(HTTPException, status_code=400),
}
diff --git a/agents-api/agents_api/queries/chat/prepare_chat_context.py b/agents-api/agents_api/queries/chat/prepare_chat_context.py
index 4731618f8..23926ea4c 100644
--- a/agents-api/agents_api/queries/chat/prepare_chat_context.py
+++ b/agents-api/agents_api/queries/chat/prepare_chat_context.py
@@ -2,18 +2,10 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
-from pydantic import ValidationError
from ...common.protocol.sessions import ChatContext, make_session
-from ..session.prepare_session_data import prepare_session_data
from ..utils import (
- cozo_query,
- fix_uuid_if_present,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -21,17 +13,107 @@
T = TypeVar("T")
-# TODO: implement this part
-# @rewrap_exceptions(
-# {
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
-@wrap_in_class(
- ChatContext,
- one=True,
- transform=lambda d: {
+query = """
+SELECT * FROM
+(
+ SELECT jsonb_agg(u) AS users FROM (
+ SELECT
+ session_lookup.participant_id,
+ users.user_id AS id,
+ users.developer_id,
+ users.name,
+ users.about,
+ users.created_at,
+ users.updated_at,
+ users.metadata
+ FROM session_lookup
+ INNER JOIN users ON session_lookup.participant_id = users.user_id
+ WHERE
+ session_lookup.developer_id = $1 AND
+ session_id = $2 AND
+ session_lookup.participant_type = 'user'
+ ) u
+) AS users,
+(
+ SELECT jsonb_agg(a) AS agents FROM (
+ SELECT
+ session_lookup.participant_id,
+ agents.agent_id AS id,
+ agents.developer_id,
+ agents.canonical_name,
+ agents.name,
+ agents.about,
+ agents.instructions,
+ agents.model,
+ agents.created_at,
+ agents.updated_at,
+ agents.metadata,
+ agents.default_settings
+ FROM session_lookup
+ INNER JOIN agents ON session_lookup.participant_id = agents.agent_id
+ WHERE
+ session_lookup.developer_id = $1 AND
+ session_id = $2 AND
+ session_lookup.participant_type = 'agent'
+ ) a
+) AS agents,
+(
+ SELECT to_jsonb(s) AS session FROM (
+ SELECT
+ sessions.session_id AS id,
+ sessions.developer_id,
+ sessions.situation,
+ sessions.system_template,
+ sessions.created_at,
+ sessions.metadata,
+ sessions.render_templates,
+ sessions.token_budget,
+ sessions.context_overflow,
+ sessions.forward_tool_calls,
+ sessions.recall_options
+ FROM sessions
+ WHERE
+ developer_id = $1 AND
+ session_id = $2
+ LIMIT 1
+ ) s
+) AS session,
+(
+ SELECT jsonb_agg(r) AS toolsets FROM (
+ SELECT
+ session_lookup.participant_id,
+ tools.tool_id as id,
+ tools.developer_id,
+ tools.agent_id,
+ tools.task_id,
+ tools.task_version,
+ tools.type,
+ tools.name,
+ tools.description,
+ tools.spec,
+ tools.updated_at,
+ tools.created_at
+ FROM session_lookup
+ INNER JOIN tools ON session_lookup.participant_id = tools.agent_id
+ WHERE
+ session_lookup.developer_id = $1 AND
+ session_id = $2 AND
+ session_lookup.participant_type = 'agent'
+ ) r
+) AS toolsets
+"""
+
+
+def _transform(d):
+ toolsets = {}
+ for tool in d["toolsets"]:
+ agent_id = tool["agent_id"]
+ if agent_id in toolsets:
+ toolsets[agent_id].append(tool)
+ else:
+ toolsets[agent_id] = [tool]
+
+ return {
**d,
"session": make_session(
agents=[a["id"] for a in d["agents"]],
@@ -40,103 +122,44 @@
),
"toolsets": [
{
- **ts,
+ "agent_id": agent_id,
"tools": [
{
tool["type"]: tool.pop("spec"),
**tool,
}
- for tool in map(fix_uuid_if_present, ts["tools"])
+ for tool in tools
],
}
- for ts in d["toolsets"]
+ for agent_id, tools in toolsets.items()
],
- },
+ }
+
+
+# TODO: implement this part
+# @rewrap_exceptions(
+# {
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
+@wrap_in_class(
+ ChatContext,
+ one=True,
+ transform=_transform,
)
-@cozo_query
+@pg_query
@beartype
-def prepare_chat_context(
+async def prepare_chat_context(
*,
developer_id: UUID,
session_id: UUID,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
"""
Executes a complex query to retrieve memory context based on session ID.
"""
- [*_, session_data_query], sd_vars = prepare_session_data.__wrapped__(
- developer_id=developer_id, session_id=session_id
- )
-
- session_data_fields = ("session", "agents", "users")
-
- session_data_query += """
- :create _session_data_json {
- agents: [Json],
- users: [Json],
- session: Json,
- }
- """
-
- toolsets_query = """
- input[session_id] <- [[to_uuid($session_id)]]
-
- tools_by_agent[agent_id, collect(tool)] :=
- input[session_id],
- *session_lookup{
- session_id,
- participant_id: agent_id,
- participant_type: "agent",
- },
-
- *tools { agent_id, tool_id, name, type, spec, description, updated_at, created_at },
- tool = {
- "id": tool_id,
- "name": name,
- "type": type,
- "spec": spec,
- "description": description,
- "updated_at": updated_at,
- "created_at": created_at,
- }
-
- agent_toolsets[collect(toolset)] :=
- tools_by_agent[agent_id, tools],
- toolset = {
- "agent_id": agent_id,
- "tools": tools,
- }
-
- ?[toolsets] :=
- agent_toolsets[toolsets]
-
- :create _toolsets_json {
- toolsets: [Json],
- }
- """
-
- combine_query = f"""
- ?[{', '.join(session_data_fields)}, toolsets] :=
- *_session_data_json {{ {', '.join(session_data_fields)} }},
- *_toolsets_json {{ toolsets }}
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- session_data_query,
- toolsets_query,
- combine_query,
- ]
-
return (
- queries,
- {
- "session_id": str(session_id),
- **sd_vars,
- },
+ [query],
+ [developer_id, session_id],
)
From c9fc7579c08b65c1203deec0a81deb5b5e6060ec Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Thu, 19 Dec 2024 12:38:09 +0000
Subject: [PATCH 147/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/chat/gather_messages.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 34a7c564f..4fd574368 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -9,6 +9,7 @@
from ...clients import litellm
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
+
# from ..docs.search_docs_by_embedding import search_docs_by_embedding
# from ..docs.search_docs_by_text import search_docs_by_text
# from ..docs.search_docs_hybrid import search_docs_hybrid
From 1d2bd9a4342c0e7bb095dfcfa6087c182084afd2 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Thu, 19 Dec 2024 15:49:10 +0300
Subject: [PATCH 148/274] feat: Add SQL validation
---
agents-api/agents_api/exceptions.py | 9 ++
.../queries/chat/prepare_chat_context.py | 90 ++++++++++---------
2 files changed, 56 insertions(+), 43 deletions(-)
diff --git a/agents-api/agents_api/exceptions.py b/agents-api/agents_api/exceptions.py
index 615958a87..f6fcc4741 100644
--- a/agents-api/agents_api/exceptions.py
+++ b/agents-api/agents_api/exceptions.py
@@ -49,3 +49,12 @@ class FailedEncodingSentinel:
"""Sentinel object returned when failed to encode payload."""
payload_data: bytes
+
+
+class QueriesBaseException(AgentsBaseException):
+ pass
+
+
+class InvalidSQLQuery(QueriesBaseException):
+ def __init__(self, query_name: str):
+ super().__init__(f"invalid query: {query_name}")
diff --git a/agents-api/agents_api/queries/chat/prepare_chat_context.py b/agents-api/agents_api/queries/chat/prepare_chat_context.py
index 23926ea4c..1d9bd52fb 100644
--- a/agents-api/agents_api/queries/chat/prepare_chat_context.py
+++ b/agents-api/agents_api/queries/chat/prepare_chat_context.py
@@ -1,9 +1,11 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
from ...common.protocol.sessions import ChatContext, make_session
+from ...exceptions import InvalidSQLQuery
from ..utils import (
pg_query,
wrap_in_class,
@@ -13,19 +15,19 @@
T = TypeVar("T")
-query = """
-SELECT * FROM
+sql_query = sqlvalidator.parse(
+ """SELECT * FROM
(
SELECT jsonb_agg(u) AS users FROM (
SELECT
session_lookup.participant_id,
users.user_id AS id,
- users.developer_id,
- users.name,
- users.about,
- users.created_at,
- users.updated_at,
- users.metadata
+ users.developer_id,
+ users.name,
+ users.about,
+ users.created_at,
+ users.updated_at,
+ users.metadata
FROM session_lookup
INNER JOIN users ON session_lookup.participant_id = users.user_id
WHERE
@@ -39,16 +41,16 @@
SELECT
session_lookup.participant_id,
agents.agent_id AS id,
- agents.developer_id,
- agents.canonical_name,
- agents.name,
- agents.about,
- agents.instructions,
- agents.model,
- agents.created_at,
- agents.updated_at,
- agents.metadata,
- agents.default_settings
+ agents.developer_id,
+ agents.canonical_name,
+ agents.name,
+ agents.about,
+ agents.instructions,
+ agents.model,
+ agents.created_at,
+ agents.updated_at,
+ agents.metadata,
+ agents.default_settings
FROM session_lookup
INNER JOIN agents ON session_lookup.participant_id = agents.agent_id
WHERE
@@ -58,24 +60,24 @@
) a
) AS agents,
(
- SELECT to_jsonb(s) AS session FROM (
+ SELECT to_jsonb(s) AS session FROM (
SELECT
sessions.session_id AS id,
- sessions.developer_id,
- sessions.situation,
- sessions.system_template,
- sessions.created_at,
- sessions.metadata,
- sessions.render_templates,
- sessions.token_budget,
- sessions.context_overflow,
- sessions.forward_tool_calls,
- sessions.recall_options
+ sessions.developer_id,
+ sessions.situation,
+ sessions.system_template,
+ sessions.created_at,
+ sessions.metadata,
+ sessions.render_templates,
+ sessions.token_budget,
+ sessions.context_overflow,
+ sessions.forward_tool_calls,
+ sessions.recall_options
FROM sessions
WHERE
developer_id = $1 AND
session_id = $2
- LIMIT 1
+ LIMIT 1
) s
) AS session,
(
@@ -83,16 +85,16 @@
SELECT
session_lookup.participant_id,
tools.tool_id as id,
- tools.developer_id,
- tools.agent_id,
- tools.task_id,
- tools.task_version,
- tools.type,
- tools.name,
- tools.description,
- tools.spec,
- tools.updated_at,
- tools.created_at
+ tools.developer_id,
+ tools.agent_id,
+ tools.task_id,
+ tools.task_version,
+ tools.type,
+ tools.name,
+ tools.description,
+ tools.spec,
+ tools.updated_at,
+ tools.created_at
FROM session_lookup
INNER JOIN tools ON session_lookup.participant_id = tools.agent_id
WHERE
@@ -100,8 +102,10 @@
session_id = $2 AND
session_lookup.participant_type = 'agent'
) r
-) AS toolsets
-"""
+) AS toolsets"""
+)
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("prepare_chat_context")
def _transform(d):
@@ -160,6 +164,6 @@ async def prepare_chat_context(
"""
return (
- [query],
+ [sql_query.format()],
[developer_id, session_id],
)
From 1bc8fe3439f38bede9615aa784dbfcd50d21b89c Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 11:49:21 +0300
Subject: [PATCH 149/274] chore: Import other required queries
---
agents-api/agents_api/queries/chat/gather_messages.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 4fd574368..94d5fe71a 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -10,10 +10,10 @@
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
-# from ..docs.search_docs_by_embedding import search_docs_by_embedding
-# from ..docs.search_docs_by_text import search_docs_by_text
-# from ..docs.search_docs_hybrid import search_docs_hybrid
-# from ..entry.get_history import get_history
+from ..docs.search_docs_by_embedding import search_docs_by_embedding
+from ..docs.search_docs_by_text import search_docs_by_text
+from ..docs.search_docs_hybrid import search_docs_hybrid
+from ..entries.get_history import get_history
from ..sessions.get_session import get_session
from ..utils import (
partialclass,
From 2975407ab98d19bfde1ecac7ce2f574310efa7d1 Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Fri, 20 Dec 2024 08:50:13 +0000
Subject: [PATCH 150/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/chat/gather_messages.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/agents_api/queries/chat/gather_messages.py b/agents-api/agents_api/queries/chat/gather_messages.py
index 94d5fe71a..cbf3bf209 100644
--- a/agents-api/agents_api/queries/chat/gather_messages.py
+++ b/agents-api/agents_api/queries/chat/gather_messages.py
@@ -9,7 +9,6 @@
from ...clients import litellm
from ...common.protocol.developers import Developer
from ...common.protocol.sessions import ChatContext
-
from ..docs.search_docs_by_embedding import search_docs_by_embedding
from ..docs.search_docs_by_text import search_docs_by_text
from ..docs.search_docs_hybrid import search_docs_hybrid
From ba3027b0f94fa7e05d75959ae0ebe74711846168 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 12:37:19 +0300
Subject: [PATCH 151/274] chore: Move queries to another folder
---
.../{models => queries}/tools/__init__.py | 0
.../{models => queries}/tools/create_tools.py | 21 +++++++++----------
.../{models => queries}/tools/delete_tool.py | 0
.../{models => queries}/tools/get_tool.py | 0
.../tools/get_tool_args_from_metadata.py | 0
.../{models => queries}/tools/list_tools.py | 0
.../{models => queries}/tools/patch_tool.py | 0
.../{models => queries}/tools/update_tool.py | 0
8 files changed, 10 insertions(+), 11 deletions(-)
rename agents-api/agents_api/{models => queries}/tools/__init__.py (100%)
rename agents-api/agents_api/{models => queries}/tools/create_tools.py (89%)
rename agents-api/agents_api/{models => queries}/tools/delete_tool.py (100%)
rename agents-api/agents_api/{models => queries}/tools/get_tool.py (100%)
rename agents-api/agents_api/{models => queries}/tools/get_tool_args_from_metadata.py (100%)
rename agents-api/agents_api/{models => queries}/tools/list_tools.py (100%)
rename agents-api/agents_api/{models => queries}/tools/patch_tool.py (100%)
rename agents-api/agents_api/{models => queries}/tools/update_tool.py (100%)
diff --git a/agents-api/agents_api/models/tools/__init__.py b/agents-api/agents_api/queries/tools/__init__.py
similarity index 100%
rename from agents-api/agents_api/models/tools/__init__.py
rename to agents-api/agents_api/queries/tools/__init__.py
diff --git a/agents-api/agents_api/models/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
similarity index 89%
rename from agents-api/agents_api/models/tools/create_tools.py
rename to agents-api/agents_api/queries/tools/create_tools.py
index 578a1268d..0d2e0984c 100644
--- a/agents-api/agents_api/models/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,18 +1,18 @@
"""This module contains functions for creating tools in the CozoDB database."""
+import sqlvalidator
from typing import Any, TypeVar
from uuid import UUID
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateToolRequest, Tool
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
+ pg_query,
partialclass,
rewrap_exceptions,
verify_developer_id_query,
@@ -24,14 +24,13 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- AssertionError: partialclass(HTTPException, status_code=400),
- }
-)
+# @rewrap_exceptions(
+# {
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# AssertionError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -41,7 +40,7 @@
},
_kind="inserted",
)
-@cozo_query
+@pg_query
@increase_counter("create_tools")
@beartype
def create_tools(
diff --git a/agents-api/agents_api/models/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/delete_tool.py
rename to agents-api/agents_api/queries/tools/delete_tool.py
diff --git a/agents-api/agents_api/models/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/get_tool.py
rename to agents-api/agents_api/queries/tools/get_tool.py
diff --git a/agents-api/agents_api/models/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
similarity index 100%
rename from agents-api/agents_api/models/tools/get_tool_args_from_metadata.py
rename to agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
diff --git a/agents-api/agents_api/models/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
similarity index 100%
rename from agents-api/agents_api/models/tools/list_tools.py
rename to agents-api/agents_api/queries/tools/list_tools.py
diff --git a/agents-api/agents_api/models/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/patch_tool.py
rename to agents-api/agents_api/queries/tools/patch_tool.py
diff --git a/agents-api/agents_api/models/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
similarity index 100%
rename from agents-api/agents_api/models/tools/update_tool.py
rename to agents-api/agents_api/queries/tools/update_tool.py
From 5c060acea43bb44765ee6b716072296cad4e0a86 Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Fri, 20 Dec 2024 09:38:56 +0000
Subject: [PATCH 152/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/queries/tools/create_tools.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index 0d2e0984c..a54fa6973 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,9 +1,9 @@
"""This module contains functions for creating tools in the CozoDB database."""
-import sqlvalidator
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
from fastapi import HTTPException
from pydantic import ValidationError
@@ -12,8 +12,8 @@
from ...autogen.openapi_model import CreateToolRequest, Tool
from ...metrics.counters import increase_counter
from ..utils import (
- pg_query,
partialclass,
+ pg_query,
rewrap_exceptions,
verify_developer_id_query,
verify_developer_owns_resource_query,
From 47b7c7e7ee7794491941e8a5c479978b3cc79c5d Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:10:53 +0300
Subject: [PATCH 153/274] feat: Add create tools query
---
.../agents_api/queries/tools/create_tools.py | 103 +++++++-----------
1 file changed, 41 insertions(+), 62 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index a54fa6973..d50e98e80 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -5,18 +5,14 @@
import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pydantic import ValidationError
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateToolRequest, Tool
+from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ # rewrap_exceptions,
wrap_in_class,
)
@@ -24,6 +20,37 @@
T = TypeVar("T")
+sql_query = sqlvalidator.parse(
+ """INSERT INTO tools
+(
+ developer_id,
+ agent_id,
+ tool_id,
+ type,
+ name,
+ spec,
+ description
+)
+SELECT
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6,
+ $7
+WHERE NOT EXISTS (
+ SELECT null FROM tools
+ WHERE (agent_id, name) = ($2, $5)
+)
+RETURNING *
+"""
+)
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("create_tools")
+
+
# @rewrap_exceptions(
# {
# ValidationError: partialclass(HTTPException, status_code=400),
@@ -48,8 +75,8 @@ def create_tools(
developer_id: UUID,
agent_id: UUID,
data: list[CreateToolRequest],
- ignore_existing: bool = False,
-) -> tuple[list[str], dict]:
+ ignore_existing: bool = False, # TODO: what to do with this flag?
+) -> tuple[list[str], list]:
"""
Constructs a datalog query for inserting tool records into the 'agent_functions' relation in the CozoDB.
@@ -69,6 +96,7 @@ def create_tools(
tools_data = [
[
+ developer_id,
str(agent_id),
str(uuid7()),
tool.type,
@@ -79,57 +107,8 @@ def create_tools(
for tool in data
]
- ensure_tool_name_unique_query = """
- input[agent_id, tool_id, type, name, spec, description] <- $records
- ?[tool_id] :=
- input[agent_id, _, type, name, _, _],
- *tools{
- agent_id: to_uuid(agent_id),
- tool_id,
- type,
- name,
- spec,
- description,
- }
-
- :limit 1
- :assert none
- """
-
- # Datalog query for inserting new tool records into the 'tools' relation
- create_query = """
- input[agent_id, tool_id, type, name, spec, description] <- $records
-
- # Do not add duplicate
- ?[agent_id, tool_id, type, name, spec, description] :=
- input[agent_id, tool_id, type, name, spec, description],
- not *tools{
- agent_id: to_uuid(agent_id),
- type,
- name,
- }
-
- :insert tools {
- agent_id,
- tool_id,
- type,
- name,
- spec,
- description,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- create_query,
- ]
-
- if not ignore_existing:
- queries.insert(
- -1,
- ensure_tool_name_unique_query,
- )
-
- return (queries, {"records": tools_data})
+ return (
+ sql_query.format(),
+ tools_data,
+ "fetchmany",
+ )
From b774589abfd3f7569fe31f2d36393492a8b20dad Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:27:45 +0300
Subject: [PATCH 154/274] feat: Add delete tool query
---
.../agents_api/queries/tools/delete_tool.py | 69 +++++++++----------
1 file changed, 33 insertions(+), 36 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index c79cdfd29..59f561cf1 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -1,19 +1,14 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
+from ...exceptions import InvalidSQLQuery
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -21,20 +16,34 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+sql_query = sqlvalidator.parse("""
+DELETE FROM
+ tools
+WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+RETURNING *
+""")
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("delete_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ResourceDeletedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "deleted_at": utcnow(), "jobs": [], **d},
_kind="deleted",
)
-@cozo_query
+@pg_query
@beartype
def delete_tool(
*,
@@ -42,27 +51,15 @@ def delete_tool(
agent_id: UUID,
tool_id: UUID,
) -> tuple[list[str], dict]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
- delete_query = """
- # Delete function
- ?[tool_id, agent_id] <- [[
- to_uuid($tool_id),
- to_uuid($agent_id),
- ]]
-
- :delete tools {
- tool_id,
+ return (
+ sql_query.format(),
+ [
+ developer_id,
agent_id,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- delete_query,
- ]
-
- return (queries, {"tool_id": tool_id, "agent_id": agent_id})
+ tool_id,
+ ],
+ )
From b2806ac80c2cfb99cef9022b8f957797150c38d5 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:32:39 +0300
Subject: [PATCH 155/274] feat: Add get tool query
---
.../agents_api/queries/tools/get_tool.py | 76 ++++++++-----------
1 file changed, 30 insertions(+), 46 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 465fd2efe..3662725b8 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -1,32 +1,39 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import Tool
+from ...exceptions import InvalidSQLQuery
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+sql_query = sqlvalidator.parse("""
+SELECT * FROM tools
+WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+LIMIT 1
+""")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("get_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -36,7 +43,7 @@
},
one=True,
)
-@cozo_query
+@pg_query
@beartype
def get_tool(
*,
@@ -44,38 +51,15 @@ def get_tool(
agent_id: UUID,
tool_id: UUID,
) -> tuple[list[str], dict]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
- get_query = """
- input[agent_id, tool_id] <- [[to_uuid($agent_id), to_uuid($tool_id)]]
-
- ?[
+ return (
+ sql_query.format(),
+ [
+ developer_id,
agent_id,
tool_id,
- type,
- name,
- spec,
- updated_at,
- created_at,
- ] := input[agent_id, tool_id],
- *tools {
- agent_id,
- tool_id,
- name,
- type,
- spec,
- updated_at,
- created_at,
- }
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- get_query,
- ]
-
- return (queries, {"agent_id": agent_id, "tool_id": tool_id})
+ ],
+ )
From cf184e0e5f2d1487f22e89561474ca976b85e80c Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 14:45:28 +0300
Subject: [PATCH 156/274] feat: Add list tools query
---
.../agents_api/queries/tools/list_tools.py | 92 ++++++++-----------
1 file changed, 37 insertions(+), 55 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 727bf8028..59fb1eff5 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -1,32 +1,43 @@
from typing import Any, Literal, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import Tool
+from ...exceptions import InvalidSQLQuery
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
+sql_query = sqlvalidator.parse("""
+SELECT * FROM tools
+WHERE
+ developer_id = $1 AND
+ agent_id = $2
+ORDER BY
+ CASE WHEN $5 = 'created_at' AND $6 = 'desc' THEN s.created_at END DESC,
+ CASE WHEN $5 = 'created_at' AND $6 = 'asc' THEN s.created_at END ASC,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN s.updated_at END DESC,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN s.updated_at END ASC
+LIMIT $3 OFFSET $4;
+""")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("get_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -38,7 +49,7 @@
**d,
},
)
-@cozo_query
+@pg_query
@beartype
def list_tools(
*,
@@ -49,46 +60,17 @@ def list_tools(
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
) -> tuple[list[str], dict]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- list_query = f"""
- input[agent_id] <- [[to_uuid($agent_id)]]
-
- ?[
- agent_id,
- id,
- name,
- type,
- spec,
- description,
- updated_at,
- created_at,
- ] := input[agent_id],
- *tools {{
- agent_id,
- tool_id: id,
- name,
- type,
- spec,
- description,
- updated_at,
- created_at,
- }}
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- list_query,
- ]
-
return (
- queries,
- {"agent_id": agent_id, "limit": limit, "offset": offset},
+ sql_query.format(),
+ [
+ developer_id,
+ agent_id,
+ limit,
+ offset,
+ sort_by,
+ direction,
+ ],
)
From 53b65a19fea18a285f62d6758693ed44653de74c Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 15:21:09 +0300
Subject: [PATCH 157/274] feat: Add patch tool query
---
.../agents_api/queries/tools/patch_tool.py | 94 +++++++++----------
1 file changed, 43 insertions(+), 51 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index bc49b8121..aa663dec0 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -1,20 +1,14 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import PatchToolRequest, ResourceUpdatedResponse
-from ...common.utils.cozo import cozo_process_mutate_data
+from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -22,25 +16,46 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
+sql_query = sqlvalidator.parse("""
+WITH updated_tools AS (
+ UPDATE tools
+ SET
+ type = COALESCE($4, type),
+ name = COALESCE($5, name),
+ description = COALESCE($6, description),
+ spec = COALESCE($7, spec)
+ WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+ RETURNING *
)
+SELECT * FROM updated_tools;
+""")
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("patch_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
_kind="inserted",
)
-@cozo_query
+@pg_query
@increase_counter("patch_tool")
@beartype
def patch_tool(
*, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
"""
Execute the datalog query and return the results as a DataFrame
Updates the tool information for a given agent and tool ID in the 'cozodb' database.
@@ -54,6 +69,7 @@ def patch_tool(
ResourceUpdatedResponse: The updated tool data.
"""
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
@@ -78,39 +94,15 @@ def patch_tool(
if tool_spec:
del patch_data[tool_type]
- tool_cols, tool_vals = cozo_process_mutate_data(
- {
- **patch_data,
- "agent_id": agent_id,
- "tool_id": tool_id,
- }
- )
-
- # Construct the datalog query for updating the tool information
- patch_query = f"""
- input[{tool_cols}] <- $input
-
- ?[{tool_cols}, spec, updated_at] :=
- *tools {{
- agent_id: to_uuid($agent_id),
- tool_id: to_uuid($tool_id),
- spec: old_spec,
- }},
- input[{tool_cols}],
- spec = concat(old_spec, $spec),
- updated_at = now()
-
- :update tools {{ {tool_cols}, spec, updated_at }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- patch_query,
- ]
-
return (
- queries,
- dict(input=tool_vals, spec=tool_spec, agent_id=agent_id, tool_id=tool_id),
+ sql_query.format(),
+ [
+ developer_id,
+ agent_id,
+ tool_id,
+ tool_type,
+ data.name,
+ data.description,
+ tool_spec,
+ ],
)
From 3299d54a16fb0485394f1a4d3658e6db4a768d73 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 15:21:21 +0300
Subject: [PATCH 158/274] fix: Fix return types
---
agents-api/agents_api/queries/tools/delete_tool.py | 2 +-
agents-api/agents_api/queries/tools/get_tool.py | 2 +-
agents-api/agents_api/queries/tools/list_tools.py | 4 ++--
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 59f561cf1..17535e1e4 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -50,7 +50,7 @@ def delete_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 3662725b8..af63be0c9 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -50,7 +50,7 @@ def get_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 59fb1eff5..3dac84875 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -28,7 +28,7 @@
""")
if not sql_query.is_valid():
- raise InvalidSQLQuery("get_tool")
+ raise InvalidSQLQuery("list_tools")
# @rewrap_exceptions(
@@ -59,7 +59,7 @@ def list_tools(
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
From 5e94d332119c88dca9c7dbba5cf601818958e4d5 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Fri, 20 Dec 2024 15:31:48 +0300
Subject: [PATCH 159/274] feat: Add update tool query
---
.../agents_api/queries/tools/update_tool.py | 93 ++++++++-----------
1 file changed, 41 insertions(+), 52 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index ef700a5f6..356e28bbf 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -1,44 +1,55 @@
from typing import Any, TypeVar
from uuid import UUID
+import sqlvalidator
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ...autogen.openapi_model import (
ResourceUpdatedResponse,
UpdateToolRequest,
)
-from ...common.utils.cozo import cozo_process_mutate_data
+from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+sql_query = sqlvalidator.parse("""
+UPDATE tools
+SET
+ type = $4,
+ name = $5,
+ description = $6,
+ spec = $7
+WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+RETURNING *;
+""")
+
+if not sql_query.is_valid():
+ raise InvalidSQLQuery("update_tool")
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
_kind="inserted",
)
-@cozo_query
+@pg_query
@increase_counter("update_tool")
@beartype
def update_tool(
@@ -48,7 +59,8 @@ def update_tool(
tool_id: UUID,
data: UpdateToolRequest,
**kwargs,
-) -> tuple[list[str], dict]:
+) -> tuple[list[str], list]:
+ developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
@@ -72,38 +84,15 @@ def update_tool(
update_data["spec"] = tool_spec
del update_data[tool_type]
- tool_cols, tool_vals = cozo_process_mutate_data(
- {
- **update_data,
- "agent_id": agent_id,
- "tool_id": tool_id,
- }
- )
-
- # Construct the datalog query for updating the tool information
- patch_query = f"""
- input[{tool_cols}] <- $input
-
- ?[{tool_cols}, created_at, updated_at] :=
- *tools {{
- agent_id: to_uuid($agent_id),
- tool_id: to_uuid($tool_id),
- created_at
- }},
- input[{tool_cols}],
- updated_at = now()
-
- :put tools {{ {tool_cols}, created_at, updated_at }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- patch_query,
- ]
-
return (
- queries,
- dict(input=tool_vals, spec=tool_spec, agent_id=agent_id, tool_id=tool_id),
+ sql_query.format(),
+ [
+ developer_id,
+ agent_id,
+ tool_id,
+ tool_type,
+ data.name,
+ data.description,
+ tool_spec,
+ ],
)
From 0252a8870aafdba70213e7271a18f083b4b948be Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Sat, 21 Dec 2024 21:05:17 +0300
Subject: [PATCH 160/274] WIP
---
.../tools/get_tool_args_from_metadata.py | 33 +++++--------------
1 file changed, 9 insertions(+), 24 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 2cdb92cb9..a8a9dba1a 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -2,16 +2,9 @@
from uuid import UUID
from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
+ pg_query,
wrap_in_class,
)
@@ -51,10 +44,6 @@ def tool_args_for_task(
"""
queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "tasks", task_id=task_id, parents=[("agents", "agent_id")]
- ),
get_query,
]
@@ -95,25 +84,21 @@ def tool_args_for_session(
"""
queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
get_query,
]
return (queries, {"agent_id": agent_id, "session_id": session_id})
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
@wrap_in_class(dict, transform=lambda x: x["values"], one=True)
-@cozo_query
+@pg_query
@beartype
def get_tool_args_from_metadata(
*,
From d209e773c6340d618333fecea4b78309774bac6e Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 23 Dec 2024 11:32:13 +0300
Subject: [PATCH 161/274] feat: Add tools args from metadata query
---
.../tools/get_tool_args_from_metadata.py | 151 +++++++-----------
1 file changed, 59 insertions(+), 92 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index a8a9dba1a..57453cd34 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -1,93 +1,62 @@
from typing import Literal
from uuid import UUID
+import sqlvalidator
from beartype import beartype
+from ...exceptions import InvalidSQLQuery
from ..utils import (
pg_query,
wrap_in_class,
)
+tools_args_for_task_query = sqlvalidator.parse(
+ """SELECT COALESCE(agents_md || tasks_md, agents_md, tasks_md, '{}') as values FROM (
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS agents_md
+ FROM agents
+ WHERE agent_id = $1 AND developer_id = $4 LIMIT 1
+) AS agents_md,
+(
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS tasks_md
+ FROM tasks
+ WHERE task_id = $2 AND developer_id = $4 LIMIT 1
+) AS tasks_md"""
+)
-def tool_args_for_task(
- *,
- developer_id: UUID,
- agent_id: UUID,
- task_id: UUID,
- tool_type: Literal["integration", "api_call"] = "integration",
- arg_type: Literal["args", "setup"] = "args",
-) -> tuple[list[str], dict]:
- agent_id = str(agent_id)
- task_id = str(task_id)
-
- get_query = f"""
- input[agent_id, task_id] <- [[to_uuid($agent_id), to_uuid($task_id)]]
-
- ?[values] :=
- input[agent_id, task_id],
- *tasks {{
- task_id,
- metadata: task_metadata,
- }},
- *agents {{
- agent_id,
- metadata: agent_metadata,
- }},
- task_{arg_type} = get(task_metadata, "x-{tool_type}-{arg_type}", {{}}),
- agent_{arg_type} = get(agent_metadata, "x-{tool_type}-{arg_type}", {{}}),
-
- # Right values overwrite left values
- # See: https://docs.cozodb.org/en/latest/functions.html#Func.Vector.concat
- values = concat(agent_{arg_type}, task_{arg_type}),
-
- :limit 1
- """
-
- queries = [
- get_query,
- ]
-
- return (queries, {"agent_id": agent_id, "task_id": task_id})
-
-
-def tool_args_for_session(
- *,
- developer_id: UUID,
- session_id: UUID,
- agent_id: UUID,
- arg_type: Literal["args", "setup"] = "args",
- tool_type: Literal["integration", "api_call"] = "integration",
-) -> tuple[list[str], dict]:
- session_id = str(session_id)
-
- get_query = f"""
- input[session_id, agent_id] <- [[to_uuid($session_id), to_uuid($agent_id)]]
-
- ?[values] :=
- input[session_id, agent_id],
- *sessions {{
- session_id,
- metadata: session_metadata,
- }},
- *agents {{
- agent_id,
- metadata: agent_metadata,
- }},
- session_{arg_type} = get(session_metadata, "x-{tool_type}-{arg_type}", {{}}),
- agent_{arg_type} = get(agent_metadata, "x-{tool_type}-{arg_type}", {{}}),
-
- # Right values overwrite left values
- # See: https://docs.cozodb.org/en/latest/functions.html#Func.Vector.concat
- values = concat(agent_{arg_type}, session_{arg_type}),
-
- :limit 1
- """
-
- queries = [
- get_query,
- ]
+if not tools_args_for_task_query.is_valid():
+ raise InvalidSQLQuery("tools_args_for_task_query")
+
+tool_args_for_session_query = sqlvalidator.parse(
+ """SELECT COALESCE(agents_md || sessions_md, agents_md, sessions_md, '{}') as values FROM (
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS agents_md
+ FROM agents
+ WHERE agent_id = $1 AND developer_id = $4 LIMIT 1
+) AS agents_md,
+(
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS tasks_md
+ FROM sessions
+ WHERE session_id = $2 AND developer_id = $4 LIMIT 1
+) AS sessions_md"""
+)
- return (queries, {"agent_id": agent_id, "session_id": session_id})
+if not tool_args_for_session_query.is_valid():
+ raise InvalidSQLQuery("tool_args_for_session")
# @rewrap_exceptions(
@@ -108,25 +77,23 @@ def get_tool_args_from_metadata(
task_id: UUID | None = None,
tool_type: Literal["integration", "api_call"] = "integration",
arg_type: Literal["args", "setup", "headers"] = "args",
-) -> tuple[list[str], dict]:
- common: dict = dict(
- developer_id=developer_id,
- agent_id=agent_id,
- tool_type=tool_type,
- arg_type=arg_type,
- )
-
+) -> tuple[list[str], list]:
match session_id, task_id:
case (None, task_id) if task_id is not None:
- return tool_args_for_task(
- **common,
- task_id=task_id,
+ return (
+ tools_args_for_task_query.format(),
+ [
+ agent_id,
+ task_id,
+ f"x-{tool_type}-{arg_type}",
+ developer_id,
+ ],
)
case (session_id, None) if session_id is not None:
- return tool_args_for_session(
- **common,
- session_id=session_id,
+ return (
+ tool_args_for_session_query.format(),
+ [agent_id, session_id, f"x-{tool_type}-{arg_type}", developer_id],
)
case (_, _):
From 8b54c80e56a3885397ae6b3dd2dd924ac3c01417 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Mon, 23 Dec 2024 15:54:33 +0530
Subject: [PATCH 162/274] fix(agents-api): Fix fixtures and initialization for
app postgres client
Signed-off-by: Diwank Singh Tomer
---
agents-api/agents_api/app.py | 10 +++-
agents-api/agents_api/routers/__init__.py | 14 ++---
.../agents_api/routers/agents/__init__.py | 24 ++++----
agents-api/agents_api/web.py | 12 ++--
agents-api/tests/fixtures.py | 10 ++--
agents-api/tests/test_agent_routes.py | 58 +++++++++----------
6 files changed, 66 insertions(+), 62 deletions(-)
diff --git a/agents-api/agents_api/app.py b/agents-api/agents_api/app.py
index ced41decb..654561dd2 100644
--- a/agents-api/agents_api/app.py
+++ b/agents-api/agents_api/app.py
@@ -1,4 +1,5 @@
from contextlib import asynccontextmanager
+import os
from fastapi import FastAPI
from prometheus_fastapi_instrumentator import Instrumentator
@@ -9,13 +10,16 @@
@asynccontextmanager
async def lifespan(app: FastAPI):
- if not app.state.postgres_pool:
- app.state.postgres_pool = await create_db_pool()
+ db_dsn = os.environ.get("DB_DSN")
+
+ if not getattr(app.state, "postgres_pool", None):
+ app.state.postgres_pool = await create_db_pool(db_dsn)
yield
- if app.state.postgres_pool:
+ if getattr(app.state, "postgres_pool", None):
await app.state.postgres_pool.close()
+ app.state.postgres_pool = None
app: FastAPI = FastAPI(
diff --git a/agents-api/agents_api/routers/__init__.py b/agents-api/agents_api/routers/__init__.py
index 4e2d7b881..328e1e918 100644
--- a/agents-api/agents_api/routers/__init__.py
+++ b/agents-api/agents_api/routers/__init__.py
@@ -18,10 +18,10 @@
# SCRUM-21
from .agents import router as agents_router
-from .docs import router as docs_router
-from .files import router as files_router
-from .internal import router as internal_router
-from .jobs import router as jobs_router
-from .sessions import router as sessions_router
-from .tasks import router as tasks_router
-from .users import router as users_router
+# from .docs import router as docs_router
+# from .files import router as files_router
+# from .internal import router as internal_router
+# from .jobs import router as jobs_router
+# from .sessions import router as sessions_router
+# from .tasks import router as tasks_router
+# from .users import router as users_router
diff --git a/agents-api/agents_api/routers/agents/__init__.py b/agents-api/agents_api/routers/agents/__init__.py
index 2eadecb3d..484be3363 100644
--- a/agents-api/agents_api/routers/agents/__init__.py
+++ b/agents-api/agents_api/routers/agents/__init__.py
@@ -1,15 +1,15 @@
# ruff: noqa: F401
from .create_agent import create_agent
-from .create_agent_tool import create_agent_tool
-from .create_or_update_agent import create_or_update_agent
-from .delete_agent import delete_agent
-from .delete_agent_tool import delete_agent_tool
-from .get_agent_details import get_agent_details
-from .list_agent_tools import list_agent_tools
-from .list_agents import list_agents
-from .patch_agent import patch_agent
-from .patch_agent_tool import patch_agent_tool
-from .router import router
-from .update_agent import update_agent
-from .update_agent_tool import update_agent_tool
+# from .create_agent_tool import create_agent_tool
+# from .create_or_update_agent import create_or_update_agent
+# from .delete_agent import delete_agent
+# from .delete_agent_tool import delete_agent_tool
+# from .get_agent_details import get_agent_details
+# from .list_agent_tools import list_agent_tools
+# from .list_agents import list_agents
+# from .patch_agent import patch_agent
+# from .patch_agent_tool import patch_agent_tool
+# from .router import router
+# from .update_agent import update_agent
+# from .update_agent_tool import update_agent_tool
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index a04a7fc66..419070b29 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -9,7 +9,7 @@
import sentry_sdk
import uvicorn
import uvloop
-from fastapi import APIRouter, FastAPI, Request, status
+from fastapi import APIRouter, Depends, FastAPI, Request, status
from fastapi.exceptions import HTTPException, RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
@@ -20,11 +20,12 @@
from .app import app
from .common.exceptions import BaseCommonException
+from .dependencies.auth import get_api_key
from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
from .exceptions import PromptTooBigError
-# from .routers import (
-# agents,
+from .routers import (
+ agents,
# docs,
# files,
# internal,
@@ -32,7 +33,7 @@
# sessions,
# tasks,
# users,
-# )
+)
if not sentry_dsn:
print("Sentry DSN not found. Sentry will not be enabled.")
@@ -144,7 +145,6 @@ def register_exceptions(app: FastAPI) -> None:
# See: https://fastapi.tiangolo.com/tutorial/bigger-applications/
#
-
# Create a new router for the docs
scalar_router = APIRouter()
@@ -162,7 +162,7 @@ async def scalar_html():
app.include_router(scalar_router)
# Add other routers with the get_api_key dependency
-# app.include_router(agents.router, dependencies=[Depends(get_api_key)])
+app.include_router(agents.router.router, dependencies=[Depends(get_api_key)])
# app.include_router(sessions.router, dependencies=[Depends(get_api_key)])
# app.include_router(users.router, dependencies=[Depends(get_api_key)])
# app.include_router(jobs.router, dependencies=[Depends(get_api_key)])
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index ea3866ff2..4da4eb6fd 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,3 +1,4 @@
+import os
import random
import string
from uuid import UUID
@@ -384,12 +385,11 @@ async def test_session(
@fixture(scope="global")
-async def client(dsn=pg_dsn):
- pool = await create_db_pool(dsn=dsn)
+def client(dsn=pg_dsn):
+ os.environ["DB_DSN"] = dsn
- client = TestClient(app=app)
- client.state.postgres_pool = pool
- return client
+ with TestClient(app=app) as client:
+ yield client
@fixture(scope="global")
diff --git a/agents-api/tests/test_agent_routes.py b/agents-api/tests/test_agent_routes.py
index 95e8e7558..d4e4a3a61 100644
--- a/agents-api/tests/test_agent_routes.py
+++ b/agents-api/tests/test_agent_routes.py
@@ -1,43 +1,43 @@
# # Tests for agent queries
-# from uuid_extensions import uuid7
-# from ward import test
+from uuid_extensions import uuid7
+from ward import test
-# from tests.fixtures import client, make_request, test_agent
+from tests.fixtures import client, make_request, test_agent
-# @test("route: unauthorized should fail")
-# def _(client=client):
-# data = dict(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# )
+@test("route: unauthorized should fail")
+def _(client=client):
+ data = dict(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ )
-# response = client.request(
-# method="POST",
-# url="/agents",
-# json=data,
-# )
+ response = client.request(
+ method="POST",
+ url="/agents",
+ json=data,
+ )
-# assert response.status_code == 403
+ assert response.status_code == 403
-# @test("route: create agent")
-# def _(make_request=make_request):
-# data = dict(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# )
+@test("route: create agent")
+def _(make_request=make_request):
+ data = dict(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ )
-# response = make_request(
-# method="POST",
-# url="/agents",
-# json=data,
-# )
+ response = make_request(
+ method="POST",
+ url="/agents",
+ json=data,
+ )
-# assert response.status_code == 201
+ assert response.status_code == 201
# @test("route: create agent with instructions")
From dfa578547e1d14c94b7c95fb936d350960fac935 Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Mon, 23 Dec 2024 10:25:29 +0000
Subject: [PATCH 163/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/app.py | 2 +-
agents-api/agents_api/web.py | 15 +++++++--------
2 files changed, 8 insertions(+), 9 deletions(-)
diff --git a/agents-api/agents_api/app.py b/agents-api/agents_api/app.py
index 654561dd2..e7903f175 100644
--- a/agents-api/agents_api/app.py
+++ b/agents-api/agents_api/app.py
@@ -1,5 +1,5 @@
-from contextlib import asynccontextmanager
import os
+from contextlib import asynccontextmanager
from fastapi import FastAPI
from prometheus_fastapi_instrumentator import Instrumentator
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 419070b29..61e6f5ea6 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -23,16 +23,15 @@
from .dependencies.auth import get_api_key
from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
from .exceptions import PromptTooBigError
-
from .routers import (
agents,
-# docs,
-# files,
-# internal,
-# jobs,
-# sessions,
-# tasks,
-# users,
+ # docs,
+ # files,
+ # internal,
+ # jobs,
+ # sessions,
+ # tasks,
+ # users,
)
if not sentry_dsn:
From 583bf66c89fc35063f6c9afc0d030a279f2595d0 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 23 Dec 2024 14:42:53 +0300
Subject: [PATCH 164/274] fix: Remove sql validation, fix tests
---
.../agents_api/queries/tools/create_tools.py | 18 +-
.../agents_api/queries/tools/delete_tool.py | 13 +-
.../agents_api/queries/tools/get_tool.py | 16 +-
.../tools/get_tool_args_from_metadata.py | 26 +-
.../agents_api/queries/tools/list_tools.py | 23 +-
.../agents_api/queries/tools/patch_tool.py | 15 +-
.../agents_api/queries/tools/update_tool.py | 15 +-
agents-api/tests/fixtures.py | 88 ++---
agents-api/tests/test_tool_queries.py | 344 +++++++++---------
9 files changed, 281 insertions(+), 277 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index d50e98e80..075497541 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -20,8 +20,7 @@
T = TypeVar("T")
-sql_query = sqlvalidator.parse(
- """INSERT INTO tools
+sql_query = """INSERT INTO tools
(
developer_id,
agent_id,
@@ -45,10 +44,10 @@
)
RETURNING *
"""
-)
-if not sql_query.is_valid():
- raise InvalidSQLQuery("create_tools")
+
+# if not sql_query.is_valid():
+# raise InvalidSQLQuery("create_tools")
# @rewrap_exceptions(
@@ -61,22 +60,21 @@
@wrap_in_class(
Tool,
transform=lambda d: {
- "id": UUID(d.pop("tool_id")),
+ "id": d.pop("tool_id"),
d["type"]: d.pop("spec"),
**d,
},
- _kind="inserted",
)
@pg_query
@increase_counter("create_tools")
@beartype
-def create_tools(
+async def create_tools(
*,
developer_id: UUID,
agent_id: UUID,
data: list[CreateToolRequest],
ignore_existing: bool = False, # TODO: what to do with this flag?
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
"""
Constructs a datalog query for inserting tool records into the 'agent_functions' relation in the CozoDB.
@@ -108,7 +106,7 @@ def create_tools(
]
return (
- sql_query.format(),
+ sql_query,
tools_data,
"fetchmany",
)
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 17535e1e4..c67cdaba5 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -16,7 +16,7 @@
T = TypeVar("T")
-sql_query = sqlvalidator.parse("""
+sql_query = """
DELETE FROM
tools
WHERE
@@ -24,10 +24,10 @@
agent_id = $2 AND
tool_id = $3
RETURNING *
-""")
+"""
-if not sql_query.is_valid():
- raise InvalidSQLQuery("delete_tool")
+# if not sql_query.is_valid():
+# raise InvalidSQLQuery("delete_tool")
# @rewrap_exceptions(
@@ -41,11 +41,10 @@
ResourceDeletedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "deleted_at": utcnow(), "jobs": [], **d},
- _kind="deleted",
)
@pg_query
@beartype
-def delete_tool(
+async def delete_tool(
*,
developer_id: UUID,
agent_id: UUID,
@@ -56,7 +55,7 @@ def delete_tool(
tool_id = str(tool_id)
return (
- sql_query.format(),
+ sql_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index af63be0c9..7581714e9 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -14,17 +14,17 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-sql_query = sqlvalidator.parse("""
+sql_query = """
SELECT * FROM tools
WHERE
developer_id = $1 AND
agent_id = $2 AND
tool_id = $3
LIMIT 1
-""")
+"""
-if not sql_query.is_valid():
- raise InvalidSQLQuery("get_tool")
+# if not sql_query.is_valid():
+# raise InvalidSQLQuery("get_tool")
# @rewrap_exceptions(
@@ -37,7 +37,7 @@
@wrap_in_class(
Tool,
transform=lambda d: {
- "id": UUID(d.pop("tool_id")),
+ "id": d.pop("tool_id"),
d["type"]: d.pop("spec"),
**d,
},
@@ -45,18 +45,18 @@
)
@pg_query
@beartype
-def get_tool(
+async def get_tool(
*,
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
return (
- sql_query.format(),
+ sql_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 57453cd34..f4caf5524 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -10,8 +10,7 @@
wrap_in_class,
)
-tools_args_for_task_query = sqlvalidator.parse(
- """SELECT COALESCE(agents_md || tasks_md, agents_md, tasks_md, '{}') as values FROM (
+tools_args_for_task_query = """SELECT COALESCE(agents_md || tasks_md, agents_md, tasks_md, '{}') as values FROM (
SELECT
CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
@@ -29,13 +28,12 @@
FROM tasks
WHERE task_id = $2 AND developer_id = $4 LIMIT 1
) AS tasks_md"""
-)
-if not tools_args_for_task_query.is_valid():
- raise InvalidSQLQuery("tools_args_for_task_query")
-tool_args_for_session_query = sqlvalidator.parse(
- """SELECT COALESCE(agents_md || sessions_md, agents_md, sessions_md, '{}') as values FROM (
+# if not tools_args_for_task_query.is_valid():
+# raise InvalidSQLQuery("tools_args_for_task_query")
+
+tool_args_for_session_query = """SELECT COALESCE(agents_md || sessions_md, agents_md, sessions_md, '{}') as values FROM (
SELECT
CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
@@ -53,10 +51,10 @@
FROM sessions
WHERE session_id = $2 AND developer_id = $4 LIMIT 1
) AS sessions_md"""
-)
-if not tool_args_for_session_query.is_valid():
- raise InvalidSQLQuery("tool_args_for_session")
+
+# if not tool_args_for_session_query.is_valid():
+# raise InvalidSQLQuery("tool_args_for_session")
# @rewrap_exceptions(
@@ -69,7 +67,7 @@
@wrap_in_class(dict, transform=lambda x: x["values"], one=True)
@pg_query
@beartype
-def get_tool_args_from_metadata(
+async def get_tool_args_from_metadata(
*,
developer_id: UUID,
agent_id: UUID,
@@ -77,11 +75,11 @@ def get_tool_args_from_metadata(
task_id: UUID | None = None,
tool_type: Literal["integration", "api_call"] = "integration",
arg_type: Literal["args", "setup", "headers"] = "args",
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
match session_id, task_id:
case (None, task_id) if task_id is not None:
return (
- tools_args_for_task_query.format(),
+ tools_args_for_task_query,
[
agent_id,
task_id,
@@ -92,7 +90,7 @@ def get_tool_args_from_metadata(
case (session_id, None) if session_id is not None:
return (
- tool_args_for_session_query.format(),
+ tool_args_for_session_query,
[agent_id, session_id, f"x-{tool_type}-{arg_type}", developer_id],
)
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 3dac84875..01460e16b 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -14,21 +14,21 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-sql_query = sqlvalidator.parse("""
+sql_query = """
SELECT * FROM tools
WHERE
developer_id = $1 AND
agent_id = $2
ORDER BY
- CASE WHEN $5 = 'created_at' AND $6 = 'desc' THEN s.created_at END DESC,
- CASE WHEN $5 = 'created_at' AND $6 = 'asc' THEN s.created_at END ASC,
- CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN s.updated_at END DESC,
- CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN s.updated_at END ASC
+ CASE WHEN $5 = 'created_at' AND $6 = 'desc' THEN tools.created_at END DESC NULLS LAST,
+ CASE WHEN $5 = 'created_at' AND $6 = 'asc' THEN tools.created_at END ASC NULLS LAST,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN tools.updated_at END DESC NULLS LAST,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN tools.updated_at END ASC NULLS LAST
LIMIT $3 OFFSET $4;
-""")
+"""
-if not sql_query.is_valid():
- raise InvalidSQLQuery("list_tools")
+# if not sql_query.is_valid():
+# raise InvalidSQLQuery("list_tools")
# @rewrap_exceptions(
@@ -46,12 +46,13 @@
"name": d["name"],
"description": d["description"],
},
+ "id": d.pop("tool_id"),
**d,
},
)
@pg_query
@beartype
-def list_tools(
+async def list_tools(
*,
developer_id: UUID,
agent_id: UUID,
@@ -59,12 +60,12 @@ def list_tools(
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
developer_id = str(developer_id)
agent_id = str(agent_id)
return (
- sql_query.format(),
+ sql_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index aa663dec0..a8adf1fa6 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -16,7 +16,7 @@
T = TypeVar("T")
-sql_query = sqlvalidator.parse("""
+sql_query = """
WITH updated_tools AS (
UPDATE tools
SET
@@ -31,10 +31,10 @@
RETURNING *
)
SELECT * FROM updated_tools;
-""")
+"""
-if not sql_query.is_valid():
- raise InvalidSQLQuery("patch_tool")
+# if not sql_query.is_valid():
+# raise InvalidSQLQuery("patch_tool")
# @rewrap_exceptions(
@@ -48,14 +48,13 @@
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
- _kind="inserted",
)
@pg_query
@increase_counter("patch_tool")
@beartype
-def patch_tool(
+async def patch_tool(
*, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
"""
Execute the datalog query and return the results as a DataFrame
Updates the tool information for a given agent and tool ID in the 'cozodb' database.
@@ -95,7 +94,7 @@ def patch_tool(
del patch_data[tool_type]
return (
- sql_query.format(),
+ sql_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index 356e28bbf..bb1d8dc87 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -18,7 +18,7 @@
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
-sql_query = sqlvalidator.parse("""
+sql_query = """
UPDATE tools
SET
type = $4,
@@ -30,10 +30,10 @@
agent_id = $2 AND
tool_id = $3
RETURNING *;
-""")
+"""
-if not sql_query.is_valid():
- raise InvalidSQLQuery("update_tool")
+# if not sql_query.is_valid():
+# raise InvalidSQLQuery("update_tool")
# @rewrap_exceptions(
@@ -47,19 +47,18 @@
ResourceUpdatedResponse,
one=True,
transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
- _kind="inserted",
)
@pg_query
@increase_counter("update_tool")
@beartype
-def update_tool(
+async def update_tool(
*,
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
data: UpdateToolRequest,
**kwargs,
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
@@ -85,7 +84,7 @@ def update_tool(
del update_data[tool_type]
return (
- sql_query.format(),
+ sql_query,
[
developer_id,
agent_id,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index ea3866ff2..b342cd0b7 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -13,29 +13,30 @@
CreateSessionRequest,
CreateTaskRequest,
CreateUserRequest,
+ CreateToolRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
from agents_api.queries.agents.create_agent import create_agent
from agents_api.queries.developers.create_developer import create_developer
-# from agents_api.queries.agents.delete_agent import delete_agent
+from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.get_developer import get_developer
from agents_api.queries.docs.create_doc import create_doc
-# from agents_api.queries.docs.delete_doc import delete_doc
-# from agents_api.queries.execution.create_execution import create_execution
-# from agents_api.queries.execution.create_execution_transition import create_execution_transition
-# from agents_api.queries.execution.create_temporal_lookup import create_temporal_lookup
+from agents_api.queries.docs.delete_doc import delete_doc
+# from agents_api.queries.executions.create_execution import create_execution
+# from agents_api.queries.executions.create_execution_transition import create_execution_transition
+# from agents_api.queries.executions.create_temporal_lookup import create_temporal_lookup
from agents_api.queries.files.create_file import create_file
-# from agents_api.queries.files.delete_file import delete_file
+from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
from agents_api.queries.tasks.create_task import create_task
-# from agents_api.queries.task.delete_task import delete_task
-# from agents_api.queries.tools.create_tools import create_tools
-# from agents_api.queries.tools.delete_tool import delete_tool
+from agents_api.queries.tasks.delete_task import delete_task
+from agents_api.queries.tools.create_tools import create_tools
+from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
from agents_api.web import app
@@ -347,40 +348,41 @@ async def test_session(
# yield transition
-# @fixture(scope="global")
-# async def test_tool(
-# dsn=pg_dsn,
-# developer_id=test_developer_id,
-# agent=test_agent,
-# ):
-# function = {
-# "description": "A function that prints hello world",
-# "parameters": {"type": "object", "properties": {}},
-# }
-
-# tool = {
-# "function": function,
-# "name": "hello_world1",
-# "type": "function",
-# }
-
-# [tool, *_] = await create_tools(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# data=[CreateToolRequest(**tool)],
-# connection_pool=pool,
-# )
-# yield tool
-
-# # Cleanup
-# try:
-# await delete_tool(
-# developer_id=developer_id,
-# tool_id=tool.id,
-# connection_pool=pool,
-# )
-# finally:
-# await pool.close()
+@fixture(scope="global")
+async def test_tool(
+ dsn=pg_dsn,
+ developer_id=test_developer_id,
+ agent=test_agent,
+):
+ pool = await create_db_pool(dsn=dsn)
+ function = {
+ "description": "A function that prints hello world",
+ "parameters": {"type": "object", "properties": {}},
+ }
+
+ tool = {
+ "function": function,
+ "name": "hello_world1",
+ "type": "function",
+ }
+
+ [tool, *_] = await create_tools(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ data=[CreateToolRequest(**tool)],
+ connection_pool=pool,
+ )
+ yield tool
+
+ # Cleanup
+ try:
+ await delete_tool(
+ developer_id=developer_id,
+ tool_id=tool.id,
+ connection_pool=pool,
+ )
+ finally:
+ await pool.close()
@fixture(scope="global")
diff --git a/agents-api/tests/test_tool_queries.py b/agents-api/tests/test_tool_queries.py
index f6f4bac47..43bdf8159 100644
--- a/agents-api/tests/test_tool_queries.py
+++ b/agents-api/tests/test_tool_queries.py
@@ -1,170 +1,178 @@
# # Tests for tool queries
-# from ward import test
-
-# from agents_api.autogen.openapi_model import (
-# CreateToolRequest,
-# PatchToolRequest,
-# Tool,
-# UpdateToolRequest,
-# )
-# from agents_api.queries.tools.create_tools import create_tools
-# from agents_api.queries.tools.delete_tool import delete_tool
-# from agents_api.queries.tools.get_tool import get_tool
-# from agents_api.queries.tools.list_tools import list_tools
-# from agents_api.queries.tools.patch_tool import patch_tool
-# from agents_api.queries.tools.update_tool import update_tool
-# from tests.fixtures import cozo_client, test_agent, test_developer_id, test_tool
-
-
-# @test("query: create tool")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# function = {
-# "name": "hello_world",
-# "description": "A function that prints hello world",
-# "parameters": {"type": "object", "properties": {}},
-# }
-
-# tool = {
-# "function": function,
-# "name": "hello_world",
-# "type": "function",
-# }
-
-# result = create_tools(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# data=[CreateToolRequest(**tool)],
-# client=client,
-# )
-
-# assert result is not None
-# assert isinstance(result[0], Tool)
-
-
-# @test("query: delete tool")
-# def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent):
-# function = {
-# "name": "temp_temp",
-# "description": "A function that prints hello world",
-# "parameters": {"type": "object", "properties": {}},
-# }
-
-# tool = {
-# "function": function,
-# "name": "temp_temp",
-# "type": "function",
-# }
-
-# [tool, *_] = create_tools(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# data=[CreateToolRequest(**tool)],
-# client=client,
-# )
-
-# result = delete_tool(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# tool_id=tool.id,
-# client=client,
-# )
-
-# assert result is not None
-
-
-# @test("query: get tool")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, tool=test_tool, agent=test_agent
-# ):
-# result = get_tool(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# tool_id=tool.id,
-# client=client,
-# )
-
-# assert result is not None
-
-
-# @test("query: list tools")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
-# ):
-# result = list_tools(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# client=client,
-# )
-
-# assert result is not None
-# assert all(isinstance(tool, Tool) for tool in result)
-
-
-# @test("query: patch tool")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
-# ):
-# patch_data = PatchToolRequest(
-# **{
-# "name": "patched_tool",
-# "function": {
-# "description": "A patched function that prints hello world",
-# },
-# }
-# )
-
-# result = patch_tool(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# tool_id=tool.id,
-# data=patch_data,
-# client=client,
-# )
-
-# assert result is not None
-
-# tool = get_tool(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# tool_id=tool.id,
-# client=client,
-# )
-
-# assert tool.name == "patched_tool"
-# assert tool.function.description == "A patched function that prints hello world"
-# assert tool.function.parameters
-
-
-# @test("query: update tool")
-# def _(
-# client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool
-# ):
-# update_data = UpdateToolRequest(
-# name="updated_tool",
-# description="An updated description",
-# type="function",
-# function={
-# "description": "An updated function that prints hello world",
-# },
-# )
-
-# result = update_tool(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# tool_id=tool.id,
-# data=update_data,
-# client=client,
-# )
-
-# assert result is not None
-
-# tool = get_tool(
-# developer_id=developer_id,
-# agent_id=agent.id,
-# tool_id=tool.id,
-# client=client,
-# )
-
-# assert tool.name == "updated_tool"
-# assert not tool.function.parameters
+from ward import test
+
+from agents_api.autogen.openapi_model import (
+ CreateToolRequest,
+ PatchToolRequest,
+ Tool,
+ UpdateToolRequest,
+)
+from agents_api.queries.tools.create_tools import create_tools
+from agents_api.queries.tools.delete_tool import delete_tool
+from agents_api.queries.tools.get_tool import get_tool
+from agents_api.queries.tools.list_tools import list_tools
+from agents_api.queries.tools.patch_tool import patch_tool
+from agents_api.queries.tools.update_tool import update_tool
+from tests.fixtures import test_agent, test_developer_id, pg_dsn, test_tool
+from agents_api.clients.pg import create_db_pool
+
+
+@test("query: create tool")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+ function = {
+ "name": "hello_world",
+ "description": "A function that prints hello world",
+ "parameters": {"type": "object", "properties": {}},
+ }
+
+ tool = {
+ "function": function,
+ "name": "hello_world",
+ "type": "function",
+ }
+
+ result = await create_tools(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ data=[CreateToolRequest(**tool)],
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert isinstance(result[0], Tool)
+
+
+@test("query: delete tool")
+async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
+ pool = await create_db_pool(dsn=dsn)
+ function = {
+ "name": "temp_temp",
+ "description": "A function that prints hello world",
+ "parameters": {"type": "object", "properties": {}},
+ }
+
+ tool = {
+ "function": function,
+ "name": "temp_temp",
+ "type": "function",
+ }
+
+ [tool, *_] = await create_tools(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ data=[CreateToolRequest(**tool)],
+ connection_pool=pool,
+ )
+
+ result = delete_tool(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ tool_id=tool.id,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+
+
+@test("query: get tool")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, tool=test_tool, agent=test_agent
+):
+ pool = await create_db_pool(dsn=dsn)
+ result = get_tool(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ tool_id=tool.id,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+
+
+@test("query: list tools")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, tool=test_tool
+):
+ pool = await create_db_pool(dsn=dsn)
+ result = await list_tools(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+ assert all(isinstance(tool, Tool) for tool in result)
+
+
+@test("query: patch tool")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, tool=test_tool
+):
+ pool = await create_db_pool(dsn=dsn)
+ patch_data = PatchToolRequest(
+ **{
+ "name": "patched_tool",
+ "function": {
+ "description": "A patched function that prints hello world",
+ "parameters": {"param1": "value1"},
+ },
+ }
+ )
+
+ result = await patch_tool(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ tool_id=tool.id,
+ data=patch_data,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+
+ tool = await get_tool(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ tool_id=tool.id,
+ connection_pool=pool,
+ )
+
+ assert tool.name == "patched_tool"
+ assert tool.function.description == "A patched function that prints hello world"
+ assert tool.function.parameters
+
+
+@test("query: update tool")
+async def _(
+ dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent, tool=test_tool
+):
+ pool = await create_db_pool(dsn=dsn)
+ update_data = UpdateToolRequest(
+ name="updated_tool",
+ description="An updated description",
+ type="function",
+ function={
+ "description": "An updated function that prints hello world",
+ },
+ )
+
+ result = await update_tool(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ tool_id=tool.id,
+ data=update_data,
+ connection_pool=pool,
+ )
+
+ assert result is not None
+
+ tool = await get_tool(
+ developer_id=developer_id,
+ agent_id=agent.id,
+ tool_id=tool.id,
+ connection_pool=pool,
+ )
+
+ assert tool.name == "updated_tool"
+ assert not tool.function.parameters
From 128cc2fa6031cddb1aa63e4972f95ff66d54ca07 Mon Sep 17 00:00:00 2001
From: whiterabbit1983
Date: Mon, 23 Dec 2024 11:44:18 +0000
Subject: [PATCH 165/274] refactor: Lint agents-api (CI)
---
agents-api/tests/fixtures.py | 9 +++------
agents-api/tests/test_tool_queries.py | 4 ++--
2 files changed, 5 insertions(+), 8 deletions(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index b342cd0b7..a98fef531 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -12,28 +12,25 @@
CreateFileRequest,
CreateSessionRequest,
CreateTaskRequest,
- CreateUserRequest,
CreateToolRequest,
+ CreateUserRequest,
)
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
from agents_api.queries.agents.create_agent import create_agent
-from agents_api.queries.developers.create_developer import create_developer
-
from agents_api.queries.agents.delete_agent import delete_agent
+from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import get_developer
from agents_api.queries.docs.create_doc import create_doc
-
from agents_api.queries.docs.delete_doc import delete_doc
+
# from agents_api.queries.executions.create_execution import create_execution
# from agents_api.queries.executions.create_execution_transition import create_execution_transition
# from agents_api.queries.executions.create_temporal_lookup import create_temporal_lookup
from agents_api.queries.files.create_file import create_file
-
from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
from agents_api.queries.tasks.create_task import create_task
-
from agents_api.queries.tasks.delete_task import delete_task
from agents_api.queries.tools.create_tools import create_tools
from agents_api.queries.tools.delete_tool import delete_tool
diff --git a/agents-api/tests/test_tool_queries.py b/agents-api/tests/test_tool_queries.py
index 43bdf8159..12698e1be 100644
--- a/agents-api/tests/test_tool_queries.py
+++ b/agents-api/tests/test_tool_queries.py
@@ -8,14 +8,14 @@
Tool,
UpdateToolRequest,
)
+from agents_api.clients.pg import create_db_pool
from agents_api.queries.tools.create_tools import create_tools
from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.tools.get_tool import get_tool
from agents_api.queries.tools.list_tools import list_tools
from agents_api.queries.tools.patch_tool import patch_tool
from agents_api.queries.tools.update_tool import update_tool
-from tests.fixtures import test_agent, test_developer_id, pg_dsn, test_tool
-from agents_api.clients.pg import create_db_pool
+from tests.fixtures import pg_dsn, test_agent, test_developer_id, test_tool
@test("query: create tool")
From 3d6d02344b549c64dc236269f440d7b8f2a4ef7a Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 23 Dec 2024 14:48:55 +0300
Subject: [PATCH 166/274] fix: Fix awaitable and type hint
---
agents-api/agents_api/queries/tools/delete_tool.py | 2 +-
agents-api/tests/test_tool_queries.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index c67cdaba5..91a57bd2f 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -49,7 +49,7 @@ async def delete_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[list[str], list]:
+) -> tuple[str, list] | tuple[str, list, str]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/tests/test_tool_queries.py b/agents-api/tests/test_tool_queries.py
index 12698e1be..5056f03ca 100644
--- a/agents-api/tests/test_tool_queries.py
+++ b/agents-api/tests/test_tool_queries.py
@@ -66,7 +66,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
connection_pool=pool,
)
- result = delete_tool(
+ result = await delete_tool(
developer_id=developer_id,
agent_id=agent.id,
tool_id=tool.id,
From 1c97bc3a0ae41af6fed18d6571397ba43fe1e795 Mon Sep 17 00:00:00 2001
From: Dmitry Paramonov
Date: Mon, 23 Dec 2024 15:00:21 +0300
Subject: [PATCH 167/274] chore: Update type annotations
---
agents-api/agents_api/queries/tools/create_tools.py | 2 +-
agents-api/agents_api/queries/tools/delete_tool.py | 2 +-
agents-api/agents_api/queries/tools/get_tool.py | 2 +-
.../agents_api/queries/tools/get_tool_args_from_metadata.py | 2 +-
agents-api/agents_api/queries/tools/list_tools.py | 2 +-
agents-api/agents_api/queries/tools/patch_tool.py | 2 +-
agents-api/agents_api/queries/tools/update_tool.py | 2 +-
7 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index 075497541..70b0525a8 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -74,7 +74,7 @@ async def create_tools(
agent_id: UUID,
data: list[CreateToolRequest],
ignore_existing: bool = False, # TODO: what to do with this flag?
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list, str]:
"""
Constructs a datalog query for inserting tool records into the 'agent_functions' relation in the CozoDB.
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 91a57bd2f..cd666ee42 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -49,7 +49,7 @@ async def delete_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 7581714e9..29a7ae9b6 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -50,7 +50,7 @@ async def get_tool(
developer_id: UUID,
agent_id: UUID,
tool_id: UUID,
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index f4caf5524..8d53a4e1b 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -75,7 +75,7 @@ async def get_tool_args_from_metadata(
task_id: UUID | None = None,
tool_type: Literal["integration", "api_call"] = "integration",
arg_type: Literal["args", "setup", "headers"] = "args",
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list]:
match session_id, task_id:
case (None, task_id) if task_id is not None:
return (
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index 01460e16b..cdc82d9bd 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -60,7 +60,7 @@ async def list_tools(
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
direction: Literal["asc", "desc"] = "desc",
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index a8adf1fa6..e0a20dc1d 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -54,7 +54,7 @@
@beartype
async def patch_tool(
*, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list]:
"""
Execute the datalog query and return the results as a DataFrame
Updates the tool information for a given agent and tool ID in the 'cozodb' database.
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index bb1d8dc87..2b8beb155 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -58,7 +58,7 @@ async def update_tool(
tool_id: UUID,
data: UpdateToolRequest,
**kwargs,
-) -> tuple[str, list] | tuple[str, list, str]:
+) -> tuple[str, list]:
developer_id = str(developer_id)
agent_id = str(agent_id)
tool_id = str(tool_id)
From 60239fd07df6bf12b25d826792fcf50a20343a4a Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Mon, 23 Dec 2024 21:58:36 +0300
Subject: [PATCH 168/274] feat(agents-api): Add routes tests + misc fixes for
queries
---
.../activities/task_steps/__init__.py | 2 +-
.../activities/task_steps/transition_step.py | 2 +-
.../agents_api/queries/agents/get_agent.py | 9 +-
.../queries/executions/create_execution.py | 113 ++++---
.../agents_api/queries/files/get_file.py | 3 +-
.../agents_api/queries/tasks/list_tasks.py | 14 +-
.../agents_api/queries/users/get_user.py | 11 +-
agents-api/agents_api/routers/__init__.py | 14 +-
.../agents_api/routers/agents/__init__.py | 14 +-
.../agents_api/routers/files/__init__.py | 1 +
.../agents_api/routers/files/list_files.py | 32 ++
.../agents_api/routers/tasks/__init__.py | 14 +-
.../routers/tasks/create_task_execution.py | 1 -
.../routers/tasks/get_task_details.py | 19 +-
agents-api/agents_api/web.py | 30 +-
agents-api/tests/test_agent_routes.py | 290 +++++++++---------
agents-api/tests/test_docs_routes.py | 142 ++++-----
agents-api/tests/test_files_routes.py | 141 +++++----
agents-api/tests/test_task_routes.py | 279 +++++++++--------
agents-api/tests/test_user_routes.py | 270 ++++++++--------
20 files changed, 735 insertions(+), 666 deletions(-)
create mode 100644 agents-api/agents_api/routers/files/list_files.py
diff --git a/agents-api/agents_api/activities/task_steps/__init__.py b/agents-api/agents_api/activities/task_steps/__init__.py
index 573884629..cccfb9d35 100644
--- a/agents-api/agents_api/activities/task_steps/__init__.py
+++ b/agents-api/agents_api/activities/task_steps/__init__.py
@@ -1,7 +1,7 @@
# ruff: noqa: F401, F403, F405
from .base_evaluate import base_evaluate
-from .cozo_query_step import cozo_query_step
+# from .cozo_query_step import cozo_query_step
from .evaluate_step import evaluate_step
from .for_each_step import for_each_step
from .get_value_step import get_value_step
diff --git a/agents-api/agents_api/activities/task_steps/transition_step.py b/agents-api/agents_api/activities/task_steps/transition_step.py
index 11c7befb5..57d594ec3 100644
--- a/agents-api/agents_api/activities/task_steps/transition_step.py
+++ b/agents-api/agents_api/activities/task_steps/transition_step.py
@@ -14,7 +14,7 @@
transition_requests_per_minute,
)
from ...exceptions import LastErrorInput, TooManyRequestsError
-from ...models.execution.create_execution_transition import (
+from ...queries.executions.create_execution_transition import (
create_execution_transition_async,
)
from ..utils import RateLimiter
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index 79fa1c4fc..a06bde240 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -3,6 +3,7 @@
It constructs and executes SQL queries to fetch agent details based on agent ID and developer ID.
"""
+from typing import Literal
from uuid import UUID
import asyncpg
@@ -51,12 +52,17 @@
status_code=400,
detail="Invalid data provided. Please check the input values.",
),
+ asyncpg.exceptions.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified agent does not exist.",
+ ),
}
)
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
@pg_query
@beartype
-async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
+async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]:
"""
Constructs the SQL query to retrieve an agent's details.
@@ -71,4 +77,5 @@ async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list]:
return (
agent_query,
[developer_id, agent_id],
+ "fetchrow",
)
diff --git a/agents-api/agents_api/queries/executions/create_execution.py b/agents-api/agents_api/queries/executions/create_execution.py
index 0b93df318..27df9ee69 100644
--- a/agents-api/agents_api/queries/executions/create_execution.py
+++ b/agents-api/agents_api/queries/executions/create_execution.py
@@ -3,20 +3,15 @@
from beartype import beartype
from fastapi import HTTPException
-from pycozo.client import QueryException
from pydantic import ValidationError
from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateExecutionRequest, Execution
-from ...common.utils.cozo import cozo_process_mutate_data
from ...common.utils.types import dict_like
from ...metrics.counters import increase_counter
from ..utils import (
- cozo_query,
partialclass,
rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
wrap_in_class,
)
from .constants import OUTPUT_UNNEST_KEY
@@ -25,22 +20,22 @@
T = TypeVar("T")
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Execution,
- one=True,
- transform=lambda d: {"id": d["execution_id"], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_execution")
-@beartype
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
+# @wrap_in_class(
+# Execution,
+# one=True,
+# transform=lambda d: {"id": d["execution_id"], **d},
+# _kind="inserted",
+# )
+# @cozo_query
+# @increase_counter("create_execution")
+# @beartype
async def create_execution(
*,
developer_id: UUID,
@@ -50,49 +45,49 @@ async def create_execution(
) -> tuple[list[str], dict]:
execution_id = execution_id or uuid7()
- developer_id = str(developer_id)
- task_id = str(task_id)
- execution_id = str(execution_id)
+ # developer_id = str(developer_id)
+ # task_id = str(task_id)
+ # execution_id = str(execution_id)
- if isinstance(data, CreateExecutionRequest):
- data.metadata = data.metadata or {}
- execution_data = data.model_dump()
- else:
- data["metadata"] = data.get("metadata", {})
- execution_data = data
+ # if isinstance(data, CreateExecutionRequest):
+ # data.metadata = data.metadata or {}
+ # execution_data = data.model_dump()
+ # else:
+ # data["metadata"] = data.get("metadata", {})
+ # execution_data = data
- if execution_data["output"] is not None and not isinstance(
- execution_data["output"], dict
- ):
- execution_data["output"] = {OUTPUT_UNNEST_KEY: execution_data["output"]}
+ # if execution_data["output"] is not None and not isinstance(
+ # execution_data["output"], dict
+ # ):
+ # execution_data["output"] = {OUTPUT_UNNEST_KEY: execution_data["output"]}
- columns, values = cozo_process_mutate_data(
- {
- **execution_data,
- "task_id": task_id,
- "execution_id": execution_id,
- }
- )
+ # columns, values = cozo_process_mutate_data(
+ # {
+ # **execution_data,
+ # "task_id": task_id,
+ # "execution_id": execution_id,
+ # }
+ # )
- insert_query = f"""
- ?[{columns}] <- $values
+ # insert_query = f"""
+ # ?[{columns}] <- $values
- :insert executions {{
- {columns}
- }}
+ # :insert executions {{
+ # {columns}
+ # }}
- :returning
- """
+ # :returning
+ # """
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "tasks",
- task_id=task_id,
- parents=[("agents", "agent_id")],
- ),
- insert_query,
- ]
+ # queries = [
+ # verify_developer_id_query(developer_id),
+ # verify_developer_owns_resource_query(
+ # developer_id,
+ # "tasks",
+ # task_id=task_id,
+ # parents=[("agents", "agent_id")],
+ # ),
+ # insert_query,
+ # ]
- return (queries, {"values": values})
+ # return (queries, {"values": values})
diff --git a/agents-api/agents_api/queries/files/get_file.py b/agents-api/agents_api/queries/files/get_file.py
index 04ba8ea71..7bfa0623c 100644
--- a/agents-api/agents_api/queries/files/get_file.py
+++ b/agents-api/agents_api/queries/files/get_file.py
@@ -66,7 +66,7 @@ async def get_file(
developer_id: UUID,
owner_type: Literal["user", "agent"] | None = None,
owner_id: UUID | None = None,
-) -> tuple[str, list]:
+) -> tuple[str, list, Literal["fetchrow", "fetchmany", "fetch"]]:
"""
Constructs the SQL query to retrieve a file's details.
Uses composite index on (developer_id, file_id) for efficient lookup.
@@ -83,4 +83,5 @@ async def get_file(
return (
file_query,
[developer_id, file_id, owner_type, owner_id],
+ "fetchrow",
)
diff --git a/agents-api/agents_api/queries/tasks/list_tasks.py b/agents-api/agents_api/queries/tasks/list_tasks.py
index 5cec7103e..0a6bd90b2 100644
--- a/agents-api/agents_api/queries/tasks/list_tasks.py
+++ b/agents-api/agents_api/queries/tasks/list_tasks.py
@@ -34,14 +34,15 @@
workflows w ON t.developer_id = w.developer_id AND t.task_id = w.task_id AND t.version = w.version
WHERE
t.developer_id = $1
+ AND t.agent_id = $2
{metadata_filter_query}
GROUP BY t.developer_id, t.task_id, t.canonical_name, t.agent_id, t.version
ORDER BY
- CASE WHEN $4 = 'created_at' AND $5 = 'asc' THEN t.created_at END ASC NULLS LAST,
- CASE WHEN $4 = 'created_at' AND $5 = 'desc' THEN t.created_at END DESC NULLS LAST,
- CASE WHEN $4 = 'updated_at' AND $5 = 'asc' THEN t.updated_at END ASC NULLS LAST,
- CASE WHEN $4 = 'updated_at' AND $5 = 'desc' THEN t.updated_at END DESC NULLS LAST
-LIMIT $2 OFFSET $3;
+ CASE WHEN $5 = 'created_at' AND $6 = 'asc' THEN t.created_at END ASC NULLS LAST,
+ CASE WHEN $5 = 'created_at' AND $6 = 'desc' THEN t.created_at END DESC NULLS LAST,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN t.updated_at END ASC NULLS LAST,
+ CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN t.updated_at END DESC NULLS LAST
+LIMIT $3 OFFSET $4;
"""
@@ -71,6 +72,7 @@
async def list_tasks(
*,
developer_id: UUID,
+ agent_id: UUID,
limit: int = 100,
offset: int = 0,
sort_by: Literal["created_at", "updated_at"] = "created_at",
@@ -82,6 +84,7 @@ async def list_tasks(
Parameters:
developer_id (UUID): The unique identifier of the developer.
+ agent_id (UUID): The unique identifier of the agent.
limit (int): Maximum number of records to return (default: 100)
offset (int): Number of records to skip (default: 0)
sort_by (str): Field to sort by ("created_at" or "updated_at")
@@ -111,6 +114,7 @@ async def list_tasks(
# Build parameters list
params = [
developer_id,
+ agent_id,
limit,
offset,
sort_by,
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 07a840621..331c4ce1b 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -1,3 +1,4 @@
+from typing import Literal
from uuid import UUID
import asyncpg
@@ -31,12 +32,17 @@
status_code=404,
detail="The specified developer does not exist.",
),
+ asyncpg.NoDataFoundError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified user does not exist.",
+ ),
}
)
@wrap_in_class(User, one=True)
@pg_query
@beartype
-async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
+async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list, Literal["fetchrow", "fetchmany", "fetch"]]:
"""
Constructs an optimized SQL query to retrieve a user's details.
Uses the primary key index (developer_id, user_id) for efficient lookup.
@@ -46,10 +52,11 @@ async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list]:
user_id (UUID): The UUID of the user to retrieve.
Returns:
- tuple[str, list]: SQL query and parameters.
+ tuple[str, list, str]: SQL query, parameters, and fetch mode.
"""
return (
user_query,
[developer_id, user_id],
+ "fetchrow",
)
diff --git a/agents-api/agents_api/routers/__init__.py b/agents-api/agents_api/routers/__init__.py
index 328e1e918..4e2d7b881 100644
--- a/agents-api/agents_api/routers/__init__.py
+++ b/agents-api/agents_api/routers/__init__.py
@@ -18,10 +18,10 @@
# SCRUM-21
from .agents import router as agents_router
-# from .docs import router as docs_router
-# from .files import router as files_router
-# from .internal import router as internal_router
-# from .jobs import router as jobs_router
-# from .sessions import router as sessions_router
-# from .tasks import router as tasks_router
-# from .users import router as users_router
+from .docs import router as docs_router
+from .files import router as files_router
+from .internal import router as internal_router
+from .jobs import router as jobs_router
+from .sessions import router as sessions_router
+from .tasks import router as tasks_router
+from .users import router as users_router
diff --git a/agents-api/agents_api/routers/agents/__init__.py b/agents-api/agents_api/routers/agents/__init__.py
index 484be3363..bd4a40252 100644
--- a/agents-api/agents_api/routers/agents/__init__.py
+++ b/agents-api/agents_api/routers/agents/__init__.py
@@ -2,14 +2,14 @@
from .create_agent import create_agent
# from .create_agent_tool import create_agent_tool
-# from .create_or_update_agent import create_or_update_agent
-# from .delete_agent import delete_agent
+from .create_or_update_agent import create_or_update_agent
+from .delete_agent import delete_agent
# from .delete_agent_tool import delete_agent_tool
-# from .get_agent_details import get_agent_details
+from .get_agent_details import get_agent_details
# from .list_agent_tools import list_agent_tools
-# from .list_agents import list_agents
-# from .patch_agent import patch_agent
+from .list_agents import list_agents
+from .patch_agent import patch_agent
# from .patch_agent_tool import patch_agent_tool
-# from .router import router
-# from .update_agent import update_agent
+from .router import router
+from .update_agent import update_agent
# from .update_agent_tool import update_agent_tool
diff --git a/agents-api/agents_api/routers/files/__init__.py b/agents-api/agents_api/routers/files/__init__.py
index 5e3d5a62c..daddb2bf7 100644
--- a/agents-api/agents_api/routers/files/__init__.py
+++ b/agents-api/agents_api/routers/files/__init__.py
@@ -3,4 +3,5 @@
from .create_file import create_file
from .delete_file import delete_file
from .get_file import get_file
+from .list_files import list_files
from .router import router
diff --git a/agents-api/agents_api/routers/files/list_files.py b/agents-api/agents_api/routers/files/list_files.py
new file mode 100644
index 000000000..f993ce479
--- /dev/null
+++ b/agents-api/agents_api/routers/files/list_files.py
@@ -0,0 +1,32 @@
+import base64
+from typing import Annotated
+from uuid import UUID
+
+from fastapi import Depends
+
+from ...autogen.openapi_model import File
+from ...clients import async_s3
+from ...dependencies.developer_id import get_developer_id
+from ...queries.files.list_files import list_files as list_files_query
+from .router import router
+
+
+async def fetch_file_content(file_id: UUID) -> str:
+ """Fetch file content from blob storage using the file ID as the key"""
+ await async_s3.setup()
+ key = str(file_id)
+ content = await async_s3.get_object(key)
+ return base64.b64encode(content).decode("utf-8")
+
+
+@router.get("/files", tags=["files"])
+async def list_files(
+ x_developer_id: Annotated[UUID, Depends(get_developer_id)]
+) -> list[File]:
+ files = await list_files_query(developer_id=x_developer_id)
+
+ # Fetch the file content from blob storage
+ for file in files:
+ file.content = await fetch_file_content(file.id)
+
+ return files
diff --git a/agents-api/agents_api/routers/tasks/__init__.py b/agents-api/agents_api/routers/tasks/__init__.py
index 5ada6a04e..37d019941 100644
--- a/agents-api/agents_api/routers/tasks/__init__.py
+++ b/agents-api/agents_api/routers/tasks/__init__.py
@@ -1,13 +1,13 @@
# ruff: noqa: F401, F403, F405
from .create_or_update_task import create_or_update_task
from .create_task import create_task
-from .create_task_execution import create_task_execution
-from .get_execution_details import get_execution_details
+# from .create_task_execution import create_task_execution
+# from .get_execution_details import get_execution_details
from .get_task_details import get_task_details
-from .list_execution_transitions import list_execution_transitions
-from .list_task_executions import list_task_executions
+# from .list_execution_transitions import list_execution_transitions
+# from .list_task_executions import list_task_executions
from .list_tasks import list_tasks
-from .patch_execution import patch_execution
+# from .patch_execution import patch_execution
from .router import router
-from .stream_transitions_events import stream_transitions_events
-from .update_execution import update_execution
+# from .stream_transitions_events import stream_transitions_events
+# from .update_execution import update_execution
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index 6cc1e3e4f..96c01ea94 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -6,7 +6,6 @@
from fastapi import BackgroundTasks, Depends, HTTPException, status
from jsonschema import validate
from jsonschema.exceptions import ValidationError
-from pycozo.client import QueryException
from starlette.status import HTTP_201_CREATED
from temporalio.client import WorkflowHandle
from uuid_extensions import uuid7
diff --git a/agents-api/agents_api/routers/tasks/get_task_details.py b/agents-api/agents_api/routers/tasks/get_task_details.py
index 452ab961d..01f1d7a35 100644
--- a/agents-api/agents_api/routers/tasks/get_task_details.py
+++ b/agents-api/agents_api/routers/tasks/get_task_details.py
@@ -2,7 +2,6 @@
from uuid import UUID
from fastapi import Depends, HTTPException, status
-from pycozo.client import QueryException
from ...autogen.openapi_model import (
Task,
@@ -17,20 +16,10 @@ async def get_task_details(
task_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> Task:
- not_found = HTTPException(
- status_code=status.HTTP_404_NOT_FOUND, detail="Task not found"
- )
-
- try:
- task = await get_task_query(developer_id=x_developer_id, task_id=task_id)
- task_data = task.model_dump()
- except AssertionError:
- raise not_found
- except QueryException as e:
- if e.code == "transact::assertion_failure":
- raise not_found
-
- raise
+
+ task = await get_task_query(developer_id=x_developer_id, task_id=task_id)
+ task_data = task.model_dump()
+
for workflow in task_data.get("workflows", []):
if workflow["name"] == "main":
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 61e6f5ea6..6a0d24036 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -25,13 +25,13 @@
from .exceptions import PromptTooBigError
from .routers import (
agents,
- # docs,
- # files,
- # internal,
- # jobs,
- # sessions,
- # tasks,
- # users,
+ docs,
+ files,
+ internal,
+ jobs,
+ sessions,
+ tasks,
+ users,
)
if not sentry_dsn:
@@ -161,14 +161,14 @@ async def scalar_html():
app.include_router(scalar_router)
# Add other routers with the get_api_key dependency
-app.include_router(agents.router.router, dependencies=[Depends(get_api_key)])
-# app.include_router(sessions.router, dependencies=[Depends(get_api_key)])
-# app.include_router(users.router, dependencies=[Depends(get_api_key)])
-# app.include_router(jobs.router, dependencies=[Depends(get_api_key)])
-# app.include_router(files.router, dependencies=[Depends(get_api_key)])
-# app.include_router(docs.router, dependencies=[Depends(get_api_key)])
-# app.include_router(tasks.router, dependencies=[Depends(get_api_key)])
-# app.include_router(internal.router)
+app.include_router(agents.router, dependencies=[Depends(get_api_key)])
+app.include_router(sessions.router, dependencies=[Depends(get_api_key)])
+app.include_router(users.router, dependencies=[Depends(get_api_key)])
+app.include_router(jobs.router, dependencies=[Depends(get_api_key)])
+app.include_router(files.router, dependencies=[Depends(get_api_key)])
+app.include_router(docs.router, dependencies=[Depends(get_api_key)])
+app.include_router(tasks.router, dependencies=[Depends(get_api_key)])
+app.include_router(internal.router)
# TODO: CORS should be enabled only for JWT auth
#
diff --git a/agents-api/tests/test_agent_routes.py b/agents-api/tests/test_agent_routes.py
index d4e4a3a61..19f48b854 100644
--- a/agents-api/tests/test_agent_routes.py
+++ b/agents-api/tests/test_agent_routes.py
@@ -40,191 +40,191 @@ def _(make_request=make_request):
assert response.status_code == 201
-# @test("route: create agent with instructions")
-# def _(make_request=make_request):
-# data = dict(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# )
-
-# response = make_request(
-# method="POST",
-# url="/agents",
-# json=data,
-# )
+@test("route: create agent with instructions")
+def _(make_request=make_request):
+ data = dict(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ )
-# assert response.status_code == 201
-
-
-# @test("route: create or update agent")
-# def _(make_request=make_request):
-# agent_id = str(uuid7())
+ response = make_request(
+ method="POST",
+ url="/agents",
+ json=data,
+ )
-# data = dict(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# )
+ assert response.status_code == 201
-# response = make_request(
-# method="POST",
-# url=f"/agents/{agent_id}",
-# json=data,
-# )
-# assert response.status_code == 201
+@test("route: create or update agent")
+def _(make_request=make_request):
+ agent_id = str(uuid7())
+ data = dict(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ )
-# @test("route: get agent not exists")
-# def _(make_request=make_request):
-# agent_id = str(uuid7())
+ response = make_request(
+ method="POST",
+ url=f"/agents/{agent_id}",
+ json=data,
+ )
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent_id}",
-# )
+ assert response.status_code == 201
-# assert response.status_code == 404
+@test("route: get agent not exists")
+def _(make_request=make_request):
+ agent_id = str(uuid7())
+
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent_id}",
+ )
-# @test("route: get agent exists")
-# def _(make_request=make_request, agent=test_agent):
-# agent_id = str(agent.id)
+ assert response.status_code == 404
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent_id}",
-# )
-# assert response.status_code != 404
+@test("route: get agent exists")
+def _(make_request=make_request, agent=test_agent):
+ agent_id = str(agent.id)
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent_id}",
+ )
+
+ assert response.status_code != 404
+
+
+@test("route: delete agent")
+def _(make_request=make_request):
+ data = dict(
+ name="test agent",
+ about="test agent about",
+ model="gpt-4o-mini",
+ instructions=["test instruction"],
+ )
-# @test("route: delete agent")
-# def _(make_request=make_request):
-# data = dict(
-# name="test agent",
-# about="test agent about",
-# model="gpt-4o-mini",
-# instructions=["test instruction"],
-# )
+ response = make_request(
+ method="POST",
+ url="/agents",
+ json=data,
+ )
+ agent_id = response.json()["id"]
-# response = make_request(
-# method="POST",
-# url="/agents",
-# json=data,
-# )
-# agent_id = response.json()["id"]
+ response = make_request(
+ method="DELETE",
+ url=f"/agents/{agent_id}",
+ )
-# response = make_request(
-# method="DELETE",
-# url=f"/agents/{agent_id}",
-# )
+ assert response.status_code == 202
-# assert response.status_code == 202
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent_id}",
+ )
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent_id}",
-# )
+ assert response.status_code == 404
-# assert response.status_code == 404
+@test("route: update agent")
+def _(make_request=make_request, agent=test_agent):
+ data = dict(
+ name="updated agent",
+ about="updated agent about",
+ default_settings={"temperature": 1.0},
+ model="gpt-4o-mini",
+ metadata={"hello": "world"},
+ )
-# @test("route: update agent")
-# def _(make_request=make_request, agent=test_agent):
-# data = dict(
-# name="updated agent",
-# about="updated agent about",
-# default_settings={"temperature": 1.0},
-# model="gpt-4o-mini",
-# metadata={"hello": "world"},
-# )
+ agent_id = str(agent.id)
+ response = make_request(
+ method="PUT",
+ url=f"/agents/{agent_id}",
+ json=data,
+ )
-# agent_id = str(agent.id)
-# response = make_request(
-# method="PUT",
-# url=f"/agents/{agent_id}",
-# json=data,
-# )
+ assert response.status_code == 200
-# assert response.status_code == 200
+ agent_id = response.json()["id"]
-# agent_id = response.json()["id"]
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent_id}",
+ )
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent_id}",
-# )
+ assert response.status_code == 200
+ agent = response.json()
-# assert response.status_code == 200
-# agent = response.json()
+ assert "test" not in agent["metadata"]
-# assert "test" not in agent["metadata"]
+@test("route: patch agent")
+def _(make_request=make_request, agent=test_agent):
+ agent_id = str(agent.id)
-# @test("route: patch agent")
-# def _(make_request=make_request, agent=test_agent):
-# agent_id = str(agent.id)
+ data = dict(
+ name="patched agent",
+ about="patched agent about",
+ default_settings={"temperature": 1.0},
+ metadata={"hello": "world"},
+ )
-# data = dict(
-# name="patched agent",
-# about="patched agent about",
-# default_settings={"temperature": 1.0},
-# metadata={"something": "else"},
-# )
+ response = make_request(
+ method="PATCH",
+ url=f"/agents/{agent_id}",
+ json=data,
+ )
-# response = make_request(
-# method="PATCH",
-# url=f"/agents/{agent_id}",
-# json=data,
-# )
+ assert response.status_code == 200
-# assert response.status_code == 200
+ agent_id = response.json()["id"]
-# agent_id = response.json()["id"]
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent_id}",
+ )
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent_id}",
-# )
+ assert response.status_code == 200
+ agent = response.json()
-# assert response.status_code == 200
-# agent = response.json()
+ assert "hello" in agent["metadata"]
-# assert "hello" in agent["metadata"]
+@test("route: list agents")
+def _(make_request=make_request):
+ response = make_request(
+ method="GET",
+ url="/agents",
+ )
-# @test("route: list agents")
-# def _(make_request=make_request):
-# response = make_request(
-# method="GET",
-# url="/agents",
-# )
-
-# assert response.status_code == 200
-# response = response.json()
-# agents = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ agents = response["items"]
-# assert isinstance(agents, list)
-# assert len(agents) > 0
+ assert isinstance(agents, list)
+ assert len(agents) > 0
-# @test("route: list agents with metadata filter")
-# def _(make_request=make_request):
-# response = make_request(
-# method="GET",
-# url="/agents",
-# params={
-# "metadata_filter": {"test": "test"},
-# },
-# )
+@test("route: list agents with metadata filter")
+def _(make_request=make_request):
+ response = make_request(
+ method="GET",
+ url="/agents",
+ params={
+ "metadata_filter": {"test": "test"},
+ },
+ )
-# assert response.status_code == 200
-# response = response.json()
-# agents = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ agents = response["items"]
-# assert isinstance(agents, list)
-# assert len(agents) > 0
+ assert isinstance(agents, list)
+ assert len(agents) > 0
diff --git a/agents-api/tests/test_docs_routes.py b/agents-api/tests/test_docs_routes.py
index a33f30108..f616ddcd8 100644
--- a/agents-api/tests/test_docs_routes.py
+++ b/agents-api/tests/test_docs_routes.py
@@ -1,16 +1,16 @@
-# import time
+import time
-# from ward import skip, test
+from ward import skip, test
-# from tests.fixtures import (
-# make_request,
-# patch_embed_acompletion,
-# test_agent,
-# test_doc,
-# test_user,
-# test_user_doc,
-# )
-# from tests.utils import patch_testing_temporal
+from tests.fixtures import (
+ make_request,
+ patch_embed_acompletion,
+ test_agent,
+ test_doc,
+ test_user,
+ # test_user_doc,
+)
+from tests.utils import patch_testing_temporal
# @test("route: create user doc")
@@ -106,66 +106,66 @@
# assert response.status_code == 200
-# @test("route: list user docs")
-# def _(make_request=make_request, user=test_user):
-# response = make_request(
-# method="GET",
-# url=f"/users/{user.id}/docs",
-# )
+@test("route: list user docs")
+def _(make_request=make_request, user=test_user):
+ response = make_request(
+ method="GET",
+ url=f"/users/{user.id}/docs",
+ )
-# assert response.status_code == 200
-# response = response.json()
-# docs = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ docs = response["items"]
-# assert isinstance(docs, list)
+ assert isinstance(docs, list)
-# @test("route: list agent docs")
-# def _(make_request=make_request, agent=test_agent):
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent.id}/docs",
-# )
+@test("route: list agent docs")
+def _(make_request=make_request, agent=test_agent):
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent.id}/docs",
+ )
-# assert response.status_code == 200
-# response = response.json()
-# docs = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ docs = response["items"]
-# assert isinstance(docs, list)
+ assert isinstance(docs, list)
-# @test("route: list user docs with metadata filter")
-# def _(make_request=make_request, user=test_user):
-# response = make_request(
-# method="GET",
-# url=f"/users/{user.id}/docs",
-# params={
-# "metadata_filter": {"test": "test"},
-# },
-# )
+@test("route: list user docs with metadata filter")
+def _(make_request=make_request, user=test_user):
+ response = make_request(
+ method="GET",
+ url=f"/users/{user.id}/docs",
+ params={
+ "metadata_filter": {"test": "test"},
+ },
+ )
-# assert response.status_code == 200
-# response = response.json()
-# docs = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ docs = response["items"]
-# assert isinstance(docs, list)
+ assert isinstance(docs, list)
-# @test("route: list agent docs with metadata filter")
-# def _(make_request=make_request, agent=test_agent):
-# response = make_request(
-# method="GET",
-# url=f"/agents/{agent.id}/docs",
-# params={
-# "metadata_filter": {"test": "test"},
-# },
-# )
+@test("route: list agent docs with metadata filter")
+def _(make_request=make_request, agent=test_agent):
+ response = make_request(
+ method="GET",
+ url=f"/agents/{agent.id}/docs",
+ params={
+ "metadata_filter": {"test": "test"},
+ },
+ )
-# assert response.status_code == 200
-# response = response.json()
-# docs = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ docs = response["items"]
-# assert isinstance(docs, list)
+ assert isinstance(docs, list)
# # TODO: Fix this test. It fails sometimes and sometimes not.
@@ -242,20 +242,20 @@
# assert len(docs) >= 1
-# @test("routes: embed route")
-# async def _(
-# make_request=make_request,
-# mocks=patch_embed_acompletion,
-# ):
-# (embed, _) = mocks
+@test("routes: embed route")
+async def _(
+ make_request=make_request,
+ mocks=patch_embed_acompletion,
+):
+ (embed, _) = mocks
-# response = make_request(
-# method="POST",
-# url="/embed",
-# json={"text": "blah blah"},
-# )
+ response = make_request(
+ method="POST",
+ url="/embed",
+ json={"text": "blah blah"},
+ )
-# result = response.json()
-# assert "vectors" in result
+ result = response.json()
+ assert "vectors" in result
-# embed.assert_called()
+ embed.assert_called()
diff --git a/agents-api/tests/test_files_routes.py b/agents-api/tests/test_files_routes.py
index 004cab74c..0ce3c1c61 100644
--- a/agents-api/tests/test_files_routes.py
+++ b/agents-api/tests/test_files_routes.py
@@ -1,88 +1,97 @@
-# import base64
-# import hashlib
+import base64
+import hashlib
-# from ward import test
+from ward import test
-# from tests.fixtures import make_request, s3_client
+from tests.fixtures import make_request, s3_client
-# @test("route: create file")
-# async def _(make_request=make_request, s3_client=s3_client):
-# data = dict(
-# name="Test File",
-# description="This is a test file.",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# )
+@test("route: create file")
+async def _(make_request=make_request, s3_client=s3_client):
+ data = dict(
+ name="Test File",
+ description="This is a test file.",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ )
-# response = make_request(
-# method="POST",
-# url="/files",
-# json=data,
-# )
+ response = make_request(
+ method="POST",
+ url="/files",
+ json=data,
+ )
-# assert response.status_code == 201
+ assert response.status_code == 201
-# @test("route: delete file")
-# async def _(make_request=make_request, s3_client=s3_client):
-# data = dict(
-# name="Test File",
-# description="This is a test file.",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# )
+@test("route: delete file")
+async def _(make_request=make_request, s3_client=s3_client):
+ data = dict(
+ name="Test File",
+ description="This is a test file.",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ )
-# response = make_request(
-# method="POST",
-# url="/files",
-# json=data,
-# )
+ response = make_request(
+ method="POST",
+ url="/files",
+ json=data,
+ )
-# file_id = response.json()["id"]
+ file_id = response.json()["id"]
-# response = make_request(
-# method="DELETE",
-# url=f"/files/{file_id}",
-# )
+ response = make_request(
+ method="DELETE",
+ url=f"/files/{file_id}",
+ )
-# assert response.status_code == 202
+ assert response.status_code == 202
-# response = make_request(
-# method="GET",
-# url=f"/files/{file_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/files/{file_id}",
+ )
-# assert response.status_code == 404
+ assert response.status_code == 404
-# @test("route: get file")
-# async def _(make_request=make_request, s3_client=s3_client):
-# data = dict(
-# name="Test File",
-# description="This is a test file.",
-# mime_type="text/plain",
-# content="eyJzYW1wbGUiOiAidGVzdCJ9",
-# )
+@test("route: get file")
+async def _(make_request=make_request, s3_client=s3_client):
+ data = dict(
+ name="Test File",
+ description="This is a test file.",
+ mime_type="text/plain",
+ content="eyJzYW1wbGUiOiAidGVzdCJ9",
+ )
-# response = make_request(
-# method="POST",
-# url="/files",
-# json=data,
-# )
+ response = make_request(
+ method="POST",
+ url="/files",
+ json=data,
+ )
-# file_id = response.json()["id"]
-# content_bytes = base64.b64decode(data["content"])
-# expected_hash = hashlib.sha256(content_bytes).hexdigest()
+ file_id = response.json()["id"]
+ content_bytes = base64.b64decode(data["content"])
+ expected_hash = hashlib.sha256(content_bytes).hexdigest()
-# response = make_request(
-# method="GET",
-# url=f"/files/{file_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/files/{file_id}",
+ )
-# assert response.status_code == 200
+ assert response.status_code == 200
-# result = response.json()
+ result = response.json()
-# # Decode base64 content and compute its SHA-256 hash
-# assert result["hash"] == expected_hash
+ # Decode base64 content and compute its SHA-256 hash
+ assert result["hash"] == expected_hash
+
+@test("route: list files")
+async def _(make_request=make_request, s3_client=s3_client):
+ response = make_request(
+ method="GET",
+ url="/files",
+ )
+
+ assert response.status_code == 200
diff --git a/agents-api/tests/test_task_routes.py b/agents-api/tests/test_task_routes.py
index 61ffa6a09..ae36ae353 100644
--- a/agents-api/tests/test_task_routes.py
+++ b/agents-api/tests/test_task_routes.py
@@ -1,62 +1,62 @@
-# # Tests for task routes
-
-# from uuid_extensions import uuid7
-# from ward import test
-
-# from tests.fixtures import (
-# client,
-# make_request,
-# test_agent,
-# test_execution,
-# test_task,
-# )
-# from tests.utils import patch_testing_temporal
-
-
-# @test("route: unauthorized should fail")
-# def _(client=client, agent=test_agent):
-# data = dict(
-# name="test user",
-# main=[
-# {
-# "kind_": "evaluate",
-# "evaluate": {
-# "additionalProp1": "value1",
-# },
-# }
-# ],
-# )
-
-# response = client.request(
-# method="POST",
-# url=f"/agents/{str(agent.id)}/tasks",
-# data=data,
-# )
-
-# assert response.status_code == 403
-
-
-# @test("route: create task")
-# def _(make_request=make_request, agent=test_agent):
-# data = dict(
-# name="test user",
-# main=[
-# {
-# "kind_": "evaluate",
-# "evaluate": {
-# "additionalProp1": "value1",
-# },
-# }
-# ],
-# )
-
-# response = make_request(
-# method="POST",
-# url=f"/agents/{str(agent.id)}/tasks",
-# json=data,
-# )
-
-# assert response.status_code == 201
+# Tests for task routes
+
+from uuid_extensions import uuid7
+from ward import test
+
+from tests.fixtures import (
+ client,
+ make_request,
+ test_agent,
+ # test_execution,
+ test_task,
+)
+from tests.utils import patch_testing_temporal
+
+
+@test("route: unauthorized should fail")
+def _(client=client, agent=test_agent):
+ data = dict(
+ name="test user",
+ main=[
+ {
+ "kind_": "evaluate",
+ "evaluate": {
+ "additionalProp1": "value1",
+ },
+ }
+ ],
+ )
+
+ response = client.request(
+ method="POST",
+ url=f"/agents/{str(agent.id)}/tasks",
+ json=data,
+ )
+
+ assert response.status_code == 403
+
+
+@test("route: create task")
+def _(make_request=make_request, agent=test_agent):
+ data = dict(
+ name="test user",
+ main=[
+ {
+ "kind_": "evaluate",
+ "evaluate": {
+ "additionalProp1": "value1",
+ },
+ }
+ ],
+ )
+
+ response = make_request(
+ method="POST",
+ url=f"/agents/{str(agent.id)}/tasks",
+ json=data,
+ )
+
+ assert response.status_code == 201
# @test("route: create task execution")
@@ -98,42 +98,42 @@
# assert response.status_code == 200
-# @test("route: get task not exists")
-# def _(make_request=make_request):
-# task_id = str(uuid7())
+@test("route: get task not exists")
+def _(make_request=make_request):
+ task_id = str(uuid7())
-# response = make_request(
-# method="GET",
-# url=f"/tasks/{task_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/tasks/{task_id}",
+ )
+
+ assert response.status_code == 404
-# assert response.status_code == 400
+@test("route: get task exists")
+def _(make_request=make_request, task=test_task):
+ response = make_request(
+ method="GET",
+ url=f"/tasks/{str(task.id)}",
+ )
-# @test("route: get task exists")
-# def _(make_request=make_request, task=test_task):
+ assert response.status_code == 200
+
+
+# FIXME: This test is failing
+# @test("route: list execution transitions")
+# def _(make_request=make_request, execution=test_execution, transition=test_transition):
# response = make_request(
# method="GET",
-# url=f"/tasks/{str(task.id)}",
+# url=f"/executions/{str(execution.id)}/transitions",
# )
# assert response.status_code == 200
+# response = response.json()
+# transitions = response["items"]
-
-# # FIXME: This test is failing
-# # @test("route: list execution transitions")
-# # def _(make_request=make_request, execution=test_execution, transition=test_transition):
-# # response = make_request(
-# # method="GET",
-# # url=f"/executions/{str(execution.id)}/transitions",
-# # )
-
-# # assert response.status_code == 200
-# # response = response.json()
-# # transitions = response["items"]
-
-# # assert isinstance(transitions, list)
-# # assert len(transitions) > 0
+# assert isinstance(transitions, list)
+# assert len(transitions) > 0
# @test("route: list task executions")
@@ -151,59 +151,84 @@
# assert len(executions) > 0
-# @test("route: list tasks")
-# def _(make_request=make_request, agent=test_agent):
-# response = make_request(
-# method="GET",
-# url=f"/agents/{str(agent.id)}/tasks",
-# )
+@test("route: list tasks")
+def _(make_request=make_request, agent=test_agent):
+ response = make_request(
+ method="GET",
+ url=f"/agents/{str(agent.id)}/tasks",
+ )
-# assert response.status_code == 200
-# response = response.json()
-# tasks = response["items"]
+ data = dict(
+ name="test user",
+ main=[
+ {
+ "kind_": "evaluate",
+ "evaluate": {
+ "additionalProp1": "value1",
+ },
+ }
+ ],
+ )
-# assert isinstance(tasks, list)
-# assert len(tasks) > 0
+ response = make_request(
+ method="POST",
+ url=f"/agents/{str(agent.id)}/tasks",
+ json=data,
+ )
+ assert response.status_code == 201
-# # FIXME: This test is failing
+ response = make_request(
+ method="GET",
+ url=f"/agents/{str(agent.id)}/tasks",
+ )
-# # @test("route: patch execution")
-# # async def _(make_request=make_request, task=test_task):
-# # data = dict(
-# # input={},
-# # metadata={},
-# # )
+ assert response.status_code == 200
+ response = response.json()
+ tasks = response["items"]
-# # async with patch_testing_temporal():
-# # response = make_request(
-# # method="POST",
-# # url=f"/tasks/{str(task.id)}/executions",
-# # json=data,
-# # )
+ assert isinstance(tasks, list)
+ assert len(tasks) > 0
+
+
+# FIXME: This test is failing
+
+# @test("route: patch execution")
+# async def _(make_request=make_request, task=test_task):
+# data = dict(
+# input={},
+# metadata={},
+# )
+
+# async with patch_testing_temporal():
+# response = make_request(
+# method="POST",
+# url=f"/tasks/{str(task.id)}/executions",
+# json=data,
+# )
-# # execution = response.json()
+# execution = response.json()
-# # data = dict(
-# # status="running",
-# # )
+# data = dict(
+# status="running",
+# )
-# # response = make_request(
-# # method="PATCH",
-# # url=f"/tasks/{str(task.id)}/executions/{str(execution['id'])}",
-# # json=data,
-# # )
+# response = make_request(
+# method="PATCH",
+# url=f"/tasks/{str(task.id)}/executions/{str(execution['id'])}",
+# json=data,
+# )
-# # assert response.status_code == 200
+# assert response.status_code == 200
-# # execution_id = response.json()["id"]
+# execution_id = response.json()["id"]
-# # response = make_request(
-# # method="GET",
-# # url=f"/executions/{execution_id}",
-# # )
+# response = make_request(
+# method="GET",
+# url=f"/executions/{execution_id}",
+# )
-# # assert response.status_code == 200
-# # execution = response.json()
+# assert response.status_code == 200
+# execution = response.json()
-# # assert execution["status"] == "running"
+# assert execution["status"] == "running"
diff --git a/agents-api/tests/test_user_routes.py b/agents-api/tests/test_user_routes.py
index 35f3b8fc7..e6cd82c2a 100644
--- a/agents-api/tests/test_user_routes.py
+++ b/agents-api/tests/test_user_routes.py
@@ -1,185 +1,185 @@
-# # Tests for user routes
+# Tests for user routes
-# from uuid_extensions import uuid7
-# from ward import test
+from uuid_extensions import uuid7
+from ward import test
-# from tests.fixtures import client, make_request, test_user
+from tests.fixtures import client, make_request, test_user
-# @test("route: unauthorized should fail")
-# def _(client=client):
-# data = dict(
-# name="test user",
-# about="test user about",
-# )
+@test("route: unauthorized should fail")
+def _(client=client):
+ data = dict(
+ name="test user",
+ about="test user about",
+ )
-# response = client.request(
-# method="POST",
-# url="/users",
-# data=data,
-# )
+ response = client.request(
+ method="POST",
+ url="/users",
+ json=data,
+ )
-# assert response.status_code == 403
+ assert response.status_code == 403
-# @test("route: create user")
-# def _(make_request=make_request):
-# data = dict(
-# name="test user",
-# about="test user about",
-# )
+@test("route: create user")
+def _(make_request=make_request):
+ data = dict(
+ name="test user",
+ about="test user about",
+ )
-# response = make_request(
-# method="POST",
-# url="/users",
-# json=data,
-# )
+ response = make_request(
+ method="POST",
+ url="/users",
+ json=data,
+ )
-# assert response.status_code == 201
+ assert response.status_code == 201
-# @test("route: get user not exists")
-# def _(make_request=make_request):
-# user_id = str(uuid7())
+@test("route: get user not exists")
+def _(make_request=make_request):
+ user_id = str(uuid7())
-# response = make_request(
-# method="GET",
-# url=f"/users/{user_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/users/{user_id}",
+ )
-# assert response.status_code == 404
+ assert response.status_code == 404
-# @test("route: get user exists")
-# def _(make_request=make_request, user=test_user):
-# user_id = str(user.id)
+@test("route: get user exists")
+def _(make_request=make_request, user=test_user):
+ user_id = str(user.id)
-# response = make_request(
-# method="GET",
-# url=f"/users/{user_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/users/{user_id}",
+ )
-# assert response.status_code != 404
+ assert response.status_code != 404
-# @test("route: delete user")
-# def _(make_request=make_request):
-# data = dict(
-# name="test user",
-# about="test user about",
-# )
+@test("route: delete user")
+def _(make_request=make_request):
+ data = dict(
+ name="test user",
+ about="test user about",
+ )
-# response = make_request(
-# method="POST",
-# url="/users",
-# json=data,
-# )
-# user_id = response.json()["id"]
+ response = make_request(
+ method="POST",
+ url="/users",
+ json=data,
+ )
+ user_id = response.json()["id"]
-# response = make_request(
-# method="DELETE",
-# url=f"/users/{user_id}",
-# )
+ response = make_request(
+ method="DELETE",
+ url=f"/users/{user_id}",
+ )
-# assert response.status_code == 202
+ assert response.status_code == 202
-# response = make_request(
-# method="GET",
-# url=f"/users/{user_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/users/{user_id}",
+ )
-# assert response.status_code == 404
+ assert response.status_code == 404
-# @test("route: update user")
-# def _(make_request=make_request, user=test_user):
-# data = dict(
-# name="updated user",
-# about="updated user about",
-# )
+@test("route: update user")
+def _(make_request=make_request, user=test_user):
+ data = dict(
+ name="updated user",
+ about="updated user about",
+ )
-# user_id = str(user.id)
-# response = make_request(
-# method="PUT",
-# url=f"/users/{user_id}",
-# json=data,
-# )
+ user_id = str(user.id)
+ response = make_request(
+ method="PUT",
+ url=f"/users/{user_id}",
+ json=data,
+ )
-# assert response.status_code == 200
+ assert response.status_code == 200
-# user_id = response.json()["id"]
+ user_id = response.json()["id"]
-# response = make_request(
-# method="GET",
-# url=f"/users/{user_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/users/{user_id}",
+ )
-# assert response.status_code == 200
-# user = response.json()
+ assert response.status_code == 200
+ user = response.json()
-# assert user["name"] == "updated user"
-# assert user["about"] == "updated user about"
+ assert user["name"] == "updated user"
+ assert user["about"] == "updated user about"
-# @test("query: patch user")
-# def _(make_request=make_request, user=test_user):
-# user_id = str(user.id)
+@test("query: patch user")
+def _(make_request=make_request, user=test_user):
+ user_id = str(user.id)
-# data = dict(
-# name="patched user",
-# about="patched user about",
-# )
+ data = dict(
+ name="patched user",
+ about="patched user about",
+ )
-# response = make_request(
-# method="PATCH",
-# url=f"/users/{user_id}",
-# json=data,
-# )
+ response = make_request(
+ method="PATCH",
+ url=f"/users/{user_id}",
+ json=data,
+ )
-# assert response.status_code == 200
+ assert response.status_code == 200
-# user_id = response.json()["id"]
+ user_id = response.json()["id"]
-# response = make_request(
-# method="GET",
-# url=f"/users/{user_id}",
-# )
+ response = make_request(
+ method="GET",
+ url=f"/users/{user_id}",
+ )
-# assert response.status_code == 200
-# user = response.json()
+ assert response.status_code == 200
+ user = response.json()
-# assert user["name"] == "patched user"
-# assert user["about"] == "patched user about"
+ assert user["name"] == "patched user"
+ assert user["about"] == "patched user about"
-# @test("query: list users")
-# def _(make_request=make_request):
-# response = make_request(
-# method="GET",
-# url="/users",
-# )
+@test("query: list users")
+def _(make_request=make_request):
+ response = make_request(
+ method="GET",
+ url="/users",
+ )
-# assert response.status_code == 200
-# response = response.json()
-# users = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ users = response["items"]
-# assert isinstance(users, list)
-# assert len(users) > 0
+ assert isinstance(users, list)
+ assert len(users) > 0
-# @test("query: list users with right metadata filter")
-# def _(make_request=make_request, user=test_user):
-# response = make_request(
-# method="GET",
-# url="/users",
-# params={
-# "metadata_filter": {"test": "test"},
-# },
-# )
+@test("query: list users with right metadata filter")
+def _(make_request=make_request, user=test_user):
+ response = make_request(
+ method="GET",
+ url="/users",
+ params={
+ "metadata_filter": {"test": "test"},
+ },
+ )
-# assert response.status_code == 200
-# response = response.json()
-# users = response["items"]
+ assert response.status_code == 200
+ response = response.json()
+ users = response["items"]
-# assert isinstance(users, list)
-# assert len(users) > 0
+ assert isinstance(users, list)
+ assert len(users) > 0
From 5f3adc660e2f91bd05acd1906979e077af799e5a Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Mon, 23 Dec 2024 18:59:40 +0000
Subject: [PATCH 169/274] refactor: Lint agents-api (CI)
---
agents-api/agents_api/activities/task_steps/__init__.py | 1 +
agents-api/agents_api/queries/agents/get_agent.py | 4 +++-
agents-api/agents_api/queries/users/get_user.py | 4 +++-
agents-api/agents_api/routers/agents/__init__.py | 4 ++++
agents-api/agents_api/routers/files/list_files.py | 2 +-
agents-api/agents_api/routers/tasks/__init__.py | 3 +++
agents-api/agents_api/routers/tasks/get_task_details.py | 2 --
agents-api/tests/test_docs_routes.py | 1 -
agents-api/tests/test_files_routes.py | 1 +
9 files changed, 16 insertions(+), 6 deletions(-)
diff --git a/agents-api/agents_api/activities/task_steps/__init__.py b/agents-api/agents_api/activities/task_steps/__init__.py
index cccfb9d35..482fc42da 100644
--- a/agents-api/agents_api/activities/task_steps/__init__.py
+++ b/agents-api/agents_api/activities/task_steps/__init__.py
@@ -1,6 +1,7 @@
# ruff: noqa: F401, F403, F405
from .base_evaluate import base_evaluate
+
# from .cozo_query_step import cozo_query_step
from .evaluate_step import evaluate_step
from .for_each_step import for_each_step
diff --git a/agents-api/agents_api/queries/agents/get_agent.py b/agents-api/agents_api/queries/agents/get_agent.py
index a06bde240..19e6ad954 100644
--- a/agents-api/agents_api/queries/agents/get_agent.py
+++ b/agents-api/agents_api/queries/agents/get_agent.py
@@ -62,7 +62,9 @@
@wrap_in_class(Agent, one=True, transform=lambda d: {"id": d["agent_id"], **d})
@pg_query
@beartype
-async def get_agent(*, agent_id: UUID, developer_id: UUID) -> tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]:
+async def get_agent(
+ *, agent_id: UUID, developer_id: UUID
+) -> tuple[str, list, Literal["fetch", "fetchmany", "fetchrow"]]:
"""
Constructs the SQL query to retrieve an agent's details.
diff --git a/agents-api/agents_api/queries/users/get_user.py b/agents-api/agents_api/queries/users/get_user.py
index 331c4ce1b..5657f823a 100644
--- a/agents-api/agents_api/queries/users/get_user.py
+++ b/agents-api/agents_api/queries/users/get_user.py
@@ -42,7 +42,9 @@
@wrap_in_class(User, one=True)
@pg_query
@beartype
-async def get_user(*, developer_id: UUID, user_id: UUID) -> tuple[str, list, Literal["fetchrow", "fetchmany", "fetch"]]:
+async def get_user(
+ *, developer_id: UUID, user_id: UUID
+) -> tuple[str, list, Literal["fetchrow", "fetchmany", "fetch"]]:
"""
Constructs an optimized SQL query to retrieve a user's details.
Uses the primary key index (developer_id, user_id) for efficient lookup.
diff --git a/agents-api/agents_api/routers/agents/__init__.py b/agents-api/agents_api/routers/agents/__init__.py
index bd4a40252..95354363c 100644
--- a/agents-api/agents_api/routers/agents/__init__.py
+++ b/agents-api/agents_api/routers/agents/__init__.py
@@ -1,14 +1,18 @@
# ruff: noqa: F401
from .create_agent import create_agent
+
# from .create_agent_tool import create_agent_tool
from .create_or_update_agent import create_or_update_agent
from .delete_agent import delete_agent
+
# from .delete_agent_tool import delete_agent_tool
from .get_agent_details import get_agent_details
+
# from .list_agent_tools import list_agent_tools
from .list_agents import list_agents
from .patch_agent import patch_agent
+
# from .patch_agent_tool import patch_agent_tool
from .router import router
from .update_agent import update_agent
diff --git a/agents-api/agents_api/routers/files/list_files.py b/agents-api/agents_api/routers/files/list_files.py
index f993ce479..67d436bd5 100644
--- a/agents-api/agents_api/routers/files/list_files.py
+++ b/agents-api/agents_api/routers/files/list_files.py
@@ -21,7 +21,7 @@ async def fetch_file_content(file_id: UUID) -> str:
@router.get("/files", tags=["files"])
async def list_files(
- x_developer_id: Annotated[UUID, Depends(get_developer_id)]
+ x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> list[File]:
files = await list_files_query(developer_id=x_developer_id)
diff --git a/agents-api/agents_api/routers/tasks/__init__.py b/agents-api/agents_api/routers/tasks/__init__.py
index 37d019941..58b9fce54 100644
--- a/agents-api/agents_api/routers/tasks/__init__.py
+++ b/agents-api/agents_api/routers/tasks/__init__.py
@@ -1,12 +1,15 @@
# ruff: noqa: F401, F403, F405
from .create_or_update_task import create_or_update_task
from .create_task import create_task
+
# from .create_task_execution import create_task_execution
# from .get_execution_details import get_execution_details
from .get_task_details import get_task_details
+
# from .list_execution_transitions import list_execution_transitions
# from .list_task_executions import list_task_executions
from .list_tasks import list_tasks
+
# from .patch_execution import patch_execution
from .router import router
# from .stream_transitions_events import stream_transitions_events
diff --git a/agents-api/agents_api/routers/tasks/get_task_details.py b/agents-api/agents_api/routers/tasks/get_task_details.py
index 01f1d7a35..8183ea1df 100644
--- a/agents-api/agents_api/routers/tasks/get_task_details.py
+++ b/agents-api/agents_api/routers/tasks/get_task_details.py
@@ -16,11 +16,9 @@ async def get_task_details(
task_id: UUID,
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
) -> Task:
-
task = await get_task_query(developer_id=x_developer_id, task_id=task_id)
task_data = task.model_dump()
-
for workflow in task_data.get("workflows", []):
if workflow["name"] == "main":
task_data["main"] = workflow.get("steps", [])
diff --git a/agents-api/tests/test_docs_routes.py b/agents-api/tests/test_docs_routes.py
index f616ddcd8..3fc85e8b0 100644
--- a/agents-api/tests/test_docs_routes.py
+++ b/agents-api/tests/test_docs_routes.py
@@ -12,7 +12,6 @@
)
from tests.utils import patch_testing_temporal
-
# @test("route: create user doc")
# async def _(make_request=make_request, user=test_user):
# async with patch_testing_temporal():
diff --git a/agents-api/tests/test_files_routes.py b/agents-api/tests/test_files_routes.py
index 0ce3c1c61..05507a786 100644
--- a/agents-api/tests/test_files_routes.py
+++ b/agents-api/tests/test_files_routes.py
@@ -87,6 +87,7 @@ async def _(make_request=make_request, s3_client=s3_client):
# Decode base64 content and compute its SHA-256 hash
assert result["hash"] == expected_hash
+
@test("route: list files")
async def _(make_request=make_request, s3_client=s3_client):
response = make_request(
From 830206bf83b830fb910a484b3cc8161570303aea Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Mon, 23 Dec 2024 19:59:20 -0500
Subject: [PATCH 170/274] feat(agents-api): added docs hybrid search
---
.../agents_api/queries/docs/__init__.py | 9 +-
.../queries/docs/search_docs_by_embedding.py | 14 +-
.../queries/docs/search_docs_by_text.py | 1 +
.../queries/docs/search_docs_hybrid.py | 239 +++++++-----------
.../agents_api/queries/tools/__init__.py | 10 +
.../agents_api/queries/tools/create_tools.py | 48 ++--
.../agents_api/queries/tools/delete_tool.py | 41 +--
.../agents_api/queries/tools/get_tool.py | 38 +--
.../tools/get_tool_args_from_metadata.py | 22 +-
.../agents_api/queries/tools/list_tools.py | 38 +--
.../agents_api/queries/tools/patch_tool.py | 39 ++-
.../agents_api/queries/tools/update_tool.py | 43 ++--
agents-api/tests/fixtures.py | 6 +-
agents-api/tests/test_docs_queries.py | 36 ++-
.../migrations/000018_doc_search.up.sql | 6 +-
15 files changed, 303 insertions(+), 287 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/__init__.py b/agents-api/agents_api/queries/docs/__init__.py
index 51bab2555..31b44e7b4 100644
--- a/agents-api/agents_api/queries/docs/__init__.py
+++ b/agents-api/agents_api/queries/docs/__init__.py
@@ -9,6 +9,8 @@
- Deleting documents by their unique identifiers.
- Embedding document snippets for retrieval purposes.
- Searching documents by text.
+- Searching documents by hybrid text and embedding.
+- Searching documents by embedding.
The module interacts with other parts of the application, such as the agents and users modules, to provide a comprehensive document management system. Its role is crucial in enabling document search, retrieval, and management features within the context of agents and users.
@@ -22,14 +24,15 @@
from .get_doc import get_doc
from .list_docs import list_docs
-# from .search_docs_by_embedding import search_docs_by_embedding
+from .search_docs_by_embedding import search_docs_by_embedding
from .search_docs_by_text import search_docs_by_text
-
+from .search_docs_hybrid import search_docs_hybrid
__all__ = [
"create_doc",
"delete_doc",
"get_doc",
"list_docs",
- # "search_docs_by_embedding",
+ "search_docs_by_embedding",
"search_docs_by_text",
+ "search_docs_hybrid",
]
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index 6fb6b82eb..9c8b15955 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -3,10 +3,12 @@
from beartype import beartype
from fastapi import HTTPException
+import asyncpg
from ...autogen.openapi_model import DocReference
-from ..utils import pg_query, wrap_in_class
+from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+# Raw query for vector search
search_docs_by_embedding_query = """
SELECT * FROM search_by_vector(
$1, -- developer_id
@@ -19,7 +21,15 @@
)
"""
-
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
@wrap_in_class(
DocReference,
transform=lambda d: {
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index 86877c752..d2a96e3af 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -8,6 +8,7 @@
from ...autogen.openapi_model import DocReference
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+# Raw query for text search
search_docs_text_query = """
SELECT * FROM search_by_text(
$1, -- developer_id
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index 184ba7e8e..8e14f36dd 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -1,158 +1,113 @@
-from typing import List, Literal
+from typing import List, Any, Literal
from uuid import UUID
from beartype import beartype
-from ...autogen.openapi_model import Doc
-from .search_docs_by_embedding import search_docs_by_embedding
-from .search_docs_by_text import search_docs_by_text
-
-
-def dbsf_normalize(scores: List[float]) -> List[float]:
- """
- Example distribution-based normalization: clamp each score
- from (mean - 3*stddev) to (mean + 3*stddev) and scale to 0..1
- """
- import statistics
-
- if len(scores) < 2:
- return scores
- m = statistics.mean(scores)
- sd = statistics.pstdev(scores) # population std
- if sd == 0:
- return scores
- upper = m + 3 * sd
- lower = m - 3 * sd
-
- def clamp_scale(v):
- c = min(upper, max(lower, v))
- return (c - lower) / (upper - lower)
-
- return [clamp_scale(s) for s in scores]
-
-
-@beartype
-def fuse_results(
- text_docs: List[Doc], embedding_docs: List[Doc], alpha: float
-) -> List[Doc]:
- """
- Merges text search results (descending by text rank) with
- embedding results (descending by closeness or inverse distance).
- alpha ~ how much to weigh the embedding score
- """
- # Suppose we stored each doc's "distance" from the embedding query, and
- # for text search we store a rank or negative distance. We'll unify them:
- # Make up a dictionary of doc_id -> text_score, doc_id -> embed_score
- # For example, text_score = -distance if you want bigger = better
- text_scores = {}
- embed_scores = {}
- for doc in text_docs:
- # If you had "rank", you might store doc.distance = rank
- # For demo, let's assume doc.distance is negative... up to you
- text_scores[doc.id] = float(-doc.distance if doc.distance else 0)
-
- for doc in embedding_docs:
- # Lower distance => better, so we do embed_score = -distance
- embed_scores[doc.id] = float(-doc.distance if doc.distance else 0)
-
- # Normalize them
- text_vals = list(text_scores.values())
- embed_vals = list(embed_scores.values())
- text_vals_norm = dbsf_normalize(text_vals)
- embed_vals_norm = dbsf_normalize(embed_vals)
-
- # Map them back
- t_keys = list(text_scores.keys())
- for i, key in enumerate(t_keys):
- text_scores[key] = text_vals_norm[i]
- e_keys = list(embed_scores.keys())
- for i, key in enumerate(e_keys):
- embed_scores[key] = embed_vals_norm[i]
-
- # Gather all doc IDs
- all_ids = set(text_scores.keys()) | set(embed_scores.keys())
-
- # Weighted sum => combined
- out = []
- for doc_id in all_ids:
- # text and embed might be missing doc_id => 0
- t_score = text_scores.get(doc_id, 0)
- e_score = embed_scores.get(doc_id, 0)
- combined = alpha * e_score + (1 - alpha) * t_score
- # We'll store final "distance" as -(combined) so bigger combined => smaller distance
- out.append((doc_id, combined))
-
- # Sort descending by combined
- out.sort(key=lambda x: x[1], reverse=True)
-
- # Convert to doc objects. We can pick from text_docs or embedding_docs or whichever is found.
- # If present in both, we can merge fields. For simplicity, just pick from text_docs then fallback embedding_docs.
-
- # Create a quick ID->doc map
- text_map = {d.id: d for d in text_docs}
- embed_map = {d.id: d for d in embedding_docs}
-
- final_docs = []
- for doc_id, score in out:
- doc = text_map.get(doc_id) or embed_map.get(doc_id)
- doc = doc.model_copy() # or copy if you are using Pydantic
- doc.distance = float(-score) # so a higher combined => smaller distance
- final_docs.append(doc)
- return final_docs
-
-
+from ...autogen.openapi_model import DocReference
+import asyncpg
+from fastapi import HTTPException
+
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Raw query for hybrid search
+search_docs_hybrid_query = """
+SELECT * FROM search_hybrid(
+ $1, -- developer_id
+ $2, -- text_query
+ $3::vector(1024), -- embedding
+ $4::text[], -- owner_types
+ $UUID_LIST::uuid[], -- owner_ids
+ $5, -- k
+ $6, -- alpha
+ $7, -- confidence
+ $8, -- metadata_filter
+ $9 -- search_language
+)
+"""
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="The specified developer does not exist.",
+ )
+ }
+)
+@wrap_in_class(
+ DocReference,
+ transform=lambda d: {
+ "owner": {
+ "id": d["owner_id"],
+ "role": d["owner_type"],
+ },
+ "metadata": d.get("metadata", {}),
+ **d,
+ },
+)
+
+@pg_query
@beartype
async def search_docs_hybrid(
developer_id: UUID,
+ owners: list[tuple[Literal["user", "agent"], UUID]],
text_query: str = "",
embedding: List[float] = None,
k: int = 10,
alpha: float = 0.5,
- owner_type: Literal["user", "agent", "org"] | None = None,
- owner_id: UUID | None = None,
-) -> List[Doc]:
+ metadata_filter: dict[str, Any] = {},
+ search_language: str = "english",
+ confidence: float = 0.5,
+) -> tuple[str, list]:
"""
Hybrid text-and-embedding doc search. We get top-K from each approach,
then fuse them client-side. Adjust concurrency or approach as you like.
- """
- # We'll dispatch two queries in parallel
- # (One full-text, one embedding-based) each limited to K
- tasks = []
- if text_query.strip():
- tasks.append(
- search_docs_by_text(
- developer_id=developer_id,
- query=text_query,
- k=k,
- owner_type=owner_type,
- owner_id=owner_id,
- )
- )
- else:
- tasks.append([]) # no text results if query is empty
-
- if embedding and any(embedding):
- tasks.append(
- search_docs_by_embedding(
- developer_id=developer_id,
- query_embedding=embedding,
- k=k,
- owner_type=owner_type,
- owner_id=owner_id,
- )
- )
- else:
- tasks.append([])
- # Run concurrently (or sequentially, if you prefer)
- # If you have a 'run_concurrently' from your old code, you can do:
- # text_results, embed_results = await run_concurrently([task1, task2])
- # Otherwise just do them in parallel with e.g. asyncio.gather:
- from asyncio import gather
-
- text_results, embed_results = await gather(*tasks)
+ Parameters:
+ developer_id (UUID): The unique identifier for the developer.
+ text_query (str): The text query to search for.
+ embedding (List[float]): The embedding to search for.
+ k (int): The number of results to return.
+ alpha (float): The weight for the embedding results.
+ owner_type (Literal["user", "agent", "org"] | None): The type of the owner.
+ owner_id (UUID | None): The ID of the owner.
+
+ Returns:
+ tuple[str, list]: The SQL query and parameters for the search.
+ """
- # fuse them
- fused = fuse_results(text_results, embed_results, alpha)
- # Then pick top K overall
- return fused[:k]
+ if k < 1:
+ raise HTTPException(status_code=400, detail="k must be >= 1")
+
+ if not text_query and not embedding:
+ raise HTTPException(status_code=400, detail="Empty query provided")
+
+ if not embedding:
+ raise HTTPException(status_code=400, detail="Empty embedding provided")
+
+ # Convert query_embedding to a string
+ embedding_str = f"[{', '.join(map(str, embedding))}]"
+
+ # Extract owner types and IDs
+ owner_types: list[str] = [owner[0] for owner in owners]
+ owner_ids: list[str] = [str(owner[1]) for owner in owners]
+
+ # NOTE: Manually replace uuids list coz asyncpg isnt sending it correctly
+ owner_ids_pg_str = f"ARRAY['{'\', \''.join(owner_ids)}']"
+ query = search_docs_hybrid_query.replace("$UUID_LIST", owner_ids_pg_str)
+
+ return (
+ query,
+ [
+ developer_id,
+ text_query,
+ embedding_str,
+ owner_types,
+ k,
+ alpha,
+ confidence,
+ metadata_filter,
+ search_language,
+ ],
+ )
diff --git a/agents-api/agents_api/queries/tools/__init__.py b/agents-api/agents_api/queries/tools/__init__.py
index b1775f1a9..7afa6d64a 100644
--- a/agents-api/agents_api/queries/tools/__init__.py
+++ b/agents-api/agents_api/queries/tools/__init__.py
@@ -18,3 +18,13 @@
from .list_tools import list_tools
from .patch_tool import patch_tool
from .update_tool import update_tool
+
+__all__ = [
+ "create_tools",
+ "delete_tool",
+ "get_tool",
+ "get_tool_args_from_metadata",
+ "list_tools",
+ "patch_tool",
+ "update_tool",
+]
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index 70b0525a8..b91964a39 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,26 +1,26 @@
"""This module contains functions for creating tools in the CozoDB database."""
-from typing import Any, TypeVar
+from typing import Any
from uuid import UUID
-import sqlvalidator
from beartype import beartype
from uuid_extensions import uuid7
+from fastapi import HTTPException
+import asyncpg
+from sqlglot import parse_one
from ...autogen.openapi_model import CreateToolRequest, Tool
-from ...exceptions import InvalidSQLQuery
from ...metrics.counters import increase_counter
+
from ..utils import (
pg_query,
- # rewrap_exceptions,
+ rewrap_exceptions,
wrap_in_class,
+ partialclass,
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-sql_query = """INSERT INTO tools
+# Define the raw SQL query for creating tools
+tools_query = parse_one("""INSERT INTO tools
(
developer_id,
agent_id,
@@ -43,20 +43,23 @@
WHERE (agent_id, name) = ($2, $5)
)
RETURNING *
-"""
-
+""").sql(pretty=True)
-# if not sql_query.is_valid():
-# raise InvalidSQLQuery("create_tools")
-
-# @rewrap_exceptions(
-# {
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# AssertionError: partialclass(HTTPException, status_code=400),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A tool with this name already exists for this agent"
+ ),
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Agent not found",
+ ),
+}
+)
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -106,7 +109,8 @@ async def create_tools(
]
return (
- sql_query,
+ tools_query,
tools_data,
"fetchmany",
)
+
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index cd666ee42..9a507523d 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -1,22 +1,23 @@
-from typing import Any, TypeVar
+from typing import Any
from uuid import UUID
-import sqlvalidator
+from fastapi import HTTPException
from beartype import beartype
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
-from ...exceptions import InvalidSQLQuery
+from sqlglot import parse_one
+import asyncpg
+
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-sql_query = """
+# Define the raw SQL query for deleting a tool
+tools_query = parse_one("""
DELETE FROM
tools
WHERE
@@ -24,19 +25,19 @@
agent_id = $2 AND
tool_id = $3
RETURNING *
-"""
+""").sql(pretty=True)
-# if not sql_query.is_valid():
-# raise InvalidSQLQuery("delete_tool")
-
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=400),
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
+@rewrap_exceptions(
+{
+ # Handle foreign key constraint
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Developer or agent not found",
+ ),
+}
+)
@wrap_in_class(
ResourceDeletedResponse,
one=True,
@@ -55,7 +56,7 @@ async def delete_tool(
tool_id = str(tool_id)
return (
- sql_query,
+ tools_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 29a7ae9b6..9f71dec40 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -1,39 +1,39 @@
-from typing import Any, TypeVar
+from typing import Any
from uuid import UUID
-import sqlvalidator
from beartype import beartype
from ...autogen.openapi_model import Tool
-from ...exceptions import InvalidSQLQuery
+from sqlglot import parse_one
+from fastapi import HTTPException
+import asyncpg
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-sql_query = """
+# Define the raw SQL query for getting a tool
+tools_query = parse_one("""
SELECT * FROM tools
WHERE
developer_id = $1 AND
agent_id = $2 AND
tool_id = $3
LIMIT 1
-"""
+""").sql(pretty=True)
-# if not sql_query.is_valid():
-# raise InvalidSQLQuery("get_tool")
-
-
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=400),
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
+@rewrap_exceptions(
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=404,
+ detail="Developer or agent not found",
+ ),
+ }
+)
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -56,7 +56,7 @@ async def get_tool(
tool_id = str(tool_id)
return (
- sql_query,
+ tools_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 8d53a4e1b..937442797 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -4,13 +4,17 @@
import sqlvalidator
from beartype import beartype
-from ...exceptions import InvalidSQLQuery
+from sqlglot import parse_one
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass,
)
-tools_args_for_task_query = """SELECT COALESCE(agents_md || tasks_md, agents_md, tasks_md, '{}') as values FROM (
+# Define the raw SQL query for getting tool args from metadata
+tools_args_for_task_query = parse_one("""
+SELECT COALESCE(agents_md || tasks_md, agents_md, tasks_md, '{}') as values FROM (
SELECT
CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
@@ -27,13 +31,10 @@
WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS tasks_md
FROM tasks
WHERE task_id = $2 AND developer_id = $4 LIMIT 1
-) AS tasks_md"""
+) AS tasks_md""").sql(pretty=True)
-
-# if not tools_args_for_task_query.is_valid():
-# raise InvalidSQLQuery("tools_args_for_task_query")
-
-tool_args_for_session_query = """SELECT COALESCE(agents_md || sessions_md, agents_md, sessions_md, '{}') as values FROM (
+# Define the raw SQL query for getting tool args from metadata for a session
+tool_args_for_session_query = parse_one("""SELECT COALESCE(agents_md || sessions_md, agents_md, sessions_md, '{}') as values FROM (
SELECT
CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
@@ -50,11 +51,8 @@
WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS tasks_md
FROM sessions
WHERE session_id = $2 AND developer_id = $4 LIMIT 1
-) AS sessions_md"""
-
+) AS sessions_md""").sql(pretty=True)
-# if not tool_args_for_session_query.is_valid():
-# raise InvalidSQLQuery("tool_args_for_session")
# @rewrap_exceptions(
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index cdc82d9bd..d85bb9da0 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -1,20 +1,21 @@
-from typing import Any, Literal, TypeVar
+from typing import Literal
from uuid import UUID
-import sqlvalidator
from beartype import beartype
+import asyncpg
+from fastapi import HTTPException
from ...autogen.openapi_model import Tool
-from ...exceptions import InvalidSQLQuery
+from sqlglot import parse_one
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-sql_query = """
+# Define the raw SQL query for listing tools
+tools_query = parse_one("""
SELECT * FROM tools
WHERE
developer_id = $1 AND
@@ -25,19 +26,18 @@
CASE WHEN $5 = 'updated_at' AND $6 = 'desc' THEN tools.updated_at END DESC NULLS LAST,
CASE WHEN $5 = 'updated_at' AND $6 = 'asc' THEN tools.updated_at END ASC NULLS LAST
LIMIT $3 OFFSET $4;
-"""
-
-# if not sql_query.is_valid():
-# raise InvalidSQLQuery("list_tools")
+""").sql(pretty=True)
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=400),
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
+@rewrap_exceptions(
+{
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Developer or agent not found",
+ ),
+}
+)
@wrap_in_class(
Tool,
transform=lambda d: {
@@ -65,7 +65,7 @@ async def list_tools(
agent_id = str(agent_id)
return (
- sql_query,
+ tools_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index e0a20dc1d..fb4c680e1 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -1,22 +1,22 @@
-from typing import Any, TypeVar
+from typing import Any
from uuid import UUID
-import sqlvalidator
from beartype import beartype
from ...autogen.openapi_model import PatchToolRequest, ResourceUpdatedResponse
-from ...exceptions import InvalidSQLQuery
+from sqlglot import parse_one
+import asyncpg
+from fastapi import HTTPException
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-sql_query = """
+# Define the raw SQL query for patching a tool
+tools_query = parse_one("""
WITH updated_tools AS (
UPDATE tools
SET
@@ -31,19 +31,18 @@
RETURNING *
)
SELECT * FROM updated_tools;
-"""
+""").sql(pretty=True)
-# if not sql_query.is_valid():
-# raise InvalidSQLQuery("patch_tool")
-
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=400),
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
+@rewrap_exceptions(
+{
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Developer or agent not found",
+ ),
+}
+)
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
@@ -94,7 +93,7 @@ async def patch_tool(
del patch_data[tool_type]
return (
- sql_query,
+ tools_query,
[
developer_id,
agent_id,
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index 2b8beb155..18ff44f18 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -1,24 +1,27 @@
from typing import Any, TypeVar
from uuid import UUID
-import sqlvalidator
from beartype import beartype
from ...autogen.openapi_model import (
ResourceUpdatedResponse,
UpdateToolRequest,
)
-from ...exceptions import InvalidSQLQuery
+import asyncpg
+import json
+from fastapi import HTTPException
+
+from sqlglot import parse_one
from ...metrics.counters import increase_counter
from ..utils import (
pg_query,
wrap_in_class,
+ rewrap_exceptions,
+ partialclass
)
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-sql_query = """
+# Define the raw SQL query for updating a tool
+tools_query = parse_one("""
UPDATE tools
SET
type = $4,
@@ -30,19 +33,23 @@
agent_id = $2 AND
tool_id = $3
RETURNING *;
-"""
+""").sql(pretty=True)
-# if not sql_query.is_valid():
-# raise InvalidSQLQuery("update_tool")
-
-# @rewrap_exceptions(
-# {
-# QueryException: partialclass(HTTPException, status_code=400),
-# ValidationError: partialclass(HTTPException, status_code=400),
-# TypeError: partialclass(HTTPException, status_code=400),
-# }
-# )
+@rewrap_exceptions(
+{
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A tool with this name already exists for this agent",
+ ),
+ json.JSONDecodeError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid tool specification format",
+ ),
+}
+)
@wrap_in_class(
ResourceUpdatedResponse,
one=True,
@@ -84,7 +91,7 @@ async def update_tool(
del update_data[tool_type]
return (
- sql_query,
+ tools_query,
[
developer_id,
agent_id,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index a98fef531..1760209a8 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -18,22 +18,18 @@
from agents_api.clients.pg import create_db_pool
from agents_api.env import api_key, api_key_header_name, multi_tenant_mode
from agents_api.queries.agents.create_agent import create_agent
-from agents_api.queries.agents.delete_agent import delete_agent
from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import get_developer
from agents_api.queries.docs.create_doc import create_doc
-from agents_api.queries.docs.delete_doc import delete_doc
# from agents_api.queries.executions.create_execution import create_execution
# from agents_api.queries.executions.create_execution_transition import create_execution_transition
# from agents_api.queries.executions.create_temporal_lookup import create_temporal_lookup
from agents_api.queries.files.create_file import create_file
-from agents_api.queries.files.delete_file import delete_file
from agents_api.queries.sessions.create_session import create_session
from agents_api.queries.tasks.create_task import create_task
-from agents_api.queries.tasks.delete_task import delete_task
from agents_api.queries.tools.create_tools import create_tools
-from agents_api.queries.tools.delete_tool import delete_tool
+from agents_api.queries.users.create_user import create_user
from agents_api.queries.users.create_user import create_user
from agents_api.web import app
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 6914b1112..125033276 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -8,10 +8,10 @@
from agents_api.queries.docs.list_docs import list_docs
from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
-
-# from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
+from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
+EMBEDDING_SIZE: int = 1024
@test("query: create user doc")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
@@ -275,3 +275,35 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
assert len(result) >= 1
assert result[0].metadata is not None
+
+@test("query: search docs by hybrid")
+async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
+ pool = await create_db_pool(dsn=dsn)
+
+ # Create a test document
+ await create_doc(
+ developer_id=developer.id,
+ owner_type="agent",
+ owner_id=agent.id,
+ data=CreateDocRequest(
+ title="Hello",
+ content="The world is a funny little thing",
+ metadata={"test": "test"},
+ embed_instruction="Embed the document",
+ ),
+ connection_pool=pool,
+ )
+
+ # Search using the correct parameter types
+ result = await search_docs_hybrid(
+ developer_id=developer.id,
+ owners=[("agent", agent.id)],
+ text_query="funny thing",
+ embedding=[1.0] * 1024,
+ k=3, # Add k parameter
+ metadata_filter={"test": "test"}, # Add metadata filter
+ connection_pool=pool,
+ )
+
+ assert len(result) >= 1
+ assert result[0].metadata is not None
\ No newline at end of file
diff --git a/memory-store/migrations/000018_doc_search.up.sql b/memory-store/migrations/000018_doc_search.up.sql
index db25e79d2..8fde5e9bb 100644
--- a/memory-store/migrations/000018_doc_search.up.sql
+++ b/memory-store/migrations/000018_doc_search.up.sql
@@ -406,7 +406,7 @@ BEGIN
),
scores AS (
SELECT
- r.developer_id,
+ -- r.developer_id,
r.doc_id,
r.title,
r.content,
@@ -418,8 +418,8 @@ BEGIN
COALESCE(t.distance, 0.0) as text_score,
COALESCE(e.distance, 0.0) as embedding_score
FROM all_results r
- LEFT JOIN text_results t ON r.doc_id = t.doc_id AND r.developer_id = t.developer_id
- LEFT JOIN embedding_results e ON r.doc_id = e.doc_id AND r.developer_id = e.developer_id
+ LEFT JOIN text_results t ON r.doc_id = t.doc_id
+ LEFT JOIN embedding_results e ON r.doc_id = e.doc_id
),
normalized_scores AS (
SELECT
From 5f4aebc19c6958a4901b506e4f9390abb861f1f4 Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Tue, 24 Dec 2024 01:00:21 +0000
Subject: [PATCH 171/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/docs/__init__.py | 2 +-
.../queries/docs/search_docs_by_embedding.py | 5 ++-
.../queries/docs/search_docs_hybrid.py | 8 ++--
.../agents_api/queries/tools/create_tools.py | 18 ++++-----
.../agents_api/queries/tools/delete_tool.py | 18 +++------
.../agents_api/queries/tools/get_tool.py | 15 +++----
.../tools/get_tool_args_from_metadata.py | 7 ++--
.../agents_api/queries/tools/list_tools.py | 25 +++++-------
.../agents_api/queries/tools/patch_tool.py | 27 +++++--------
.../agents_api/queries/tools/update_tool.py | 40 ++++++++-----------
agents-api/tests/fixtures.py | 1 -
agents-api/tests/test_docs_queries.py | 4 +-
12 files changed, 70 insertions(+), 100 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/__init__.py b/agents-api/agents_api/queries/docs/__init__.py
index 31b44e7b4..3862131bb 100644
--- a/agents-api/agents_api/queries/docs/__init__.py
+++ b/agents-api/agents_api/queries/docs/__init__.py
@@ -23,10 +23,10 @@
from .delete_doc import delete_doc
from .get_doc import get_doc
from .list_docs import list_docs
-
from .search_docs_by_embedding import search_docs_by_embedding
from .search_docs_by_text import search_docs_by_text
from .search_docs_hybrid import search_docs_hybrid
+
__all__ = [
"create_doc",
"delete_doc",
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index 9c8b15955..d573b4d8f 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -1,12 +1,12 @@
from typing import Any, List, Literal
from uuid import UUID
+import asyncpg
from beartype import beartype
from fastapi import HTTPException
-import asyncpg
from ...autogen.openapi_model import DocReference
-from ..utils import pg_query, rewrap_exceptions, wrap_in_class, partialclass
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Raw query for vector search
search_docs_by_embedding_query = """
@@ -21,6 +21,7 @@
)
"""
+
@rewrap_exceptions(
{
asyncpg.UniqueViolationError: partialclass(
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index 8e14f36dd..aa27ed648 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -1,12 +1,11 @@
-from typing import List, Any, Literal
+from typing import Any, List, Literal
from uuid import UUID
-from beartype import beartype
-
-from ...autogen.openapi_model import DocReference
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from ...autogen.openapi_model import DocReference
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Raw query for hybrid search
@@ -46,7 +45,6 @@
**d,
},
)
-
@pg_query
@beartype
async def search_docs_hybrid(
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index b91964a39..70277ab99 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -3,20 +3,19 @@
from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
-from uuid_extensions import uuid7
from fastapi import HTTPException
-import asyncpg
-from sqlglot import parse_one
+from sqlglot import parse_one
+from uuid_extensions import uuid7
from ...autogen.openapi_model import CreateToolRequest, Tool
from ...metrics.counters import increase_counter
-
from ..utils import (
+ partialclass,
pg_query,
rewrap_exceptions,
wrap_in_class,
- partialclass,
)
# Define the raw SQL query for creating tools
@@ -50,15 +49,15 @@
{
asyncpg.UniqueViolationError: partialclass(
HTTPException,
- status_code=409,
- detail="A tool with this name already exists for this agent"
- ),
+ status_code=409,
+ detail="A tool with this name already exists for this agent",
+ ),
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="Agent not found",
),
-}
+ }
)
@wrap_in_class(
Tool,
@@ -113,4 +112,3 @@ async def create_tools(
tools_data,
"fetchmany",
)
-
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 9a507523d..32fca1571 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -1,20 +1,14 @@
from typing import Any
from uuid import UUID
-from fastapi import HTTPException
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import ResourceDeletedResponse
from ...common.utils.datetime import utcnow
-from sqlglot import parse_one
-import asyncpg
-
-from ..utils import (
- pg_query,
- wrap_in_class,
- rewrap_exceptions,
- partialclass
-)
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for deleting a tool
tools_query = parse_one("""
@@ -29,14 +23,14 @@
@rewrap_exceptions(
-{
+ {
# Handle foreign key constraint
asyncpg.ForeignKeyViolationError: partialclass(
HTTPException,
status_code=404,
detail="Developer or agent not found",
),
-}
+ }
)
@wrap_in_class(
ResourceDeletedResponse,
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 9f71dec40..6f25d3893 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -1,19 +1,13 @@
from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
-
-from ...autogen.openapi_model import Tool
-from sqlglot import parse_one
from fastapi import HTTPException
-import asyncpg
-from ..utils import (
- pg_query,
- wrap_in_class,
- rewrap_exceptions,
- partialclass
-)
+from sqlglot import parse_one
+from ...autogen.openapi_model import Tool
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for getting a tool
tools_query = parse_one("""
@@ -25,6 +19,7 @@
LIMIT 1
""").sql(pretty=True)
+
@rewrap_exceptions(
{
asyncpg.ForeignKeyViolationError: partialclass(
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 937442797..0171f5093 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -3,13 +3,13 @@
import sqlvalidator
from beartype import beartype
-
from sqlglot import parse_one
+
from ..utils import (
+ partialclass,
pg_query,
- wrap_in_class,
rewrap_exceptions,
- partialclass,
+ wrap_in_class,
)
# Define the raw SQL query for getting tool args from metadata
@@ -54,7 +54,6 @@
) AS sessions_md""").sql(pretty=True)
-
# @rewrap_exceptions(
# {
# QueryException: partialclass(HTTPException, status_code=400),
diff --git a/agents-api/agents_api/queries/tools/list_tools.py b/agents-api/agents_api/queries/tools/list_tools.py
index d85bb9da0..fbd14f8b1 100644
--- a/agents-api/agents_api/queries/tools/list_tools.py
+++ b/agents-api/agents_api/queries/tools/list_tools.py
@@ -1,18 +1,13 @@
from typing import Literal
from uuid import UUID
-from beartype import beartype
import asyncpg
+from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import Tool
-from sqlglot import parse_one
-from ..utils import (
- pg_query,
- wrap_in_class,
- rewrap_exceptions,
- partialclass
-)
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for listing tools
tools_query = parse_one("""
@@ -30,13 +25,13 @@
@rewrap_exceptions(
-{
- asyncpg.ForeignKeyViolationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Developer or agent not found",
- ),
-}
+ {
+ asyncpg.ForeignKeyViolationError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Developer or agent not found",
+ ),
+ }
)
@wrap_in_class(
Tool,
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index fb4c680e1..b65eca481 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -1,19 +1,14 @@
from typing import Any
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import PatchToolRequest, ResourceUpdatedResponse
-from sqlglot import parse_one
-import asyncpg
-from fastapi import HTTPException
from ...metrics.counters import increase_counter
-from ..utils import (
- pg_query,
- wrap_in_class,
- rewrap_exceptions,
- partialclass
-)
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for patching a tool
tools_query = parse_one("""
@@ -35,13 +30,13 @@
@rewrap_exceptions(
-{
- asyncpg.UniqueViolationError: partialclass(
- HTTPException,
- status_code=409,
- detail="Developer or agent not found",
- ),
-}
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Developer or agent not found",
+ ),
+ }
)
@wrap_in_class(
ResourceUpdatedResponse,
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index 18ff44f18..45c5a022d 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -1,24 +1,18 @@
+import json
from typing import Any, TypeVar
from uuid import UUID
+import asyncpg
from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import (
ResourceUpdatedResponse,
UpdateToolRequest,
)
-import asyncpg
-import json
-from fastapi import HTTPException
-
-from sqlglot import parse_one
from ...metrics.counters import increase_counter
-from ..utils import (
- pg_query,
- wrap_in_class,
- rewrap_exceptions,
- partialclass
-)
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
# Define the raw SQL query for updating a tool
tools_query = parse_one("""
@@ -37,18 +31,18 @@
@rewrap_exceptions(
-{
- asyncpg.UniqueViolationError: partialclass(
- HTTPException,
- status_code=409,
- detail="A tool with this name already exists for this agent",
- ),
- json.JSONDecodeError: partialclass(
- HTTPException,
- status_code=400,
- detail="Invalid tool specification format",
- ),
-}
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="A tool with this name already exists for this agent",
+ ),
+ json.JSONDecodeError: partialclass(
+ HTTPException,
+ status_code=400,
+ detail="Invalid tool specification format",
+ ),
+ }
)
@wrap_in_class(
ResourceUpdatedResponse,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 1760209a8..2c43ba9d6 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -30,7 +30,6 @@
from agents_api.queries.tasks.create_task import create_task
from agents_api.queries.tools.create_tools import create_tools
from agents_api.queries.users.create_user import create_user
-from agents_api.queries.users.create_user import create_user
from agents_api.web import app
from .utils import (
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 125033276..f0070adfe 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -13,6 +13,7 @@
EMBEDDING_SIZE: int = 1024
+
@test("query: create user doc")
async def _(dsn=pg_dsn, developer=test_developer, user=test_user):
pool = await create_db_pool(dsn=dsn)
@@ -276,6 +277,7 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
assert len(result) >= 1
assert result[0].metadata is not None
+
@test("query: search docs by hybrid")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
@@ -306,4 +308,4 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
)
assert len(result) >= 1
- assert result[0].metadata is not None
\ No newline at end of file
+ assert result[0].metadata is not None
From d16a693d0327acd35d424aeb86e257d8d4a14f9f Mon Sep 17 00:00:00 2001
From: vedantsahai18
Date: Tue, 24 Dec 2024 00:02:59 -0500
Subject: [PATCH 172/274] chore: skip dearch test + search queries optimized
---
.../queries/docs/search_docs_by_embedding.py | 15 +++----
.../queries/docs/search_docs_by_text.py | 15 +++----
.../queries/docs/search_docs_hybrid.py | 20 +++++-----
agents-api/tests/fixtures.py | 1 +
agents-api/tests/test_docs_queries.py | 40 +++++++++++++------
5 files changed, 49 insertions(+), 42 deletions(-)
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
index d573b4d8f..fd750bc0f 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_embedding.py
@@ -14,10 +14,10 @@
$1, -- developer_id
$2::vector(1024), -- query_embedding
$3::text[], -- owner_types
- $UUID_LIST::uuid[], -- owner_ids
- $4, -- k
- $5, -- confidence
- $6 -- metadata_filter
+ $4::uuid[], -- owner_ids
+ $5, -- k
+ $6, -- confidence
+ $7 -- metadata_filter
)
"""
@@ -80,16 +80,13 @@ async def search_docs_by_embedding(
owner_types: list[str] = [owner[0] for owner in owners]
owner_ids: list[str] = [str(owner[1]) for owner in owners]
- # NOTE: Manually replace uuids list coz asyncpg isnt sending it correctly
- owner_ids_pg_str = f"ARRAY['{'\', \''.join(owner_ids)}']"
- query = search_docs_by_embedding_query.replace("$UUID_LIST", owner_ids_pg_str)
-
return (
- query,
+ search_docs_by_embedding_query,
[
developer_id,
query_embedding_str,
owner_types,
+ owner_ids,
k,
confidence,
metadata_filter,
diff --git a/agents-api/agents_api/queries/docs/search_docs_by_text.py b/agents-api/agents_api/queries/docs/search_docs_by_text.py
index d2a96e3af..787a83651 100644
--- a/agents-api/agents_api/queries/docs/search_docs_by_text.py
+++ b/agents-api/agents_api/queries/docs/search_docs_by_text.py
@@ -14,10 +14,10 @@
$1, -- developer_id
$2, -- query
$3, -- owner_types
- $UUID_LIST::uuid[], -- owner_ids
- $4, -- search_language
- $5, -- k
- $6 -- metadata_filter
+ $4, -- owner_ids
+ $5, -- search_language
+ $6, -- k
+ $7 -- metadata_filter
)
"""
@@ -75,16 +75,13 @@ async def search_docs_by_text(
owner_types: list[str] = [owner[0] for owner in owners]
owner_ids: list[str] = [str(owner[1]) for owner in owners]
- # NOTE: Manually replace uuids list coz asyncpg isnt sending it correctly
- owner_ids_pg_str = f"ARRAY['{'\', \''.join(owner_ids)}']"
- query = search_docs_text_query.replace("$UUID_LIST", owner_ids_pg_str)
-
return (
- query,
+ search_docs_text_query,
[
developer_id,
query,
owner_types,
+ owner_ids,
search_language,
k,
metadata_filter,
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index aa27ed648..e9f62064a 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -4,6 +4,7 @@
import asyncpg
from beartype import beartype
from fastapi import HTTPException
+from sqlglot import parse_one
from ...autogen.openapi_model import DocReference
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
@@ -15,12 +16,12 @@
$2, -- text_query
$3::vector(1024), -- embedding
$4::text[], -- owner_types
- $UUID_LIST::uuid[], -- owner_ids
- $5, -- k
- $6, -- alpha
- $7, -- confidence
- $8, -- metadata_filter
- $9 -- search_language
+ $5::uuid[], -- owner_ids
+ $6, -- k
+ $7, -- alpha
+ $8, -- confidence
+ $9, -- metadata_filter
+ $10 -- search_language
)
"""
@@ -91,17 +92,14 @@ async def search_docs_hybrid(
owner_types: list[str] = [owner[0] for owner in owners]
owner_ids: list[str] = [str(owner[1]) for owner in owners]
- # NOTE: Manually replace uuids list coz asyncpg isnt sending it correctly
- owner_ids_pg_str = f"ARRAY['{'\', \''.join(owner_ids)}']"
- query = search_docs_hybrid_query.replace("$UUID_LIST", owner_ids_pg_str)
-
return (
- query,
+ search_docs_hybrid_query,
[
developer_id,
text_query,
embedding_str,
owner_types,
+ owner_ids,
k,
alpha,
confidence,
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 2c43ba9d6..86ee8b815 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -21,6 +21,7 @@
from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import get_developer
from agents_api.queries.docs.create_doc import create_doc
+from agents_api.queries.tools.delete_tool import delete_tool
# from agents_api.queries.executions.create_execution import create_execution
# from agents_api.queries.executions.create_execution_transition import create_execution_transition
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index f0070adfe..4e2006310 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -1,4 +1,5 @@
-from ward import test
+from ward import skip, test
+import asyncio
from agents_api.autogen.openapi_model import CreateDocRequest
from agents_api.clients.pg import create_db_pool
@@ -9,7 +10,13 @@
from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
-from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
+from tests.fixtures import (
+ pg_dsn,
+ test_agent,
+ test_developer,
+ test_doc,
+ test_user
+)
EMBEDDING_SIZE: int = 1024
@@ -212,13 +219,13 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
)
assert not any(d.id == doc_agent.id for d in docs_list)
-
+@skip("text search: test container not vectorizing")
@test("query: search docs by text")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
# Create a test document
- await create_doc(
+ doc = await create_doc(
developer_id=developer.id,
owner_type="agent",
owner_id=agent.id,
@@ -231,21 +238,28 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
connection_pool=pool,
)
- # Search using the correct parameter types
+ # Add a longer delay to ensure the search index is updated
+ await asyncio.sleep(3)
+
+ # Search using simpler terms first
result = await search_docs_by_text(
developer_id=developer.id,
owners=[("agent", agent.id)],
- query="funny thing",
- k=3, # Add k parameter
- search_language="english", # Add language parameter
- metadata_filter={"test": "test"}, # Add metadata filter
+ query="world",
+ k=3,
+ search_language="english",
+ metadata_filter={"test": "test"},
connection_pool=pool,
)
- assert len(result) >= 1
- assert result[0].metadata is not None
-
+ print("\nSearch results:", result)
+
+ # More specific assertions
+ assert len(result) >= 1, "Should find at least one document"
+ assert any(d.id == doc.id for d in result), f"Should find document {doc.id}"
+ assert result[0].metadata == {"test": "test"}, "Metadata should match"
+@skip("embedding search: test container not vectorizing")
@test("query: search docs by embedding")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
@@ -277,7 +291,7 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
assert len(result) >= 1
assert result[0].metadata is not None
-
+@skip("hybrid search: test container not vectorizing")
@test("query: search docs by hybrid")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
pool = await create_db_pool(dsn=dsn)
From 23235f4aaf688319a8caa91b73f9ded7bd85c4ab Mon Sep 17 00:00:00 2001
From: Vedantsahai18
Date: Tue, 24 Dec 2024 05:03:51 +0000
Subject: [PATCH 173/274] refactor: Lint agents-api (CI)
---
agents-api/tests/fixtures.py | 2 +-
agents-api/tests/test_docs_queries.py | 16 +++++++---------
2 files changed, 8 insertions(+), 10 deletions(-)
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index 86ee8b815..417cab825 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -21,7 +21,6 @@
from agents_api.queries.developers.create_developer import create_developer
from agents_api.queries.developers.get_developer import get_developer
from agents_api.queries.docs.create_doc import create_doc
-from agents_api.queries.tools.delete_tool import delete_tool
# from agents_api.queries.executions.create_execution import create_execution
# from agents_api.queries.executions.create_execution_transition import create_execution_transition
@@ -30,6 +29,7 @@
from agents_api.queries.sessions.create_session import create_session
from agents_api.queries.tasks.create_task import create_task
from agents_api.queries.tools.create_tools import create_tools
+from agents_api.queries.tools.delete_tool import delete_tool
from agents_api.queries.users.create_user import create_user
from agents_api.web import app
diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py
index 4e2006310..7eacaf1dc 100644
--- a/agents-api/tests/test_docs_queries.py
+++ b/agents-api/tests/test_docs_queries.py
@@ -1,6 +1,7 @@
-from ward import skip, test
import asyncio
+from ward import skip, test
+
from agents_api.autogen.openapi_model import CreateDocRequest
from agents_api.clients.pg import create_db_pool
from agents_api.queries.docs.create_doc import create_doc
@@ -10,13 +11,7 @@
from agents_api.queries.docs.search_docs_by_embedding import search_docs_by_embedding
from agents_api.queries.docs.search_docs_by_text import search_docs_by_text
from agents_api.queries.docs.search_docs_hybrid import search_docs_hybrid
-from tests.fixtures import (
- pg_dsn,
- test_agent,
- test_developer,
- test_doc,
- test_user
-)
+from tests.fixtures import pg_dsn, test_agent, test_developer, test_doc, test_user
EMBEDDING_SIZE: int = 1024
@@ -219,6 +214,7 @@ async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent):
)
assert not any(d.id == doc_agent.id for d in docs_list)
+
@skip("text search: test container not vectorizing")
@test("query: search docs by text")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
@@ -253,12 +249,13 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
)
print("\nSearch results:", result)
-
+
# More specific assertions
assert len(result) >= 1, "Should find at least one document"
assert any(d.id == doc.id for d in result), f"Should find document {doc.id}"
assert result[0].metadata == {"test": "test"}, "Metadata should match"
+
@skip("embedding search: test container not vectorizing")
@test("query: search docs by embedding")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
@@ -291,6 +288,7 @@ async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
assert len(result) >= 1
assert result[0].metadata is not None
+
@skip("hybrid search: test container not vectorizing")
@test("query: search docs by hybrid")
async def _(dsn=pg_dsn, agent=test_agent, developer=test_developer):
From 11734e44022c604b6d943ed57b04208e2fdbd5aa Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Tue, 24 Dec 2024 12:46:50 +0300
Subject: [PATCH 174/274] fix(agents-api): fix merge conflicts errors
---
.../tools/get_tool_args_from_metadata.py | 95 ++++++++++++++++++
.../agents_api/queries/tools/patch_tool.py | 99 +++++++++++++++++++
drafts/cozo | 1 -
3 files changed, 194 insertions(+), 1 deletion(-)
delete mode 160000 drafts/cozo
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index e69de29bb..368607688 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -0,0 +1,95 @@
+from typing import Literal
+from uuid import UUID
+
+import sqlvalidator
+from beartype import beartype
+from sqlglot import parse_one
+
+from ..utils import (
+ partialclass,
+ pg_query,
+ rewrap_exceptions,
+ wrap_in_class,
+)
+
+# Define the raw SQL query for getting tool args from metadata
+tools_args_for_task_query = parse_one("""
+SELECT COALESCE(agents_md || tasks_md, agents_md, tasks_md, '{}') as values FROM (
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS agents_md
+ FROM agents
+ WHERE agent_id = $1 AND developer_id = $4 LIMIT 1
+) AS agents_md,
+(
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS tasks_md
+ FROM tasks
+ WHERE task_id = $2 AND developer_id = $4 LIMIT 1
+) AS tasks_md""").sql(pretty=True)
+
+# Define the raw SQL query for getting tool args from metadata for a session
+tool_args_for_session_query = parse_one("""SELECT COALESCE(agents_md || sessions_md, agents_md, sessions_md, '{}') as values FROM (
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS agents_md
+ FROM agents
+ WHERE agent_id = $1 AND developer_id = $4 LIMIT 1
+) AS agents_md,
+(
+ SELECT
+ CASE WHEN $3 = 'x-integrations-args' then metadata->'x-integrations-args'
+ WHEN $3 = 'x-api_call-args' then metadata->'x-api_call-args'
+ WHEN $3 = 'x-integrations-setup' then metadata->'x-integrations-setup'
+ WHEN $3 = 'x-api_call-setup' then metadata->'x-api_call-setup' END AS tasks_md
+ FROM sessions
+ WHERE session_id = $2 AND developer_id = $4 LIMIT 1
+) AS sessions_md""").sql(pretty=True)
+
+
+# @rewrap_exceptions(
+# {
+# QueryException: partialclass(HTTPException, status_code=400),
+# ValidationError: partialclass(HTTPException, status_code=400),
+# TypeError: partialclass(HTTPException, status_code=400),
+# }
+# )
+@wrap_in_class(dict, transform=lambda x: x["values"], one=True)
+@pg_query
+@beartype
+async def get_tool_args_from_metadata(
+ *,
+ developer_id: UUID,
+ agent_id: UUID,
+ session_id: UUID | None = None,
+ task_id: UUID | None = None,
+ tool_type: Literal["integration", "api_call"] = "integration",
+ arg_type: Literal["args", "setup", "headers"] = "args",
+) -> tuple[str, list]:
+ match session_id, task_id:
+ case (None, task_id) if task_id is not None:
+ return (
+ tools_args_for_task_query,
+ [
+ agent_id,
+ task_id,
+ f"x-{tool_type}-{arg_type}",
+ developer_id,
+ ],
+ )
+
+ case (session_id, None) if session_id is not None:
+ return (
+ tool_args_for_session_query,
+ [agent_id, session_id, f"x-{tool_type}-{arg_type}", developer_id],
+ )
+
+ case (_, _):
+ raise ValueError("Either session_id or task_id must be provided")
\ No newline at end of file
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index e69de29bb..a0ba07b89 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -0,0 +1,99 @@
+from typing import Any
+from uuid import UUID
+
+import asyncpg
+from beartype import beartype
+from fastapi import HTTPException
+from sqlglot import parse_one
+
+from ...autogen.openapi_model import PatchToolRequest, ResourceUpdatedResponse
+from ...metrics.counters import increase_counter
+from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
+
+# Define the raw SQL query for patching a tool
+tools_query = parse_one("""
+WITH updated_tools AS (
+ UPDATE tools
+ SET
+ type = COALESCE($4, type),
+ name = COALESCE($5, name),
+ description = COALESCE($6, description),
+ spec = COALESCE($7, spec)
+ WHERE
+ developer_id = $1 AND
+ agent_id = $2 AND
+ tool_id = $3
+ RETURNING *
+)
+SELECT * FROM updated_tools;
+""").sql(pretty=True)
+
+
+@rewrap_exceptions(
+ {
+ asyncpg.UniqueViolationError: partialclass(
+ HTTPException,
+ status_code=409,
+ detail="Developer or agent not found",
+ ),
+ }
+)
+@wrap_in_class(
+ ResourceUpdatedResponse,
+ one=True,
+ transform=lambda d: {"id": d["tool_id"], "jobs": [], **d},
+)
+@pg_query
+@increase_counter("patch_tool")
+@beartype
+async def patch_tool(
+ *, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
+) -> tuple[str, list]:
+ """
+ Execute the datalog query and return the results as a DataFrame
+ Updates the tool information for a given agent and tool ID in the 'cozodb' database.
+ Parameters:
+ agent_id (UUID): The unique identifier of the agent.
+ tool_id (UUID): The unique identifier of the tool to be updated.
+ data (PatchToolRequest): The request payload containing the updated tool information.
+ Returns:
+ ResourceUpdatedResponse: The updated tool data.
+ """
+
+ developer_id = str(developer_id)
+ agent_id = str(agent_id)
+ tool_id = str(tool_id)
+
+ # Extract the tool data from the payload
+ patch_data = data.model_dump(exclude_none=True)
+
+ # Assert that only one of the tool type fields is present
+ tool_specs = [
+ (tool_type, patch_data.get(tool_type))
+ for tool_type in ["function", "integration", "system", "api_call"]
+ if patch_data.get(tool_type) is not None
+ ]
+
+ assert len(tool_specs) <= 1, "Invalid tool update"
+ tool_type, tool_spec = tool_specs[0] if tool_specs else (None, None)
+
+ if tool_type is not None:
+ patch_data["type"] = patch_data.get("type", tool_type)
+ assert patch_data["type"] == tool_type, "Invalid tool update"
+
+ tool_spec = tool_spec or {}
+ if tool_spec:
+ del patch_data[tool_type]
+
+ return (
+ tools_query,
+ [
+ developer_id,
+ agent_id,
+ tool_id,
+ tool_type,
+ data.name,
+ data.description,
+ tool_spec,
+ ],
+ )
\ No newline at end of file
diff --git a/drafts/cozo b/drafts/cozo
deleted file mode 160000
index faf89ef77..000000000
--- a/drafts/cozo
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit faf89ef77e6462460f873e9de618001d968a1a40
From e1be81b6c06ad3a057cd72cc4deebe79ac4c4701 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos
Date: Tue, 24 Dec 2024 09:47:40 +0000
Subject: [PATCH 175/274] refactor: Lint agents-api (CI)
---
.../agents_api/queries/tools/get_tool_args_from_metadata.py | 2 +-
agents-api/agents_api/queries/tools/patch_tool.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 368607688..0171f5093 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -92,4 +92,4 @@ async def get_tool_args_from_metadata(
)
case (_, _):
- raise ValueError("Either session_id or task_id must be provided")
\ No newline at end of file
+ raise ValueError("Either session_id or task_id must be provided")
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index a0ba07b89..9474a0868 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -96,4 +96,4 @@ async def patch_tool(
data.description,
tool_spec,
],
- )
\ No newline at end of file
+ )
From eadc2916154a9137d57c7edac4646cd5a24a6cd4 Mon Sep 17 00:00:00 2001
From: Diwank Singh Tomer
Date: Tue, 24 Dec 2024 19:23:48 +0530
Subject: [PATCH 176/274] fix(agents-api): Random fixes; make sure
content-length is valid
Signed-off-by: Diwank Singh Tomer
---
agents-api/agents_api/app.py | 49 +++++++++++++++++--
.../agents_api/dependencies/content_length.py | 7 +++
agents-api/agents_api/env.py | 4 ++
.../queries/docs/search_docs_hybrid.py | 1 -
.../queries/executions/create_execution.py | 12 +----
.../agents_api/queries/tasks/list_tasks.py | 2 +-
.../agents_api/queries/tools/create_tools.py | 1 -
.../agents_api/queries/tools/delete_tool.py | 1 -
.../agents_api/queries/tools/get_tool.py | 1 -
.../tools/get_tool_args_from_metadata.py | 3 --
.../agents_api/queries/tools/patch_tool.py | 1 -
.../agents_api/queries/tools/update_tool.py | 1 -
.../agents_api/routers/files/create_file.py | 1 +
.../agents_api/routers/files/get_file.py | 1 +
.../agents_api/routers/files/list_files.py | 1 +
.../routers/tasks/create_task_execution.py | 13 ++---
.../routers/tasks/get_task_details.py | 2 +-
agents-api/agents_api/web.py | 21 +-------
agents-api/tests/fixtures.py | 3 ++
agents-api/tests/test_docs_routes.py | 5 +-
agents-api/tests/test_task_queries.py | 2 +
agents-api/tests/test_task_routes.py | 1 -
memory-store/migrations/000015_entries.up.sql | 16 ++++--
23 files changed, 89 insertions(+), 60 deletions(-)
create mode 100644 agents-api/agents_api/dependencies/content_length.py
diff --git a/agents-api/agents_api/app.py b/agents-api/agents_api/app.py
index e7903f175..baf3e7602 100644
--- a/agents-api/agents_api/app.py
+++ b/agents-api/agents_api/app.py
@@ -1,11 +1,15 @@
import os
from contextlib import asynccontextmanager
+from typing import Any, Callable, Coroutine
-from fastapi import FastAPI
+from fastapi import APIRouter, FastAPI, Request, Response
+from fastapi.params import Depends
from prometheus_fastapi_instrumentator import Instrumentator
+from scalar_fastapi import get_scalar_api_reference
from .clients.pg import create_db_pool
-from .env import api_prefix
+from .dependencies.content_length import valid_content_length
+from .env import api_prefix, hostname, max_payload_size, protocol, public_port
@asynccontextmanager
@@ -33,11 +37,50 @@ async def lifespan(app: FastAPI):
contact={
"name": "Julep",
"url": "https://www.julep.ai",
- "email": "team@julep.ai",
+ "email": "developers@julep.ai",
},
root_path=api_prefix,
lifespan=lifespan,
+ #
+ # Global dependencies
+ dependencies=[Depends(valid_content_length)],
)
# Enable metrics
Instrumentator().instrument(app).expose(app, include_in_schema=False)
+
+
+# Create a new router for the docs
+scalar_router = APIRouter()
+
+
+@scalar_router.get("/docs", include_in_schema=False)
+async def scalar_html():
+ return get_scalar_api_reference(
+ openapi_url=app.openapi_url[1:], # Remove leading '/'
+ title=app.title,
+ servers=[{"url": f"{protocol}://{hostname}:{public_port}{api_prefix}"}],
+ )
+
+
+# Add the docs_router without dependencies
+app.include_router(scalar_router)
+
+
+# content-length validation
+# NOTE: This relies on client reporting the correct content-length header
+# TODO: We should use streaming for large payloads
+@app.middleware("http")
+async def validate_content_length(
+ request: Request,
+ call_next: Callable[[Request], Coroutine[Any, Any, Response]],
+):
+ content_length = request.headers.get("content-length")
+
+ if not content_length:
+ return Response(status_code=411, content="Content-Length header is required")
+
+ if int(content_length) > max_payload_size:
+ return Response(status_code=413, content="Payload too large")
+
+ return await call_next(request)
diff --git a/agents-api/agents_api/dependencies/content_length.py b/agents-api/agents_api/dependencies/content_length.py
new file mode 100644
index 000000000..3fe8b6781
--- /dev/null
+++ b/agents-api/agents_api/dependencies/content_length.py
@@ -0,0 +1,7 @@
+from fastapi import Header
+
+from ..env import max_payload_size
+
+
+async def valid_content_length(content_length: int = Header(..., lt=max_payload_size)):
+ return content_length
diff --git a/agents-api/agents_api/env.py b/agents-api/agents_api/env.py
index 7baa24653..54c8a2eee 100644
--- a/agents-api/agents_api/env.py
+++ b/agents-api/agents_api/env.py
@@ -25,6 +25,10 @@
hostname: str = env.str("AGENTS_API_HOSTNAME", default="localhost")
public_port: int = env.int("AGENTS_API_PUBLIC_PORT", default=80)
api_prefix: str = env.str("AGENTS_API_PREFIX", default="")
+max_payload_size: int = env.int(
+ "AGENTS_API_MAX_PAYLOAD_SIZE",
+ default=50 * 1024 * 1024, # 50MB
+)
# Tasks
# -----
diff --git a/agents-api/agents_api/queries/docs/search_docs_hybrid.py b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
index e9f62064a..23eb12318 100644
--- a/agents-api/agents_api/queries/docs/search_docs_hybrid.py
+++ b/agents-api/agents_api/queries/docs/search_docs_hybrid.py
@@ -4,7 +4,6 @@
import asyncpg
from beartype import beartype
from fastapi import HTTPException
-from sqlglot import parse_one
from ...autogen.openapi_model import DocReference
from ..utils import partialclass, pg_query, rewrap_exceptions, wrap_in_class
diff --git a/agents-api/agents_api/queries/executions/create_execution.py b/agents-api/agents_api/queries/executions/create_execution.py
index 27df9ee69..664a07808 100644
--- a/agents-api/agents_api/queries/executions/create_execution.py
+++ b/agents-api/agents_api/queries/executions/create_execution.py
@@ -1,20 +1,10 @@
from typing import Annotated, Any, TypeVar
from uuid import UUID
-from beartype import beartype
-from fastapi import HTTPException
-from pydantic import ValidationError
from uuid_extensions import uuid7
-from ...autogen.openapi_model import CreateExecutionRequest, Execution
+from ...autogen.openapi_model import CreateExecutionRequest
from ...common.utils.types import dict_like
-from ...metrics.counters import increase_counter
-from ..utils import (
- partialclass,
- rewrap_exceptions,
- wrap_in_class,
-)
-from .constants import OUTPUT_UNNEST_KEY
ModelT = TypeVar("ModelT", bound=Any)
T = TypeVar("T")
diff --git a/agents-api/agents_api/queries/tasks/list_tasks.py b/agents-api/agents_api/queries/tasks/list_tasks.py
index 0a6bd90b2..8a284fd2c 100644
--- a/agents-api/agents_api/queries/tasks/list_tasks.py
+++ b/agents-api/agents_api/queries/tasks/list_tasks.py
@@ -108,7 +108,7 @@ async def list_tasks(
# Format query with metadata filter if needed
query = list_tasks_query.format(
- metadata_filter_query="AND metadata @> $6::jsonb" if metadata_filter else ""
+ metadata_filter_query="AND metadata @> $7::jsonb" if metadata_filter else ""
)
# Build parameters list
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index 70277ab99..f585c33c9 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,6 +1,5 @@
"""This module contains functions for creating tools in the CozoDB database."""
-from typing import Any
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/tools/delete_tool.py b/agents-api/agents_api/queries/tools/delete_tool.py
index 32fca1571..307db4c9b 100644
--- a/agents-api/agents_api/queries/tools/delete_tool.py
+++ b/agents-api/agents_api/queries/tools/delete_tool.py
@@ -1,4 +1,3 @@
-from typing import Any
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/tools/get_tool.py b/agents-api/agents_api/queries/tools/get_tool.py
index 6f25d3893..44ca2ea92 100644
--- a/agents-api/agents_api/queries/tools/get_tool.py
+++ b/agents-api/agents_api/queries/tools/get_tool.py
@@ -1,4 +1,3 @@
-from typing import Any
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
index 0171f5093..ace75bac5 100644
--- a/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
+++ b/agents-api/agents_api/queries/tools/get_tool_args_from_metadata.py
@@ -1,14 +1,11 @@
from typing import Literal
from uuid import UUID
-import sqlvalidator
from beartype import beartype
from sqlglot import parse_one
from ..utils import (
- partialclass,
pg_query,
- rewrap_exceptions,
wrap_in_class,
)
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index 9474a0868..77c33faa8 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -1,4 +1,3 @@
-from typing import Any
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/queries/tools/update_tool.py b/agents-api/agents_api/queries/tools/update_tool.py
index 45c5a022d..9131ecb8e 100644
--- a/agents-api/agents_api/queries/tools/update_tool.py
+++ b/agents-api/agents_api/queries/tools/update_tool.py
@@ -1,5 +1,4 @@
import json
-from typing import Any, TypeVar
from uuid import UUID
import asyncpg
diff --git a/agents-api/agents_api/routers/files/create_file.py b/agents-api/agents_api/routers/files/create_file.py
index 7adc0b74e..7e43dd4ff 100644
--- a/agents-api/agents_api/routers/files/create_file.py
+++ b/agents-api/agents_api/routers/files/create_file.py
@@ -24,6 +24,7 @@ async def upload_file_content(file_id: UUID, content: str) -> None:
await async_s3.add_object(key, content_bytes)
+# TODO: Use streaming for large payloads
@router.post("/files", status_code=HTTP_201_CREATED, tags=["files"])
async def create_file(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
diff --git a/agents-api/agents_api/routers/files/get_file.py b/agents-api/agents_api/routers/files/get_file.py
index 6473fc570..5c6b3d293 100644
--- a/agents-api/agents_api/routers/files/get_file.py
+++ b/agents-api/agents_api/routers/files/get_file.py
@@ -19,6 +19,7 @@ async def fetch_file_content(file_id: UUID) -> str:
return base64.b64encode(content).decode("utf-8")
+# TODO: Use streaming for large payloads
@router.get("/files/{file_id}", tags=["files"])
async def get_file(
file_id: UUID, x_developer_id: Annotated[UUID, Depends(get_developer_id)]
diff --git a/agents-api/agents_api/routers/files/list_files.py b/agents-api/agents_api/routers/files/list_files.py
index 67d436bd5..9108bce47 100644
--- a/agents-api/agents_api/routers/files/list_files.py
+++ b/agents-api/agents_api/routers/files/list_files.py
@@ -19,6 +19,7 @@ async def fetch_file_content(file_id: UUID) -> str:
return base64.b64encode(content).decode("utf-8")
+# TODO: Use streaming for large payloads
@router.get("/files", tags=["files"])
async def list_files(
x_developer_id: Annotated[UUID, Depends(get_developer_id)],
diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py
index 96c01ea94..bee043ecc 100644
--- a/agents-api/agents_api/routers/tasks/create_task_execution.py
+++ b/agents-api/agents_api/routers/tasks/create_task_execution.py
@@ -111,13 +111,14 @@ async def create_task_execution(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid request arguments schema",
)
- except QueryException as e:
- if e.code == "transact::assertion_failure":
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND, detail="Task not found"
- )
- raise
+ # except QueryException as e:
+ # if e.code == "transact::assertion_failure":
+ # raise HTTPException(
+ # status_code=status.HTTP_404_NOT_FOUND, detail="Task not found"
+ # )
+
+ # raise
# get developer data
developer: Developer = await get_developer(developer_id=x_developer_id)
diff --git a/agents-api/agents_api/routers/tasks/get_task_details.py b/agents-api/agents_api/routers/tasks/get_task_details.py
index 8183ea1df..c6a70207e 100644
--- a/agents-api/agents_api/routers/tasks/get_task_details.py
+++ b/agents-api/agents_api/routers/tasks/get_task_details.py
@@ -1,7 +1,7 @@
from typing import Annotated
from uuid import UUID
-from fastapi import Depends, HTTPException, status
+from fastapi import Depends
from ...autogen.openapi_model import (
Task,
diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py
index 6a0d24036..195606a19 100644
--- a/agents-api/agents_api/web.py
+++ b/agents-api/agents_api/web.py
@@ -9,19 +9,18 @@
import sentry_sdk
import uvicorn
import uvloop
-from fastapi import APIRouter, Depends, FastAPI, Request, status
+from fastapi import Depends, FastAPI, Request, status
from fastapi.exceptions import HTTPException, RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from litellm.exceptions import APIError
from pydantic import ValidationError
-from scalar_fastapi import get_scalar_api_reference
from temporalio.service import RPCError
from .app import app
from .common.exceptions import BaseCommonException
from .dependencies.auth import get_api_key
-from .env import api_prefix, hostname, protocol, public_port, sentry_dsn
+from .env import sentry_dsn
from .exceptions import PromptTooBigError
from .routers import (
agents,
@@ -144,22 +143,6 @@ def register_exceptions(app: FastAPI) -> None:
# See: https://fastapi.tiangolo.com/tutorial/bigger-applications/
#
-# Create a new router for the docs
-scalar_router = APIRouter()
-
-
-@scalar_router.get("/docs", include_in_schema=False)
-async def scalar_html():
- return get_scalar_api_reference(
- openapi_url=app.openapi_url[1:], # Remove leading '/'
- title=app.title,
- servers=[{"url": f"{protocol}://{hostname}:{public_port}{api_prefix}"}],
- )
-
-
-# Add the docs_router without dependencies
-app.include_router(scalar_router)
-
# Add other routers with the get_api_key dependency
app.include_router(agents.router, dependencies=[Depends(get_api_key)])
app.include_router(sessions.router, dependencies=[Depends(get_api_key)])
diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py
index b527cc13d..aaf374417 100644
--- a/agents-api/tests/fixtures.py
+++ b/agents-api/tests/fixtures.py
@@ -1,6 +1,7 @@
import os
import random
import string
+import sys
from uuid import UUID
from fastapi.testclient import TestClient
@@ -399,6 +400,8 @@ def _make_request(method, url, **kwargs):
if multi_tenant_mode:
headers["X-Developer-Id"] = str(developer_id)
+ headers["Content-Length"] = str(sys.getsizeof(kwargs.get("json", {})))
+
return client.request(method, url, headers=headers, **kwargs)
return _make_request
diff --git a/agents-api/tests/test_docs_routes.py b/agents-api/tests/test_docs_routes.py
index 3fc85e8b0..5431e0d1b 100644
--- a/agents-api/tests/test_docs_routes.py
+++ b/agents-api/tests/test_docs_routes.py
@@ -1,16 +1,13 @@
-import time
-from ward import skip, test
+from ward import test
from tests.fixtures import (
make_request,
patch_embed_acompletion,
test_agent,
- test_doc,
test_user,
# test_user_doc,
)
-from tests.utils import patch_testing_temporal
# @test("route: create user doc")
# async def _(make_request=make_request, user=test_user):
diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py
index 43394d244..0ff364256 100644
--- a/agents-api/tests/test_task_queries.py
+++ b/agents-api/tests/test_task_queries.py
@@ -159,6 +159,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
result = await list_tasks(
developer_id=developer_id,
+ agent_id=agent.id,
limit=10,
offset=0,
sort_by="updated_at",
@@ -179,6 +180,7 @@ async def _(dsn=pg_dsn, developer_id=test_developer_id, agent=test_agent):
pool = await create_db_pool(dsn=dsn)
result = await list_tasks(
developer_id=developer_id,
+ agent_id=agent.id,
connection_pool=pool,
)
assert result is not None
diff --git a/agents-api/tests/test_task_routes.py b/agents-api/tests/test_task_routes.py
index ae36ae353..eb3c58a98 100644
--- a/agents-api/tests/test_task_routes.py
+++ b/agents-api/tests/test_task_routes.py
@@ -10,7 +10,6 @@
# test_execution,
test_task,
)
-from tests.utils import patch_testing_temporal
@test("route: unauthorized should fail")
diff --git a/memory-store/migrations/000015_entries.up.sql b/memory-store/migrations/000015_entries.up.sql
index acb601559..10e7693a4 100644
--- a/memory-store/migrations/000015_entries.up.sql
+++ b/memory-store/migrations/000015_entries.up.sql
@@ -106,11 +106,17 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
-CREATE TRIGGER trg_optimized_update_token_count_after
-AFTER INSERT
-OR
-UPDATE ON entries FOR EACH ROW
-EXECUTE FUNCTION optimized_update_token_count_after ();
+-- FIXME: This trigger is causing the slow performance of the create_entries query
+--
+-- We should consider using a timescale background job to update the token count
+-- instead of a trigger.
+-- https://docs.timescale.com/use-timescale/latest/user-defined-actions/create-and-register/
+--
+-- CREATE TRIGGER trg_optimized_update_token_count_after
+-- AFTER INSERT
+-- OR
+-- UPDATE ON entries FOR EACH ROW
+-- EXECUTE FUNCTION optimized_update_token_count_after ();
-- Add trigger to update parent session's updated_at
CREATE
From 77903efe68f6fb54244c78026288beed9f7aa12d Mon Sep 17 00:00:00 2001
From: creatorrr
Date: Tue, 24 Dec 2024 13:54:58 +0000
Subject: [PATCH 177/274] refactor: Lint agents-api (CI)
---
agents-api/tests/test_docs_routes.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/agents-api/tests/test_docs_routes.py b/agents-api/tests/test_docs_routes.py
index 5431e0d1b..24a5b882c 100644
--- a/agents-api/tests/test_docs_routes.py
+++ b/agents-api/tests/test_docs_routes.py
@@ -1,4 +1,3 @@
-
from ward import test
from tests.fixtures import (
From 2f836d67fdfe301b8d168ebdd5f4b9d75f379a35 Mon Sep 17 00:00:00 2001
From: Ahmad Haidar
Date: Tue, 24 Dec 2024 20:13:26 +0300
Subject: [PATCH 178/274] chore(agents-api): remove cozo related stuff
---
.../agents_api/activities/embed_docs.py | 73 ---
.../activities/task_steps/__init__.py | 2 +-
.../activities/task_steps/cozo_query_step.py | 28 -
.../activities/task_steps/pg_query_step.py | 37 ++
agents-api/agents_api/activities/utils.py | 44 +-
agents-api/agents_api/clients/__init__.py | 2 +-
.../agents_api/common/utils/__init__.py | 2 +-
agents-api/agents_api/common/utils/cozo.py | 26 -
agents-api/agents_api/models/__init__.py | 20 -
.../agents_api/models/agent/__init__.py | 22 -
.../agents_api/models/agent/create_agent.py | 148 -----
.../models/agent/create_or_update_agent.py | 186 ------
.../agents_api/models/agent/delete_agent.py | 134 ----
.../agents_api/models/agent/get_agent.py | 117 ----
.../agents_api/models/agent/list_agents.py | 122 ----
.../agents_api/models/agent/patch_agent.py | 132 ----
.../agents_api/models/agent/update_agent.py | 149 -----
agents-api/agents_api/models/docs/__init__.py | 25 -
.../agents_api/models/docs/create_doc.py | 141 -----
.../agents_api/models/docs/delete_doc.py | 102 ----
.../agents_api/models/docs/embed_snippets.py | 102 ----
agents-api/agents_api/models/docs/get_doc.py | 103 ----
.../agents_api/models/docs/list_docs.py | 141 -----
agents-api/agents_api/models/docs/mmr.py | 109 ----
.../models/docs/search_docs_by_embedding.py | 369 -----------
.../models/docs/search_docs_by_text.py | 206 -------
.../models/docs/search_docs_hybrid.py | 138 -----
.../agents_api/models/entry/__init__.py | 19 -
.../agents_api/models/entry/create_entries.py | 128 ----
.../agents_api/models/entry/delete_entries.py | 153 -----
.../agents_api/models/entry/get_history.py | 150 -----
.../agents_api/models/entry/list_entries.py | 112 ----
.../agents_api/models/execution/__init__.py | 15 -
.../agents_api/models/execution/constants.py | 5 -
.../models/execution/count_executions.py | 61 --
.../models/execution/create_execution.py | 98 ---
.../execution/create_execution_transition.py | 259 --------
.../execution/create_temporal_lookup.py | 72 ---
.../models/execution/get_execution.py | 78 ---
.../execution/get_execution_transition.py | 80 ---
.../execution/get_paused_execution_token.py | 77 ---
.../execution/get_temporal_workflow_data.py | 57 --
.../execution/list_execution_transitions.py | 69 ---
.../models/execution/list_executions.py | 95 ---
.../models/execution/lookup_temporal_data.py | 66 --
.../execution/prepare_execution_input.py | 223 -------
.../models/execution/update_execution.py | 130 ----
.../agents_api/models/files/__init__.py | 3 -
.../agents_api/models/files/create_file.py | 122 ----
.../agents_api/models/files/delete_file.py | 97 ---
.../agents_api/models/files/get_file.py | 116 ----
.../agents_api/models/session/__init__.py | 22 -
.../models/session/count_sessions.py | 64 --
.../session/create_or_update_session.py | 158 -----
.../models/session/create_session.py | 154 -----
.../models/session/delete_session.py | 125 ----
.../agents_api/models/session/get_session.py | 116 ----
.../models/session/list_sessions.py | 131 ----
.../models/session/patch_session.py | 127 ----
.../models/session/prepare_session_data.py | 235 -------
.../models/session/update_session.py | 127 ----
agents-api/agents_api/models/task/__init__.py | 9 -
.../models/task/create_or_update_task.py | 109 ----
.../agents_api/models/task/create_task.py | 118 ----
.../agents_api/models/task/delete_task.py | 91 ---
agents-api/agents_api/models/task/get_task.py | 120 ----
.../agents_api/models/task/list_tasks.py | 130 ----
.../agents_api/models/task/patch_task.py | 133 ----
.../agents_api/models/task/update_task.py | 129 ----
agents-api/agents_api/models/user/__init__.py | 18 -
.../models/user/create_or_update_user.py | 125 ----
.../agents_api/models/user/create_user.py | 116 ----
.../agents_api/models/user/delete_user.py | 116 ----
agents-api/agents_api/models/user/get_user.py | 107 ----
.../agents_api/models/user/list_users.py | 116 ----
.../agents_api/models/user/patch_user.py | 121 ----
.../agents_api/models/user/update_user.py | 118 ----
agents-api/agents_api/models/utils.py | 578 ------------------
agents-api/agents_api/queries/__init__.py | 21 +
.../queries/developers/get_developer.py | 4 +-
.../agents_api/queries/tools/create_tools.py | 5 +-
.../agents_api/queries/tools/patch_tool.py | 3 +-
agents-api/agents_api/worker/worker.py | 4 -
agents-api/agents_api/workflows/embed_docs.py | 27 -
84 files changed, 90 insertions(+), 8452 deletions(-)
delete mode 100644 agents-api/agents_api/activities/embed_docs.py
delete mode 100644 agents-api/agents_api/activities/task_steps/cozo_query_step.py
create mode 100644 agents-api/agents_api/activities/task_steps/pg_query_step.py
delete mode 100644 agents-api/agents_api/common/utils/cozo.py
delete mode 100644 agents-api/agents_api/models/__init__.py
delete mode 100644 agents-api/agents_api/models/agent/__init__.py
delete mode 100644 agents-api/agents_api/models/agent/create_agent.py
delete mode 100644 agents-api/agents_api/models/agent/create_or_update_agent.py
delete mode 100644 agents-api/agents_api/models/agent/delete_agent.py
delete mode 100644 agents-api/agents_api/models/agent/get_agent.py
delete mode 100644 agents-api/agents_api/models/agent/list_agents.py
delete mode 100644 agents-api/agents_api/models/agent/patch_agent.py
delete mode 100644 agents-api/agents_api/models/agent/update_agent.py
delete mode 100644 agents-api/agents_api/models/docs/__init__.py
delete mode 100644 agents-api/agents_api/models/docs/create_doc.py
delete mode 100644 agents-api/agents_api/models/docs/delete_doc.py
delete mode 100644 agents-api/agents_api/models/docs/embed_snippets.py
delete mode 100644 agents-api/agents_api/models/docs/get_doc.py
delete mode 100644 agents-api/agents_api/models/docs/list_docs.py
delete mode 100644 agents-api/agents_api/models/docs/mmr.py
delete mode 100644 agents-api/agents_api/models/docs/search_docs_by_embedding.py
delete mode 100644 agents-api/agents_api/models/docs/search_docs_by_text.py
delete mode 100644 agents-api/agents_api/models/docs/search_docs_hybrid.py
delete mode 100644 agents-api/agents_api/models/entry/__init__.py
delete mode 100644 agents-api/agents_api/models/entry/create_entries.py
delete mode 100644 agents-api/agents_api/models/entry/delete_entries.py
delete mode 100644 agents-api/agents_api/models/entry/get_history.py
delete mode 100644 agents-api/agents_api/models/entry/list_entries.py
delete mode 100644 agents-api/agents_api/models/execution/__init__.py
delete mode 100644 agents-api/agents_api/models/execution/constants.py
delete mode 100644 agents-api/agents_api/models/execution/count_executions.py
delete mode 100644 agents-api/agents_api/models/execution/create_execution.py
delete mode 100644 agents-api/agents_api/models/execution/create_execution_transition.py
delete mode 100644 agents-api/agents_api/models/execution/create_temporal_lookup.py
delete mode 100644 agents-api/agents_api/models/execution/get_execution.py
delete mode 100644 agents-api/agents_api/models/execution/get_execution_transition.py
delete mode 100644 agents-api/agents_api/models/execution/get_paused_execution_token.py
delete mode 100644 agents-api/agents_api/models/execution/get_temporal_workflow_data.py
delete mode 100644 agents-api/agents_api/models/execution/list_execution_transitions.py
delete mode 100644 agents-api/agents_api/models/execution/list_executions.py
delete mode 100644 agents-api/agents_api/models/execution/lookup_temporal_data.py
delete mode 100644 agents-api/agents_api/models/execution/prepare_execution_input.py
delete mode 100644 agents-api/agents_api/models/execution/update_execution.py
delete mode 100644 agents-api/agents_api/models/files/__init__.py
delete mode 100644 agents-api/agents_api/models/files/create_file.py
delete mode 100644 agents-api/agents_api/models/files/delete_file.py
delete mode 100644 agents-api/agents_api/models/files/get_file.py
delete mode 100644 agents-api/agents_api/models/session/__init__.py
delete mode 100644 agents-api/agents_api/models/session/count_sessions.py
delete mode 100644 agents-api/agents_api/models/session/create_or_update_session.py
delete mode 100644 agents-api/agents_api/models/session/create_session.py
delete mode 100644 agents-api/agents_api/models/session/delete_session.py
delete mode 100644 agents-api/agents_api/models/session/get_session.py
delete mode 100644 agents-api/agents_api/models/session/list_sessions.py
delete mode 100644 agents-api/agents_api/models/session/patch_session.py
delete mode 100644 agents-api/agents_api/models/session/prepare_session_data.py
delete mode 100644 agents-api/agents_api/models/session/update_session.py
delete mode 100644 agents-api/agents_api/models/task/__init__.py
delete mode 100644 agents-api/agents_api/models/task/create_or_update_task.py
delete mode 100644 agents-api/agents_api/models/task/create_task.py
delete mode 100644 agents-api/agents_api/models/task/delete_task.py
delete mode 100644 agents-api/agents_api/models/task/get_task.py
delete mode 100644 agents-api/agents_api/models/task/list_tasks.py
delete mode 100644 agents-api/agents_api/models/task/patch_task.py
delete mode 100644 agents-api/agents_api/models/task/update_task.py
delete mode 100644 agents-api/agents_api/models/user/__init__.py
delete mode 100644 agents-api/agents_api/models/user/create_or_update_user.py
delete mode 100644 agents-api/agents_api/models/user/create_user.py
delete mode 100644 agents-api/agents_api/models/user/delete_user.py
delete mode 100644 agents-api/agents_api/models/user/get_user.py
delete mode 100644 agents-api/agents_api/models/user/list_users.py
delete mode 100644 agents-api/agents_api/models/user/patch_user.py
delete mode 100644 agents-api/agents_api/models/user/update_user.py
delete mode 100644 agents-api/agents_api/models/utils.py
create mode 100644 agents-api/agents_api/queries/__init__.py
delete mode 100644 agents-api/agents_api/workflows/embed_docs.py
diff --git a/agents-api/agents_api/activities/embed_docs.py b/agents-api/agents_api/activities/embed_docs.py
deleted file mode 100644
index a9a7cae44..000000000
--- a/agents-api/agents_api/activities/embed_docs.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import asyncio
-import operator
-from functools import reduce
-from itertools import batched
-
-from beartype import beartype
-from temporalio import activity
-
-from ..clients import cozo, litellm
-from ..env import testing
-from ..models.docs.embed_snippets import embed_snippets as embed_snippets_query
-from .types import EmbedDocsPayload
-
-
-@beartype
-async def embed_docs(
- payload: EmbedDocsPayload, cozo_client=None, max_batch_size: int = 100
-) -> None:
- # Create batches of both indices and snippets together
- indexed_snippets = list(enumerate(payload.content))
- # Batch snippets into groups of max_batch_size for parallel processing
- batched_indexed_snippets = list(batched(indexed_snippets, max_batch_size))
- # Get embedding instruction and title from payload, defaulting to empty strings
- embed_instruction: str = payload.embed_instruction or ""
- title: str = payload.title or ""
-
- # Helper function to embed a batch of snippets
- async def embed_batch(indexed_batch):
- # Split indices and snippets for the batch
- batch_indices, batch_snippets = zip(*indexed_batch)
- embeddings = await litellm.aembedding(
- inputs=[
- ((title + "\n\n" + snippet) if title else snippet).strip()
- for snippet in batch_snippets
- ],
- embed_instruction=embed_instruction,
- )
- return list(zip(batch_indices, embeddings))
-
- # Gather embeddings with their corresponding indices
- indexed_embeddings = reduce(
- operator.add,
- await asyncio.gather(
- *[embed_batch(batch) for batch in batched_indexed_snippets]
- ),
- )
-
- # Split indices and embeddings after all batches are processed
- indices, embeddings = zip(*indexed_embeddings)
-
- # Convert to lists since embed_snippets_query expects list types
- indices = list(indices)
- embeddings = list(embeddings)
-
- embed_snippets_query(
- developer_id=payload.developer_id,
- doc_id=payload.doc_id,
- snippet_indices=indices,
- embeddings=embeddings,
- client=cozo_client or cozo.get_cozo_client(),
- )
-
-
-async def mock_embed_docs(
- payload: EmbedDocsPayload, cozo_client=None, max_batch_size=100
-) -> None:
- # Does nothing
- return None
-
-
-embed_docs = activity.defn(name="embed_docs")(
- embed_docs if not testing else mock_embed_docs
-)
diff --git a/agents-api/agents_api/activities/task_steps/__init__.py b/agents-api/agents_api/activities/task_steps/__init__.py
index 573884629..5d02db858 100644
--- a/agents-api/agents_api/activities/task_steps/__init__.py
+++ b/agents-api/agents_api/activities/task_steps/__init__.py
@@ -1,7 +1,7 @@
# ruff: noqa: F401, F403, F405
from .base_evaluate import base_evaluate
-from .cozo_query_step import cozo_query_step
+from .pg_query_step import pg_query_step
from .evaluate_step import evaluate_step
from .for_each_step import for_each_step
from .get_value_step import get_value_step
diff --git a/agents-api/agents_api/activities/task_steps/cozo_query_step.py b/agents-api/agents_api/activities/task_steps/cozo_query_step.py
deleted file mode 100644
index 8d28d83c9..000000000
--- a/agents-api/agents_api/activities/task_steps/cozo_query_step.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from typing import Any
-
-from beartype import beartype
-from temporalio import activity
-
-from ... import models
-from ...env import testing
-
-
-@beartype
-async def cozo_query_step(
- query_name: str,
- values: dict[str, Any],
-) -> Any:
- (module_name, name) = query_name.split(".")
-
- module = getattr(models, module_name)
- query = getattr(module, name)
- return query(**values)
-
-
-# Note: This is here just for clarity. We could have just imported cozo_query_step directly
-# They do the same thing, so we dont need to mock the cozo_query_step function
-mock_cozo_query_step = cozo_query_step
-
-cozo_query_step = activity.defn(name="cozo_query_step")(
- cozo_query_step if not testing else mock_cozo_query_step
-)
diff --git a/agents-api/agents_api/activities/task_steps/pg_query_step.py b/agents-api/agents_api/activities/task_steps/pg_query_step.py
new file mode 100644
index 000000000..bfddc716f
--- /dev/null
+++ b/agents-api/agents_api/activities/task_steps/pg_query_step.py
@@ -0,0 +1,37 @@
+from typing import Any
+
+from async_lru import alru_cache
+from beartype import beartype
+from temporalio import activity
+
+from ... import queries
+from ...env import testing, db_dsn
+
+from ...clients.pg import create_db_pool
+
+@alru_cache(maxsize=1)
+async def get_db_pool(dsn: str):
+ return await create_db_pool(dsn=dsn)
+
+@beartype
+async def pg_query_step(
+ query_name: str,
+ values: dict[str, Any],
+ dsn: str = db_dsn,
+) -> Any:
+ pool = await get_db_pool(dsn=dsn)
+
+ (module_name, name) = query_name.split(".")
+
+ module = getattr(queries, module_name)
+ query = getattr(module, name)
+ return await query(**values, connection_pool=pool)
+
+
+# Note: This is here just for clarity. We could have just imported pg_query_step directly
+# They do the same thing, so we dont need to mock the pg_query_step function
+mock_pg_query_step = pg_query_step
+
+pg_query_step = activity.defn(name="pg_query_step")(
+ pg_query_step if not testing else mock_pg_query_step
+)
diff --git a/agents-api/agents_api/activities/utils.py b/agents-api/agents_api/activities/utils.py
index d9ad1840c..9b97f5f71 100644
--- a/agents-api/agents_api/activities/utils.py
+++ b/agents-api/agents_api/activities/utils.py
@@ -296,28 +296,28 @@ def get_handler(system: SystemDef) -> Callable:
The base handler function.
"""
- from ..models.agent.create_agent import create_agent as create_agent_query
- from ..models.agent.delete_agent import delete_agent as delete_agent_query
- from ..models.agent.get_agent import get_agent as get_agent_query
- from ..models.agent.list_agents import list_agents as list_agents_query
- from ..models.agent.update_agent import update_agent as update_agent_query
- from ..models.docs.delete_doc import delete_doc as delete_doc_query
- from ..models.docs.list_docs import list_docs as list_docs_query
- from ..models.session.create_session import create_session as create_session_query
- from ..models.session.delete_session import delete_session as delete_session_query
- from ..models.session.get_session import get_session as get_session_query
- from ..models.session.list_sessions import list_sessions as list_sessions_query
- from ..models.session.update_session import update_session as update_session_query
- from ..models.task.create_task import create_task as create_task_query
- from ..models.task.delete_task import delete_task as delete_task_query
- from ..models.task.get_task import get_task as get_task_query
- from ..models.task.list_tasks import list_tasks as list_tasks_query
- from ..models.task.update_task import update_task as update_task_query
- from ..models.user.create_user import create_user as create_user_query
- from ..models.user.delete_user import delete_user as delete_user_query
- from ..models.user.get_user import get_user as get_user_query
- from ..models.user.list_users import list_users as list_users_query
- from ..models.user.update_user import update_user as update_user_query
+ from ..queries.agents.create_agent import create_agent as create_agent_query
+ from ..queries.agents.delete_agent import delete_agent as delete_agent_query
+ from ..queries.agents.get_agent import get_agent as get_agent_query
+ from ..queries.agents.list_agents import list_agents as list_agents_query
+ from ..queries.agents.update_agent import update_agent as update_agent_query
+ from ..queries.docs.delete_doc import delete_doc as delete_doc_query
+ from ..queries.docs.list_docs import list_docs as list_docs_query
+ from ..queries.sessions.create_session import create_session as create_session_query
+ from ..queries.sessions.delete_session import delete_session as delete_session_query
+ from ..queries.sessions.get_session import get_session as get_session_query
+ from ..queries.sessions.list_sessions import list_sessions as list_sessions_query
+ from ..queries.sessions.update_session import update_session as update_session_query
+ from ..queries.tasks.create_task import create_task as create_task_query
+ from ..queries.tasks.delete_task import delete_task as delete_task_query
+ from ..queries.tasks.get_task import get_task as get_task_query
+ from ..queries.tasks.list_tasks import list_tasks as list_tasks_query
+ from ..queries.tasks.update_task import update_task as update_task_query
+ from ..queries.users.create_user import create_user as create_user_query
+ from ..queries.users.delete_user import delete_user as delete_user_query
+ from ..queries.users.get_user import get_user as get_user_query
+ from ..queries.users.list_users import list_users as list_users_query
+ from ..queries.users.update_user import update_user as update_user_query
from ..routers.docs.create_doc import create_agent_doc, create_user_doc
from ..routers.docs.search_docs import search_agent_docs, search_user_docs
from ..routers.sessions.chat import chat
diff --git a/agents-api/agents_api/clients/__init__.py b/agents-api/agents_api/clients/__init__.py
index 43a17ab08..714cc5294 100644
--- a/agents-api/agents_api/clients/__init__.py
+++ b/agents-api/agents_api/clients/__init__.py
@@ -1,7 +1,7 @@
"""
The `clients` module contains client classes and functions for interacting with various external services and APIs, abstracting the complexity of HTTP requests and API interactions to provide a simplified interface for the rest of the application.
-- `cozo.py`: Handles communication with the Cozo service, facilitating operations such as retrieving product information.
+- `pg.py`: Handles communication with the PostgreSQL service, facilitating operations such as retrieving product information.
- `embed.py`: Manages requests to an Embedding Service for text embedding functionalities.
- `openai.py`: Facilitates interaction with OpenAI's API for natural language processing tasks.
- `temporal.py`: Provides functionality for connecting to Temporal workflows, enabling asynchronous task execution and management.
diff --git a/agents-api/agents_api/common/utils/__init__.py b/agents-api/agents_api/common/utils/__init__.py
index 891594c02..fbe7d490c 100644
--- a/agents-api/agents_api/common/utils/__init__.py
+++ b/agents-api/agents_api/common/utils/__init__.py
@@ -1,7 +1,7 @@
"""
The `utils` module within the `agents-api` project offers a collection of utility functions designed to support various aspects of the application. This includes:
-- `cozo.py`: Utilities for interacting with the Cozo API client, including data mutation processes.
+- `pg.py`: Utilities for interacting with the PostgreSQL API client, including data mutation processes.
- `datetime.py`: Functions for handling date and time operations, ensuring consistent use of time zones and formats across the application.
- `json.py`: Custom JSON utilities, including a custom JSON encoder for handling specific object types like UUIDs, and a utility function for JSON serialization with support for default values for None objects.
diff --git a/agents-api/agents_api/common/utils/cozo.py b/agents-api/agents_api/common/utils/cozo.py
deleted file mode 100644
index f342ba617..000000000
--- a/agents-api/agents_api/common/utils/cozo.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python3
-
-"""This module provides utility functions for interacting with the Cozo API client, including data mutation processes."""
-
-from types import SimpleNamespace
-from uuid import UUID
-
-from beartype import beartype
-from pycozo import Client
-
-# Define a mock client for testing purposes, simulating Cozo API client behavior.
-_fake_client: SimpleNamespace = SimpleNamespace()
-# Lambda function to process and mutate data dictionaries using the Cozo client's internal method. This is a workaround to access protected member functions for testing.
-_fake_client._process_mutate_data_dict = lambda data: (
- Client._process_mutate_data_dict(_fake_client, data)
-)
-
-# Public interface to process and mutate data using the Cozo client. It wraps the client's internal processing method for external use.
-cozo_process_mutate_data = _fake_client._process_mutate_data = lambda data: (
- Client._process_mutate_data(_fake_client, data)
-)
-
-
-@beartype
-def uuid_int_list_to_uuid(data: list[int]) -> UUID:
- return UUID(bytes=b"".join([i.to_bytes(1, "big") for i in data]))
diff --git a/agents-api/agents_api/models/__init__.py b/agents-api/agents_api/models/__init__.py
deleted file mode 100644
index e59b5b01c..000000000
--- a/agents-api/agents_api/models/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-The `models` module of the agents API is designed to encapsulate all data interactions with the CozoDB database. It provides a structured way to perform CRUD (Create, Read, Update, Delete) operations and other specific data manipulations across various entities such as agents, documents, entries, sessions, tools, and users.
-
-Each sub-module within this module corresponds to a specific entity and contains functions and classes that implement datalog queries for interacting with the database. These interactions include creating new records, updating existing ones, retrieving data for specific conditions, and deleting records. The operations are crucial for the functionality of the agents API, enabling it to manage and process data effectively for each entity.
-
-This module also integrates with the `common` module for exception handling and utility functions, ensuring robust error management and providing reusable components for data processing and query construction.
-"""
-
-# ruff: noqa: F401, F403, F405
-
-from . import agent as agent
-from . import developer as developer
-from . import docs as docs
-from . import entry as entry
-from . import execution as execution
-from . import files as files
-from . import session as session
-from . import task as task
-from . import tools as tools
-from . import user as user
diff --git a/agents-api/agents_api/models/agent/__init__.py b/agents-api/agents_api/models/agent/__init__.py
deleted file mode 100644
index 2beaf8166..000000000
--- a/agents-api/agents_api/models/agent/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-The `agent` module within the `agents-api` package provides a comprehensive suite of functionalities for managing agents in the CozoDB database. This includes:
-
-- Creating new agents and their associated tools.
-- Updating existing agents and their settings.
-- Retrieving details about specific agents or a list of agents.
-- Deleting agents from the database.
-
-Additionally, the module supports operations related to agent tools, including creating, updating, and patching tools associated with agents.
-
-This module serves as the backbone for agent management within the CozoDB ecosystem, facilitating a wide range of operations necessary for the effective handling of agent data.
-"""
-
-# ruff: noqa: F401, F403, F405
-
-from .create_agent import create_agent
-from .create_or_update_agent import create_or_update_agent
-from .delete_agent import delete_agent
-from .get_agent import get_agent
-from .list_agents import list_agents
-from .patch_agent import patch_agent
-from .update_agent import update_agent
diff --git a/agents-api/agents_api/models/agent/create_agent.py b/agents-api/agents_api/models/agent/create_agent.py
deleted file mode 100644
index 1872a6f36..000000000
--- a/agents-api/agents_api/models/agent/create_agent.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""
-This module contains the functionality for creating agents in the CozoDB database.
-It includes functions to construct and execute datalog queries for inserting new agent records.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import Agent, CreateAgentRequest
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "asserted to return some results, but returned none"
- in str(e): lambda *_: HTTPException(
- detail="Developer not found. Please ensure the provided auth token (which refers to your developer_id) is valid and the developer has the necessary permissions to create an agent.",
- status_code=403,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- Agent,
- one=True,
- transform=lambda d: {"id": UUID(d.pop("agent_id")), **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_agent")
-@beartype
-def create_agent(
- *,
- developer_id: UUID,
- agent_id: UUID | None = None,
- data: CreateAgentRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to create a new agent in the database.
-
- Parameters:
- agent_id (UUID | None): The unique identifier for the agent.
- developer_id (UUID): The unique identifier for the developer creating the agent.
- data (CreateAgentRequest): The data for the new agent.
-
- Returns:
- Agent: The newly created agent record.
- """
-
- agent_id = agent_id or uuid7()
-
- # Extract the agent data from the payload
- data.metadata = data.metadata or {}
- data.default_settings = data.default_settings or {}
-
- data.instructions = (
- data.instructions
- if isinstance(data.instructions, list)
- else [data.instructions]
- )
-
- agent_data = data.model_dump()
- default_settings = agent_data.pop("default_settings")
-
- settings_cols, settings_vals = cozo_process_mutate_data(
- {
- **default_settings,
- "agent_id": str(agent_id),
- }
- )
-
- # Create default agent settings
- # Construct a query to insert default settings for the new agent
- default_settings_query = f"""
- ?[{settings_cols}] <- $settings_vals
-
- :insert agent_default_settings {{
- {settings_cols}
- }}
- """
- # create the agent
- # Construct a query to insert the new agent record into the agents table
- agent_query = """
- ?[agent_id, developer_id, model, name, about, metadata, instructions, created_at, updated_at] <- [
- [$agent_id, $developer_id, $model, $name, $about, $metadata, $instructions, now(), now()]
- ]
-
- :insert agents {
- developer_id,
- agent_id =>
- model,
- name,
- about,
- metadata,
- instructions,
- created_at,
- updated_at,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- default_settings_query,
- agent_query,
- ]
-
- return (
- queries,
- {
- "settings_vals": settings_vals,
- "agent_id": str(agent_id),
- "developer_id": str(developer_id),
- **agent_data,
- },
- )
diff --git a/agents-api/agents_api/models/agent/create_or_update_agent.py b/agents-api/agents_api/models/agent/create_or_update_agent.py
deleted file mode 100644
index 9a1feb717..000000000
--- a/agents-api/agents_api/models/agent/create_or_update_agent.py
+++ /dev/null
@@ -1,186 +0,0 @@
-"""
-This module contains the functionality for creating agents in the CozoDB database.
-It includes functions to construct and execute datalog queries for inserting new agent records.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Agent, CreateOrUpdateAgentRequest
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- Agent, one=True, transform=lambda d: {"id": UUID(d.pop("agent_id")), **d}
-)
-@cozo_query
-@increase_counter("create_or_update_agent")
-@beartype
-def create_or_update_agent(
- *,
- developer_id: UUID,
- agent_id: UUID,
- data: CreateOrUpdateAgentRequest,
-) -> tuple[list[str | None], dict]:
- """
- Constructs and executes a datalog query to create a new agent in the database.
-
- Parameters:
- agent_id (UUID): The unique identifier for the agent.
- developer_id (UUID): The unique identifier for the developer creating the agent.
- name (str): The name of the agent.
- about (str): A description of the agent.
- instructions (list[str], optional): A list of instructions for using the agent. Defaults to an empty list.
- model (str, optional): The model identifier for the agent. Defaults to "gpt-4o".
- metadata (dict, optional): A dictionary of metadata for the agent. Defaults to an empty dict.
- default_settings (dict, optional): A dictionary of default settings for the agent. Defaults to an empty dict.
- client (CozoClient, optional): The CozoDB client instance to use for the query. Defaults to a preconfigured client instance.
-
- Returns:
- Agent: The newly created agent record.
- """
-
- # Extract the agent data from the payload
- data.metadata = data.metadata or {}
- data.instructions = (
- data.instructions
- if isinstance(data.instructions, list)
- else [data.instructions]
- )
- data.default_settings = data.default_settings or {}
-
- agent_data = data.model_dump()
- default_settings = (
- data.default_settings.model_dump(exclude_none=True)
- if data.default_settings
- else {}
- )
-
- settings_cols, settings_vals = cozo_process_mutate_data(
- {
- **default_settings,
- "agent_id": str(agent_id),
- }
- )
-
- # TODO: remove this
- ### # Create default agent settings
- ### # Construct a query to insert default settings for the new agent
- ### default_settings_query = f"""
- ### %if {{
- ### len[count(agent_id)] :=
- ### *agent_default_settings{{agent_id}},
- ### agent_id = to_uuid($agent_id)
-
- ### ?[should_create] := len[count], count > 0
- ### }}
- ### %then {{
- ### ?[{settings_cols}] <- $settings_vals
-
- ### :put agent_default_settings {{
- ### {settings_cols}
- ### }}
- ### }}
- ### """
-
- # FIXME: This create or update query will overwrite the settings
- # Need to find a way to only run the insert query if the agent_default_settings
-
- # Create default agent settings
- # Construct a query to insert default settings for the new agent
- default_settings_query = f"""
- ?[{settings_cols}] <- $settings_vals
-
- :put agent_default_settings {{
- {settings_cols}
- }}
- """
-
- # create the agent
- # Construct a query to insert the new agent record into the agents table
- agent_query = """
- input[agent_id, developer_id, model, name, about, metadata, instructions, updated_at] <- [
- [$agent_id, $developer_id, $model, $name, $about, $metadata, $instructions, now()]
- ]
-
- ?[agent_id, developer_id, model, name, about, metadata, instructions, created_at, updated_at] :=
- input[_agent_id, developer_id, model, name, about, metadata, instructions, updated_at],
- *agents{
- agent_id,
- developer_id,
- created_at,
- },
- agent_id = to_uuid(_agent_id),
-
- ?[agent_id, developer_id, model, name, about, metadata, instructions, created_at, updated_at] :=
- input[_agent_id, developer_id, model, name, about, metadata, instructions, updated_at],
- not *agents{
- agent_id,
- developer_id,
- }, created_at = now(),
- agent_id = to_uuid(_agent_id),
-
- :put agents {
- developer_id,
- agent_id =>
- model,
- name,
- about,
- metadata,
- instructions,
- created_at,
- updated_at,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- default_settings_query,
- agent_query,
- ]
-
- return (
- queries,
- {
- "settings_vals": settings_vals,
- "agent_id": str(agent_id),
- "developer_id": str(developer_id),
- **agent_data,
- },
- )
diff --git a/agents-api/agents_api/models/agent/delete_agent.py b/agents-api/agents_api/models/agent/delete_agent.py
deleted file mode 100644
index 60de66292..000000000
--- a/agents-api/agents_api/models/agent/delete_agent.py
+++ /dev/null
@@ -1,134 +0,0 @@
-"""
-This module contains the implementation of the delete_agent_query function, which is responsible for deleting an agent and its related default settings from the CozoDB database.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "Developer does not own resource"
- in e.resp["display"]: lambda *_: HTTPException(
- detail="The specified developer does not own the requested resource. Please verify the ownership or check if the developer ID is correct.",
- status_code=404,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("agent_id")),
- "deleted_at": utcnow(),
- "jobs": [],
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_agent(*, developer_id: UUID, agent_id: UUID) -> tuple[list[str], dict]:
- """
- Constructs and returns a datalog query for deleting an agent and its default settings from the database.
-
- Parameters:
- developer_id (UUID): The UUID of the developer owning the agent.
- agent_id (UUID): The UUID of the agent to be deleted.
- client (CozoClient, optional): An instance of the CozoClient to execute the query.
-
- Returns:
- ResourceDeletedResponse: The response indicating the deletion of the agent.
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- """
- # Delete docs
- ?[owner_id, owner_type, doc_id] :=
- *docs{
- owner_type,
- owner_id,
- doc_id,
- },
- owner_id = to_uuid($agent_id),
- owner_type = "agent"
-
- :delete docs {
- owner_type,
- owner_id,
- doc_id
- }
- :returning
- """,
- """
- # Delete tools
- ?[agent_id, tool_id] :=
- *tools{
- agent_id,
- tool_id,
- }, agent_id = to_uuid($agent_id)
-
- :delete tools {
- agent_id,
- tool_id
- }
- :returning
- """,
- """
- # Delete default agent settings
- ?[agent_id] <- [[$agent_id]]
-
- :delete agent_default_settings {
- agent_id
- }
- :returning
- """,
- """
- # Delete the agent
- ?[agent_id, developer_id] <- [[$agent_id, $developer_id]]
-
- :delete agents {
- developer_id,
- agent_id
- }
- :returning
- """,
- ]
-
- return (queries, {"agent_id": str(agent_id), "developer_id": str(developer_id)})
diff --git a/agents-api/agents_api/models/agent/get_agent.py b/agents-api/agents_api/models/agent/get_agent.py
deleted file mode 100644
index 008e39454..000000000
--- a/agents-api/agents_api/models/agent/get_agent.py
+++ /dev/null
@@ -1,117 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Agent
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "Developer not found" in str(e): lambda *_: HTTPException(
- detail="Developer does not exist", status_code=403
- ),
- lambda e: isinstance(e, QueryException)
- and "Developer does not own resource"
- in e.resp["display"]: lambda *_: HTTPException(
- detail="Developer does not own resource", status_code=404
- ),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(Agent, one=True)
-@cozo_query
-@beartype
-def get_agent(*, developer_id: UUID, agent_id: UUID) -> tuple[list[str], dict]:
- """
- Fetches agent details and default settings from the database.
-
- This function constructs and executes a datalog query to retrieve information about a specific agent, including its default settings, based on the provided agent_id and developer_id.
-
- Parameters:
- developer_id (UUID): The unique identifier for the developer.
- agent_id (UUID): The unique identifier for the agent.
- client (CozoClient, optional): The database client used to execute the query.
-
- Returns:
- Agent
- """
- # Constructing a datalog query to retrieve agent details and default settings.
- # The query uses input parameters for agent_id and developer_id to filter the results.
- # It joins the 'agents' and 'agent_default_settings' relations to fetch comprehensive details.
- get_query = """
- input[agent_id] <- [[to_uuid($agent_id)]]
-
- ?[
- id,
- model,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- default_settings,
- instructions,
- ] := input[id],
- *agents {
- developer_id: to_uuid($developer_id),
- agent_id: id,
- model,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- instructions,
- },
- *agent_default_settings {
- agent_id: id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- preset,
- },
- default_settings = {
- "frequency_penalty": frequency_penalty,
- "presence_penalty": presence_penalty,
- "length_penalty": length_penalty,
- "repetition_penalty": repetition_penalty,
- "top_p": top_p,
- "temperature": temperature,
- "min_p": min_p,
- "preset": preset,
- }
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- get_query,
- ]
-
- # Execute the constructed datalog query using the provided CozoClient.
- # The result is returned as a pandas DataFrame.
- return (queries, {"agent_id": str(agent_id), "developer_id": str(developer_id)})
diff --git a/agents-api/agents_api/models/agent/list_agents.py b/agents-api/agents_api/models/agent/list_agents.py
deleted file mode 100644
index 882b6c8c6..000000000
--- a/agents-api/agents_api/models/agent/list_agents.py
+++ /dev/null
@@ -1,122 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Agent
-from ...common.utils import json
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(Agent)
-@cozo_query
-@beartype
-def list_agents(
- *,
- developer_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at", "updated_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
- metadata_filter: dict[str, Any] = {},
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to list agents from the 'cozodb' database.
-
- Parameters:
- developer_id: UUID of the developer.
- limit: Maximum number of agents to return.
- offset: Number of agents to skip before starting to collect the result set.
- metadata_filter: Dictionary to filter agents based on metadata.
- client: Instance of CozoClient to execute the query.
- """
- # Transforms the metadata_filter dictionary into a string representation for the datalog query.
- metadata_filter_str = ", ".join(
- [
- f"metadata->{json.dumps(k)} == {json.dumps(v)}"
- for k, v in metadata_filter.items()
- ]
- )
-
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- # Datalog query to retrieve agent information based on filters, sorted by creation date in descending order.
- queries = [
- verify_developer_id_query(developer_id),
- f"""
- input[developer_id] <- [[to_uuid($developer_id)]]
-
- ?[
- id,
- model,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- default_settings,
- instructions,
- ] := input[developer_id],
- *agents {{
- developer_id,
- agent_id: id,
- model,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- instructions,
- }},
- *agent_default_settings {{
- agent_id: id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- preset,
- }},
- default_settings = {{
- "frequency_penalty": frequency_penalty,
- "presence_penalty": presence_penalty,
- "length_penalty": length_penalty,
- "repetition_penalty": repetition_penalty,
- "top_p": top_p,
- "temperature": temperature,
- "min_p": min_p,
- "preset": preset,
- }},
- {metadata_filter_str}
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """,
- ]
-
- return (
- queries,
- {"developer_id": str(developer_id), "limit": limit, "offset": offset},
- )
diff --git a/agents-api/agents_api/models/agent/patch_agent.py b/agents-api/agents_api/models/agent/patch_agent.py
deleted file mode 100644
index 99d4e3553..000000000
--- a/agents-api/agents_api/models/agent/patch_agent.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...common.utils.datetime import utcnow
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {"id": d["agent_id"], "jobs": [], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("patch_agent")
-@beartype
-def patch_agent(
- *,
- agent_id: UUID,
- developer_id: UUID,
- data: PatchAgentRequest,
-) -> tuple[list[str], dict]:
- """Patches agent data based on provided updates.
-
- Parameters:
- agent_id (UUID): The unique identifier for the agent.
- developer_id (UUID): The unique identifier for the developer.
- default_settings (dict, optional): Default settings to apply to the agent.
- **update_data: Arbitrary keyword arguments representing data to update.
-
- Returns:
- ResourceUpdatedResponse: The updated agent data.
- """
- update_data = data.model_dump(exclude_unset=True)
-
- # Construct the query for updating agent information in the database.
- # Agent update query
- metadata = update_data.pop("metadata", {}) or {}
- default_settings = update_data.pop("default_settings", {}) or {}
- agent_update_cols, agent_update_vals = cozo_process_mutate_data(
- {
- **{k: v for k, v in update_data.items() if v is not None},
- "agent_id": str(agent_id),
- "developer_id": str(developer_id),
- "updated_at": utcnow().timestamp(),
- }
- )
-
- update_query = f"""
- # update the agent
- input[{agent_update_cols}] <- $agent_update_vals
-
- ?[{agent_update_cols}, metadata] :=
- input[{agent_update_cols}],
- *agents {{
- agent_id: to_uuid($agent_id),
- metadata: md,
- }},
- metadata = concat(md, $metadata)
-
- :update agents {{
- {agent_update_cols},
- metadata,
- }}
- :returning
- """
-
- # Construct the query for updating agent's default settings in the database.
- # Settings update query
- settings_cols, settings_vals = cozo_process_mutate_data(
- {
- **default_settings,
- "agent_id": str(agent_id),
- }
- )
-
- settings_update_query = f"""
- # update the agent settings
- ?[{settings_cols}] <- $settings_vals
-
- :update agent_default_settings {{
- {settings_cols}
- }}
- """
-
- # Combine agent and settings update queries if default settings are provided.
- # Combine the queries
- queries = [update_query]
-
- if len(default_settings) != 0:
- queries.insert(0, settings_update_query)
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- *queries,
- ]
-
- return (
- queries,
- {
- "agent_update_vals": agent_update_vals,
- "settings_vals": settings_vals,
- "metadata": metadata,
- "agent_id": str(agent_id),
- },
- )
diff --git a/agents-api/agents_api/models/agent/update_agent.py b/agents-api/agents_api/models/agent/update_agent.py
deleted file mode 100644
index b36f687eb..000000000
--- a/agents-api/agents_api/models/agent/update_agent.py
+++ /dev/null
@@ -1,149 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {"id": d["agent_id"], "jobs": [], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("update_agent")
-@beartype
-def update_agent(
- *,
- agent_id: UUID,
- developer_id: UUID,
- data: UpdateAgentRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to update an agent and its default settings in the 'cozodb' database.
-
- Parameters:
- agent_id (UUID): The unique identifier of the agent to be updated.
- developer_id (UUID): The unique identifier of the developer associated with the agent.
- data (UpdateAgentRequest): The request payload containing the updated agent data.
- client (CozoClient, optional): The database client used to execute the query. Defaults to a pre-configured client instance.
-
- Returns:
- ResourceUpdatedResponse: The updated agent data.
- """
- default_settings = (
- data.default_settings.model_dump(exclude_none=True)
- if data.default_settings
- else {}
- )
- update_data = data.model_dump()
-
- # Remove default settings from the agent update data
- update_data.pop("default_settings", None)
-
- agent_id = str(agent_id)
- developer_id = str(developer_id)
- update_data["instructions"] = update_data.get("instructions", [])
- update_data["instructions"] = (
- update_data["instructions"]
- if isinstance(update_data["instructions"], list)
- else [update_data["instructions"]]
- )
-
- # Construct the agent update part of the query with dynamic columns and values based on `update_data`.
- # Agent update query
- agent_update_cols, agent_update_vals = cozo_process_mutate_data(
- {
- **{k: v for k, v in update_data.items() if v is not None},
- "agent_id": agent_id,
- "developer_id": developer_id,
- }
- )
-
- update_query = f"""
- # update the agent
- input[{agent_update_cols}] <- $agent_update_vals
- original[created_at] := *agents{{
- developer_id: to_uuid($developer_id),
- agent_id: to_uuid($agent_id),
- created_at,
- }},
-
- ?[created_at, updated_at, {agent_update_cols}] :=
- input[{agent_update_cols}],
- original[created_at],
- updated_at = now(),
-
- :put agents {{
- created_at,
- updated_at,
- {agent_update_cols}
- }}
- :returning
- """
-
- # Construct the settings update part of the query if `default_settings` are provided.
- # Settings update query
- settings_cols, settings_vals = cozo_process_mutate_data(
- {
- **default_settings,
- "agent_id": agent_id,
- }
- )
-
- settings_update_query = f"""
- # update the agent settings
- ?[{settings_cols}] <- $settings_vals
-
- :put agent_default_settings {{
- {settings_cols}
- }}
- """
-
- # Combine agent and settings update queries into a single query string.
- # Combine the queries
- queries = [update_query]
-
- if len(default_settings) != 0:
- queries.insert(0, settings_update_query)
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- *queries,
- ]
-
- return (
- queries,
- {
- "agent_update_vals": agent_update_vals,
- "settings_vals": settings_vals,
- "agent_id": agent_id,
- "developer_id": developer_id,
- },
- )
diff --git a/agents-api/agents_api/models/docs/__init__.py b/agents-api/agents_api/models/docs/__init__.py
deleted file mode 100644
index 0ba3db0d4..000000000
--- a/agents-api/agents_api/models/docs/__init__.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""
-Module: agents_api/models/docs
-
-This module is responsible for managing document-related operations within the application, particularly for agents and possibly other entities. It serves as a core component of the document management system, enabling features such as document creation, listing, deletion, and embedding of snippets for enhanced search and retrieval capabilities.
-
-Main functionalities include:
-- Creating new documents and associating them with agents or users.
-- Listing documents based on various criteria, including ownership and metadata filters.
-- Deleting documents by their unique identifiers.
-- Embedding document snippets for retrieval purposes.
-
-The module interacts with other parts of the application, such as the agents and users modules, to provide a comprehensive document management system. Its role is crucial in enabling document search, retrieval, and management features within the context of agents and users.
-
-This documentation aims to provide clear, concise, and sufficient context for new developers or contributors to understand the module's role without needing to dive deep into the code immediately.
-"""
-
-# ruff: noqa: F401, F403, F405
-
-from .create_doc import create_doc
-from .delete_doc import delete_doc
-from .embed_snippets import embed_snippets
-from .get_doc import get_doc
-from .list_docs import list_docs
-from .search_docs_by_embedding import search_docs_by_embedding
-from .search_docs_by_text import search_docs_by_text
diff --git a/agents-api/agents_api/models/docs/create_doc.py b/agents-api/agents_api/models/docs/create_doc.py
deleted file mode 100644
index ceb8b5fe0..000000000
--- a/agents-api/agents_api/models/docs/create_doc.py
+++ /dev/null
@@ -1,141 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateDocRequest, Doc
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Doc,
- one=True,
- transform=lambda d: {
- "id": UUID(d["doc_id"]),
- **d,
- },
-)
-@cozo_query
-@increase_counter("create_doc")
-@beartype
-def create_doc(
- *,
- developer_id: UUID,
- owner_type: Literal["user", "agent"],
- owner_id: UUID,
- doc_id: UUID | None = None,
- data: CreateDocRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to create a new document and its associated snippets in the 'cozodb' database.
-
- Parameters:
- owner_type (Literal["user", "agent"]): The type of the owner of the document.
- owner_id (UUID): The UUID of the document owner.
- doc_id (UUID): The UUID of the document to be created.
- data (CreateDocRequest): The content of the document.
- """
-
- doc_id = str(doc_id or uuid7())
- owner_id = str(owner_id)
-
- if isinstance(data.content, str):
- data.content = [data.content]
-
- data.metadata = data.metadata or {}
-
- doc_data = data.model_dump()
- doc_data.pop("embed_instruction", None)
- content = doc_data.pop("content")
-
- doc_data["owner_type"] = owner_type
- doc_data["owner_id"] = owner_id
- doc_data["doc_id"] = doc_id
-
- doc_cols, doc_rows = cozo_process_mutate_data(doc_data)
-
- snippet_cols, snippet_rows = "", []
-
- # Process each content snippet and prepare data for the datalog query.
- for snippet_idx, snippet in enumerate(content):
- snippet_cols, new_snippet_rows = cozo_process_mutate_data(
- dict(
- doc_id=doc_id,
- index=snippet_idx,
- content=snippet,
- )
- )
-
- snippet_rows += new_snippet_rows
-
- create_snippets_query = f"""
- ?[{snippet_cols}] <- $snippet_rows
-
- :create _snippets {{ {snippet_cols} }}
- }} {{
- ?[{snippet_cols}] <- $snippet_rows
- :insert snippets {{ {snippet_cols} }}
- :returning
- """
-
- # Construct the datalog query for creating the document and its snippets.
- create_doc_query = f"""
- ?[{doc_cols}] <- $doc_rows
-
- :create _docs {{ {doc_cols} }}
- }} {{
- ?[{doc_cols}] <- $doc_rows
- :insert docs {{ {doc_cols} }}
- :returning
- }} {{
- snippet_rows[collect(content)] :=
- *_snippets {{
- content
- }}
-
- ?[{doc_cols}, content, created_at] :=
- *_docs {{ {doc_cols} }},
- snippet_rows[content],
- created_at = now()
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id}
- ),
- create_snippets_query,
- create_doc_query,
- ]
-
- # Execute the constructed datalog query and return the results as a DataFrame.
- return (
- queries,
- {
- "doc_rows": doc_rows,
- "snippet_rows": snippet_rows,
- },
- )
diff --git a/agents-api/agents_api/models/docs/delete_doc.py b/agents-api/agents_api/models/docs/delete_doc.py
deleted file mode 100644
index c02705756..000000000
--- a/agents-api/agents_api/models/docs/delete_doc.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("doc_id")),
- "deleted_at": utcnow(),
- "jobs": [],
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_doc(
- *,
- developer_id: UUID,
- owner_id: UUID,
- owner_type: str,
- doc_id: UUID,
-) -> tuple[list[str], dict]:
- """Constructs and returns a datalog query for deleting documents and associated information snippets.
-
- This function targets the 'cozodb' database, allowing for the removal of documents and their related information snippets based on the provided document ID and owner (user or agent).
-
- Parameters:
- doc_id (UUID): The UUID of the document to be deleted.
- client (CozoClient): An instance of the CozoClient to execute the query.
-
- Returns:
- pd.DataFrame: The result of the executed datalog query.
- """
- # Convert UUID parameters to string format for use in the datalog query
- doc_id = str(doc_id)
- owner_id = str(owner_id)
-
- # The following query is divided into two main parts:
- # 1. Deleting information snippets associated with the document
- # 2. Deleting the document itself
- delete_snippets_query = """
- # This section constructs the subquery for identifying and deleting all information snippets associated with the given document ID.
- # Delete snippets
- input[doc_id] <- [[to_uuid($doc_id)]]
- ?[doc_id, index] :=
- input[doc_id],
- *snippets {
- doc_id,
- index,
- }
-
- :delete snippets {
- doc_id,
- index
- }
- """
-
- delete_doc_query = """
- # Delete the docs
- ?[doc_id, owner_type, owner_id] <- [[ to_uuid($doc_id), $owner_type, to_uuid($owner_id) ]]
-
- :delete docs { doc_id, owner_type, owner_id }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id}
- ),
- delete_snippets_query,
- delete_doc_query,
- ]
-
- return (queries, {"doc_id": doc_id, "owner_type": owner_type, "owner_id": owner_id})
diff --git a/agents-api/agents_api/models/docs/embed_snippets.py b/agents-api/agents_api/models/docs/embed_snippets.py
deleted file mode 100644
index 8d8ae1e62..000000000
--- a/agents-api/agents_api/models/docs/embed_snippets.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""Module for embedding documents in the cozodb database. Contains functions to update document embeddings."""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceUpdatedResponse
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {"id": d["doc_id"], "updated_at": utcnow(), "jobs": []},
- _kind="inserted",
-)
-@cozo_query
-@beartype
-def embed_snippets(
- *,
- developer_id: UUID,
- doc_id: UUID,
- snippet_indices: list[int] | tuple[int, ...],
- embeddings: list[list[float]],
- embedding_size: int = 1024,
-) -> tuple[list[str], dict]:
- """Embeds document snippets in the cozodb database.
-
- Parameters:
- doc_id (UUID): The unique identifier for the document.
- snippet_indices (list[int]): Indices of the snippets in the document.
- embeddings (list[list[float]]): Embedding vectors for the snippets.
- """
-
- doc_id = str(doc_id)
-
- # Ensure the number of snippet indices matches the number of embeddings.
- assert len(snippet_indices) == len(embeddings)
- assert all(len(embedding) == embedding_size for embedding in embeddings)
- assert min(snippet_indices) >= 0
-
- # Ensure all embeddings are non-zero.
- assert all(sum(embedding) for embedding in embeddings)
-
- # Create a list of records to update the document snippet embeddings in the database.
- records = [
- {"doc_id": doc_id, "index": snippet_idx, "embedding": embedding}
- for snippet_idx, embedding in zip(snippet_indices, embeddings)
- ]
-
- cols, vals = cozo_process_mutate_data(records)
-
- # Ensure that index is present in the records.
- check_indices_query = f"""
- ?[index] :=
- *snippets {{
- doc_id: $doc_id,
- index,
- }},
- index > {max(snippet_indices)}
-
- :limit 1
- :assert none
- """
-
- # Define the datalog query for updating document snippet embeddings in the database.
- embed_query = f"""
- ?[{cols}] <- $vals
-
- :update snippets {{ {cols} }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- check_indices_query,
- embed_query,
- ]
-
- return (queries, {"vals": vals, "doc_id": doc_id})
diff --git a/agents-api/agents_api/models/docs/get_doc.py b/agents-api/agents_api/models/docs/get_doc.py
deleted file mode 100644
index d47cc80a8..000000000
--- a/agents-api/agents_api/models/docs/get_doc.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""Module for retrieving document snippets from the CozoDB based on document IDs."""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Doc
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, AssertionError)
- and "Expected one result" in repr(e): partialclass(
- HTTPException, status_code=404
- ),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Doc,
- one=True,
- transform=lambda d: {
- "content": [s[1] for s in sorted(d["snippet_data"], key=lambda x: x[0])],
- "embeddings": [
- s[2]
- for s in sorted(d["snippet_data"], key=lambda x: x[0])
- if s[2] is not None
- ],
- **d,
- },
-)
-@cozo_query
-@beartype
-def get_doc(
- *,
- developer_id: UUID,
- doc_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Retrieves snippets of documents by their ID from the CozoDB.
-
- Parameters:
- doc_id (UUID): The unique identifier of the document.
- client (CozoClient, optional): The CozoDB client instance. Defaults to a pre-configured client.
-
- Returns:
- pd.DataFrame: A DataFrame containing the document snippets and related metadata.
- """
-
- doc_id = str(doc_id)
-
- get_query = """
- input[doc_id] <- [[to_uuid($doc_id)]]
- snippets[collect(snippet_data)] :=
- input[doc_id],
- *snippets {
- doc_id,
- index,
- content,
- embedding,
- },
- snippet_data = [index, content, embedding]
-
- ?[
- id,
- title,
- snippet_data,
- created_at,
- metadata,
- ] := input[id],
- *docs {
- doc_id: id,
- title,
- created_at,
- metadata,
- },
- snippets[snippet_data]
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- get_query,
- ]
-
- return (queries, {"doc_id": doc_id})
diff --git a/agents-api/agents_api/models/docs/list_docs.py b/agents-api/agents_api/models/docs/list_docs.py
deleted file mode 100644
index dd389d58c..000000000
--- a/agents-api/agents_api/models/docs/list_docs.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""This module contains functions for querying document-related data from the 'cozodb' database using datalog queries."""
-
-import json
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Doc
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Doc,
- transform=lambda d: {
- "content": [s[1] for s in sorted(d["snippet_data"], key=lambda x: x[0])],
- "embeddings": [
- s[2]
- for s in sorted(d["snippet_data"], key=lambda x: x[0])
- if s[2] is not None
- ],
- **d,
- },
-)
-@cozo_query
-@beartype
-def list_docs(
- *,
- developer_id: UUID,
- owner_type: Literal["user", "agent"],
- owner_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
- metadata_filter: dict[str, Any] = {},
- include_without_embeddings: bool = False,
-) -> tuple[list[str], dict]:
- """
- Constructs and returns a datalog query for listing documents and their associated information snippets.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the documents.
- owner_id (UUID): The unique identifier of the owner (user or agent) associated with the documents.
- owner_type (Literal["user", "agent"]): The type of owner associated with the documents.
- limit (int): The maximum number of documents to return.
- offset (int): The number of documents to skip before returning the results.
- sort_by (Literal["created_at"]): The field to sort the documents by.
- direction (Literal["asc", "desc"]): The direction to sort the documents in.
- metadata_filter (dict): A dictionary of metadata filters to apply to the documents.
- include_without_embeddings (bool): Whether to include documents without embeddings in the results.
-
- Returns:
- Doc[]
- """
-
- # Transforms the metadata_filter dictionary into a string representation for the datalog query.
- metadata_filter_str = ", ".join(
- [
- f"metadata->{json.dumps(k)} == {json.dumps(v)}"
- for k, v in metadata_filter.items()
- ]
- )
-
- owner_id = str(owner_id)
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- get_query = f"""
- snippets[id, collect(snippet_data)] :=
- *snippets {{
- doc_id: id,
- index,
- content,
- embedding,
- }},
- {"" if include_without_embeddings else "not is_null(embedding),"}
- snippet_data = [index, content, embedding]
-
- ?[
- owner_type,
- id,
- title,
- snippet_data,
- created_at,
- metadata,
- ] :=
- owner_type = $owner_type,
- owner_id = to_uuid($owner_id),
- *docs {{
- owner_type,
- owner_id,
- doc_id: id,
- title,
- created_at,
- metadata,
- }},
- snippets[id, snippet_data],
- {metadata_filter_str}
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id}
- ),
- get_query,
- ]
-
- return (
- queries,
- {
- "owner_id": owner_id,
- "owner_type": owner_type,
- "limit": limit,
- "offset": offset,
- },
- )
diff --git a/agents-api/agents_api/models/docs/mmr.py b/agents-api/agents_api/models/docs/mmr.py
deleted file mode 100644
index d214e8c04..000000000
--- a/agents-api/agents_api/models/docs/mmr.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from __future__ import annotations
-
-import logging
-from typing import Union
-
-import numpy as np
-
-Matrix = Union[list[list[float]], list[np.ndarray], np.ndarray]
-
-logger = logging.getLogger(__name__)
-
-
-def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
- """Row-wise cosine similarity between two equal-width matrices.
-
- Args:
- x: A matrix of shape (n, m).
- y: A matrix of shape (k, m).
-
- Returns:
- A matrix of shape (n, k) where each element (i, j) is the cosine similarity
- between the ith row of X and the jth row of Y.
-
- Raises:
- ValueError: If the number of columns in X and Y are not the same.
- ImportError: If numpy is not installed.
- """
-
- if len(x) == 0 or len(y) == 0:
- return np.array([])
-
- x = [xx for xx in x if xx is not None]
- y = [yy for yy in y if yy is not None]
-
- x = np.array(x)
- y = np.array(y)
- if x.shape[1] != y.shape[1]:
- msg = (
- f"Number of columns in X and Y must be the same. X has shape {x.shape} "
- f"and Y has shape {y.shape}."
- )
- raise ValueError(msg)
- try:
- import simsimd as simd # type: ignore
-
- x = np.array(x, dtype=np.float32)
- y = np.array(y, dtype=np.float32)
- z = 1 - np.array(simd.cdist(x, y, metric="cosine"))
- return z
- except ImportError:
- logger.debug(
- "Unable to import simsimd, defaulting to NumPy implementation. If you want "
- "to use simsimd please install with `pip install simsimd`."
- )
- x_norm = np.linalg.norm(x, axis=1)
- y_norm = np.linalg.norm(y, axis=1)
- # Ignore divide by zero errors run time warnings as those are handled below.
- with np.errstate(divide="ignore", invalid="ignore"):
- similarity = np.dot(x, y.T) / np.outer(x_norm, y_norm)
- similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0
- return similarity
-
-
-def maximal_marginal_relevance(
- query_embedding: np.ndarray,
- embedding_list: list,
- lambda_mult: float = 0.5,
- k: int = 4,
-) -> list[int]:
- """Calculate maximal marginal relevance.
-
- Args:
- query_embedding: The query embedding.
- embedding_list: A list of embeddings.
- lambda_mult: The lambda parameter for MMR. Default is 0.5.
- k: The number of embeddings to return. Default is 4.
-
- Returns:
- A list of indices of the embeddings to return.
-
- Raises:
- ImportError: If numpy is not installed.
- """
-
- if min(k, len(embedding_list)) <= 0:
- return []
- if query_embedding.ndim == 1:
- query_embedding = np.expand_dims(query_embedding, axis=0)
- similarity_to_query = _cosine_similarity(query_embedding, embedding_list)[0]
- most_similar = int(np.argmax(similarity_to_query))
- idxs = [most_similar]
- selected = np.array([embedding_list[most_similar]])
- while len(idxs) < min(k, len(embedding_list)):
- best_score = -np.inf
- idx_to_add = -1
- similarity_to_selected = _cosine_similarity(embedding_list, selected)
- for i, query_score in enumerate(similarity_to_query):
- if i in idxs:
- continue
- redundant_score = max(similarity_to_selected[i])
- equation_score = (
- lambda_mult * query_score - (1 - lambda_mult) * redundant_score
- )
- if equation_score > best_score:
- best_score = equation_score
- idx_to_add = i
- idxs.append(idx_to_add)
- selected = np.append(selected, [embedding_list[idx_to_add]], axis=0)
- return idxs
diff --git a/agents-api/agents_api/models/docs/search_docs_by_embedding.py b/agents-api/agents_api/models/docs/search_docs_by_embedding.py
deleted file mode 100644
index 992e12f9d..000000000
--- a/agents-api/agents_api/models/docs/search_docs_by_embedding.py
+++ /dev/null
@@ -1,369 +0,0 @@
-"""This module contains functions for searching documents in the CozoDB based on embedding queries."""
-
-import json
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import DocReference
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- DocReference,
- transform=lambda d: {
- "owner": {
- "id": d["owner_id"],
- "role": d["owner_type"],
- },
- "metadata": d.get("metadata", {}),
- **d,
- },
-)
-@cozo_query
-@beartype
-def search_docs_by_embedding(
- *,
- developer_id: UUID,
- owners: list[tuple[Literal["user", "agent"], UUID]],
- query_embedding: list[float],
- k: int = 3,
- confidence: float = 0.5,
- ef: int = 50,
- embedding_size: int = 1024,
- ann_threshold: int = 1_000_000,
- metadata_filter: dict[str, Any] = {},
-) -> tuple[str, dict]:
- """
- Searches for document snippets in CozoDB by embedding query.
-
- Parameters:
- owner_type (Literal["user", "agent"]): The type of the owner of the documents.
- owner_id (UUID): The unique identifier of the owner.
- query_embedding (list[float]): The embedding vector of the query.
- k (int, optional): The number of nearest neighbors to retrieve. Defaults to 3.
- confidence (float, optional): The confidence threshold for filtering results. Defaults to 0.8.
- mmr_lambda (float, optional): The lambda parameter for MMR. Defaults to 0.25.
- embedding_size (int): Embedding vector length
- metadata_filter (dict[str, Any]): Dictionary to filter agents based on metadata.
- """
-
- assert len(query_embedding) == embedding_size
- assert sum(query_embedding)
-
- metadata_filter_str = ", ".join(
- [
- f"metadata->{json.dumps(k)} == {json.dumps(v)}"
- for k, v in metadata_filter.items()
- ]
- )
-
- owners: list[list[str]] = [
- [owner_type, str(owner_id)] for owner_type, owner_id in owners
- ]
-
- # Calculate the search radius based on confidence level
- radius: float = 1.0 - confidence
-
- determine_knn_ann_query = f"""
- owners[owner_type, owner_id] <- $owners
- snippet_counter[count(item)] :=
- owners[owner_type, owner_id_str],
- owner_id = to_uuid(owner_id_str),
- *docs {{
- owner_type,
- owner_id,
- doc_id: item,
- metadata,
- }}
- {', ' + metadata_filter_str if metadata_filter_str.strip() else ''}
-
- ?[use_ann] :=
- snippet_counter[count],
- count > {ann_threshold},
- use_ann = true
-
- :limit 1
- :create _determine_knn_ann {{
- use_ann
- }}
- """
-
- # Construct the datalog query for searching document snippets
- search_query = f"""
- # %debug _determine_knn_ann
- %if {{
- ?[use_ann] := *_determine_knn_ann{{ use_ann }}
- }}
-
- %then {{
- owners[owner_type, owner_id] <- $owners
- input[
- owner_type,
- owner_id,
- query_embedding,
- ] :=
- owners[owner_type, owner_id_str],
- owner_id = to_uuid(owner_id_str),
- query_embedding = vec($query_embedding)
-
- # Search for documents by owner
- ?[
- doc_id,
- index,
- title,
- content,
- distance,
- embedding,
- metadata,
- ] :=
- # Get input values
- input[owner_type, owner_id, query],
-
- # Restrict the search to all documents that match the owner
- *docs {{
- owner_type,
- owner_id,
- doc_id,
- title,
- metadata,
- }},
-
- # Search for snippets in the embedding space
- ~snippets:embedding_space {{
- doc_id,
- index,
- content
- |
- query: query,
- k: {k},
- ef: {ef},
- radius: {radius},
- bind_distance: distance,
- bind_vector: embedding,
- }}
-
- :sort distance
- :limit {k}
-
- :create _search_result {{
- doc_id,
- index,
- title,
- content,
- distance,
- embedding,
- metadata,
- }}
- }}
-
- %else {{
- owners[owner_type, owner_id] <- $owners
- input[
- owner_type,
- owner_id,
- query_embedding,
- ] :=
- owners[owner_type, owner_id_str],
- owner_id = to_uuid(owner_id_str),
- query_embedding = vec($query_embedding)
-
- # Search for documents by owner
- ?[
- doc_id,
- index,
- title,
- content,
- distance,
- embedding,
- metadata,
- ] :=
- # Get input values
- input[owner_type, owner_id, query],
-
- # Restrict the search to all documents that match the owner
- *docs {{
- owner_type,
- owner_id,
- doc_id,
- title,
- metadata,
- }},
-
- # Search for snippets in the embedding space
- *snippets {{
- doc_id,
- index,
- content,
- embedding,
- }},
- !is_null(embedding),
- distance = cos_dist(query, embedding),
- distance <= {radius}
-
- :sort distance
- :limit {k}
-
- :create _search_result {{
- doc_id,
- index,
- title,
- content,
- distance,
- embedding,
- metadata,
- }}
- }}
- %end
- """
-
- normal_interim_query = f"""
- owners[owner_type, owner_id] <- $owners
-
- ?[
- owner_type,
- owner_id,
- doc_id,
- snippet_data,
- distance,
- title,
- embedding,
- metadata,
- ] :=
- owners[owner_type, owner_id_str],
- owner_id = to_uuid(owner_id_str),
- *_search_result{{ doc_id, index, title, content, distance, embedding, metadata }},
- snippet_data = [index, content]
-
- :sort distance
- :limit {k}
-
- :create _interim {{
- owner_type,
- owner_id,
- doc_id,
- snippet_data,
- distance,
- title,
- embedding,
- metadata,
- }}
- """
-
- collect_query = """
- n[
- doc_id,
- owner_type,
- owner_id,
- unique(snippet_data),
- distance,
- title,
- embedding,
- metadata,
- ] :=
- *_interim {
- owner_type,
- owner_id,
- doc_id,
- snippet_data,
- distance,
- title,
- embedding,
- metadata,
- }
-
- m[
- doc_id,
- owner_type,
- owner_id,
- snippet,
- distance,
- title,
- metadata,
- ] :=
- n[
- doc_id,
- owner_type,
- owner_id,
- snippet_data,
- distance,
- title,
- embedding,
- metadata,
- ],
- snippet = {
- "index": snippet_datum->0,
- "content": snippet_datum->1,
- "embedding": embedding,
- },
- snippet_datum in snippet_data
-
- ?[
- id,
- owner_type,
- owner_id,
- snippet,
- distance,
- title,
- metadata,
- ] := m[
- id,
- owner_type,
- owner_id,
- snippet,
- distance,
- title,
- metadata,
- ]
-
- :sort distance
- """
-
- verify_query = "}\n\n{".join(
- [
- verify_developer_id_query(developer_id),
- *[
- verify_developer_owns_resource_query(
- developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id}
- )
- for owner_type, owner_id in owners
- ],
- ]
- )
-
- query = f"""
- {{ {verify_query} }}
- {{ {determine_knn_ann_query} }}
- {search_query}
- {{ {normal_interim_query} }}
- {{ {collect_query} }}
- """
-
- return (
- query,
- {
- "owners": owners,
- "query_embedding": query_embedding,
- },
- )
diff --git a/agents-api/agents_api/models/docs/search_docs_by_text.py b/agents-api/agents_api/models/docs/search_docs_by_text.py
deleted file mode 100644
index ac1a9f54f..000000000
--- a/agents-api/agents_api/models/docs/search_docs_by_text.py
+++ /dev/null
@@ -1,206 +0,0 @@
-"""This module contains functions for searching documents in the CozoDB based on embedding queries."""
-
-import json
-import re
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import DocReference
-from ...common.nlp import paragraph_to_custom_queries
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- DocReference,
- transform=lambda d: {
- "owner": {
- "id": d["owner_id"],
- "role": d["owner_type"],
- },
- "metadata": d.get("metadata", {}),
- **d,
- },
-)
-@cozo_query
-@beartype
-def search_docs_by_text(
- *,
- developer_id: UUID,
- owners: list[tuple[Literal["user", "agent"], UUID]],
- query: str,
- k: int = 3,
- metadata_filter: dict[str, Any] = {},
-) -> tuple[list[str], dict]:
- """
- Searches for document snippets in CozoDB by embedding query.
-
- Parameters:
- owners (list[tuple[Literal["user", "agent"], UUID]]): The type of the owner of the documents.
- query (str): The query string.
- k (int, optional): The number of nearest neighbors to retrieve. Defaults to 3.
- metadata_filter (dict[str, Any]): Dictionary to filter agents based on metadata.
- """
- metadata_filter_str = ", ".join(
- [
- f"metadata->{json.dumps(k)} == {json.dumps(v)}"
- for k, v in metadata_filter.items()
- ]
- )
-
- owners: list[list[str]] = [
- [owner_type, str(owner_id)] for owner_type, owner_id in owners
- ]
-
- # See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts
- fts_queries = paragraph_to_custom_queries(query) or [
- re.sub(r"[^\w\s\-_]+", "", query)
- ]
-
- # Construct the datalog query for searching document snippets
- search_query = f"""
- owners[owner_type, owner_id] <- $owners
- input[
- owner_type,
- owner_id,
- ] :=
- owners[owner_type, owner_id_str],
- owner_id = to_uuid(owner_id_str)
-
- candidate[doc_id] :=
- input[owner_type, owner_id],
- *docs {{
- owner_type,
- owner_id,
- doc_id,
- metadata,
- }}
- {', ' + metadata_filter_str if metadata_filter_str.strip() else ''}
-
- # search_result[
- # doc_id,
- # snippet_data,
- # distance,
- # ] :=
- # candidate[doc_id],
- # ~snippets:lsh {{
- # doc_id,
- # index,
- # content
- # |
- # query: $query,
- # k: {k},
- # }},
- # distance = 10000000, # Very large distance to depict no valid distance
- # snippet_data = [index, content]
-
- search_result[
- doc_id,
- snippet_data,
- distance,
- ] :=
- candidate[doc_id],
- ~snippets:fts {{
- doc_id,
- index,
- content
- |
- query: query,
- k: {k},
- score_kind: 'tf_idf',
- bind_score: score,
- }},
- query in $fts_queries,
- distance = -score,
- snippet_data = [index, content]
-
- m[
- doc_id,
- snippet,
- distance,
- title,
- owner_type,
- owner_id,
- metadata,
- ] :=
- candidate[doc_id],
- *docs {{
- owner_type,
- owner_id,
- doc_id,
- title,
- metadata,
- }},
- search_result [
- doc_id,
- snippet_data,
- distance,
- ],
- snippet = {{
- "index": snippet_data->0,
- "content": snippet_data->1,
- }}
-
-
- ?[
- id,
- owner_type,
- owner_id,
- snippet,
- distance,
- title,
- metadata,
- ] :=
- candidate[id],
- input[owner_type, owner_id],
- m[
- id,
- snippet,
- distance,
- title,
- owner_type,
- owner_id,
- metadata,
- ]
-
- # Sort the results by distance to find the closest matches
- :sort distance
- :limit {k}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- *[
- verify_developer_owns_resource_query(
- developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id}
- )
- for owner_type, owner_id in owners
- ],
- search_query,
- ]
-
- return (
- queries,
- {"owners": owners, "query": query, "fts_queries": fts_queries},
- )
diff --git a/agents-api/agents_api/models/docs/search_docs_hybrid.py b/agents-api/agents_api/models/docs/search_docs_hybrid.py
deleted file mode 100644
index c43f8c97b..000000000
--- a/agents-api/agents_api/models/docs/search_docs_hybrid.py
+++ /dev/null
@@ -1,138 +0,0 @@
-"""This module contains functions for searching documents in the CozoDB based on embedding queries."""
-
-from statistics import mean, stdev
-from typing import Any, Literal
-from uuid import UUID
-
-from beartype import beartype
-
-from ...autogen.openapi_model import DocReference
-from ..utils import run_concurrently
-from .search_docs_by_embedding import search_docs_by_embedding
-from .search_docs_by_text import search_docs_by_text
-
-
-# Distribution based score normalization
-# https://medium.com/plain-simple-software/distribution-based-score-fusion-dbsf-a-new-approach-to-vector-search-ranking-f87c37488b18
-def dbsf_normalize(scores: list[float]) -> list[float]:
- """
- Scores scaled using minmax scaler with our custom feature range
- (extremes indicated as 3 standard deviations from the mean)
- """
- if len(scores) < 2:
- return scores
-
- sd = stdev(scores)
- if sd == 0:
- return scores
-
- m = mean(scores)
- m3d = 3 * sd + m
- m_3d = m - 3 * sd
-
- return [(s - m_3d) / (m3d - m_3d) for s in scores]
-
-
-def dbsf_fuse(
- text_results: list[DocReference],
- embedding_results: list[DocReference],
- alpha: float = 0.7, # Weight of the embedding search results (this is a good default)
-) -> list[DocReference]:
- """
- Weighted reciprocal-rank fusion of text and embedding search results
- """
- all_docs = {doc.id: doc for doc in text_results + embedding_results}
-
- text_scores: dict[UUID, float] = {
- doc.id: -(doc.distance or 0.0) for doc in text_results
- }
-
- # Because these are cosine distances, we need to invert them
- embedding_scores: dict[UUID, float] = {
- doc.id: 1.0 - doc.distance for doc in embedding_results
- }
-
- # normalize the scores
- text_scores_normalized = dbsf_normalize(list(text_scores.values()))
- text_scores = {
- doc_id: score
- for doc_id, score in zip(text_scores.keys(), text_scores_normalized)
- }
-
- embedding_scores_normalized = dbsf_normalize(list(embedding_scores.values()))
- embedding_scores = {
- doc_id: score
- for doc_id, score in zip(embedding_scores.keys(), embedding_scores_normalized)
- }
-
- # Combine the scores
- text_weight: float = 1 - alpha
- embedding_weight: float = alpha
-
- combined_scores = []
-
- for id in all_docs.keys():
- text_score = text_weight * text_scores.get(id, 0)
- embedding_score = embedding_weight * embedding_scores.get(id, 0)
-
- combined_scores.append((id, text_score + embedding_score))
-
- # Sort by the combined score
- combined_scores = sorted(combined_scores, key=lambda x: x[1], reverse=True)
-
- # Rank the results
- ranked_results = []
- for id, score in combined_scores:
- doc = all_docs[id].model_copy()
- doc.distance = 1.0 - score
- ranked_results.append(doc)
-
- return ranked_results
-
-
-@beartype
-def search_docs_hybrid(
- *,
- developer_id: UUID,
- owners: list[tuple[Literal["user", "agent"], UUID]],
- query: str,
- query_embedding: list[float],
- k: int = 3,
- alpha: float = 0.7, # Weight of the embedding search results (this is a good default)
- embed_search_options: dict = {},
- text_search_options: dict = {},
- metadata_filter: dict[str, Any] = {},
-) -> list[DocReference]:
- # Parallelize the text and embedding search queries
- fns = [
- search_docs_by_text if bool(query.strip()) else lambda: [],
- search_docs_by_embedding if bool(sum(query_embedding)) else lambda: [],
- ]
-
- kwargs_list = [
- {
- "developer_id": developer_id,
- "owners": owners,
- "query": query,
- "k": k,
- "metadata_filter": metadata_filter,
- **text_search_options,
- }
- if bool(query.strip())
- else {},
- {
- "developer_id": developer_id,
- "owners": owners,
- "query_embedding": query_embedding,
- "k": k,
- "metadata_filter": metadata_filter,
- **embed_search_options,
- }
- if bool(sum(query_embedding))
- else {},
- ]
-
- results = run_concurrently(fns, kwargs_list=kwargs_list)
- text_results, embedding_results = results
-
- return dbsf_fuse(text_results, embedding_results, alpha)[:k]
diff --git a/agents-api/agents_api/models/entry/__init__.py b/agents-api/agents_api/models/entry/__init__.py
deleted file mode 100644
index 32231c364..000000000
--- a/agents-api/agents_api/models/entry/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-"""
-The `entry` module is responsible for managing entries related to agents' activities and interactions within the 'cozodb' database. It provides a comprehensive set of functionalities for adding, deleting, summarizing, and retrieving entries, as well as processing them to retrieve memory context based on embeddings.
-
-Key functionalities include:
-- Adding entries to the database.
-- Deleting entries from the database based on session IDs.
-- Summarizing entries and managing their relationships.
-- Retrieving entries from the database, including top-level entries and entries based on session IDs.
-- Processing entries to retrieve memory context based on embeddings.
-
-The module utilizes pandas DataFrames for handling query results and integrates with the CozoClient for database operations, ensuring efficient and effective management of entries.
-"""
-
-# ruff: noqa: F401, F403, F405
-
-from .create_entries import create_entries
-from .delete_entries import delete_entries
-from .get_history import get_history
-from .list_entries import list_entries
diff --git a/agents-api/agents_api/models/entry/create_entries.py b/agents-api/agents_api/models/entry/create_entries.py
deleted file mode 100644
index 140a5696b..000000000
--- a/agents-api/agents_api/models/entry/create_entries.py
+++ /dev/null
@@ -1,128 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateEntryRequest, Entry, Relation
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...common.utils.datetime import utcnow
-from ...common.utils.messages import content_to_json
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- mark_session_updated_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Entry,
- transform=lambda d: {
- "id": UUID(d.pop("entry_id")),
- **d,
- },
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_entries")
-@beartype
-def create_entries(
- *,
- developer_id: UUID,
- session_id: UUID,
- data: list[CreateEntryRequest],
- mark_session_as_updated: bool = True,
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- session_id = str(session_id)
-
- data_dicts = [item.model_dump(mode="json") for item in data]
-
- for item in data_dicts:
- item["content"] = content_to_json(item["content"] or [])
- item["session_id"] = session_id
- item["entry_id"] = item.pop("id", None) or str(uuid7())
- item["created_at"] = (item.get("created_at") or utcnow()).timestamp()
-
- cols, rows = cozo_process_mutate_data(data_dicts)
-
- # Construct a datalog query to insert the processed entries into the 'cozodb' database.
- # Refer to the schema for the 'entries' relation in the README.md for column names and types.
- create_query = f"""
- ?[{cols}] <- $rows
-
- :insert entries {{
- {cols}
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- mark_session_updated_query(developer_id, session_id)
- if mark_session_as_updated
- else "",
- create_query,
- ]
-
- return (queries, {"rows": rows})
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(Relation, _kind="inserted")
-@cozo_query
-@beartype
-def add_entry_relations(
- *,
- developer_id: UUID,
- data: list[Relation],
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
-
- data_dicts = [item.model_dump(mode="json") for item in data]
- cols, rows = cozo_process_mutate_data(data_dicts)
-
- create_query = f"""
- ?[{cols}] <- $rows
-
- :insert relations {{
- {cols}
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- create_query,
- ]
-
- return (queries, {"rows": rows})
diff --git a/agents-api/agents_api/models/entry/delete_entries.py b/agents-api/agents_api/models/entry/delete_entries.py
deleted file mode 100644
index c98b6c7d2..000000000
--- a/agents-api/agents_api/models/entry/delete_entries.py
+++ /dev/null
@@ -1,153 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- mark_session_updated_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- IndexError: partialclass(HTTPException, status_code=404),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("session_id")), # Only return session cleared
- "deleted_at": utcnow(),
- "jobs": [],
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_entries_for_session(
- *, developer_id: UUID, session_id: UUID, mark_session_as_updated: bool = True
-) -> tuple[list[str], dict]:
- """
- Constructs and returns a datalog query for deleting entries associated with a given session ID from the 'cozodb' database.
-
- Parameters:
- session_id (UUID): The unique identifier of the session whose entries are to be deleted.
- """
-
- delete_query = """
- input[session_id] <- [[
- to_uuid($session_id),
- ]]
-
- ?[
- session_id,
- entry_id,
- source,
- role,
- ] := input[session_id],
- *entries{
- session_id,
- entry_id,
- source,
- role,
- }
-
- :delete entries {
- session_id,
- entry_id,
- source,
- role,
- }
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- mark_session_updated_query(developer_id, session_id)
- if mark_session_as_updated
- else "",
- delete_query,
- ]
-
- return (queries, {"session_id": str(session_id)})
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- transform=lambda d: {
- "id": UUID(d.pop("entry_id")),
- "deleted_at": utcnow(),
- "jobs": [],
- },
-)
-@cozo_query
-@beartype
-def delete_entries(
- *, developer_id: UUID, session_id: UUID, entry_ids: list[UUID]
-) -> tuple[list[str], dict]:
- delete_query = """
- input[entry_id_str] <- $entry_ids
-
- ?[
- entry_id,
- session_id,
- source,
- role,
- ] :=
- input[entry_id_str],
- entry_id = to_uuid(entry_id_str),
- *entries {
- session_id,
- entry_id,
- source,
- role,
- }
-
- :delete entries {
- session_id,
- entry_id,
- source,
- role,
- }
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- delete_query,
- ]
-
- return (queries, {"entry_ids": [[str(id)] for id in entry_ids]})
diff --git a/agents-api/agents_api/models/entry/get_history.py b/agents-api/agents_api/models/entry/get_history.py
deleted file mode 100644
index bb12b1c5b..000000000
--- a/agents-api/agents_api/models/entry/get_history.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import History
-from ...common.utils.cozo import uuid_int_list_to_uuid as fix_uuid
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- History,
- one=True,
- transform=lambda d: {
- "relations": [
- {
- # This is needed because cozo has a bug:
- # https://github.com/cozodb/cozo/issues/269
- "head": fix_uuid(r["head"]),
- "relation": r["relation"],
- "tail": fix_uuid(r["tail"]),
- }
- for r in d.pop("relations")
- ],
- # TODO: Remove this once we sort the entries in the cozo query
- # Sort entries by created_at
- "entries": sorted(d.pop("entries"), key=lambda entry: entry["created_at"]),
- **d,
- },
-)
-@cozo_query
-@beartype
-def get_history(
- *,
- developer_id: UUID,
- session_id: UUID,
- allowed_sources: list[str] = ["api_request", "api_response"],
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- session_id = str(session_id)
-
- history_query = """
- session_entries[collect(entry)] :=
- *entries {
- session_id,
- entry_id,
- role,
- name,
- content,
- source,
- token_count,
- tokenizer,
- created_at,
- tool_calls,
- timestamp,
- tool_call_id,
- },
- source in $allowed_sources,
- session_id = to_uuid($session_id),
- entry = {
- "session_id": session_id,
- "id": entry_id,
- "role": role,
- "name": name,
- "content": content,
- "source": source,
- "token_count": token_count,
- "tokenizer": tokenizer,
- "created_at": created_at,
- "timestamp": timestamp,
- "tool_calls": tool_calls,
- "tool_call_id": tool_call_id,
- }
-
- session_relations[unique(item)] :=
- session_id = to_uuid($session_id),
- *entries {
- session_id,
- entry_id: head
- },
-
- *relations {
- head,
- relation,
- tail
- },
-
- item = {
- "head": head,
- "relation": relation,
- "tail": tail
- }
-
- session_relations[unique(item)] :=
- session_id = to_uuid($session_id),
- *entries {
- session_id,
- entry_id: tail
- },
-
- *relations {
- head,
- relation,
- tail
- },
-
- item = {
- "head": head,
- "relation": relation,
- "tail": tail
- }
-
- ?[entries, relations, session_id, created_at] :=
- session_entries[entries],
- session_relations[relations],
- session_id = to_uuid($session_id),
- created_at = now()
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- history_query,
- ]
-
- return (queries, {"session_id": session_id, "allowed_sources": allowed_sources})
diff --git a/agents-api/agents_api/models/entry/list_entries.py b/agents-api/agents_api/models/entry/list_entries.py
deleted file mode 100644
index d3081a9b0..000000000
--- a/agents-api/agents_api/models/entry/list_entries.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Entry
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(Entry)
-@cozo_query
-@beartype
-def list_entries(
- *,
- developer_id: UUID,
- session_id: UUID,
- allowed_sources: list[str] = ["api_request", "api_response"],
- limit: int = -1,
- offset: int = 0,
- sort_by: Literal["created_at", "timestamp"] = "timestamp",
- direction: Literal["asc", "desc"] = "asc",
- exclude_relations: list[str] = [],
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a query to retrieve entries from the 'cozodb' database.
- """
-
- developer_id = str(developer_id)
- session_id = str(session_id)
-
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- exclude_relations_query = """
- not *relations {
- relation,
- tail: id,
- },
- relation in $exclude_relations,
- # !is_in(relation, $exclude_relations),
- """
-
- list_query = f"""
- ?[
- session_id,
- id,
- role,
- name,
- content,
- source,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- ] := *entries {{
- session_id,
- entry_id: id,
- role,
- name,
- content,
- source,
- token_count,
- tokenizer,
- created_at,
- timestamp,
- }},
- {exclude_relations_query if exclude_relations else ''}
- source in $allowed_sources,
- session_id = to_uuid($session_id),
-
- :sort {sort}
- """
-
- if limit > 0:
- list_query += f"\n:limit {limit}"
- list_query += f"\n:offset {offset}"
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- list_query,
- ]
-
- return (
- queries,
- {
- "session_id": session_id,
- "allowed_sources": allowed_sources,
- "exclude_relations": exclude_relations,
- },
- )
diff --git a/agents-api/agents_api/models/execution/__init__.py b/agents-api/agents_api/models/execution/__init__.py
deleted file mode 100644
index abd3c7e47..000000000
--- a/agents-api/agents_api/models/execution/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# ruff: noqa: F401, F403, F405
-
-from .count_executions import count_executions
-from .create_execution import create_execution
-from .create_execution_transition import (
- create_execution_transition,
- create_execution_transition_async,
-)
-from .get_execution import get_execution
-from .get_execution_transition import get_execution_transition
-from .list_execution_transitions import list_execution_transitions
-from .list_executions import list_executions
-from .lookup_temporal_data import lookup_temporal_data
-from .prepare_execution_input import prepare_execution_input
-from .update_execution import update_execution
diff --git a/agents-api/agents_api/models/execution/constants.py b/agents-api/agents_api/models/execution/constants.py
deleted file mode 100644
index 8d4568ba2..000000000
--- a/agents-api/agents_api/models/execution/constants.py
+++ /dev/null
@@ -1,5 +0,0 @@
-##########
-# Consts #
-##########
-
-OUTPUT_UNNEST_KEY = "$$e7w_unnest$$"
diff --git a/agents-api/agents_api/models/execution/count_executions.py b/agents-api/agents_api/models/execution/count_executions.py
deleted file mode 100644
index d130f0359..000000000
--- a/agents-api/agents_api/models/execution/count_executions.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(dict, one=True)
-@cozo_query
-@beartype
-def count_executions(
- *,
- developer_id: UUID,
- task_id: UUID,
-) -> tuple[list[str], dict]:
- count_query = """
- input[task_id] <- [[to_uuid($task_id)]]
-
- counter[count(id)] :=
- input[task_id],
- *executions:task_id_execution_id_idx {
- task_id,
- execution_id: id,
- }
-
- ?[count] := counter[count]
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "tasks",
- task_id=task_id,
- parents=[("agents", "agent_id")],
- ),
- count_query,
- ]
-
- return (queries, {"task_id": str(task_id)})
diff --git a/agents-api/agents_api/models/execution/create_execution.py b/agents-api/agents_api/models/execution/create_execution.py
deleted file mode 100644
index 59efd7ac3..000000000
--- a/agents-api/agents_api/models/execution/create_execution.py
+++ /dev/null
@@ -1,98 +0,0 @@
-from typing import Annotated, Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateExecutionRequest, Execution
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...common.utils.types import dict_like
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-from .constants import OUTPUT_UNNEST_KEY
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Execution,
- one=True,
- transform=lambda d: {"id": d["execution_id"], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_execution")
-@beartype
-def create_execution(
- *,
- developer_id: UUID,
- task_id: UUID,
- execution_id: UUID | None = None,
- data: Annotated[CreateExecutionRequest | dict, dict_like(CreateExecutionRequest)],
-) -> tuple[list[str], dict]:
- execution_id = execution_id or uuid7()
-
- developer_id = str(developer_id)
- task_id = str(task_id)
- execution_id = str(execution_id)
-
- if isinstance(data, CreateExecutionRequest):
- data.metadata = data.metadata or {}
- execution_data = data.model_dump()
- else:
- data["metadata"] = data.get("metadata", {})
- execution_data = data
-
- if execution_data["output"] is not None and not isinstance(
- execution_data["output"], dict
- ):
- execution_data["output"] = {OUTPUT_UNNEST_KEY: execution_data["output"]}
-
- columns, values = cozo_process_mutate_data(
- {
- **execution_data,
- "task_id": task_id,
- "execution_id": execution_id,
- }
- )
-
- insert_query = f"""
- ?[{columns}] <- $values
-
- :insert executions {{
- {columns}
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "tasks",
- task_id=task_id,
- parents=[("agents", "agent_id")],
- ),
- insert_query,
- ]
-
- return (queries, {"values": values})
diff --git a/agents-api/agents_api/models/execution/create_execution_transition.py b/agents-api/agents_api/models/execution/create_execution_transition.py
deleted file mode 100644
index 5cbcb97bc..000000000
--- a/agents-api/agents_api/models/execution/create_execution_transition.py
+++ /dev/null
@@ -1,259 +0,0 @@
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import (
- CreateTransitionRequest,
- Transition,
- UpdateExecutionRequest,
-)
-from ...common.protocol.tasks import transition_to_execution_status, valid_transitions
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- cozo_query_async,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-from .update_execution import update_execution
-
-
-@beartype
-def _create_execution_transition(
- *,
- developer_id: UUID,
- execution_id: UUID,
- data: CreateTransitionRequest,
- # Only one of these needed
- transition_id: UUID | None = None,
- task_token: str | None = None,
- # Only required for updating the execution status as well
- update_execution_status: bool = False,
- task_id: UUID | None = None,
-) -> tuple[list[str | None], dict]:
- transition_id = transition_id or uuid7()
- data.metadata = data.metadata or {}
- data.execution_id = execution_id
-
- # Dump to json
- if isinstance(data.output, list):
- data.output = [
- item.model_dump(mode="json") if hasattr(item, "model_dump") else item
- for item in data.output
- ]
-
- elif hasattr(data.output, "model_dump"):
- data.output = data.output.model_dump(mode="json")
-
- # TODO: This is a hack to make sure the transition is valid
- # (parallel transitions are whack, we should do something better)
- is_parallel = data.current.workflow.startswith("PAR:")
-
- # Prepare the transition data
- transition_data = data.model_dump(exclude_unset=True, exclude={"id"})
-
- # Parse the current and next targets
- validate_transition_targets(data)
- current_target = transition_data.pop("current")
- next_target = transition_data.pop("next")
-
- transition_data["current"] = (current_target["workflow"], current_target["step"])
- transition_data["next"] = next_target and (
- next_target["workflow"],
- next_target["step"],
- )
-
- columns, transition_values = cozo_process_mutate_data(
- {
- **transition_data,
- "task_token": str(task_token), # Converting to str for JSON serialisation
- "transition_id": str(transition_id),
- "execution_id": str(execution_id),
- }
- )
-
- # Make sure the transition is valid
- check_last_transition_query = f"""
- valid_transition[start, end] <- [
- {", ".join(f'["{start}", "{end}"]' for start, ends in valid_transitions.items() for end in ends)}
- ]
-
- last_transition_type[min_cost(type_created_at)] :=
- *transitions:execution_id_type_created_at_idx {{
- execution_id: to_uuid("{str(execution_id)}"),
- type,
- created_at,
- }},
- type_created_at = [type, -created_at]
-
- matched[collect(last_type)] :=
- last_transition_type[data],
- last_type_data = first(data),
- last_type = if(is_null(last_type_data), "init", last_type_data),
- valid_transition[last_type, $next_type]
-
- ?[valid] :=
- matched[prev_transitions],
- found = length(prev_transitions),
- valid = if($next_type == "init", found == 0, found > 0),
- assert(valid, "Invalid transition"),
-
- :limit 1
- """
-
- # Prepare the insert query
- insert_query = f"""
- ?[{columns}] <- $transition_values
-
- :insert transitions {{
- {columns}
- }}
-
- :returning
- """
-
- validate_status_query, update_execution_query, update_execution_params = (
- "",
- "",
- {},
- )
-
- if update_execution_status:
- assert (
- task_id is not None
- ), "task_id is required for updating the execution status"
-
- # Prepare the execution update query
- [*_, validate_status_query, update_execution_query], update_execution_params = (
- update_execution.__wrapped__(
- developer_id=developer_id,
- task_id=task_id,
- execution_id=execution_id,
- data=UpdateExecutionRequest(
- status=transition_to_execution_status[data.type]
- ),
- output=data.output if data.type != "error" else None,
- error=str(data.output)
- if data.type == "error" and data.output
- else None,
- )
- )
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "executions",
- execution_id=execution_id,
- parents=[("agents", "agent_id"), ("tasks", "task_id")],
- ),
- validate_status_query if not is_parallel else None,
- update_execution_query if not is_parallel else None,
- check_last_transition_query if not is_parallel else None,
- insert_query,
- ]
-
- return (
- queries,
- {
- "transition_values": transition_values,
- "next_type": data.type,
- "valid_transitions": valid_transitions,
- **update_execution_params,
- },
- )
-
-
-def validate_transition_targets(data: CreateTransitionRequest) -> None:
- # Make sure the current/next targets are valid
- match data.type:
- case "finish_branch":
- pass # TODO: Implement
- case "finish" | "error" | "cancelled":
- pass
-
- ### FIXME: HACK: Fix this and uncomment
-
- ### assert (
- ### data.next is None
- ### ), "Next target must be None for finish/finish_branch/error/cancelled"
-
- case "init_branch" | "init":
- assert (
- data.next and data.current.step == data.next.step == 0
- ), "Next target must be same as current for init_branch/init and step 0"
-
- case "wait":
- assert data.next is None, "Next target must be None for wait"
-
- case "resume" | "step":
- assert data.next is not None, "Next target must be provided for resume/step"
-
- if data.next.workflow == data.current.workflow:
- assert (
- data.next.step > data.current.step
- ), "Next step must be greater than current"
-
- case _:
- raise ValueError(f"Invalid transition type: {data.type}")
-
-
-create_execution_transition = rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)(
- wrap_in_class(
- Transition,
- transform=lambda d: {
- **d,
- "id": d["transition_id"],
- "current": {"workflow": d["current"][0], "step": d["current"][1]},
- "next": d["next"] and {"workflow": d["next"][0], "step": d["next"][1]},
- },
- one=True,
- _kind="inserted",
- )(
- cozo_query(
- increase_counter("create_execution_transition")(
- _create_execution_transition
- )
- )
- )
-)
-
-create_execution_transition_async = rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)(
- wrap_in_class(
- Transition,
- transform=lambda d: {
- **d,
- "id": d["transition_id"],
- "current": {"workflow": d["current"][0], "step": d["current"][1]},
- "next": d["next"] and {"workflow": d["next"][0], "step": d["next"][1]},
- },
- one=True,
- _kind="inserted",
- )(
- cozo_query_async(
- increase_counter("create_execution_transition_async")(
- _create_execution_transition
- )
- )
- )
-)
diff --git a/agents-api/agents_api/models/execution/create_temporal_lookup.py b/agents-api/agents_api/models/execution/create_temporal_lookup.py
deleted file mode 100644
index e47a505db..000000000
--- a/agents-api/agents_api/models/execution/create_temporal_lookup.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from typing import TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from temporalio.client import WorkflowHandle
-
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
-)
-
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- AssertionError: partialclass(HTTPException, status_code=404),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@cozo_query
-@increase_counter("create_temporal_lookup")
-@beartype
-def create_temporal_lookup(
- *,
- developer_id: UUID,
- execution_id: UUID,
- workflow_handle: WorkflowHandle,
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- execution_id = str(execution_id)
-
- temporal_columns, temporal_values = cozo_process_mutate_data(
- {
- "execution_id": execution_id,
- "id": workflow_handle.id,
- "run_id": workflow_handle.run_id,
- "first_execution_run_id": workflow_handle.first_execution_run_id,
- "result_run_id": workflow_handle.result_run_id,
- }
- )
-
- temporal_executions_lookup_query = f"""
- ?[{temporal_columns}] <- $temporal_values
-
- :insert temporal_executions_lookup {{
- {temporal_columns}
- }}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "executions",
- execution_id=execution_id,
- parents=[("agents", "agent_id"), ("tasks", "task_id")],
- ),
- temporal_executions_lookup_query,
- ]
-
- return (queries, {"temporal_values": temporal_values})
diff --git a/agents-api/agents_api/models/execution/get_execution.py b/agents-api/agents_api/models/execution/get_execution.py
deleted file mode 100644
index db0279b1f..000000000
--- a/agents-api/agents_api/models/execution/get_execution.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Execution
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- wrap_in_class,
-)
-from .constants import OUTPUT_UNNEST_KEY
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- AssertionError: partialclass(HTTPException, status_code=404),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Execution,
- one=True,
- transform=lambda d: {
- **d,
- "output": d["output"][OUTPUT_UNNEST_KEY]
- if isinstance(d["output"], dict) and OUTPUT_UNNEST_KEY in d["output"]
- else d["output"],
- },
-)
-@cozo_query
-@beartype
-def get_execution(
- *,
- execution_id: UUID,
-) -> tuple[str, dict]:
- # Executions are allowed direct GET access if they have execution_id
-
- # NOTE: Do not remove outer curly braces
- query = """
- {
- input[execution_id] <- [[to_uuid($execution_id)]]
-
- ?[id, task_id, status, input, output, error, session_id, metadata, created_at, updated_at] :=
- input[execution_id],
- *executions {
- task_id,
- execution_id,
- status,
- input,
- output,
- error,
- session_id,
- metadata,
- created_at,
- updated_at,
- },
- id = execution_id
-
- :limit 1
- }
- """
-
- return (
- query,
- {
- "execution_id": str(execution_id),
- },
- )
diff --git a/agents-api/agents_api/models/execution/get_execution_transition.py b/agents-api/agents_api/models/execution/get_execution_transition.py
deleted file mode 100644
index e2b38789a..000000000
--- a/agents-api/agents_api/models/execution/get_execution_transition.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Transition
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- AssertionError: partialclass(HTTPException, status_code=500),
- }
-)
-@wrap_in_class(Transition, one=True)
-@cozo_query
-@beartype
-def get_execution_transition(
- *,
- developer_id: UUID,
- transition_id: UUID | None = None,
- task_token: str | None = None,
-) -> tuple[list[str], dict]:
- # At least one of `transition_id` or `task_token` must be provided
- assert (
- transition_id or task_token
- ), "At least one of `transition_id` or `task_token` must be provided."
-
- if transition_id:
- transition_id = str(transition_id)
- filter = "id = to_uuid($transition_id)"
-
- else:
- filter = "task_token = $task_token"
-
- get_query = """
- ?[id, type, current, next, output, metadata, updated_at, created_at] :=
- *transitions {
- transition_id: id,
- type,
- current: current_tuple,
- next: next_tuple,
- output,
- metadata,
- updated_at,
- created_at,
- },
- current = {"workflow": current_tuple->0, "step": current_tuple->1},
- next = if(
- is_null(next_tuple),
- null,
- {"workflow": next_tuple->0, "step": next_tuple->1},
- )
-
- :limit 1
- """
-
- get_query += filter
-
- queries = [
- verify_developer_id_query(developer_id),
- get_query,
- ]
-
- return (queries, {"task_token": task_token, "transition_id": transition_id})
diff --git a/agents-api/agents_api/models/execution/get_paused_execution_token.py b/agents-api/agents_api/models/execution/get_paused_execution_token.py
deleted file mode 100644
index 6c32c7692..000000000
--- a/agents-api/agents_api/models/execution/get_paused_execution_token.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- AssertionError: partialclass(HTTPException, status_code=500),
- }
-)
-@wrap_in_class(dict, one=True)
-@cozo_query
-@beartype
-def get_paused_execution_token(
- *,
- developer_id: UUID,
- execution_id: UUID,
-) -> tuple[list[str], dict]:
- execution_id = str(execution_id)
-
- check_status_query = """
- ?[execution_id, status] :=
- *executions:execution_id_status_idx {
- execution_id,
- status,
- },
- execution_id = to_uuid($execution_id),
- status = "awaiting_input"
-
- :limit 1
- :assert some
- """
-
- get_query = """
- ?[task_token, created_at, metadata] :=
- execution_id = to_uuid($execution_id),
- *executions {
- execution_id,
- },
- *transitions {
- execution_id,
- created_at,
- task_token,
- type,
- metadata,
- },
- type = "wait"
-
- :sort -created_at
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- check_status_query,
- get_query,
- ]
-
- return (queries, {"execution_id": execution_id})
diff --git a/agents-api/agents_api/models/execution/get_temporal_workflow_data.py b/agents-api/agents_api/models/execution/get_temporal_workflow_data.py
deleted file mode 100644
index 8b1bf4604..000000000
--- a/agents-api/agents_api/models/execution/get_temporal_workflow_data.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(dict, one=True)
-@cozo_query
-@beartype
-def get_temporal_workflow_data(
- *,
- execution_id: UUID,
-) -> tuple[str, dict]:
- # Executions are allowed direct GET access if they have execution_id
-
- query = """
- input[execution_id] <- [[to_uuid($execution_id)]]
-
- ?[id, run_id, result_run_id, first_execution_run_id] :=
- input[execution_id],
- *temporal_executions_lookup {
- execution_id,
- id,
- run_id,
- result_run_id,
- first_execution_run_id,
- }
-
- :limit 1
- """
-
- return (
- query,
- {
- "execution_id": str(execution_id),
- },
- )
diff --git a/agents-api/agents_api/models/execution/list_execution_transitions.py b/agents-api/agents_api/models/execution/list_execution_transitions.py
deleted file mode 100644
index 8931676f6..000000000
--- a/agents-api/agents_api/models/execution/list_execution_transitions.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Transition
-from ..utils import cozo_query, partialclass, rewrap_exceptions, wrap_in_class
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(Transition)
-@cozo_query
-@beartype
-def list_execution_transitions(
- *,
- execution_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at", "updated_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
-) -> tuple[str, dict]:
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- query = f"""
- ?[id, execution_id, type, current, next, output, metadata, updated_at, created_at] :=
- *transitions {{
- execution_id,
- transition_id: id,
- type,
- current: current_tuple,
- next: next_tuple,
- output,
- metadata,
- updated_at,
- created_at,
- }},
- current = {{"workflow": current_tuple->0, "step": current_tuple->1}},
- next = if(
- is_null(next_tuple),
- null,
- {{"workflow": next_tuple->0, "step": next_tuple->1}},
- ),
- execution_id = to_uuid($execution_id)
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- return (
- query,
- {
- "execution_id": str(execution_id),
- "limit": limit,
- "offset": offset,
- },
- )
diff --git a/agents-api/agents_api/models/execution/list_executions.py b/agents-api/agents_api/models/execution/list_executions.py
deleted file mode 100644
index 64add074f..000000000
--- a/agents-api/agents_api/models/execution/list_executions.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import Execution
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-from .constants import OUTPUT_UNNEST_KEY
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Execution,
- transform=lambda d: {
- **d,
- "output": d["output"][OUTPUT_UNNEST_KEY]
- if isinstance(d.get("output"), dict) and OUTPUT_UNNEST_KEY in d["output"]
- else d.get("output"),
- },
-)
-@cozo_query
-@beartype
-def list_executions(
- *,
- developer_id: UUID,
- task_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at", "updated_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
-) -> tuple[list[str], dict]:
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- list_query = f"""
- input[task_id] <- [[to_uuid($task_id)]]
-
- ?[
- id,
- task_id,
- status,
- input,
- output,
- session_id,
- metadata,
- created_at,
- updated_at,
- ] := input[task_id],
- *executions {{
- task_id,
- execution_id: id,
- status,
- input,
- output,
- session_id,
- metadata,
- created_at,
- updated_at,
- }}
-
- :limit {limit}
- :offset {offset}
- :sort {sort}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "tasks",
- task_id=task_id,
- parents=[("agents", "agent_id")],
- ),
- list_query,
- ]
-
- return (queries, {"task_id": str(task_id), "limit": limit, "offset": offset})
diff --git a/agents-api/agents_api/models/execution/lookup_temporal_data.py b/agents-api/agents_api/models/execution/lookup_temporal_data.py
deleted file mode 100644
index 35f09129b..000000000
--- a/agents-api/agents_api/models/execution/lookup_temporal_data.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(dict, one=True)
-@cozo_query
-@beartype
-def lookup_temporal_data(
- *,
- developer_id: UUID,
- execution_id: UUID,
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- execution_id = str(execution_id)
-
- temporal_query = """
- ?[id] :=
- execution_id = to_uuid($execution_id),
- *temporal_executions_lookup {
- id, execution_id, run_id, first_execution_run_id, result_run_id
- }
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "executions",
- execution_id=execution_id,
- parents=[("agents", "agent_id"), ("tasks", "task_id")],
- ),
- temporal_query,
- ]
-
- return (
- queries,
- {
- "execution_id": str(execution_id),
- },
- )
diff --git a/agents-api/agents_api/models/execution/prepare_execution_input.py b/agents-api/agents_api/models/execution/prepare_execution_input.py
deleted file mode 100644
index 5e841b9f2..000000000
--- a/agents-api/agents_api/models/execution/prepare_execution_input.py
+++ /dev/null
@@ -1,223 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...common.protocol.tasks import ExecutionInput
-from ..agent.get_agent import get_agent
-from ..task.get_task import get_task
-from ..tools.list_tools import list_tools
-from ..utils import (
- cozo_query,
- fix_uuid_if_present,
- make_cozo_json_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-from .get_execution import get_execution
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- AssertionError: lambda e: HTTPException(
- status_code=429,
- detail=str(e),
- headers={"x-should-retry": "true"},
- ),
- }
-)
-@wrap_in_class(
- ExecutionInput,
- one=True,
- transform=lambda d: {
- **d,
- "task": {
- "tools": [*map(fix_uuid_if_present, d["task"].pop("tools"))],
- **d["task"],
- },
- "agent_tools": [
- {tool["type"]: tool.pop("spec"), **tool}
- for tool in map(fix_uuid_if_present, d["tools"])
- ],
- },
-)
-@cozo_query
-@beartype
-def prepare_execution_input(
- *,
- developer_id: UUID,
- task_id: UUID,
- execution_id: UUID,
-) -> tuple[list[str], dict]:
- execution_query, execution_params = get_execution.__wrapped__(
- execution_id=execution_id
- )
-
- # Remove the outer curly braces
- execution_query = execution_query.strip()[1:-1]
-
- execution_fields = (
- "id",
- "task_id",
- "status",
- "input",
- "session_id",
- "metadata",
- "created_at",
- "updated_at",
- )
- execution_query += f"""
- :create _execution {{
- {", ".join(execution_fields)}
- }}
- """
-
- task_query, task_params = get_task.__wrapped__(
- developer_id=developer_id, task_id=task_id
- )
-
- # Remove the outer curly braces
- task_query = task_query[-1].strip()
-
- task_fields = (
- "id",
- "agent_id",
- "name",
- "description",
- "input_schema",
- "tools",
- "inherit_tools",
- "workflows",
- "created_at",
- "updated_at",
- "metadata",
- )
- task_query += f"""
- :create _task {{
- {", ".join(task_fields)}
- }}
- """
-
- dummy_agent_id = UUID(int=0)
-
- [*_, agent_query], agent_params = get_agent.__wrapped__(
- developer_id=developer_id,
- agent_id=dummy_agent_id, # We will replace this with value from the query
- )
- agent_params.pop("agent_id")
- agent_query = agent_query.replace(
- "<- [[to_uuid($agent_id)]]", ":= *_task { agent_id }"
- )
-
- agent_fields = (
- "id",
- "name",
- "model",
- "about",
- "metadata",
- "default_settings",
- "instructions",
- "created_at",
- "updated_at",
- )
-
- agent_query += f"""
- :create _agent {{
- {", ".join(agent_fields)}
- }}
- """
-
- [*_, tools_query], tools_params = list_tools.__wrapped__(
- developer_id=developer_id,
- agent_id=dummy_agent_id, # We will replace this with value from the query
- )
- tools_params.pop("agent_id")
- tools_query = tools_query.replace(
- "<- [[to_uuid($agent_id)]]", ":= *_task { agent_id }"
- )
-
- tools_fields = (
- "id",
- "agent_id",
- "name",
- "type",
- "spec",
- "description",
- "created_at",
- "updated_at",
- )
- tools_query += f"""
- :create _tools {{
- {", ".join(tools_fields)}
- }}
- """
-
- combine_query = f"""
- collected_tools[collect(tool)] :=
- *_tools {{ {', '.join(tools_fields)} }},
- tool = {{ {make_cozo_json_query(tools_fields)} }}
-
- agent_json[agent] :=
- *_agent {{ {', '.join(agent_fields)} }},
- agent = {{ {make_cozo_json_query(agent_fields)} }}
-
- task_json[task] :=
- *_task {{ {', '.join(task_fields)} }},
- task = {{ {make_cozo_json_query(task_fields)} }}
-
- execution_json[execution] :=
- *_execution {{ {', '.join(execution_fields)} }},
- execution = {{ {make_cozo_json_query(execution_fields)} }}
-
- ?[developer_id, execution, task, agent, user, session, tools, arguments] :=
- developer_id = to_uuid($developer_id),
-
- agent_json[agent],
- task_json[task],
- execution_json[execution],
- collected_tools[tools],
-
- # TODO: Enable these later
- user = null,
- session = null,
- arguments = execution->"input"
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "tasks", task_id=task_id, parents=[("agents", "agent_id")]
- ),
- execution_query,
- task_query,
- agent_query,
- tools_query,
- combine_query,
- ]
-
- return (
- queries,
- {
- "developer_id": str(developer_id),
- "task_id": str(task_id),
- "execution_id": str(execution_id),
- **execution_params,
- **task_params,
- **agent_params,
- **tools_params,
- },
- )
diff --git a/agents-api/agents_api/models/execution/update_execution.py b/agents-api/agents_api/models/execution/update_execution.py
deleted file mode 100644
index f33368412..000000000
--- a/agents-api/agents_api/models/execution/update_execution.py
+++ /dev/null
@@ -1,130 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import (
- ResourceUpdatedResponse,
- UpdateExecutionRequest,
-)
-from ...common.protocol.tasks import (
- valid_previous_statuses as valid_previous_statuses_map,
-)
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-from .constants import OUTPUT_UNNEST_KEY
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {"id": d["execution_id"], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("update_execution")
-@beartype
-def update_execution(
- *,
- developer_id: UUID,
- task_id: UUID,
- execution_id: UUID,
- data: UpdateExecutionRequest,
- output: dict | Any | None = None,
- error: str | None = None,
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- task_id = str(task_id)
- execution_id = str(execution_id)
-
- valid_previous_statuses: list[str] | None = valid_previous_statuses_map.get(
- data.status, None
- )
-
- execution_data: dict = data.model_dump(exclude_none=True)
-
- if output is not None and not isinstance(output, dict):
- output: dict = {OUTPUT_UNNEST_KEY: output}
-
- columns, values = cozo_process_mutate_data(
- {
- **execution_data,
- "task_id": task_id,
- "execution_id": execution_id,
- "output": output,
- "error": error,
- }
- )
-
- validate_status_query = """
- valid_status[count(status)] :=
- *executions {
- status,
- execution_id: to_uuid($execution_id),
- task_id: to_uuid($task_id),
- },
- status in $valid_previous_statuses
-
- ?[num] :=
- valid_status[num],
- assert(num > 0, 'Invalid status')
-
- :limit 1
- """
-
- update_query = f"""
- input[{columns}] <- $values
- ?[{columns}, updated_at] :=
- input[{columns}],
- updated_at = now()
-
- :update executions {{
- updated_at,
- {columns}
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id,
- "executions",
- execution_id=execution_id,
- parents=[("agents", "agent_id"), ("tasks", "task_id")],
- ),
- validate_status_query if valid_previous_statuses is not None else "",
- update_query,
- ]
-
- return (
- queries,
- {
- "values": values,
- "valid_previous_statuses": valid_previous_statuses,
- "execution_id": str(execution_id),
- "task_id": task_id,
- },
- )
diff --git a/agents-api/agents_api/models/files/__init__.py b/agents-api/agents_api/models/files/__init__.py
deleted file mode 100644
index 444c0a6eb..000000000
--- a/agents-api/agents_api/models/files/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .create_file import create_file as create_file
-from .delete_file import delete_file as delete_file
-from .get_file import get_file as get_file
diff --git a/agents-api/agents_api/models/files/create_file.py b/agents-api/agents_api/models/files/create_file.py
deleted file mode 100644
index 58948038b..000000000
--- a/agents-api/agents_api/models/files/create_file.py
+++ /dev/null
@@ -1,122 +0,0 @@
-"""
-This module contains the functionality for creating a new user in the CozoDB database.
-It defines a query for inserting user data into the 'users' relation.
-"""
-
-import base64
-import hashlib
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateFileRequest, File
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "asserted to return some results, but returned none"
- in str(e): lambda *_: HTTPException(
- detail="Developer not found. Please ensure the provided auth token (which refers to your developer_id) is valid and the developer has the necessary permissions to create an agent.",
- status_code=403,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- File,
- one=True,
- transform=lambda d: {
- **d,
- "id": d["file_id"],
- "content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
- },
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_file")
-@beartype
-def create_file(
- *,
- developer_id: UUID,
- file_id: UUID | None = None,
- data: CreateFileRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to create a new file in the CozoDB database.
-
- Parameters:
- user_id (UUID): The unique identifier for the user.
- developer_id (UUID): The unique identifier for the developer creating the file.
- """
-
- file_id = file_id or uuid7()
- file_data = data.model_dump(exclude={"content"})
-
- content_bytes = base64.b64decode(data.content)
- size = len(content_bytes)
- hash = hashlib.sha256(content_bytes).hexdigest()
-
- create_query = """
- # Then create the file
- ?[file_id, developer_id, name, description, mime_type, size, hash] <- [
- [to_uuid($file_id), to_uuid($developer_id), $name, $description, $mime_type, $size, $hash]
- ]
-
- :insert files {
- developer_id,
- file_id =>
- name,
- description,
- mime_type,
- size,
- hash,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- create_query,
- ]
-
- return (
- queries,
- {
- "file_id": str(file_id),
- "developer_id": str(developer_id),
- "size": size,
- "hash": hash,
- **file_data,
- },
- )
diff --git a/agents-api/agents_api/models/files/delete_file.py b/agents-api/agents_api/models/files/delete_file.py
deleted file mode 100644
index 053402e2f..000000000
--- a/agents-api/agents_api/models/files/delete_file.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-This module contains the implementation of the delete_user_query function, which is responsible for deleting an user and its related default settings from the CozoDB database.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "Developer does not exist" in str(e): lambda *_: HTTPException(
- detail="The specified developer does not exist.",
- status_code=403,
- ),
- lambda e: isinstance(e, QueryException)
- and "Developer does not own resource"
- in e.resp["display"]: lambda *_: HTTPException(
- detail="The specified developer does not own the requested resource. Please verify the ownership or check if the developer ID is correct.",
- status_code=404,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("file_id")),
- "deleted_at": utcnow(),
- "jobs": [],
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_file(*, developer_id: UUID, file_id: UUID) -> tuple[list[str], dict]:
- """
- Constructs and returns a datalog query for deleting an file from the database.
-
- Parameters:
- developer_id (UUID): The UUID of the developer owning the file.
- file_id (UUID): The UUID of the file to be deleted.
- client (CozoClient, optional): An instance of the CozoClient to execute the query.
-
- Returns:
- ResourceDeletedResponse: The response indicating the deletion of the user.
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "files", file_id=file_id),
- """
- ?[file_id, developer_id] <- [[$file_id, $developer_id]]
-
- :delete files {
- developer_id,
- file_id
- }
- :returning
- """,
- ]
-
- return (queries, {"file_id": str(file_id), "developer_id": str(developer_id)})
diff --git a/agents-api/agents_api/models/files/get_file.py b/agents-api/agents_api/models/files/get_file.py
deleted file mode 100644
index f3b85c2f7..000000000
--- a/agents-api/agents_api/models/files/get_file.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import File
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "Developer does not exist" in str(e): lambda *_: HTTPException(
- detail="The specified developer does not exist.",
- status_code=403,
- ),
- lambda e: isinstance(e, QueryException)
- and "Developer does not own resource"
- in e.resp["display"]: lambda *_: HTTPException(
- detail="The specified developer does not own the requested resource. Please verify the ownership or check if the developer ID is correct.",
- status_code=404,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- File,
- one=True,
- transform=lambda d: {
- **d,
- "content": "DUMMY: NEED TO FETCH CONTENT FROM BLOB STORAGE",
- },
-)
-@cozo_query
-@beartype
-def get_file(
- *,
- developer_id: UUID,
- file_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Retrieves a file by their unique identifier.
-
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the file.
- file_id (UUID): The unique identifier of the file to retrieve.
-
- Returns:
- File: The retrieved file.
- """
-
- # Convert UUIDs to strings for query compatibility.
- file_id = str(file_id)
- developer_id = str(developer_id)
-
- get_query = """
- input[developer_id, file_id] <- [[to_uuid($developer_id), to_uuid($file_id)]]
-
- ?[
- id,
- name,
- description,
- mime_type,
- size,
- hash,
- created_at,
- ] := input[developer_id, id],
- *files {
- file_id: id,
- developer_id,
- name,
- description,
- mime_type,
- size,
- hash,
- created_at,
- }
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "files", file_id=file_id),
- get_query,
- ]
-
- return (queries, {"developer_id": developer_id, "file_id": file_id})
diff --git a/agents-api/agents_api/models/session/__init__.py b/agents-api/agents_api/models/session/__init__.py
deleted file mode 100644
index bf80c9f4b..000000000
--- a/agents-api/agents_api/models/session/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""The session module is responsible for managing session data in the 'cozodb' database. It provides functionalities to create, retrieve, list, update, and delete session information. This module utilizes the `CozoClient` for database interactions, ensuring that sessions are uniquely identified and managed through UUIDs.
-
-Key functionalities include:
-- Creating new sessions with specific metadata.
-- Retrieving session information based on developer and session IDs.
-- Listing all sessions with optional filters for pagination and metadata.
-- Updating session data, including situation, summary, and metadata.
-- Deleting sessions and their associated data from the database.
-
-This module plays a crucial role in the application by facilitating the management of session data, which is essential for tracking and analyzing user interactions and behaviors within the system."""
-
-# ruff: noqa: F401, F403, F405
-
-from .count_sessions import count_sessions
-from .create_or_update_session import create_or_update_session
-from .create_session import create_session
-from .delete_session import delete_session
-from .get_session import get_session
-from .list_sessions import list_sessions
-from .patch_session import patch_session
-from .prepare_session_data import prepare_session_data
-from .update_session import update_session
diff --git a/agents-api/agents_api/models/session/count_sessions.py b/agents-api/agents_api/models/session/count_sessions.py
deleted file mode 100644
index 3599cc2fb..000000000
--- a/agents-api/agents_api/models/session/count_sessions.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""This module contains functions for querying session data from the 'cozodb' database."""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(dict, one=True)
-@cozo_query
-@beartype
-def count_sessions(
- *,
- developer_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Counts sessions from the 'cozodb' database.
-
- Parameters:
- developer_id (UUID): The developer's ID to filter sessions by.
- """
-
- count_query = """
- input[developer_id] <- [[
- to_uuid($developer_id),
- ]]
-
- counter[count(id)] :=
- input[developer_id],
- *sessions{
- developer_id,
- session_id: id,
- }
-
- ?[count] := counter[count]
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- count_query,
- ]
-
- return (queries, {"developer_id": str(developer_id)})
diff --git a/agents-api/agents_api/models/session/create_or_update_session.py b/agents-api/agents_api/models/session/create_or_update_session.py
deleted file mode 100644
index e34a63ca5..000000000
--- a/agents-api/agents_api/models/session/create_or_update_session.py
+++ /dev/null
@@ -1,158 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import (
- CreateOrUpdateSessionRequest,
- ResourceUpdatedResponse,
-)
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- AssertionError: partialclass(HTTPException, status_code=400),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["session_id"],
- "updated_at": d.pop("updated_at")[0],
- "jobs": [],
- **d,
- },
-)
-@cozo_query
-@increase_counter("create_or_update_session")
-@beartype
-def create_or_update_session(
- *,
- session_id: UUID,
- developer_id: UUID,
- data: CreateOrUpdateSessionRequest,
-) -> tuple[list[str], dict]:
- data.metadata = data.metadata or {}
- session_data = data.model_dump(exclude={"auto_run_tools", "disable_cache"})
-
- user = session_data.pop("user")
- agent = session_data.pop("agent")
- users = session_data.pop("users")
- agents = session_data.pop("agents")
-
- # Only one of agent or agents should be provided.
- if agent and agents:
- raise ValueError("Only one of 'agent' or 'agents' should be provided.")
-
- agents = agents or ([agent] if agent else [])
- assert len(agents) > 0, "At least one agent must be provided."
-
- # Users are zero or more, so we default to an empty list if not provided.
- if not (user or users):
- users = []
-
- else:
- users = users or [user]
-
- participants = [
- *[("user", str(user)) for user in users],
- *[("agent", str(agent)) for agent in agents],
- ]
-
- # Construct the datalog query for creating a new session and its lookup.
- clear_lookup_query = """
- input[session_id] <- [[$session_id]]
- ?[session_id, participant_id, participant_type] :=
- input[session_id],
- *session_lookup {
- session_id,
- participant_type,
- participant_id,
- },
-
- :delete session_lookup {
- session_id,
- participant_type,
- participant_id,
- }
- """
-
- lookup_query = """
- # This section creates a new session lookup to ensure uniqueness and manage session metadata.
- session[session_id] <- [[$session_id]]
- participants[participant_type, participant_id] <- $participants
- ?[session_id, participant_id, participant_type] :=
- session[session_id],
- participants[participant_type, participant_id],
-
- :put session_lookup {
- session_id,
- participant_id,
- participant_type,
- }
- """
-
- session_update_cols, session_update_vals = cozo_process_mutate_data(
- {k: v for k, v in session_data.items() if v is not None}
- )
-
- # Construct the datalog query for creating or updating session information.
- update_query = f"""
- input[{session_update_cols}] <- $session_update_vals
- ids[session_id, developer_id] <- [[to_uuid($session_id), to_uuid($developer_id)]]
-
- ?[{session_update_cols}, session_id, developer_id] :=
- input[{session_update_cols}],
- ids[session_id, developer_id],
-
- :put sessions {{
- {session_update_cols}, session_id, developer_id
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- *[
- verify_developer_owns_resource_query(
- developer_id,
- f"{participant_type}s",
- **{f"{participant_type}_id": participant_id},
- )
- for participant_type, participant_id in participants
- ],
- clear_lookup_query,
- lookup_query,
- update_query,
- ]
-
- return (
- queries,
- {
- "session_update_vals": session_update_vals,
- "session_id": str(session_id),
- "developer_id": str(developer_id),
- "participants": participants,
- },
- )
diff --git a/agents-api/agents_api/models/session/create_session.py b/agents-api/agents_api/models/session/create_session.py
deleted file mode 100644
index a08059961..000000000
--- a/agents-api/agents_api/models/session/create_session.py
+++ /dev/null
@@ -1,154 +0,0 @@
-"""
-This module contains the functionality for creating a new session in the 'cozodb' database.
-It constructs and executes a datalog query to insert session data.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateSessionRequest, Session
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- AssertionError: partialclass(HTTPException, status_code=400),
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- Session,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("session_id")),
- "updated_at": (d.pop("updated_at")[0]),
- **d,
- },
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_session")
-@beartype
-def create_session(
- *,
- developer_id: UUID,
- session_id: UUID | None = None,
- data: CreateSessionRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to create a new session in the database.
- """
-
- session_id = session_id or uuid7()
-
- data.metadata = data.metadata or {}
- session_data = data.model_dump(exclude={"auto_run_tools", "disable_cache"})
-
- user = session_data.pop("user")
- agent = session_data.pop("agent")
- users = session_data.pop("users")
- agents = session_data.pop("agents")
-
- # Only one of agent or agents should be provided.
- if agent and agents:
- raise ValueError("Only one of 'agent' or 'agents' should be provided.")
-
- agents = agents or ([agent] if agent else [])
- assert len(agents) > 0, "At least one agent must be provided."
-
- # Users are zero or more, so we default to an empty list if not provided.
- if not (user or users):
- users = []
-
- else:
- users = users or [user]
-
- participants = [
- *[("user", str(user)) for user in users],
- *[("agent", str(agent)) for agent in agents],
- ]
-
- # Construct the datalog query for creating a new session and its lookup.
- lookup_query = """
- # This section creates a new session lookup to ensure uniqueness and manage session metadata.
- session[session_id] <- [[$session_id]]
- participants[participant_type, participant_id] <- $participants
- ?[session_id, participant_id, participant_type] :=
- session[session_id],
- participants[participant_type, participant_id],
-
- :insert session_lookup {
- session_id,
- participant_id,
- participant_type,
- }
- """
-
- create_query = """
- # Insert the new session data into the 'session' table with the specified columns.
- ?[session_id, developer_id, situation, metadata, render_templates, token_budget, context_overflow] <- [[
- $session_id,
- $developer_id,
- $situation,
- $metadata,
- $render_templates,
- $token_budget,
- $context_overflow,
- ]]
-
- :insert sessions {
- developer_id,
- session_id,
- situation,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- }
- # Specify the data to return after the query execution, typically the newly created session's ID.
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- *[
- verify_developer_owns_resource_query(
- developer_id,
- f"{participant_type}s",
- **{f"{participant_type}_id": participant_id},
- )
- for participant_type, participant_id in participants
- ],
- lookup_query,
- create_query,
- ]
-
- # Execute the constructed query with the provided parameters and return the result.
- return (
- queries,
- {
- "session_id": str(session_id),
- "developer_id": str(developer_id),
- "participants": participants,
- **session_data,
- },
- )
diff --git a/agents-api/agents_api/models/session/delete_session.py b/agents-api/agents_api/models/session/delete_session.py
deleted file mode 100644
index 81f8e1f7c..000000000
--- a/agents-api/agents_api/models/session/delete_session.py
+++ /dev/null
@@ -1,125 +0,0 @@
-"""This module contains the implementation for deleting sessions from the 'cozodb' database using datalog queries."""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("session_id")),
- "deleted_at": utcnow(),
- "jobs": [],
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_session(
- *,
- developer_id: UUID,
- session_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Deletes a session and its related data from the 'cozodb' database.
-
- Parameters:
- developer_id (UUID): The unique identifier for the developer.
- session_id (UUID): The unique identifier for the session to be deleted.
-
- Returns:
- ResourceDeletedResponse: The response indicating the deletion of the session.
- """
- session_id = str(session_id)
- developer_id = str(developer_id)
-
- # Constructs and executes a datalog query to delete the specified session and its associated data based on the session_id and developer_id.
- delete_lookup_query = """
- # Convert session_id to UUID format
- input[session_id] <- [[
- to_uuid($session_id),
- ]]
-
- # Select sessions based on the session_id provided
- ?[
- session_id,
- participant_id,
- participant_type,
- ] :=
- input[session_id],
- *session_lookup{
- session_id,
- participant_id,
- participant_type,
- }
-
- # Delete entries from session_lookup table matching the criteria
- :delete session_lookup {
- session_id,
- participant_id,
- participant_type,
- }
- """
-
- delete_query = """
- # Convert developer_id and session_id to UUID format
- input[developer_id, session_id] <- [[
- to_uuid($developer_id),
- to_uuid($session_id),
- ]]
-
- # Select sessions based on the developer_id and session_id provided
- ?[developer_id, session_id, updated_at] :=
- input[developer_id, session_id],
- *sessions {
- developer_id,
- session_id,
- updated_at,
- }
-
- # Delete entries from sessions table matching the criteria
- :delete sessions {
- developer_id,
- session_id,
- updated_at,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- delete_lookup_query,
- delete_query,
- ]
-
- return (queries, {"session_id": session_id, "developer_id": developer_id})
diff --git a/agents-api/agents_api/models/session/get_session.py b/agents-api/agents_api/models/session/get_session.py
deleted file mode 100644
index f99f2524c..000000000
--- a/agents-api/agents_api/models/session/get_session.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...common.protocol.sessions import make_session
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(make_session, one=True)
-@cozo_query
-@beartype
-def get_session(
- *,
- developer_id: UUID,
- session_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to retrieve session information from the 'cozodb' database.
-
- Parameters:
- developer_id (UUID): The developer's unique identifier.
- session_id (UUID): The session's unique identifier.
- """
- session_id = str(session_id)
- developer_id = str(developer_id)
-
- # This query retrieves session information by using `input` to pass parameters,
- get_query = """
- input[developer_id, session_id] <- [[
- to_uuid($developer_id),
- to_uuid($session_id),
- ]]
-
- participants[collect(participant_id), participant_type] :=
- input[_, session_id],
- *session_lookup{
- session_id,
- participant_id,
- participant_type,
- }
-
- # We have to do this dance because users can be zero or more
- users_p[users] :=
- participants[users, "user"]
-
- users_p[users] :=
- not participants[_, "user"],
- users = []
-
- ?[
- agents,
- users,
- id,
- situation,
- summary,
- updated_at,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- recall_options,
- forward_tool_calls,
- ] := input[developer_id, id],
- users_p[users],
- participants[agents, "agent"],
- *sessions{
- developer_id,
- session_id: id,
- situation,
- summary,
- created_at,
- updated_at: validity,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- recall_options,
- forward_tool_calls,
- @ "END"
- },
- updated_at = to_int(validity)
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- get_query,
- ]
-
- return (queries, {"session_id": session_id, "developer_id": developer_id})
diff --git a/agents-api/agents_api/models/session/list_sessions.py b/agents-api/agents_api/models/session/list_sessions.py
deleted file mode 100644
index 4adb84a6c..000000000
--- a/agents-api/agents_api/models/session/list_sessions.py
+++ /dev/null
@@ -1,131 +0,0 @@
-"""This module contains functions for querying session data from the 'cozodb' database."""
-
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...common.protocol.sessions import make_session
-from ...common.utils import json
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(make_session)
-@cozo_query
-@beartype
-def list_sessions(
- *,
- developer_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at", "updated_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
- metadata_filter: dict[str, Any] = {},
-) -> tuple[list[str], dict]:
- """
- Lists sessions from the 'cozodb' database based on the provided filters.
-
- Parameters:
- developer_id (UUID): The developer's ID to filter sessions by.
- limit (int): The maximum number of sessions to return.
- offset (int): The offset from which to start listing sessions.
- metadata_filter (dict[str, Any]): A dictionary of metadata fields to filter sessions by.
- """
- metadata_filter_str = ", ".join(
- [
- f"metadata->{json.dumps(k)} == {json.dumps(v)}"
- for k, v in metadata_filter.items()
- ]
- )
-
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- list_query = f"""
- input[developer_id] <- [[
- to_uuid($developer_id),
- ]]
-
- participants[collect(participant_id), participant_type, session_id] :=
- *session_lookup{{
- session_id,
- participant_id,
- participant_type,
- }}
-
- # We have to do this dance because users can be zero or more
- users_p[users, session_id] :=
- participants[users, "user", session_id]
-
- users_p[users, session_id] :=
- not participants[_, "user", session_id],
- users = []
-
- ?[
- agents,
- users,
- id,
- situation,
- summary,
- updated_at,
- created_at,
- metadata,
- token_budget,
- context_overflow,
- recall_options,
- forward_tool_calls,
- ] :=
- input[developer_id],
- *sessions{{
- developer_id,
- session_id: id,
- situation,
- summary,
- created_at,
- updated_at: validity,
- metadata,
- token_budget,
- context_overflow,
- recall_options,
- forward_tool_calls,
- @ "END"
- }},
- users_p[users, id],
- participants[agents, "agent", id],
- updated_at = to_int(validity),
- {metadata_filter_str}
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- # Datalog query to retrieve agent information based on filters, sorted by creation date in descending order.
- queries = [
- verify_developer_id_query(developer_id),
- list_query,
- ]
-
- # Execute the datalog query and return the results as a pandas DataFrame.
- return (
- queries,
- {"developer_id": str(developer_id), "limit": limit, "offset": offset},
- )
diff --git a/agents-api/agents_api/models/session/patch_session.py b/agents-api/agents_api/models/session/patch_session.py
deleted file mode 100644
index 4a119a684..000000000
--- a/agents-api/agents_api/models/session/patch_session.py
+++ /dev/null
@@ -1,127 +0,0 @@
-"""This module contains functions for patching session data in the 'cozodb' database using datalog queries."""
-
-from typing import Any, List, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import PatchSessionRequest, ResourceUpdatedResponse
-from ...common.utils.cozo import cozo_process_mutate_data
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-_fields: List[str] = [
- "situation",
- "summary",
- "created_at",
- "session_id",
- "developer_id",
-]
-
-
-# TODO: Add support for updating `render_templates` field
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["session_id"],
- "updated_at": d.pop("updated_at")[0],
- "jobs": [],
- **d,
- },
- _kind="inserted",
-)
-@cozo_query
-@beartype
-def patch_session(
- *,
- session_id: UUID,
- developer_id: UUID,
- data: PatchSessionRequest,
-) -> tuple[list[str], dict]:
- """
- Patch session data in the 'cozodb' database.
-
- Parameters:
- session_id (UUID): The unique identifier for the session to be updated.
- developer_id (UUID): The unique identifier for the developer making the update.
- data (PatchSessionRequest): The request payload containing the updates to apply.
- """
-
- update_data = data.model_dump(exclude_unset=True)
- metadata = update_data.pop("metadata", {}) or {}
-
- session_update_cols, session_update_vals = cozo_process_mutate_data(
- {k: v for k, v in update_data.items() if v is not None}
- )
-
- # Prepare lists of columns for the query.
- session_update_cols_lst = session_update_cols.split(",")
- all_fields_lst = list(set(session_update_cols_lst).union(set(_fields)))
- all_fields = ", ".join(all_fields_lst)
- rest_fields = ", ".join(
- list(
- set(all_fields_lst)
- - set([k for k, v in update_data.items() if v is not None])
- )
- )
-
- # Construct the datalog query for updating session information.
- update_query = f"""
- input[{session_update_cols}] <- $session_update_vals
- ids[session_id, developer_id] <- [[to_uuid($session_id), to_uuid($developer_id)]]
-
- ?[{all_fields}, metadata, updated_at] :=
- input[{session_update_cols}],
- ids[session_id, developer_id],
- *sessions{{
- {rest_fields}, metadata: md, @ "END"
- }},
- updated_at = 'ASSERT',
- metadata = concat(md, $metadata),
-
- :put sessions {{
- {all_fields}, metadata, updated_at
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- update_query,
- ]
-
- return (
- queries,
- {
- "session_update_vals": session_update_vals,
- "session_id": str(session_id),
- "developer_id": str(developer_id),
- "metadata": metadata,
- },
- )
diff --git a/agents-api/agents_api/models/session/prepare_session_data.py b/agents-api/agents_api/models/session/prepare_session_data.py
deleted file mode 100644
index 83ee0c219..000000000
--- a/agents-api/agents_api/models/session/prepare_session_data.py
+++ /dev/null
@@ -1,235 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...common.protocol.sessions import SessionData, make_session
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- SessionData,
- one=True,
- transform=lambda d: {
- "session": make_session(
- **d["session"],
- agents=[a["id"] for a in d["agents"]],
- users=[u["id"] for u in d["users"]],
- ),
- },
-)
-@cozo_query
-@beartype
-def prepare_session_data(
- *,
- developer_id: UUID,
- session_id: UUID,
-) -> tuple[list[str], dict]:
- """Constructs and executes a datalog query to retrieve session data from the 'cozodb' database.
-
- Parameters:
- developer_id (UUID): The developer's unique identifier.
- session_id (UUID): The session's unique identifier.
- """
- session_id = str(session_id)
- developer_id = str(developer_id)
-
- # This query retrieves session information by using `input` to pass parameters,
- get_query = """
- input[session_id, developer_id] <- [[
- to_uuid($session_id),
- to_uuid($developer_id),
- ]]
-
- participants[collect(participant_id), participant_type] :=
- input[session_id, developer_id],
- *session_lookup{
- session_id,
- participant_id,
- participant_type,
- }
-
- agents[agent_ids] := participants[agent_ids, "agent"]
-
- # We have to do this dance because users can be zero or more
- users[user_ids] :=
- participants[user_ids, "user"]
-
- users[user_ids] :=
- not participants[_, "user"],
- user_ids = []
-
- settings_data[agent_id, settings] :=
- *agent_default_settings {
- agent_id,
- frequency_penalty,
- presence_penalty,
- length_penalty,
- repetition_penalty,
- top_p,
- temperature,
- min_p,
- preset,
- },
- settings = {
- "frequency_penalty": frequency_penalty,
- "presence_penalty": presence_penalty,
- "length_penalty": length_penalty,
- "repetition_penalty": repetition_penalty,
- "top_p": top_p,
- "temperature": temperature,
- "min_p": min_p,
- "preset": preset,
- }
-
- agent_data[collect(record)] :=
- input[session_id, developer_id],
- agents[agent_ids],
- agent_id in agent_ids,
- *agents{
- developer_id,
- agent_id,
- model,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- instructions,
- },
- settings_data[agent_id, default_settings],
- record = {
- "id": agent_id,
- "name": name,
- "model": model,
- "about": about,
- "created_at": created_at,
- "updated_at": updated_at,
- "metadata": metadata,
- "default_settings": default_settings,
- "instructions": instructions,
- }
-
- # Version where we don't have default settings
- agent_data[collect(record)] :=
- input[session_id, developer_id],
- agents[agent_ids],
- agent_id in agent_ids,
- *agents{
- developer_id,
- agent_id,
- model,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- instructions,
- },
- not settings_data[agent_id, _],
- record = {
- "id": agent_id,
- "name": name,
- "model": model,
- "about": about,
- "created_at": created_at,
- "updated_at": updated_at,
- "metadata": metadata,
- "default_settings": {},
- "instructions": instructions,
- }
-
- user_data[collect(record)] :=
- input[session_id, developer_id],
- users[user_ids],
- user_id in user_ids,
- *users{
- developer_id,
- user_id,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- },
- record = {
- "id": user_id,
- "name": name,
- "about": about,
- "created_at": created_at,
- "updated_at": updated_at,
- "metadata": metadata,
- }
-
- session_data[record] :=
- input[session_id, developer_id],
- *sessions{
- developer_id,
- session_id,
- situation,
- summary,
- created_at,
- updated_at: validity,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- @ "END"
- },
- updated_at = to_int(validity),
- record = {
- "id": session_id,
- "situation": situation,
- "summary": summary,
- "created_at": created_at,
- "updated_at": updated_at,
- "metadata": metadata,
- "render_templates": render_templates,
- "token_budget": token_budget,
- "context_overflow": context_overflow,
- }
-
- ?[
- agents,
- users,
- session,
- ] :=
- session_data[session],
- user_data[users],
- agent_data[agents]
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- get_query,
- ]
-
- return (
- queries,
- {"developer_id": developer_id, "session_id": session_id},
- )
diff --git a/agents-api/agents_api/models/session/update_session.py b/agents-api/agents_api/models/session/update_session.py
deleted file mode 100644
index cc8b61f16..000000000
--- a/agents-api/agents_api/models/session/update_session.py
+++ /dev/null
@@ -1,127 +0,0 @@
-from typing import Any, List, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateSessionRequest
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-_fields: List[str] = [
- "situation",
- "summary",
- "metadata",
- "created_at",
- "session_id",
- "developer_id",
-]
-
-# TODO: Add support for updating `render_templates` field
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["session_id"],
- "updated_at": d.pop("updated_at")[0],
- "jobs": [],
- **d,
- },
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("update_session")
-@beartype
-def update_session(
- *,
- session_id: UUID,
- developer_id: UUID,
- data: UpdateSessionRequest,
-) -> tuple[list[str], dict]:
- """
- Updates a session with the provided data.
-
- Parameters:
- session_id (UUID): The unique identifier of the session to update.
- developer_id (UUID): The unique identifier of the developer associated with the session.
- data (UpdateSessionRequest): The data to update the session with.
-
- Returns:
- ResourceUpdatedResponse: The updated session.
- """
-
- update_data = data.model_dump(exclude_unset=True)
-
- session_update_cols, session_update_vals = cozo_process_mutate_data(
- {k: v for k, v in update_data.items() if v is not None}
- )
-
- # Prepare lists of columns for the query.
- session_update_cols_lst = session_update_cols.split(",")
- all_fields_lst = list(set(session_update_cols_lst).union(set(_fields)))
- all_fields = ", ".join(all_fields_lst)
- rest_fields = ", ".join(
- list(
- set(all_fields_lst)
- - set([k for k, v in update_data.items() if v is not None])
- )
- )
-
- # Construct the datalog query for updating session information.
- update_query = f"""
- input[{session_update_cols}] <- $session_update_vals
- ids[session_id, developer_id] <- [[to_uuid($session_id), to_uuid($developer_id)]]
-
- ?[{all_fields}, updated_at] :=
- input[{session_update_cols}],
- ids[session_id, developer_id],
- *sessions{{
- {rest_fields}, @ "END"
- }},
- updated_at = 'ASSERT'
-
- :put sessions {{
- {all_fields}, updated_at
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "sessions", session_id=session_id
- ),
- update_query,
- ]
-
- return (
- queries,
- {
- "session_update_vals": session_update_vals,
- "session_id": str(session_id),
- "developer_id": str(developer_id),
- },
- )
diff --git a/agents-api/agents_api/models/task/__init__.py b/agents-api/agents_api/models/task/__init__.py
deleted file mode 100644
index 2eaff3ab3..000000000
--- a/agents-api/agents_api/models/task/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# ruff: noqa: F401, F403, F405
-
-from .create_or_update_task import create_or_update_task
-from .create_task import create_task
-from .delete_task import delete_task
-from .get_task import get_task
-from .list_tasks import list_tasks
-from .patch_task import patch_task
-from .update_task import update_task
diff --git a/agents-api/agents_api/models/task/create_or_update_task.py b/agents-api/agents_api/models/task/create_or_update_task.py
deleted file mode 100644
index 1f615a3ad..000000000
--- a/agents-api/agents_api/models/task/create_or_update_task.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""
-This module contains the functionality for creating a new Task in the 'cozodb` database.
-It constructs and executes a datalog query to insert Task data.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import (
- CreateOrUpdateTaskRequest,
- ResourceUpdatedResponse,
-)
-from ...common.protocol.tasks import task_to_spec
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...common.utils.datetime import utcnow
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["task_id"],
- "jobs": [],
- "updated_at": d["updated_at_ms"][0] / 1000,
- **d,
- },
-)
-@cozo_query
-@increase_counter("create_or_update_task")
-@beartype
-def create_or_update_task(
- *,
- developer_id: UUID,
- agent_id: UUID,
- task_id: UUID,
- data: CreateOrUpdateTaskRequest,
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- agent_id = str(agent_id)
- task_id = str(task_id)
-
- data.metadata = data.metadata or {}
- data.input_schema = data.input_schema or {}
-
- task_data = task_to_spec(data).model_dump(exclude_none=True, mode="json")
- task_data.pop("task_id", None)
- task_data["created_at"] = utcnow().timestamp()
-
- columns, values = cozo_process_mutate_data(task_data)
-
- update_query = f"""
- input[{columns}] <- $values
- ids[agent_id, task_id] :=
- agent_id = to_uuid($agent_id),
- task_id = to_uuid($task_id)
-
- ?[updated_at_ms, agent_id, task_id, {columns}] :=
- ids[agent_id, task_id],
- input[{columns}],
- updated_at_ms = [floor(now() * 1000), true]
-
- :put tasks {{
- agent_id,
- task_id,
- updated_at_ms,
- {columns},
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- update_query,
- ]
-
- return (
- queries,
- {
- "values": values,
- "agent_id": agent_id,
- "task_id": task_id,
- },
- )
diff --git a/agents-api/agents_api/models/task/create_task.py b/agents-api/agents_api/models/task/create_task.py
deleted file mode 100644
index 7cd1e8f4a..000000000
--- a/agents-api/agents_api/models/task/create_task.py
+++ /dev/null
@@ -1,118 +0,0 @@
-"""
-This module contains the functionality for creating a new Task in the 'cozodb` database.
-It constructs and executes a datalog query to insert Task data.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import (
- CreateTaskRequest,
- ResourceCreatedResponse,
-)
-from ...common.protocol.tasks import task_to_spec
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceCreatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["task_id"],
- "jobs": [],
- "created_at": d["created_at"],
- **d,
- },
-)
-@cozo_query
-@increase_counter("create_task")
-@beartype
-def create_task(
- *,
- developer_id: UUID,
- agent_id: UUID,
- task_id: UUID | None = None,
- data: CreateTaskRequest,
-) -> tuple[list[str], dict]:
- """
- Creates a new task.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the task.
- agent_id (UUID): The unique identifier of the agent associated with the task.
- task_id (UUID | None): The unique identifier of the task. If not provided, a new UUID will be generated.
- data (CreateTaskRequest): The data to create the task with.
-
- Returns:
- ResourceCreatedResponse: The created task.
- """
-
- data.metadata = data.metadata or {}
- data.input_schema = data.input_schema or {}
-
- task_id = task_id or uuid7()
- task_spec = task_to_spec(data)
-
- # Prepares the update data by filtering out None values and adding user_id and developer_id.
- columns, values = cozo_process_mutate_data(
- {
- **task_spec.model_dump(exclude_none=True, mode="json"),
- "task_id": str(task_id),
- "agent_id": str(agent_id),
- }
- )
-
- create_query = f"""
- input[{columns}] <- $values
- ?[{columns}, updated_at_ms, created_at] :=
- input[{columns}],
- updated_at_ms = [floor(now() * 1000), true],
- created_at = now(),
-
- :insert tasks {{
- {columns},
- updated_at_ms,
- created_at,
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- create_query,
- ]
-
- return (
- queries,
- {
- "agent_id": str(agent_id),
- "values": values,
- },
- )
diff --git a/agents-api/agents_api/models/task/delete_task.py b/agents-api/agents_api/models/task/delete_task.py
deleted file mode 100644
index 10c377a25..000000000
--- a/agents-api/agents_api/models/task/delete_task.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("task_id")),
- "jobs": [],
- "deleted_at": utcnow(),
- **d,
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_task(
- *,
- developer_id: UUID,
- agent_id: UUID,
- task_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Deletes a task.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the task.
- agent_id (UUID): The unique identifier of the agent associated with the task.
- task_id (UUID): The unique identifier of the task to delete.
-
- Returns:
- ResourceDeletedResponse: The deleted task.
- """
-
- delete_query = """
- input[agent_id, task_id] <- [[
- to_uuid($agent_id),
- to_uuid($task_id),
- ]]
-
- ?[agent_id, task_id, updated_at_ms] :=
- input[agent_id, task_id],
- *tasks{
- agent_id,
- task_id,
- updated_at_ms,
- }
-
- :delete tasks {
- agent_id,
- task_id,
- updated_at_ms,
- }
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- delete_query,
- ]
-
- return (queries, {"agent_id": str(agent_id), "task_id": str(task_id)})
diff --git a/agents-api/agents_api/models/task/get_task.py b/agents-api/agents_api/models/task/get_task.py
deleted file mode 100644
index 460fdc38b..000000000
--- a/agents-api/agents_api/models/task/get_task.py
+++ /dev/null
@@ -1,120 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...common.protocol.tasks import spec_to_task
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(spec_to_task, one=True)
-@cozo_query
-@beartype
-def get_task(
- *,
- developer_id: UUID,
- task_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Retrieves a task by its unique identifier.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the task.
- task_id (UUID): The unique identifier of the task to retrieve.
-
- Returns:
- Task | CreateTaskRequest: The retrieved task.
- """
-
- get_query = """
- input[task_id] <- [[to_uuid($task_id)]]
-
- task_data[
- task_id,
- agent_id,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- updated_at,
- metadata,
- ] :=
- input[task_id],
- *tasks {
- agent_id,
- task_id,
- updated_at_ms,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- metadata,
- @ 'END'
- },
- updated_at = to_int(updated_at_ms) / 1000
-
- ?[
- id,
- agent_id,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- updated_at,
- metadata,
- ] :=
- task_data[
- id,
- agent_id,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- updated_at,
- metadata,
- ]
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(
- developer_id, "tasks", task_id=task_id, parents=[("agents", "agent_id")]
- ),
- get_query,
- ]
-
- return (queries, {"task_id": str(task_id)})
diff --git a/agents-api/agents_api/models/task/list_tasks.py b/agents-api/agents_api/models/task/list_tasks.py
deleted file mode 100644
index d873e817e..000000000
--- a/agents-api/agents_api/models/task/list_tasks.py
+++ /dev/null
@@ -1,130 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...common.protocol.tasks import spec_to_task
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(spec_to_task)
-@cozo_query
-@beartype
-def list_tasks(
- *,
- developer_id: UUID,
- agent_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at", "updated_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
-) -> tuple[list[str], dict]:
- """
- Lists tasks for a given agent.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the tasks.
- agent_id (UUID): The unique identifier of the agent associated with the tasks.
- limit (int): The maximum number of tasks to return.
- offset (int): The number of tasks to skip before returning the results.
- sort_by (Literal["created_at", "updated_at"]): The field to sort the tasks by.
- direction (Literal["asc", "desc"]): The direction to sort the tasks in.
-
- Returns:
- Task[] | CreateTaskRequest[]: The list of tasks.
- """
-
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- list_query = f"""
- input[agent_id] <- [[to_uuid($agent_id)]]
-
- task_data[
- task_id,
- agent_id,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- updated_at,
- metadata,
- ] :=
- input[agent_id],
- *tasks {{
- agent_id,
- task_id,
- updated_at_ms,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- metadata,
- @ 'END'
- }},
- updated_at = to_int(updated_at_ms) / 1000
-
- ?[
- task_id,
- agent_id,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- updated_at,
- metadata,
- ] :=
- task_data[
- task_id,
- agent_id,
- name,
- description,
- input_schema,
- tools,
- inherit_tools,
- workflows,
- created_at,
- updated_at,
- metadata,
- ]
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- list_query,
- ]
-
- return (queries, {"agent_id": str(agent_id), "limit": limit, "offset": offset})
diff --git a/agents-api/agents_api/models/task/patch_task.py b/agents-api/agents_api/models/task/patch_task.py
deleted file mode 100644
index 178b9daa3..000000000
--- a/agents-api/agents_api/models/task/patch_task.py
+++ /dev/null
@@ -1,133 +0,0 @@
-"""
-This module contains the functionality for creating a new Task in the 'cozodb` database.
-It constructs and executes a datalog query to insert Task data.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import PatchTaskRequest, ResourceUpdatedResponse, TaskSpec
-from ...common.protocol.tasks import task_to_spec
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["task_id"],
- "jobs": [],
- "updated_at": d["updated_at_ms"][0] / 1000,
- **d,
- },
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("patch_task")
-@beartype
-def patch_task(
- *,
- developer_id: UUID,
- agent_id: UUID,
- task_id: UUID,
- data: PatchTaskRequest,
-) -> tuple[list[str], dict]:
- developer_id = str(developer_id)
- agent_id = str(agent_id)
- task_id = str(task_id)
-
- data.input_schema = data.input_schema or {}
- task_data = task_to_spec(data, exclude_none=True, exclude_unset=True).model_dump(
- exclude_none=True, exclude_unset=True
- )
- task_data.pop("task_id", None)
-
- assert len(task_data), "No data provided to update task"
- metadata = task_data.pop("metadata", {})
- columns, values = cozo_process_mutate_data(task_data)
-
- all_columns = list(TaskSpec.model_fields.keys())
- all_columns.remove("id")
- all_columns.remove("main")
-
- missing_columns = (
- set(all_columns)
- - set(columns.split(","))
- - {"metadata", "created_at", "updated_at"}
- )
- missing_columns_str = ",".join(missing_columns)
-
- patch_query = f"""
- input[{columns}] <- $values
- ids[agent_id, task_id] :=
- agent_id = to_uuid($agent_id),
- task_id = to_uuid($task_id)
-
- original[created_at, metadata, {missing_columns_str}] :=
- ids[agent_id, task_id],
- *tasks{{
- agent_id,
- task_id,
- created_at,
- metadata,
- {missing_columns_str},
- }}
-
- ?[created_at, updated_at_ms, agent_id, task_id, metadata, {columns}, {missing_columns_str}] :=
- ids[agent_id, task_id],
- input[{columns}],
- original[created_at, _metadata, {missing_columns_str}],
- updated_at_ms = [floor(now() * 1000), true],
- metadata = _metadata ++ $metadata
-
- :put tasks {{
- agent_id,
- task_id,
- created_at,
- updated_at_ms,
- metadata,
- {columns}, {missing_columns_str}
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- patch_query,
- ]
-
- return (
- queries,
- {
- "values": values,
- "agent_id": agent_id,
- "task_id": task_id,
- "metadata": metadata,
- },
- )
diff --git a/agents-api/agents_api/models/task/update_task.py b/agents-api/agents_api/models/task/update_task.py
deleted file mode 100644
index cd98d85d5..000000000
--- a/agents-api/agents_api/models/task/update_task.py
+++ /dev/null
@@ -1,129 +0,0 @@
-"""
-This module contains the functionality for creating a new Task in the 'cozodb` database.
-It constructs and executes a datalog query to insert Task data.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateTaskRequest
-from ...common.protocol.tasks import task_to_spec
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(HTTPException, status_code=400),
- ValidationError: partialclass(HTTPException, status_code=400),
- TypeError: partialclass(HTTPException, status_code=400),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {
- "id": d["task_id"],
- "jobs": [],
- "updated_at": d["updated_at_ms"][0] / 1000,
- **d,
- },
-)
-@cozo_query
-@increase_counter("update_task")
-@beartype
-def update_task(
- *,
- developer_id: UUID,
- agent_id: UUID,
- task_id: UUID,
- data: UpdateTaskRequest,
-) -> tuple[list[str], dict]:
- """
- Updates a task.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the task.
- agent_id (UUID): The unique identifier of the agent associated with the task.
- task_id (UUID): The unique identifier of the task to update.
- data (UpdateTaskRequest): The data to update the task with.
-
- Returns:
- ResourceUpdatedResponse: The updated task.
- """
-
- developer_id = str(developer_id)
- agent_id = str(agent_id)
- task_id = str(task_id)
-
- data.metadata = data.metadata or {}
- data.input_schema = data.input_schema or {}
-
- task_data = task_to_spec(data, exclude_none=True, exclude_unset=True).model_dump(
- exclude_none=True, exclude_unset=True
- )
- task_data.pop("task_id", None)
-
- columns, values = cozo_process_mutate_data(task_data)
-
- update_query = f"""
- input[{columns}] <- $values
- ids[agent_id, task_id] :=
- agent_id = to_uuid($agent_id),
- task_id = to_uuid($task_id)
-
- original[created_at] :=
- ids[agent_id, task_id],
- *tasks{{
- agent_id,
- task_id,
- created_at,
- }}
-
- ?[created_at, updated_at_ms, agent_id, task_id, {columns}] :=
- ids[agent_id, task_id],
- input[{columns}],
- original[created_at],
- updated_at_ms = [floor(now() * 1000), true]
-
- :put tasks {{
- agent_id,
- task_id,
- created_at,
- updated_at_ms,
- {columns},
- }}
-
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id),
- update_query,
- ]
-
- return (
- queries,
- {
- "values": values,
- "agent_id": agent_id,
- "task_id": task_id,
- },
- )
diff --git a/agents-api/agents_api/models/user/__init__.py b/agents-api/agents_api/models/user/__init__.py
deleted file mode 100644
index 5ae76865f..000000000
--- a/agents-api/agents_api/models/user/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""
-This module is responsible for managing user data in the CozoDB database. It provides functionalities to create, retrieve, list, and update user information.
-
-Functions:
-- create_user_query: Creates a new user in the CozoDB database, accepting parameters such as user ID, developer ID, name, about, and optional metadata.
-- get_user_query: Retrieves a user's information from the CozoDB database by their user ID and developer ID.
-- list_users_query: Lists users associated with a specific developer, with support for pagination and metadata-based filtering.
-- patch_user_query: Updates a user's information in the CozoDB database, allowing for changes to fields such as name, about, and metadata.
-"""
-
-# ruff: noqa: F401, F403, F405
-
-from .create_or_update_user import create_or_update_user
-from .create_user import create_user
-from .get_user import get_user
-from .list_users import list_users
-from .patch_user import patch_user
-from .update_user import update_user
diff --git a/agents-api/agents_api/models/user/create_or_update_user.py b/agents-api/agents_api/models/user/create_or_update_user.py
deleted file mode 100644
index 3e9b1f3a6..000000000
--- a/agents-api/agents_api/models/user/create_or_update_user.py
+++ /dev/null
@@ -1,125 +0,0 @@
-"""
-This module contains the functionality for creating users in the CozoDB database.
-It includes functions to construct and execute datalog queries for inserting new user records.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import CreateOrUpdateUserRequest, User
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(User, one=True, transform=lambda d: {"id": UUID(d.pop("user_id")), **d})
-@cozo_query
-@increase_counter("create_or_update_user")
-@beartype
-def create_or_update_user(
- *,
- developer_id: UUID,
- user_id: UUID,
- data: CreateOrUpdateUserRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to create a new user in the database.
-
- Parameters:
- user_id (UUID): The unique identifier for the user.
- developer_id (UUID): The unique identifier for the developer creating the user.
- name (str): The name of the user.
- about (str): A description of the user.
- metadata (dict, optional): A dictionary of metadata for the user. Defaults to an empty dict.
- client (CozoClient, optional): The CozoDB client instance to use for the query. Defaults to a preconfigured client instance.
-
- Returns:
- User: The newly created user record.
- """
-
- # Extract the user data from the payload
- data.metadata = data.metadata or {}
-
- user_data = data.model_dump()
-
- # Create the user
- # Construct a query to insert the new user record into the users table
- user_query = """
- input[user_id, developer_id, name, about, metadata, updated_at] <- [
- [$user_id, $developer_id, $name, $about, $metadata, now()]
- ]
-
- ?[user_id, developer_id, name, about, metadata, created_at, updated_at] :=
- input[_user_id, developer_id, name, about, metadata, updated_at],
- *users{
- developer_id,
- user_id,
- created_at,
- },
- user_id = to_uuid(_user_id),
-
- ?[user_id, developer_id, name, about, metadata, created_at, updated_at] :=
- input[_user_id, developer_id, name, about, metadata, updated_at],
- not *users{
- developer_id,
- user_id,
- }, created_at = now(),
- user_id = to_uuid(_user_id),
-
- :put users {
- developer_id,
- user_id =>
- name,
- about,
- metadata,
- created_at,
- updated_at,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- user_query,
- ]
-
- return (
- queries,
- {
- "user_id": str(user_id),
- "developer_id": str(developer_id),
- **user_data,
- },
- )
diff --git a/agents-api/agents_api/models/user/create_user.py b/agents-api/agents_api/models/user/create_user.py
deleted file mode 100644
index 62975a6d4..000000000
--- a/agents-api/agents_api/models/user/create_user.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
-This module contains the functionality for creating a new user in the CozoDB database.
-It defines a query for inserting user data into the 'users' relation.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-from uuid_extensions import uuid7
-
-from ...autogen.openapi_model import CreateUserRequest, User
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "asserted to return some results, but returned none"
- in str(e): lambda *_: HTTPException(
- detail="Developer not found. Please ensure the provided auth token (which refers to your developer_id) is valid and the developer has the necessary permissions to create an agent.",
- status_code=403,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- User,
- one=True,
- transform=lambda d: {"id": UUID(d.pop("user_id")), **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("create_user")
-@beartype
-def create_user(
- *,
- developer_id: UUID,
- user_id: UUID | None = None,
- data: CreateUserRequest,
-) -> tuple[list[str], dict]:
- """
- Constructs and executes a datalog query to create a new user in the CozoDB database.
-
- Parameters:
- user_id (UUID): The unique identifier for the user.
- developer_id (UUID): The unique identifier for the developer creating the user.
- name (str): The name of the user.
- about (str): A brief description about the user.
- metadata (dict, optional): Additional metadata about the user. Defaults to an empty dict.
- client (CozoClient, optional): The CozoDB client instance to run the query. Defaults to a pre-configured client instance.
-
- Returns:
- pd.DataFrame: A DataFrame containing the result of the query execution.
- """
-
- user_id = user_id or uuid7()
- data.metadata = data.metadata or {}
- user_data = data.model_dump()
-
- create_query = """
- # Then create the user
- ?[user_id, developer_id, name, about, metadata] <- [
- [to_uuid($user_id), to_uuid($developer_id), $name, $about, $metadata]
- ]
-
- :insert users {
- developer_id,
- user_id =>
- name,
- about,
- metadata,
- }
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- create_query,
- ]
-
- return (
- queries,
- {
- "user_id": str(user_id),
- "developer_id": str(developer_id),
- **user_data,
- },
- )
diff --git a/agents-api/agents_api/models/user/delete_user.py b/agents-api/agents_api/models/user/delete_user.py
deleted file mode 100644
index 7f08316be..000000000
--- a/agents-api/agents_api/models/user/delete_user.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
-This module contains the implementation of the delete_user_query function, which is responsible for deleting an user and its related default settings from the CozoDB database.
-"""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceDeletedResponse
-from ...common.utils.datetime import utcnow
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "Developer does not exist" in str(e): lambda *_: HTTPException(
- detail="The specified developer does not exist.",
- status_code=403,
- ),
- lambda e: isinstance(e, QueryException)
- and "Developer does not own resource"
- in e.resp["display"]: lambda *_: HTTPException(
- detail="The specified developer does not own the requested resource. Please verify the ownership or check if the developer ID is correct.",
- status_code=404,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- ResourceDeletedResponse,
- one=True,
- transform=lambda d: {
- "id": UUID(d.pop("user_id")),
- "deleted_at": utcnow(),
- "jobs": [],
- },
- _kind="deleted",
-)
-@cozo_query
-@beartype
-def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[list[str], dict]:
- """
- Constructs and returns a datalog query for deleting an user and its default settings from the database.
-
- Parameters:
- developer_id (UUID): The UUID of the developer owning the user.
- user_id (UUID): The UUID of the user to be deleted.
- client (CozoClient, optional): An instance of the CozoClient to execute the query.
-
- Returns:
- ResourceDeletedResponse: The response indicating the deletion of the user.
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "users", user_id=user_id),
- """
- # Delete docs
- ?[owner_type, owner_id, doc_id] :=
- *docs{
- owner_id,
- owner_type,
- doc_id,
- },
- owner_id = to_uuid($user_id),
- owner_type = "user"
-
- :delete docs {
- owner_type,
- owner_id,
- doc_id
- }
- :returning
- """,
- """
- # Delete the user
- ?[user_id, developer_id] <- [[$user_id, $developer_id]]
-
- :delete users {
- developer_id,
- user_id
- }
- :returning
- """,
- ]
-
- return (queries, {"user_id": str(user_id), "developer_id": str(developer_id)})
diff --git a/agents-api/agents_api/models/user/get_user.py b/agents-api/agents_api/models/user/get_user.py
deleted file mode 100644
index 69b3da883..000000000
--- a/agents-api/agents_api/models/user/get_user.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import User
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- lambda e: isinstance(e, QueryException)
- and "Developer does not exist" in str(e): lambda *_: HTTPException(
- detail="The specified developer does not exist.",
- status_code=403,
- ),
- lambda e: isinstance(e, QueryException)
- and "Developer does not own resource"
- in e.resp["display"]: lambda *_: HTTPException(
- detail="The specified developer does not own the requested resource. Please verify the ownership or check if the developer ID is correct.",
- status_code=404,
- ),
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(User, one=True)
-@cozo_query
-@beartype
-def get_user(
- *,
- developer_id: UUID,
- user_id: UUID,
-) -> tuple[list[str], dict]:
- """
- Retrieves a user by their unique identifier.
-
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer associated with the user.
- user_id (UUID): The unique identifier of the user to retrieve.
-
- Returns:
- User: The retrieved user.
- """
-
- # Convert UUIDs to strings for query compatibility.
- user_id = str(user_id)
- developer_id = str(developer_id)
-
- get_query = """
- input[developer_id, user_id] <- [[to_uuid($developer_id), to_uuid($user_id)]]
-
- ?[
- id,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- ] := input[developer_id, id],
- *users {
- user_id: id,
- developer_id,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- }
-
- :limit 1
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "users", user_id=user_id),
- get_query,
- ]
-
- return (queries, {"developer_id": developer_id, "user_id": user_id})
diff --git a/agents-api/agents_api/models/user/list_users.py b/agents-api/agents_api/models/user/list_users.py
deleted file mode 100644
index f1e06adf4..000000000
--- a/agents-api/agents_api/models/user/list_users.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from typing import Any, Literal, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import User
-from ...common.utils import json
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(User)
-@cozo_query
-@beartype
-def list_users(
- *,
- developer_id: UUID,
- limit: int = 100,
- offset: int = 0,
- sort_by: Literal["created_at", "updated_at"] = "created_at",
- direction: Literal["asc", "desc"] = "desc",
- metadata_filter: dict[str, Any] = {},
-) -> tuple[list[str], dict]:
- """
- Queries the 'cozodb' database to list users associated with a specific developer.
-
- Parameters:
- developer_id (UUID): The unique identifier of the developer.
- limit (int): The maximum number of users to return. Defaults to 100.
- offset (int): The number of users to skip before starting to collect the result set. Defaults to 0.
- sort_by (Literal["created_at", "updated_at"]): The field to sort the users by. Defaults to "created_at".
- direction (Literal["asc", "desc"]): The direction to sort the users in. Defaults to "desc".
- metadata_filter (dict[str, Any]): A dictionary representing filters to apply on user metadata.
-
- Returns:
- pd.DataFrame: A DataFrame containing the queried user data.
- """
- # Construct a filter string for the metadata based on the provided dictionary.
- metadata_filter_str = ", ".join(
- [
- f"metadata->{json.dumps(k)} == {json.dumps(v)}"
- for k, v in metadata_filter.items()
- ]
- )
-
- sort = f"{'-' if direction == 'desc' else ''}{sort_by}"
-
- # Define the datalog query for retrieving user information based on the specified filters and sorting them by creation date in descending order.
- list_query = f"""
- input[developer_id] <- [[to_uuid($developer_id)]]
-
- ?[
- id,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- ] :=
- input[developer_id],
- *users {{
- user_id: id,
- developer_id,
- name,
- about,
- created_at,
- updated_at,
- metadata,
- }},
- {metadata_filter_str}
-
- :limit $limit
- :offset $offset
- :sort {sort}
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- list_query,
- ]
-
- # Execute the datalog query with the specified parameters and return the results as a DataFrame.
- return (
- queries,
- {"developer_id": str(developer_id), "limit": limit, "offset": offset},
- )
diff --git a/agents-api/agents_api/models/user/patch_user.py b/agents-api/agents_api/models/user/patch_user.py
deleted file mode 100644
index bd3fc0246..000000000
--- a/agents-api/agents_api/models/user/patch_user.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""Module for generating datalog queries to update user information in the 'cozodb' database."""
-
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...common.utils.datetime import utcnow
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {"id": d["user_id"], "jobs": [], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("patch_user")
-@beartype
-def patch_user(
- *,
- developer_id: UUID,
- user_id: UUID,
- data: PatchUserRequest,
-) -> tuple[list[str], dict]:
- """
- Generates a datalog query for updating a user's information.
-
- Parameters:
- developer_id (UUID): The UUID of the developer.
- user_id (UUID): The UUID of the user to be updated.
- **update_data: Arbitrary keyword arguments representing the data to be updated.
-
- Returns:
- tuple[str, dict]: A pandas DataFrame containing the results of the query execution.
- """
-
- update_data = data.model_dump(exclude_unset=True)
-
- # Prepare data for mutation by filtering out None values and adding system-generated fields.
- metadata = update_data.pop("metadata", {}) or {}
- user_update_cols, user_update_vals = cozo_process_mutate_data(
- {
- **{k: v for k, v in update_data.items() if v is not None},
- "user_id": str(user_id),
- "developer_id": str(developer_id),
- "updated_at": utcnow().timestamp(),
- }
- )
-
- # Construct the datalog query for updating user information.
- update_query = f"""
- # update the user
- input[{user_update_cols}] <- $user_update_vals
-
- ?[{user_update_cols}, metadata] :=
- input[{user_update_cols}],
- *users:developer_id_metadata_user_id_idx {{
- developer_id: to_uuid($developer_id),
- user_id: to_uuid($user_id),
- metadata: md,
- }},
- metadata = concat(md, $metadata)
-
- :update users {{
- {user_update_cols}, metadata
- }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "users", user_id=user_id),
- update_query,
- ]
-
- return (
- queries,
- {
- "user_update_vals": user_update_vals,
- "metadata": metadata,
- "user_id": str(user_id),
- "developer_id": str(developer_id),
- },
- )
diff --git a/agents-api/agents_api/models/user/update_user.py b/agents-api/agents_api/models/user/update_user.py
deleted file mode 100644
index 68e6e6c25..000000000
--- a/agents-api/agents_api/models/user/update_user.py
+++ /dev/null
@@ -1,118 +0,0 @@
-from typing import Any, TypeVar
-from uuid import UUID
-
-from beartype import beartype
-from fastapi import HTTPException
-from pycozo.client import QueryException
-from pydantic import ValidationError
-
-from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest
-from ...common.utils.cozo import cozo_process_mutate_data
-from ...metrics.counters import increase_counter
-from ..utils import (
- cozo_query,
- partialclass,
- rewrap_exceptions,
- verify_developer_id_query,
- verify_developer_owns_resource_query,
- wrap_in_class,
-)
-
-ModelT = TypeVar("ModelT", bound=Any)
-T = TypeVar("T")
-
-
-@rewrap_exceptions(
- {
- QueryException: partialclass(
- HTTPException,
- status_code=400,
- detail="A database query failed to return the expected results. This might occur if the requested resource doesn't exist or your query parameters are incorrect.",
- ),
- ValidationError: partialclass(
- HTTPException,
- status_code=400,
- detail="Input validation failed. Please check the provided data for missing or incorrect fields, and ensure it matches the required format.",
- ),
- TypeError: partialclass(
- HTTPException,
- status_code=400,
- detail="A type mismatch occurred. This likely means the data provided is of an incorrect type (e.g., string instead of integer). Please review the input and try again.",
- ),
- }
-)
-@wrap_in_class(
- ResourceUpdatedResponse,
- one=True,
- transform=lambda d: {"id": d["user_id"], "jobs": [], **d},
- _kind="inserted",
-)
-@cozo_query
-@increase_counter("update_user")
-@beartype
-def update_user(
- *, developer_id: UUID, user_id: UUID, data: UpdateUserRequest
-) -> tuple[list[str], dict]:
- """
- Updates user information in the 'cozodb' database.
-
- Parameters:
- developer_id (UUID): The developer's unique identifier.
- user_id (UUID): The user's unique identifier.
- client (CozoClient): The Cozo database client instance.
- **update_data: Arbitrary keyword arguments representing the data to update.
-
- Returns:
- pd.DataFrame: A DataFrame containing the result of the update operation.
- """
- user_id = str(user_id)
- developer_id = str(developer_id)
- update_data = data.model_dump()
-
- # Prepares the update data by filtering out None values and adding user_id and developer_id.
- user_update_cols, user_update_vals = cozo_process_mutate_data(
- {
- **{k: v for k, v in update_data.items() if v is not None},
- "user_id": user_id,
- "developer_id": developer_id,
- }
- )
-
- # Constructs the update operation for the user, setting new values and updating 'updated_at'.
- update_query = f"""
- # update the user
- # This line updates the user's information based on the provided columns and values.
- input[{user_update_cols}] <- $user_update_vals
- original[created_at] := *users{{
- developer_id: to_uuid($developer_id),
- user_id: to_uuid($user_id),
- created_at,
- }},
-
- ?[created_at, updated_at, {user_update_cols}] :=
- input[{user_update_cols}],
- original[created_at],
- updated_at = now(),
-
- :put users {{
- created_at,
- updated_at,
- {user_update_cols}
- }}
- :returning
- """
-
- queries = [
- verify_developer_id_query(developer_id),
- verify_developer_owns_resource_query(developer_id, "users", user_id=user_id),
- update_query,
- ]
-
- return (
- queries,
- {
- "user_update_vals": user_update_vals,
- "developer_id": developer_id,
- "user_id": user_id,
- },
- )
diff --git a/agents-api/agents_api/models/utils.py b/agents-api/agents_api/models/utils.py
deleted file mode 100644
index 08006d1c7..000000000
--- a/agents-api/agents_api/models/utils.py
+++ /dev/null
@@ -1,578 +0,0 @@
-import concurrent.futures
-import inspect
-import re
-import time
-from functools import partialmethod, wraps
-from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeVar
-from uuid import UUID
-
-import pandas as pd
-from asyncpg import Record
-from fastapi import HTTPException
-from httpcore import ConnectError, NetworkError, TimeoutException
-from httpx import ConnectError as HttpxConnectError
-from httpx import RequestError
-from pydantic import BaseModel
-from requests.exceptions import ConnectionError, Timeout
-
-from ..common.utils.cozo import uuid_int_list_to_uuid
-from ..env import do_verify_developer, do_verify_developer_owns_resource
-
-P = ParamSpec("P")
-T = TypeVar("T")
-ModelT = TypeVar("ModelT", bound=BaseModel)
-
-
-def fix_uuid(
- item: dict[str, Any], attr_regex: str = r"^(?:id|.*_id)$"
-) -> dict[str, Any]:
- # find the attributes that are ids
- id_attrs = [
- attr for attr in item.keys() if re.match(attr_regex, attr) and item[attr]
- ]
-
- if not id_attrs:
- return item
-
- fixed = {
- **item,
- **{
- attr: uuid_int_list_to_uuid(item[attr])
- for attr in id_attrs
- if isinstance(item[attr], list)
- },
- }
-
- return fixed
-
-
-def fix_uuid_list(
- items: list[dict[str, Any]], attr_regex: str = r"^(?:id|.*_id)$"
-) -> list[dict[str, Any]]:
- fixed = list(map(lambda item: fix_uuid(item, attr_regex), items))
- return fixed
-
-
-def fix_uuid_if_present(item: Any, attr_regex: str = r"^(?:id|.*_id)$") -> Any:
- match item:
- case [dict(), *_]:
- return fix_uuid_list(item, attr_regex)
-
- case dict():
- return fix_uuid(item, attr_regex)
-
- case _:
- return item
-
-
-def partialclass(cls, *args, **kwargs):
- cls_signature = inspect.signature(cls)
- bound = cls_signature.bind_partial(*args, **kwargs)
-
- # The `updated=()` argument is necessary to avoid a TypeError when using @wraps for a class
- @wraps(cls, updated=())
- class NewCls(cls):
- __init__ = partialmethod(cls.__init__, *bound.args, **bound.kwargs)
-
- return NewCls
-
-
-def mark_session_updated_query(developer_id: UUID | str, session_id: UUID | str) -> str:
- return f"""
- input[developer_id, session_id] <- [[
- to_uuid("{str(developer_id)}"),
- to_uuid("{str(session_id)}"),
- ]]
-
- ?[
- developer_id,
- session_id,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- updated_at,
- ] :=
- input[developer_id, session_id],
- *sessions {{
- session_id,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- @ 'END'
- }},
- updated_at = [floor(now()), true]
-
- :put sessions {{
- developer_id,
- session_id,
- situation,
- summary,
- created_at,
- metadata,
- render_templates,
- token_budget,
- context_overflow,
- updated_at,
- }}
- """
-
-
-def verify_developer_id_query(developer_id: UUID | str) -> str:
- if not do_verify_developer:
- return "?[exists] := exists = true"
-
- return f"""
- matched[count(developer_id)] :=
- *developers{{
- developer_id,
- }}, developer_id = to_uuid("{str(developer_id)}")
-
- ?[exists] :=
- matched[num],
- exists = num > 0,
- assert(exists, "Developer does not exist")
-
- :limit 1
- """
-
-
-def verify_developer_owns_resource_query(
- developer_id: UUID | str,
- resource: str,
- parents: list[tuple[str, str]] | None = None,
- **resource_id,
-) -> str:
- if not do_verify_developer_owns_resource:
- return "?[exists] := exists = true"
-
- parents = parents or []
- resource_id_key, resource_id_value = next(iter(resource_id.items()))
-
- parents.append((resource, resource_id_key))
- parent_keys = ["developer_id", *map(lambda x: x[1], parents)]
-
- rule_head = f"""
- found[count({resource_id_key})] :=
- developer_id = to_uuid("{str(developer_id)}"),
- {resource_id_key} = to_uuid("{str(resource_id_value)}"),
- """
-
- rule_body = ""
- for parent_key, (relation, key) in zip(parent_keys, parents):
- rule_body += f"""
- *{relation}{{
- {parent_key},
- {key},
- }},
- """
-
- assertion = f"""
- ?[exists] :=
- found[num],
- exists = num > 0,
- assert(exists, "Developer does not own resource {resource} with {resource_id_key} {resource_id_value}")
-
- :limit 1
- """
-
- rule = rule_head + rule_body + assertion
- return rule
-
-
-def make_cozo_json_query(fields):
- return ", ".join(f'"{field}": {field}' for field in fields).strip()
-
-
-def cozo_query(
- func: Callable[P, tuple[str | list[str | None], dict]] | None = None,
- debug: bool | None = None,
- only_on_error: bool = False,
- timeit: bool = False,
-):
- def cozo_query_dec(func: Callable[P, tuple[str | list[Any], dict]]):
- """
- Decorator that wraps a function that takes arbitrary arguments, and
- returns a (query string, variables) tuple.
-
- The wrapped function should additionally take a client keyword argument
- and then run the query using the client, returning a DataFrame.
- """
-
- from pprint import pprint
-
- from tenacity import (
- retry,
- retry_if_exception,
- stop_after_attempt,
- wait_exponential,
- )
-
- def is_resource_busy(e: Exception) -> bool:
- return (
- isinstance(e, HTTPException)
- and e.status_code == 429
- and not getattr(e, "cozo_offline", False)
- )
-
- @retry(
- stop=stop_after_attempt(4),
- wait=wait_exponential(multiplier=1, min=4, max=10),
- retry=retry_if_exception(is_resource_busy),
- )
- @wraps(func)
- def wrapper(*args: P.args, client=None, **kwargs: P.kwargs) -> pd.DataFrame:
- queries, variables = func(*args, **kwargs)
-
- if isinstance(queries, str):
- query = queries
- else:
- queries = [str(query) for query in queries if query]
- query = "}\n\n{\n".join(queries)
- query = f"{{ {query} }}"
-
- not only_on_error and debug and print(query)
- not only_on_error and debug and pprint(
- dict(
- variables=variables,
- )
- )
-
- # Run the query
- from ..clients import cozo
-
- try:
- client = client or cozo.get_cozo_client()
-
- start = timeit and time.perf_counter()
- result = client.run(query, variables)
- end = timeit and time.perf_counter()
-
- timeit and print(f"Cozo query time: {end - start:.2f} seconds")
-
- except Exception as e:
- if only_on_error and debug:
- print(query)
- pprint(variables)
-
- debug and print(repr(e))
-
- pretty_error = repr(e).lower()
- cozo_busy = ("busy" in pretty_error) or (
- "when executing against relation '_" in pretty_error
- )
- cozo_offline = isinstance(e, ConnectionError) and (
- ("connection refused" in pretty_error)
- or ("name or service not known" in pretty_error)
- )
- connection_error = isinstance(
- e,
- (
- ConnectionError,
- Timeout,
- TimeoutException,
- NetworkError,
- RequestError,
- ),
- )
-
- if cozo_busy or connection_error or cozo_offline:
- exc = HTTPException(
- status_code=429, detail="Resource busy. Please try again later."
- )
- exc.cozo_offline = cozo_offline
- raise exc from e
-
- raise
-
- # Need to fix the UUIDs in the result
- result = result.map(fix_uuid_if_present)
-
- not only_on_error and debug and pprint(
- dict(
- result=result.to_dict(orient="records"),
- )
- )
-
- return result
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return wrapper
-
- if func is not None and callable(func):
- return cozo_query_dec(func)
-
- return cozo_query_dec
-
-
-def cozo_query_async(
- func: Callable[
- P,
- tuple[str | list[str | None], dict]
- | Awaitable[tuple[str | list[str | None], dict]],
- ]
- | None = None,
- debug: bool | None = None,
- only_on_error: bool = False,
- timeit: bool = False,
-):
- def cozo_query_dec(
- func: Callable[
- P, tuple[str | list[Any], dict] | Awaitable[tuple[str | list[Any], dict]]
- ],
- ):
- """
- Decorator that wraps a function that takes arbitrary arguments, and
- returns a (query string, variables) tuple.
-
- The wrapped function should additionally take a client keyword argument
- and then run the query using the client, returning a DataFrame.
- """
-
- from pprint import pprint
-
- from tenacity import (
- retry,
- retry_if_exception,
- stop_after_attempt,
- wait_exponential,
- )
-
- def is_resource_busy(e: Exception) -> bool:
- return (
- isinstance(e, HTTPException)
- and e.status_code == 429
- and not getattr(e, "cozo_offline", False)
- )
-
- @retry(
- stop=stop_after_attempt(6),
- wait=wait_exponential(multiplier=1.2, min=3, max=10),
- retry=retry_if_exception(is_resource_busy),
- reraise=True,
- )
- @wraps(func)
- async def wrapper(
- *args: P.args, client=None, **kwargs: P.kwargs
- ) -> pd.DataFrame:
- if inspect.iscoroutinefunction(func):
- queries, variables = await func(*args, **kwargs)
- else:
- queries, variables = func(*args, **kwargs)
-
- if isinstance(queries, str):
- query = queries
- else:
- queries = [str(query) for query in queries if query]
- query = "}\n\n{\n".join(queries)
- query = f"{{ {query} }}"
-
- not only_on_error and debug and print(query)
- not only_on_error and debug and pprint(
- dict(
- variables=variables,
- )
- )
-
- # Run the query
- from ..clients import cozo
-
- try:
- client = client or cozo.get_async_cozo_client()
-
- start = timeit and time.perf_counter()
- result = await client.run(query, variables)
- end = timeit and time.perf_counter()
-
- timeit and print(f"Cozo query time: {end - start:.2f} seconds")
-
- except Exception as e:
- if only_on_error and debug:
- print(query)
- pprint(variables)
-
- debug and print(repr(e))
-
- pretty_error = repr(e).lower()
- cozo_busy = ("busy" in pretty_error) or (
- "when executing against relation '_" in pretty_error
- )
- cozo_offline = (
- isinstance(e, ConnectError)
- or isinstance(e, HttpxConnectError)
- and (
- ("all connection attempts failed" in pretty_error)
- or ("name or service not known" in pretty_error)
- )
- )
- connection_error = isinstance(
- e,
- (
- ConnectError,
- HttpxConnectError,
- TimeoutException,
- NetworkError,
- RequestError,
- ),
- )
-
- if cozo_busy or connection_error or cozo_offline:
- exc = HTTPException(
- status_code=429, detail="Resource busy. Please try again later."
- )
- exc.cozo_offline = cozo_offline
- raise exc from e
-
- raise
-
- # Need to fix the UUIDs in the result
- result = result.map(fix_uuid_if_present)
-
- not only_on_error and debug and pprint(
- dict(
- result=result.to_dict(orient="records"),
- )
- )
-
- return result
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return wrapper
-
- if func is not None and callable(func):
- return cozo_query_dec(func)
-
- return cozo_query_dec
-
-
-def wrap_in_class(
- cls: Type[ModelT] | Callable[..., ModelT],
- one: bool = False,
- transform: Callable[[dict], dict] | None = None,
- _kind: str | None = None,
-):
- def _return_data(rec: Record):
- # Convert df to list of dicts
- # if _kind:
- # rec = rec[rec["_kind"] == _kind]
-
- data = list(rec.items())
-
- nonlocal transform
- transform = transform or (lambda x: x)
-
- if one:
- assert len(data) >= 1, "Expected one result, got none"
- obj: ModelT = cls(**transform(data[0]))
- return obj
-
- objs: list[ModelT] = [cls(**item) for item in map(transform, data)]
- return objs
-
- def decorator(func: Callable[P, pd.DataFrame | Awaitable[pd.DataFrame]]):
- @wraps(func)
- def wrapper(*args: P.args, **kwargs: P.kwargs) -> ModelT | list[ModelT]:
- return _return_data(func(*args, **kwargs))
-
- @wraps(func)
- async def async_wrapper(
- *args: P.args, **kwargs: P.kwargs
- ) -> ModelT | list[ModelT]:
- return _return_data(await func(*args, **kwargs))
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
- setattr(async_wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return async_wrapper if inspect.iscoroutinefunction(func) else wrapper
-
- return decorator
-
-
-def rewrap_exceptions(
- mapping: dict[
- Type[BaseException] | Callable[[BaseException], bool],
- Type[BaseException] | Callable[[BaseException], BaseException],
- ],
- /,
-):
- def _check_error(error):
- nonlocal mapping
-
- for check, transform in mapping.items():
- should_catch = (
- isinstance(error, check) if isinstance(check, type) else check(error)
- )
-
- if should_catch:
- new_error = (
- transform(str(error))
- if isinstance(transform, type)
- else transform(error)
- )
-
- setattr(new_error, "__cause__", error)
-
- raise new_error from error
-
- def decorator(func: Callable[P, T | Awaitable[T]]):
- @wraps(func)
- async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
- try:
- result: T = await func(*args, **kwargs)
- except BaseException as error:
- _check_error(error)
- raise
-
- return result
-
- @wraps(func)
- def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
- try:
- result: T = func(*args, **kwargs)
- except BaseException as error:
- _check_error(error)
- raise
-
- return result
-
- # Set the wrapped function as an attribute of the wrapper,
- # forwards the __wrapped__ attribute if it exists.
- setattr(wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
- setattr(async_wrapper, "__wrapped__", getattr(func, "__wrapped__", func))
-
- return async_wrapper if inspect.iscoroutinefunction(func) else wrapper
-
- return decorator
-
-
-def run_concurrently(
- fns: list[Callable[..., Any]],
- *,
- args_list: list[tuple] = [],
- kwargs_list: list[dict] = [],
-) -> list[Any]:
- args_list = args_list or [tuple()] * len(fns)
- kwargs_list = kwargs_list or [dict()] * len(fns)
-
- with concurrent.futures.ThreadPoolExecutor() as executor:
- futures = [
- executor.submit(fn, *args, **kwargs)
- for fn, args, kwargs in zip(fns, args_list, kwargs_list)
- ]
-
- return [future.result() for future in concurrent.futures.as_completed(futures)]
diff --git a/agents-api/agents_api/queries/__init__.py b/agents-api/agents_api/queries/__init__.py
new file mode 100644
index 000000000..eabb352e5
--- /dev/null
+++ b/agents-api/agents_api/queries/__init__.py
@@ -0,0 +1,21 @@
+"""
+The `queries` module of the agents API is designed to encapsulate all data interactions with the PostgreSQL database. It provides a structured way to perform CRUD (Create, Read, Update, Delete) operations and other specific data manipulations across various entities such as agents, documents, entries, sessions, tools, and users.
+
+Each sub-module within this module corresponds to a specific entity and contains functions and classes that implement SQL queries for interacting with the database. These interactions include creating new records, updating existing ones, retrieving data for specific conditions, and deleting records. The operations are crucial for the functionality of the agents API, enabling it to manage and process data effectively for each entity.
+
+This module also integrates with the `common` module for exception handling and utility functions, ensuring robust error management and providing reusable components for data processing and query construction.
+"""
+
+# ruff: noqa: F401, F403, F405
+
+from . import agents as agents
+from . import developers as developers
+from . import docs as docs
+from . import entries as entries
+from . import executions as executions
+from . import files as files
+from . import sessions as sessions
+from . import tasks as tasks
+from . import tools as tools
+from . import users as users
+
diff --git a/agents-api/agents_api/queries/developers/get_developer.py b/agents-api/agents_api/queries/developers/get_developer.py
index b164bad81..a02a8f914 100644
--- a/agents-api/agents_api/queries/developers/get_developer.py
+++ b/agents-api/agents_api/queries/developers/get_developer.py
@@ -1,4 +1,6 @@
-"""Module for retrieving document snippets from the CozoDB based on document IDs."""
+"""
+Module for retrieving developer information from the PostgreSQL database.
+"""
from uuid import UUID
diff --git a/agents-api/agents_api/queries/tools/create_tools.py b/agents-api/agents_api/queries/tools/create_tools.py
index 70277ab99..4f47ee099 100644
--- a/agents-api/agents_api/queries/tools/create_tools.py
+++ b/agents-api/agents_api/queries/tools/create_tools.py
@@ -1,4 +1,4 @@
-"""This module contains functions for creating tools in the CozoDB database."""
+"""This module contains functions for creating tools in the PostgreSQL database."""
from typing import Any
from uuid import UUID
@@ -78,9 +78,10 @@ async def create_tools(
ignore_existing: bool = False, # TODO: what to do with this flag?
) -> tuple[str, list, str]:
"""
- Constructs a datalog query for inserting tool records into the 'agent_functions' relation in the CozoDB.
+ Constructs an SQL query for inserting tool records into the 'tools' relation in the PostgreSQL database.
Parameters:
+ developer_id (UUID): The unique identifier for the developer.
agent_id (UUID): The unique identifier for the agent.
data (list[CreateToolRequest]): A list of function definitions to be inserted.
diff --git a/agents-api/agents_api/queries/tools/patch_tool.py b/agents-api/agents_api/queries/tools/patch_tool.py
index b65eca481..c41d89b4e 100644
--- a/agents-api/agents_api/queries/tools/patch_tool.py
+++ b/agents-api/agents_api/queries/tools/patch_tool.py
@@ -50,8 +50,7 @@ async def patch_tool(
*, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest
) -> tuple[str, list]:
"""
- Execute the datalog query and return the results as a DataFrame
- Updates the tool information for a given agent and tool ID in the 'cozodb' database.
+ Updates the tool information for a given agent and tool ID in the 'PostgreSQL' database.
Parameters:
agent_id (UUID): The unique identifier of the agent.
diff --git a/agents-api/agents_api/worker/worker.py b/agents-api/agents_api/worker/worker.py
index 39eff2b54..c88fdb72b 100644
--- a/agents-api/agents_api/worker/worker.py
+++ b/agents-api/agents_api/worker/worker.py
@@ -21,7 +21,6 @@ def create_worker(client: Client) -> Any:
from ..activities import task_steps
from ..activities.demo import demo_activity
- from ..activities.embed_docs import embed_docs
from ..activities.excecute_api_call import execute_api_call
from ..activities.execute_integration import execute_integration
from ..activities.execute_system import execute_system
@@ -35,7 +34,6 @@ def create_worker(client: Client) -> Any:
temporal_task_queue,
)
from ..workflows.demo import DemoWorkflow
- from ..workflows.embed_docs import EmbedDocsWorkflow
from ..workflows.mem_mgmt import MemMgmtWorkflow
from ..workflows.mem_rating import MemRatingWorkflow
from ..workflows.summarization import SummarizationWorkflow
@@ -54,14 +52,12 @@ def create_worker(client: Client) -> Any:
SummarizationWorkflow,
MemMgmtWorkflow,
MemRatingWorkflow,
- EmbedDocsWorkflow,
TaskExecutionWorkflow,
TruncationWorkflow,
],
activities=[
*task_activities,
demo_activity,
- embed_docs,
execute_integration,
execute_system,
execute_api_call,
diff --git a/agents-api/agents_api/workflows/embed_docs.py b/agents-api/agents_api/workflows/embed_docs.py
deleted file mode 100644
index 9e7b43d79..000000000
--- a/agents-api/agents_api/workflows/embed_docs.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python3
-
-
-from datetime import timedelta
-
-from temporalio import workflow
-
-with workflow.unsafe.imports_passed_through():
- from ..activities.embed_docs import embed_docs
- from ..activities.types import EmbedDocsPayload
- from ..common.retry_policies import DEFAULT_RETRY_POLICY
- from ..env import temporal_heartbeat_timeout, temporal_schedule_to_close_timeout
-
-
-@workflow.defn
-class EmbedDocsWorkflow:
- @workflow.run
- async def run(self, embed_payload: EmbedDocsPayload) -> None:
- await workflow.execute_activity(
- embed_docs,
- embed_payload,
- schedule_to_close_timeout=timedelta(
- seconds=temporal_schedule_to_close_timeout
- ),
- retry_policy=DEFAULT_RETRY_POLICY,
- heartbeat_timeout=timedelta(seconds=temporal_heartbeat_timeout),
- )
From c53319e710a9ecefae7fdac2b323765eee07fb48 Mon Sep 17 00:00:00 2001
From: Ahmad-mtos