From 5a04e7050d3bf40a73657419ca56c8e7bb426dab Mon Sep 17 00:00:00 2001 From: Dev-Khant Date: Tue, 22 Oct 2024 11:41:21 +0530 Subject: [PATCH] Remove unecessary tools --- mem0/llms/utils/tools.py | 57 --------------------------------------- tests/llms/test_ollama.py | 34 ----------------------- 2 files changed, 91 deletions(-) delete mode 100644 mem0/llms/utils/tools.py diff --git a/mem0/llms/utils/tools.py b/mem0/llms/utils/tools.py deleted file mode 100644 index 6857294f40..0000000000 --- a/mem0/llms/utils/tools.py +++ /dev/null @@ -1,57 +0,0 @@ -# TODO: Remove these tools if no issues are found for new memory addition logic - -ADD_MEMORY_TOOL = { - "type": "function", - "function": { - "name": "add_memory", - "description": "Add a memory", - "parameters": { - "type": "object", - "properties": {"data": {"type": "string", "description": "Data to add to memory"}}, - "required": ["data"], - "additionalProperties": False, - }, - }, -} - -UPDATE_MEMORY_TOOL = { - "type": "function", - "function": { - "name": "update_memory", - "description": "Update memory provided ID and data", - "parameters": { - "type": "object", - "properties": { - "memory_id": { - "type": "string", - "description": "memory_id of the memory to update", - }, - "data": { - "type": "string", - "description": "Updated data for the memory", - }, - }, - "required": ["memory_id", "data"], - "additionalProperties": False, - }, - }, -} - -DELETE_MEMORY_TOOL = { - "type": "function", - "function": { - "name": "delete_memory", - "description": "Delete memory by memory_id", - "parameters": { - "type": "object", - "properties": { - "memory_id": { - "type": "string", - "description": "memory_id of the memory to delete", - } - }, - "required": ["memory_id"], - "additionalProperties": False, - }, - }, -} diff --git a/tests/llms/test_ollama.py b/tests/llms/test_ollama.py index f815833515..0b797bfae9 100644 --- a/tests/llms/test_ollama.py +++ b/tests/llms/test_ollama.py @@ -4,7 +4,6 @@ from mem0.configs.llms.base import BaseLlmConfig from mem0.llms.ollama import OllamaLLM -from mem0.llms.utils.tools import ADD_MEMORY_TOOL @pytest.fixture @@ -33,36 +32,3 @@ def test_generate_response_without_tools(mock_ollama_client): model="llama3.1:70b", messages=messages, options={"temperature": 0.7, "num_predict": 100, "top_p": 1.0} ) assert response == "I'm doing well, thank you for asking!" - - -def test_generate_response_with_tools(mock_ollama_client): - config = BaseLlmConfig(model="llama3.1:70b", temperature=0.7, max_tokens=100, top_p=1.0) - llm = OllamaLLM(config) - messages = [ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": "Add a new memory: Today is a sunny day."}, - ] - tools = [ADD_MEMORY_TOOL] - - mock_response = { - "message": { - "content": "I've added the memory for you.", - "tool_calls": [{"function": {"name": "add_memory", "arguments": {"data": "Today is a sunny day."}}}], - } - } - - mock_ollama_client.chat.return_value = mock_response - - response = llm.generate_response(messages, tools=tools) - - mock_ollama_client.chat.assert_called_once_with( - model="llama3.1:70b", - messages=messages, - options={"temperature": 0.7, "num_predict": 100, "top_p": 1.0}, - tools=tools, - ) - - assert response["content"] == "I've added the memory for you." - assert len(response["tool_calls"]) == 1 - assert response["tool_calls"][0]["name"] == "add_memory" - assert response["tool_calls"][0]["arguments"] == {"data": "Today is a sunny day."}