Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

docs: update usage of claude-3-5-sonnet-2024062 to claude-3-5-sonnet-20241022 #2289

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ tools = [search]

tool_node = ToolNode(tools)

model = ChatAnthropic(model="claude-3-5-sonnet-20240620", temperature=0).bind_tools(tools)
model = ChatAnthropic(model="claude-3-5-sonnet-20241022", temperature=0).bind_tools(tools)

# Define the function that determines whether to continue or not
def should_continue(state: MessagesState) -> Literal["tools", END]:
Expand Down
4 changes: 2 additions & 2 deletions docs/docs/cloud/quick_start.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ First, let's set create all of the necessary files for our LangGraph application
from langchain_community.tools.tavily_search import TavilySearchResults
from langgraph.prebuilt import create_react_agent

model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
model = ChatAnthropic(model="claude-3-5-sonnet-20241022")

tools = [TavilySearchResults(max_results=2)]

Expand All @@ -78,7 +78,7 @@ First, let's set create all of the necessary files for our LangGraph application
import { createReactAgent } from "@langchain/langgraph/prebuilt";

const model = new ChatAnthropic({
model: "claude-3-5-sonnet-20240620",
model: "claude-3-5-sonnet-20241022",
});

const tools = [
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/cross-thread-persistence.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@
"from langgraph.store.base import BaseStore\n",
"\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"\n",
"\n",
"# NOTE: we're passing the Store param to the node --\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/human_in_the_loop/breakpoints.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@
"\n",
"# Set up the model\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"model = model.bind_tools(tools)\n",
"\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/human_in_the_loop/edit-graph-state.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@
"\n",
"# Set up the model\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"model = model.bind_tools(tools)\n",
"\n",
"\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@
" return \"Sunny!\"\n",
"\n",
"\n",
"model = ChatAnthropic(model_name=\"claude-3-5-sonnet-20240620\").bind_tools(\n",
"model = ChatAnthropic(model_name=\"claude-3-5-sonnet-20241022\").bind_tools(\n",
" [weather_search]\n",
")\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/human_in_the_loop/wait-user-input.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@
"from langchain_anthropic import ChatAnthropic\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"model = ChatOpenAI(model=\"gpt-4o\")\n",
"\n",
"from pydantic import BaseModel\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/map-reduce.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@
" id: int = Field(description=\"Index of the best joke, starting with 0\", ge=0)\n",
"\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"\n",
"# Graph components: define the components that will make up the graph\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/persistence.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@
"source": [
"from langchain_anthropic import ChatAnthropic\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")"
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how-tos/tool-calling-errors.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@
"model = ChatAnthropic(model=\"claude-3-haiku-20240307\", temperature=0)\n",
"model_with_tools = model.bind_tools([master_haiku_generator])\n",
"\n",
"better_model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\", temperature=0)\n",
"better_model = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\", temperature=0)\n",
"better_model_with_tools = better_model.bind_tools([master_haiku_generator])\n",
"\n",
"\n",
Expand Down
28 changes: 14 additions & 14 deletions docs/docs/tutorials/introduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@
"source": [
"from langchain_anthropic import ChatAnthropic\n",
"\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"\n",
"\n",
"def chatbot(state: State):\n",
Expand Down Expand Up @@ -340,7 +340,7 @@
"graph_builder = StateGraph(State)\n",
"\n",
"\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"\n",
"\n",
"def chatbot(state: State):\n",
Expand Down Expand Up @@ -475,7 +475,7 @@
"graph_builder = StateGraph(State)\n",
"\n",
"\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"# Modification: tell the LLM which tools it can call\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
Expand Down Expand Up @@ -753,7 +753,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"\n",
Expand Down Expand Up @@ -846,7 +846,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"\n",
Expand Down Expand Up @@ -1076,7 +1076,7 @@
{
"data": {
"text/plain": [
"StateSnapshot(values={'messages': [HumanMessage(content='Hi there! My name is Will.', additional_kwargs={}, response_metadata={}, id='8c1ca919-c553-4ebf-95d4-b59a2d61e078'), AIMessage(content=\"Hello Will! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to know or discuss?\", additional_kwargs={}, response_metadata={'id': 'msg_01WTQebPhNwmMrmmWojJ9KXJ', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 405, 'output_tokens': 32}}, id='run-58587b77-8c82-41e6-8a90-d62c444a261d-0', usage_metadata={'input_tokens': 405, 'output_tokens': 32, 'total_tokens': 437}), HumanMessage(content='Remember my name?', additional_kwargs={}, response_metadata={}, id='daba7df6-ad75-4d6b-8057-745881cea1ca'), AIMessage(content=\"Of course, I remember your name, Will. I always try to pay attention to important details that users share with me. Is there anything else you'd like to talk about or any questions you have? I'm here to help with a wide range of topics or tasks.\", additional_kwargs={}, response_metadata={'id': 'msg_01E41KitY74HpENRgXx94vag', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 444, 'output_tokens': 58}}, id='run-ffeaae5c-4d2d-4ddb-bd59-5d5cbf2a5af8-0', usage_metadata={'input_tokens': 444, 'output_tokens': 58, 'total_tokens': 502})]}, next=(), config={'configurable': {'thread_id': '1', 'checkpoint_ns': '', 'checkpoint_id': '1ef7d06e-93e0-6acc-8004-f2ac846575d2'}}, metadata={'source': 'loop', 'writes': {'chatbot': {'messages': [AIMessage(content=\"Of course, I remember your name, Will. I always try to pay attention to important details that users share with me. Is there anything else you'd like to talk about or any questions you have? I'm here to help with a wide range of topics or tasks.\", additional_kwargs={}, response_metadata={'id': 'msg_01E41KitY74HpENRgXx94vag', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 444, 'output_tokens': 58}}, id='run-ffeaae5c-4d2d-4ddb-bd59-5d5cbf2a5af8-0', usage_metadata={'input_tokens': 444, 'output_tokens': 58, 'total_tokens': 502})]}}, 'step': 4, 'parents': {}}, created_at='2024-09-27T19:30:10.820758+00:00', parent_config={'configurable': {'thread_id': '1', 'checkpoint_ns': '', 'checkpoint_id': '1ef7d06e-859f-6206-8003-e1bd3c264b8f'}}, tasks=())"
"StateSnapshot(values={'messages': [HumanMessage(content='Hi there! My name is Will.', additional_kwargs={}, response_metadata={}, id='8c1ca919-c553-4ebf-95d4-b59a2d61e078'), AIMessage(content=\"Hello Will! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to know or discuss?\", additional_kwargs={}, response_metadata={'id': 'msg_01WTQebPhNwmMrmmWojJ9KXJ', 'model': 'claude-3-5-sonnet-20241022', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 405, 'output_tokens': 32}}, id='run-58587b77-8c82-41e6-8a90-d62c444a261d-0', usage_metadata={'input_tokens': 405, 'output_tokens': 32, 'total_tokens': 437}), HumanMessage(content='Remember my name?', additional_kwargs={}, response_metadata={}, id='daba7df6-ad75-4d6b-8057-745881cea1ca'), AIMessage(content=\"Of course, I remember your name, Will. I always try to pay attention to important details that users share with me. Is there anything else you'd like to talk about or any questions you have? I'm here to help with a wide range of topics or tasks.\", additional_kwargs={}, response_metadata={'id': 'msg_01E41KitY74HpENRgXx94vag', 'model': 'claude-3-5-sonnet-20241022', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 444, 'output_tokens': 58}}, id='run-ffeaae5c-4d2d-4ddb-bd59-5d5cbf2a5af8-0', usage_metadata={'input_tokens': 444, 'output_tokens': 58, 'total_tokens': 502})]}, next=(), config={'configurable': {'thread_id': '1', 'checkpoint_ns': '', 'checkpoint_id': '1ef7d06e-93e0-6acc-8004-f2ac846575d2'}}, metadata={'source': 'loop', 'writes': {'chatbot': {'messages': [AIMessage(content=\"Of course, I remember your name, Will. I always try to pay attention to important details that users share with me. Is there anything else you'd like to talk about or any questions you have? I'm here to help with a wide range of topics or tasks.\", additional_kwargs={}, response_metadata={'id': 'msg_01E41KitY74HpENRgXx94vag', 'model': 'claude-3-5-sonnet-20241022', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 444, 'output_tokens': 58}}, id='run-ffeaae5c-4d2d-4ddb-bd59-5d5cbf2a5af8-0', usage_metadata={'input_tokens': 444, 'output_tokens': 58, 'total_tokens': 502})]}}, 'step': 4, 'parents': {}}, created_at='2024-09-27T19:30:10.820758+00:00', parent_config={'configurable': {'thread_id': '1', 'checkpoint_ns': '', 'checkpoint_id': '1ef7d06e-859f-6206-8003-e1bd3c264b8f'}}, tasks=())"
]
},
"execution_count": 26,
Expand Down Expand Up @@ -1150,7 +1150,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"\n",
Expand Down Expand Up @@ -1220,7 +1220,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"\n",
Expand Down Expand Up @@ -1471,7 +1471,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"\n",
Expand Down Expand Up @@ -1546,7 +1546,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"\n",
Expand Down Expand Up @@ -2135,7 +2135,7 @@
"source": [
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"# We can bind the llm to a tool definition, a pydantic model, or a json schema\n",
"llm_with_tools = llm.bind_tools(tools + [RequestAssistance])\n",
"\n",
Expand Down Expand Up @@ -2445,7 +2445,7 @@
"data": {
"text/plain": [
"[HumanMessage(content='I need some expert guidance for building this AI agent. Could you request assistance for me?', additional_kwargs={}, response_metadata={}, id='3f28f959-9ab7-489a-9c58-7ed1b49cedf3'),\n",
" AIMessage(content=[{'text': \"Certainly! I understand that you need expert guidance for building an AI agent. I'll use the RequestAssistance function to escalate your request to an expert who can provide you with the specialized knowledge and support you need. Let me do that for you right away.\", 'type': 'text'}, {'id': 'toolu_01Mo3N2c1byuSZwT1vyJWRia', 'input': {'request': 'The user needs expert guidance for building an AI agent. They require specialized knowledge and support in AI development and implementation.'}, 'name': 'RequestAssistance', 'type': 'tool_use'}], additional_kwargs={}, response_metadata={'id': 'msg_01VRnZvVbgsVRbQaQuvsziDx', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 516, 'output_tokens': 130}}, id='run-4e3f7906-5887-40d9-9267-5beefe7b3b76-0', tool_calls=[{'name': 'RequestAssistance', 'args': {'request': 'The user needs expert guidance for building an AI agent. They require specialized knowledge and support in AI development and implementation.'}, 'id': 'toolu_01Mo3N2c1byuSZwT1vyJWRia', 'type': 'tool_call'}], usage_metadata={'input_tokens': 516, 'output_tokens': 130, 'total_tokens': 646}),\n",
" AIMessage(content=[{'text': \"Certainly! I understand that you need expert guidance for building an AI agent. I'll use the RequestAssistance function to escalate your request to an expert who can provide you with the specialized knowledge and support you need. Let me do that for you right away.\", 'type': 'text'}, {'id': 'toolu_01Mo3N2c1byuSZwT1vyJWRia', 'input': {'request': 'The user needs expert guidance for building an AI agent. They require specialized knowledge and support in AI development and implementation.'}, 'name': 'RequestAssistance', 'type': 'tool_use'}], additional_kwargs={}, response_metadata={'id': 'msg_01VRnZvVbgsVRbQaQuvsziDx', 'model': 'claude-3-5-sonnet-20241022', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 516, 'output_tokens': 130}}, id='run-4e3f7906-5887-40d9-9267-5beefe7b3b76-0', tool_calls=[{'name': 'RequestAssistance', 'args': {'request': 'The user needs expert guidance for building an AI agent. They require specialized knowledge and support in AI development and implementation.'}, 'id': 'toolu_01Mo3N2c1byuSZwT1vyJWRia', 'type': 'tool_call'}], usage_metadata={'input_tokens': 516, 'output_tokens': 130, 'total_tokens': 646}),\n",
" ToolMessage(content=\"We, the experts are here to help! We'd recommend you check out LangGraph to build your agent. It's much more reliable and extensible than simple autonomous agents.\", id='8583b899-d898-4051-9f36-f5e5d11e9a37', tool_call_id='toolu_01Mo3N2c1byuSZwT1vyJWRia')]"
]
},
Expand Down Expand Up @@ -2565,7 +2565,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"# We can bind the llm to a tool definition, a pydantic model, or a json schema\n",
"llm_with_tools = llm.bind_tools(tools + [RequestAssistance])\n",
"\n",
Expand Down Expand Up @@ -2697,7 +2697,7 @@
"\n",
"tool = TavilySearchResults(max_results=2)\n",
"tools = [tool]\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"# We can bind the llm to a tool definition, a pydantic model, or a json schema\n",
"llm_with_tools = llm.bind_tools(tools + [RequestAssistance])\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/tutorials/reflexion/reflexion.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
"source": [
"from langchain_anthropic import ChatAnthropic\n",
"\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"llm = ChatAnthropic(model=\"claude-3-5-sonnet-20241022\")\n",
"# You could also use OpenAI or another provider\n",
"# from langchain_openai import ChatOpenAI\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion libs/cli/js-examples/src/agent/graph.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ const callModel = async (
* ```ts
* import { ChatAnthropic } from "@langchain/anthropic";
* const model = new ChatAnthropic({
* model: "claude-3-5-sonnet-20240620",
* model: "claude-3-5-sonnet-20241022",
* apiKey: process.env.ANTHROPIC_API_KEY,
* });
* const res = await model.invoke(state.messages);
Expand Down
2 changes: 1 addition & 1 deletion libs/langgraph/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ tools = [search]

tool_node = ToolNode(tools)

model = ChatAnthropic(model="claude-3-5-sonnet-20240620", temperature=0).bind_tools(tools)
model = ChatAnthropic(model="claude-3-5-sonnet-20241022", temperature=0).bind_tools(tools)

# Define the function that determines whether to continue or not
def should_continue(state: MessagesState) -> Literal["tools", END]:
Expand Down