diff --git a/libs/langchain-anthropic/src/tests/chat_models.int.test.ts b/libs/langchain-anthropic/src/tests/chat_models.int.test.ts index a8fa91d6f48f..20618a14139c 100644 --- a/libs/langchain-anthropic/src/tests/chat_models.int.test.ts +++ b/libs/langchain-anthropic/src/tests/chat_models.int.test.ts @@ -2,6 +2,7 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { expect, test } from "@jest/globals"; +import * as fs from "fs/promises"; import { AIMessageChunk, HumanMessage, @@ -834,3 +835,36 @@ test("human message caching", async () => { 0 ); }); + +test("Can accept PDF documents", async () => { + const model = new ChatAnthropic({ + model: "claude-3-5-sonnet-latest", + }); + + const pdfPath = + "../langchain-community/src/document_loaders/tests/example_data/Jacob_Lee_Resume_2023.pdf"; + const pdfBase64 = await fs.readFile(pdfPath, "base64"); + + const response = await model.invoke([ + ["system", "Use the provided documents to answer the question"], + [ + "user", + [ + { + type: "document", + source: { + type: "base64", + media_type: "application/pdf", + data: pdfBase64, + }, + }, + { + type: "text", + text: "Summarize the contents of this PDF", + }, + ], + ], + ]); + + expect(response.content.length).toBeGreaterThan(10); +});