diff --git a/libs/providers/langchain-anthropic/README.md b/libs/providers/langchain-anthropic/README.md index fd4f0f6b9a58..d54dabbe2572 100644 --- a/libs/providers/langchain-anthropic/README.md +++ b/libs/providers/langchain-anthropic/README.md @@ -75,6 +75,65 @@ const response = await model.stream({ }); ``` +## Tools + +This package provides LangChain-compatible wrappers for Anthropic's built-in tools. These tools can be bound to `ChatAnthropic` using `bindTools()` or any [`ReactAgent`](https://docs.langchain.com/oss/javascript/langchain/agents). + +### Memory Tool + +The memory tool (`memory_20250818`) enables Claude to store and retrieve information across conversations through a memory file directory. Claude can create, read, update, and delete files that persist between sessions, allowing it to build knowledge over time without keeping everything in the context window. + +```typescript +import { ChatAnthropic, tools } from "@langchain/anthropic"; + +// Create a simple in-memory file store (or use your own persistence layer) +const files = new Map(); + +const memory = tools.memory_20250818({ + execute: async (command) => { + switch (command.command) { + case "view": + if (!command.path || command.path === "/") { + return Array.from(files.keys()).join("\n") || "Directory is empty."; + } + return ( + files.get(command.path) ?? `Error: File not found: ${command.path}` + ); + case "create": + files.set(command.path!, command.file_text ?? ""); + return `Successfully created file: ${command.path}`; + case "str_replace": + const content = files.get(command.path!); + if (content && command.old_str) { + files.set( + command.path!, + content.replace(command.old_str, command.new_str ?? "") + ); + } + return `Successfully replaced text in: ${command.path}`; + case "delete": + files.delete(command.path!); + return `Successfully deleted: ${command.path}`; + // Handle other commands: insert, rename + default: + return `Unknown command`; + } + }, +}); + +const llm = new ChatAnthropic({ + model: "claude-sonnet-4-5-20250929", +}); + +const llmWithMemory = llm.bindTools([memory]); + +const response = await llmWithMemory.invoke( + "Remember that my favorite programming language is TypeScript" +); +``` + +For more information, see [Anthropic's Memory Tool documentation](https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/memory-tool). + ## Development To develop the Anthropic package, you'll need to follow these instructions: @@ -103,8 +162,8 @@ Test files should live within a `tests/` file in the `src/` folder. Unit tests s end in `.int.test.ts`: ```bash -$ pnpm test -$ pnpm test:int +pnpm test +pnpm test:int ``` ### Lint & Format @@ -124,5 +183,5 @@ If you add a new file to be exported, either import & re-export from `src/index. After running `pnpm build`, publish a new version with: ```bash -$ npm publish +npm publish ``` diff --git a/libs/providers/langchain-anthropic/package.json b/libs/providers/langchain-anthropic/package.json index 19368f02acc5..11e1e6b09d4e 100644 --- a/libs/providers/langchain-anthropic/package.json +++ b/libs/providers/langchain-anthropic/package.json @@ -74,6 +74,7 @@ "vectorstores" ], "main": "./dist/index.cjs", + "module": "./dist/index.js", "types": "./dist/index.d.cts", "exports": { ".": { @@ -94,6 +95,5 @@ "CHANGELOG.md", "README.md", "LICENSE" - ], - "module": "./dist/index.js" + ] } \ No newline at end of file diff --git a/libs/providers/langchain-anthropic/src/index.ts b/libs/providers/langchain-anthropic/src/index.ts index 8d6d08229f4b..5f7419559c43 100644 --- a/libs/providers/langchain-anthropic/src/index.ts +++ b/libs/providers/langchain-anthropic/src/index.ts @@ -1,3 +1,4 @@ export * from "./chat_models.js"; export { convertPromptToAnthropic } from "./utils/prompts.js"; export { type ChatAnthropicContentBlock } from "./types.js"; +export * from "./tools/index.js"; diff --git a/libs/providers/langchain-anthropic/src/tools/index.ts b/libs/providers/langchain-anthropic/src/tools/index.ts new file mode 100644 index 000000000000..cbf63775cb92 --- /dev/null +++ b/libs/providers/langchain-anthropic/src/tools/index.ts @@ -0,0 +1,2 @@ +export { memory_20250818 } from "./memory.js"; +export type * from "./types.js"; diff --git a/libs/providers/langchain-anthropic/src/tools/memory.ts b/libs/providers/langchain-anthropic/src/tools/memory.ts new file mode 100644 index 000000000000..796900c61d8b --- /dev/null +++ b/libs/providers/langchain-anthropic/src/tools/memory.ts @@ -0,0 +1,78 @@ +import { tool } from "@langchain/core/tools"; +import type { DynamicStructuredTool, ToolRuntime } from "@langchain/core/tools"; + +import type { MemoryTool20250818Options } from "./types.js"; + +/** + * Creates an Anthropic memory tool that can be used with ChatAnthropic. + * + * The memory tool enables Claude to store and retrieve information across conversations + * through a memory file directory. Claude can create, read, update, and delete files that + * persist between sessions, allowing it to build knowledge over time without keeping + * everything in the context window. + * + * @example + * ```typescript + * import { ChatAnthropic, memory_20250818 } from "@langchain/anthropic"; + * + * const llm = new ChatAnthropic({ + * model: "claude-sonnet-4-5-20250929" + * }); + * + * const memory = memory_20250818({ + * execute: async (args) => { + * // handle memory command execution + * // ... + * }, + * }); + * const llmWithMemory = llm.bindTools([memory]); + * + * const response = await llmWithMemory.invoke("Remember that I like Python"); + * ``` + * + * @param options - Optional configuration for the memory tool (currently unused) + * @param options.execute - Optional execute function that handles memory command execution. + * @returns The memory tool object that can be passed to `bindTools` + * + * @see https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/memory-tool + */ +export function memory_20250818( + options?: MemoryTool20250818Options +): DynamicStructuredTool { + const memoryTool = tool( + options?.execute as ( + input: unknown, + runtime: ToolRuntime + ) => string | Promise, + { + name: "memory", + schema: { + type: "object", + properties: { + command: { + type: "string", + enum: [ + "view", + "create", + "str_replace", + "insert", + "delete", + "rename", + ], + }, + }, + required: ["command"], + }, + } + ); + + memoryTool.extras = { + ...(memoryTool.extras ?? {}), + providerToolDefinition: { + type: "memory_20250818", + name: "memory", + }, + }; + + return memoryTool; +} diff --git a/libs/providers/langchain-anthropic/src/tools/tests/memory.int.test.ts b/libs/providers/langchain-anthropic/src/tools/tests/memory.int.test.ts new file mode 100644 index 000000000000..80caf33ed462 --- /dev/null +++ b/libs/providers/langchain-anthropic/src/tools/tests/memory.int.test.ts @@ -0,0 +1,477 @@ +import { expect, test, describe } from "vitest"; +import { + AIMessage, + AIMessageChunk, + HumanMessage, + ToolMessage, +} from "@langchain/core/messages"; +import { concat } from "@langchain/core/utils/stream"; + +import { ChatAnthropic } from "../../chat_models.js"; +import { memory_20250818 } from "../memory.js"; +import type { Memory20250818Command } from "../types.js"; + +/** + * Simple in-memory file system for testing memory tool operations. + * Normalizes paths by removing leading slashes to handle both "/file.txt" and "file.txt". + */ +class MockMemoryFileSystem { + private files: Map = new Map(); + + private normalizePath(path?: string): string { + // Handle undefined/null path - default to root + if (!path) { + return ""; + } + // Remove leading slash and normalize + return path.replace(/^\/+/, ""); + } + + view(path?: string): string { + const normalizedPath = this.normalizePath(path); + if (normalizedPath === "" || path === "/" || !path) { + const entries = Array.from(this.files.keys()); + if (entries.length === 0) { + return "Directory is empty."; + } + return entries.join("\n"); + } + const content = this.files.get(normalizedPath); + if (content === undefined) { + return `Error: File not found: ${path}`; + } + return content; + } + + create(path?: string, fileText?: string): string { + if (!path) { + return "Error: Path is required for create command"; + } + const normalizedPath = this.normalizePath(path); + if (this.files.has(normalizedPath)) { + return `Error: File already exists: ${path}`; + } + this.files.set(normalizedPath, fileText ?? ""); + return `Successfully created file: ${path}`; + } + + strReplace(path?: string, oldStr?: string, newStr?: string): string { + if (!path) { + return "Error: Path is required for str_replace command"; + } + const normalizedPath = this.normalizePath(path); + const content = this.files.get(normalizedPath); + if (content === undefined) { + return `Error: File not found: ${path}`; + } + if (!oldStr || !content.includes(oldStr)) { + return `Error: String not found in file: ${oldStr}`; + } + this.files.set(normalizedPath, content.replace(oldStr, newStr ?? "")); + return `Successfully replaced text in: ${path}`; + } + + insert(path?: string, insertLine?: number, insertText?: string): string { + if (!path) { + return "Error: Path is required for insert command"; + } + const normalizedPath = this.normalizePath(path); + const content = this.files.get(normalizedPath); + if (content === undefined) { + return `Error: File not found: ${path}`; + } + const lines = content.split("\n"); + lines.splice(insertLine ?? 0, 0, insertText ?? ""); + this.files.set(normalizedPath, lines.join("\n")); + return `Successfully inserted text at line ${insertLine ?? 0} in: ${path}`; + } + + delete(path?: string): string { + if (!path) { + return "Error: Path is required for delete command"; + } + const normalizedPath = this.normalizePath(path); + if (!this.files.has(normalizedPath)) { + return `Error: File not found: ${path}`; + } + this.files.delete(normalizedPath); + return `Successfully deleted: ${path}`; + } + + rename(oldPath?: string, newPath?: string): string { + if (!oldPath || !newPath) { + return "Error: Both old_path and new_path are required for rename command"; + } + const normalizedOldPath = this.normalizePath(oldPath); + const normalizedNewPath = this.normalizePath(newPath); + const content = this.files.get(normalizedOldPath); + if (content === undefined) { + return `Error: File not found: ${oldPath}`; + } + if (this.files.has(normalizedNewPath)) { + return `Error: Destination already exists: ${newPath}`; + } + this.files.delete(normalizedOldPath); + this.files.set(normalizedNewPath, content); + return `Successfully renamed ${oldPath} to ${newPath}`; + } + + /** Check if any file contains the given text */ + hasFileContaining(text: string): boolean { + for (const content of this.files.values()) { + if (content.includes(text)) { + return true; + } + } + return false; + } + + /** Check if any file with the given name fragment exists */ + hasFileNamed(nameFragment: string): boolean { + for (const path of this.files.keys()) { + if (path.includes(nameFragment)) { + return true; + } + } + return false; + } + + /** Get all file contents for debugging */ + getAllFiles(): Record { + return Object.fromEntries(this.files); + } + + executeCommand(command: Memory20250818Command): string { + switch (command.command) { + case "view": + return this.view(command.path); + case "create": + return this.create(command.path, command.file_text); + case "str_replace": + return this.strReplace(command.path, command.old_str, command.new_str); + case "insert": + return this.insert( + command.path, + command.insert_line, + command.insert_text + ); + case "delete": + return this.delete(command.path); + case "rename": + return this.rename(command.old_path, command.new_path); + default: + return `Error: Unknown command: ${JSON.stringify(command)}`; + } + } +} + +const createModel = () => + new ChatAnthropic({ + model: "claude-sonnet-4-5-20250929", + }); + +describe("Anthropic Memory Tool Integration Tests", () => { + test("memory_20250818 creates a valid tool with correct providerToolDefinition", () => { + const memoryFs = new MockMemoryFileSystem(); + const memory = memory_20250818({ + execute: async (action) => memoryFs.executeCommand(action), + }); + + expect(memory.name).toBe("memory"); + expect(memory.extras?.providerToolDefinition).toEqual({ + type: "memory_20250818", + name: "memory", + }); + }); + + test("memory tool can be bound to ChatAnthropic and triggers tool call", async () => { + const memoryFs = new MockMemoryFileSystem(); + const memory = memory_20250818({ + execute: async (action) => memoryFs.executeCommand(action), + }); + + const llm = createModel(); + const llmWithMemory = llm.bindTools([memory]); + + const response = await llmWithMemory.invoke( + "Please remember that my favorite programming language is TypeScript. Store this in a file called preferences.md" + ); + + expect(response).toBeInstanceOf(AIMessage); + expect(response.tool_calls).toBeDefined(); + expect(response.tool_calls?.length).toBeGreaterThan(0); + expect(response.tool_calls?.[0].name).toBe("memory"); + }); + + test("memory tool can be used with native Anthropic tool definition", async () => { + const llm = createModel(); + const llmWithMemory = llm.bindTools([ + { type: "memory_20250818", name: "memory" }, + ]); + + const response = await llmWithMemory.invoke("What do you know about me?"); + + expect(response).toBeInstanceOf(AIMessage); + expect(response.tool_calls).toBeDefined(); + expect(response.tool_calls?.[0].name).toBe("memory"); + }); + + test("agentic loop with memory tool - create and view", async () => { + const memoryFs = new MockMemoryFileSystem(); + const memory = memory_20250818({ + execute: async (action) => memoryFs.executeCommand(action), + }); + + const llm = createModel(); + const llmWithMemory = llm.bindTools([memory]); + const messages: (HumanMessage | AIMessage | ToolMessage)[] = []; + + // Step 1: Ask to remember something + const userMessage = new HumanMessage( + "Please remember that I like Python and JavaScript. Store this in a file called languages.txt" + ); + messages.push(userMessage); + + let response = await llmWithMemory.invoke(messages); + messages.push(response); + + // Process tool calls until the model stops calling tools + while (response.tool_calls && response.tool_calls.length > 0) { + for (const toolCall of response.tool_calls) { + if (toolCall.name === "memory") { + const result = memoryFs.executeCommand( + toolCall.args as Memory20250818Command + ); + messages.push( + new ToolMessage({ + tool_call_id: toolCall.id ?? "", + content: result, + }) + ); + } + } + + response = await llmWithMemory.invoke(messages); + messages.push(response); + } + + // Step 2: Ask to recall + const recallMessage = new HumanMessage( + "What programming languages did I say I like? Check your memory." + ); + messages.push(recallMessage); + + response = await llmWithMemory.invoke(messages); + messages.push(response); + + // Process any tool calls for recall + while (response.tool_calls && response.tool_calls.length > 0) { + for (const toolCall of response.tool_calls) { + if (toolCall.name === "memory") { + const result = memoryFs.executeCommand( + toolCall.args as Memory20250818Command + ); + messages.push( + new ToolMessage({ + tool_call_id: toolCall.id ?? "", + content: result, + }) + ); + } + } + + response = await llmWithMemory.invoke(messages); + messages.push(response); + } + + // The final response should mention Python and/or JavaScript + const finalContent = + typeof response.content === "string" + ? response.content + : JSON.stringify(response.content); + + expect( + finalContent.toLowerCase().includes("python") || + finalContent.toLowerCase().includes("javascript") + ).toBe(true); + }); + + test("memory tool streaming works correctly", async () => { + const memoryFs = new MockMemoryFileSystem(); + const memory = memory_20250818({ + execute: async (action) => memoryFs.executeCommand(action), + }); + + const llm = createModel(); + const llmWithMemory = llm.bindTools([memory]); + + const stream = await llmWithMemory.stream( + "Remember that my name is Alice. Save this to user_info.txt" + ); + + let finalChunk: AIMessageChunk | undefined; + for await (const chunk of stream) { + if (!finalChunk) { + finalChunk = chunk; + } else { + finalChunk = concat(finalChunk, chunk); + } + } + + expect(finalChunk).toBeDefined(); + expect(finalChunk).toBeInstanceOf(AIMessageChunk); + expect(finalChunk?.tool_calls?.length).toBeGreaterThan(0); + expect(finalChunk?.tool_calls?.[0].name).toBe("memory"); + }); + + test("memory tool with str_replace command", async () => { + const memoryFs = new MockMemoryFileSystem(); + // Pre-populate with a file (use normalized path without leading slash) + memoryFs.create("notes.txt", "My favorite color is blue."); + + const memory = memory_20250818({ + execute: async (action) => memoryFs.executeCommand(action), + }); + + const llm = createModel(); + const llmWithMemory = llm.bindTools([memory]); + const messages: (HumanMessage | AIMessage | ToolMessage)[] = []; + + // First, tell the model about the existing file by having it view the directory + const setupMessage = new HumanMessage( + "First, view the root directory to see what files exist in memory." + ); + messages.push(setupMessage); + + let response = await llmWithMemory.invoke(messages); + messages.push(response); + + // Process tool calls for viewing + while (response.tool_calls && response.tool_calls.length > 0) { + for (const toolCall of response.tool_calls) { + if (toolCall.name === "memory") { + const result = memoryFs.executeCommand( + toolCall.args as Memory20250818Command + ); + messages.push( + new ToolMessage({ + tool_call_id: toolCall.id ?? "", + content: result, + }) + ); + } + } + + response = await llmWithMemory.invoke(messages); + messages.push(response); + } + + // Now ask to update the file + const userMessage = new HumanMessage( + "I changed my mind, my favorite color is now green. Please update the notes.txt file to reflect this change by replacing 'blue' with 'green'." + ); + messages.push(userMessage); + + response = await llmWithMemory.invoke(messages); + messages.push(response); + + // Process tool calls for str_replace + while (response.tool_calls && response.tool_calls.length > 0) { + for (const toolCall of response.tool_calls) { + if (toolCall.name === "memory") { + const result = memoryFs.executeCommand( + toolCall.args as Memory20250818Command + ); + messages.push( + new ToolMessage({ + tool_call_id: toolCall.id ?? "", + content: result, + }) + ); + } + } + + response = await llmWithMemory.invoke(messages); + messages.push(response); + } + + // Verify the file was updated - check using normalized path + expect(memoryFs.hasFileContaining("green")).toBe(true); + expect(memoryFs.hasFileContaining("blue")).toBe(false); + }); + + test("memory tool with delete command", async () => { + const memoryFs = new MockMemoryFileSystem(); + // Pre-populate with a file (use normalized path without leading slash) + memoryFs.create("temp.txt", "Temporary content"); + + const memory = memory_20250818({ + execute: async (action) => memoryFs.executeCommand(action), + }); + + const llm = createModel(); + const llmWithMemory = llm.bindTools([memory]); + const messages: (HumanMessage | AIMessage | ToolMessage)[] = []; + + // First, tell the model about the existing file by having it view the directory + const setupMessage = new HumanMessage( + "First, view the root directory to see what files exist in memory." + ); + messages.push(setupMessage); + + let response = await llmWithMemory.invoke(messages); + messages.push(response); + + // Process tool calls for viewing + while (response.tool_calls && response.tool_calls.length > 0) { + for (const toolCall of response.tool_calls) { + if (toolCall.name === "memory") { + const result = memoryFs.executeCommand( + toolCall.args as Memory20250818Command + ); + messages.push( + new ToolMessage({ + tool_call_id: toolCall.id ?? "", + content: result, + }) + ); + } + } + + response = await llmWithMemory.invoke(messages); + messages.push(response); + } + + // Now ask to delete the file + const userMessage = new HumanMessage( + "Please delete the file called temp.txt from memory." + ); + messages.push(userMessage); + + response = await llmWithMemory.invoke(messages); + messages.push(response); + + // Process tool calls for delete + while (response.tool_calls && response.tool_calls.length > 0) { + for (const toolCall of response.tool_calls) { + if (toolCall.name === "memory") { + const result = memoryFs.executeCommand( + toolCall.args as Memory20250818Command + ); + messages.push( + new ToolMessage({ + tool_call_id: toolCall.id ?? "", + content: result, + }) + ); + } + } + + response = await llmWithMemory.invoke(messages); + messages.push(response); + } + + // Verify the file was deleted - check that no file with "temp" exists + expect(memoryFs.hasFileNamed("temp")).toBe(false); + }); +}); diff --git a/libs/providers/langchain-anthropic/src/tools/types.ts b/libs/providers/langchain-anthropic/src/tools/types.ts new file mode 100644 index 000000000000..a3247e64e1db --- /dev/null +++ b/libs/providers/langchain-anthropic/src/tools/types.ts @@ -0,0 +1,45 @@ +import Anthropic from "@anthropic-ai/sdk"; + +/** + * Memory tool command types as defined by Anthropic's memory tool API. + * @beta + * @see https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/memory-tool + */ +export type Memory20250818Command = + | Memory20250818ViewCommand + | Memory20250818CreateCommand + | Memory20250818StrReplaceCommand + | Memory20250818InsertCommand + | Memory20250818DeleteCommand + | Memory20250818RenameCommand; + +export type Memory20250818ViewCommand = + Anthropic.Beta.BetaMemoryTool20250818ViewCommand; +export type Memory20250818CreateCommand = + Anthropic.Beta.BetaMemoryTool20250818CreateCommand; +export type Memory20250818StrReplaceCommand = + Anthropic.Beta.BetaMemoryTool20250818StrReplaceCommand; +export type Memory20250818InsertCommand = + Anthropic.Beta.BetaMemoryTool20250818InsertCommand; +export type Memory20250818DeleteCommand = + Anthropic.Beta.BetaMemoryTool20250818DeleteCommand; +export type Memory20250818RenameCommand = + Anthropic.Beta.BetaMemoryTool20250818RenameCommand; + +/** + * Options for creating a memory tool. + */ +export interface MemoryTool20250818Options { + /** + * Optional execute function that handles memory command execution. + * In LangChain, this is typically handled separately when processing tool calls, + * but this option is provided for compatibility with the AI SDK pattern. + * Note: This option is currently unused but reserved for future use. + */ + execute: (action: Memory20250818Command) => Promise | string; +} + +/** + * Memory tool type definition. + */ +export type MemoryTool20250818 = Anthropic.Beta.BetaMemoryTool20250818; diff --git a/libs/providers/langchain-anthropic/src/utils/tools.ts b/libs/providers/langchain-anthropic/src/utils/tools.ts index 4af2b0691028..dbda31bd7367 100644 --- a/libs/providers/langchain-anthropic/src/utils/tools.ts +++ b/libs/providers/langchain-anthropic/src/utils/tools.ts @@ -53,4 +53,5 @@ export const AnthropicToolExtrasSchema = z.object({ export const ANTHROPIC_TOOL_BETAS: Record = { tool_search_tool_regex_20251119: "advanced-tool-use-2025-11-20", tool_search_tool_bm25_20251119: "advanced-tool-use-2025-11-20", + memory_20250818: "context-management-2025-06-27", };