Skip to content

Commit

Permalink
anthropic[patch]: Populate message id (#6122)
Browse files Browse the repository at this point in the history
* anthropic[patch]: Populate message id

* chore: lint files
  • Loading branch information
bracesproul authored Jul 18, 2024
1 parent be5f191 commit e6fe232
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 1 deletion.
5 changes: 5 additions & 0 deletions libs/langchain-anthropic/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ function anthropicResponseToChatMessages(
additional_kwargs: additionalKwargs,
usage_metadata: usageMetadata,
response_metadata: additionalKwargs,
id: additionalKwargs.id as string,
}),
},
];
Expand All @@ -140,6 +141,7 @@ function anthropicResponseToChatMessages(
tool_calls: toolCalls,
usage_metadata: usageMetadata,
response_metadata: additionalKwargs,
id: additionalKwargs.id as string,
}),
},
];
Expand Down Expand Up @@ -184,11 +186,13 @@ function _makeMessageChunkFromAnthropicEvent(
total_tokens: usage.input_tokens + usage.output_tokens,
};
}
console.log("data.message", data.message);
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString ? "" : [],
additional_kwargs: filteredAdditionalKwargs,
usage_metadata: usageMetadata,
id: data.message.id,
}),
usageData: usageDataCopy,
};
Expand Down Expand Up @@ -954,6 +958,7 @@ export class ChatAnthropicMessages<
tool_call_chunks: newToolCallChunk ? [newToolCallChunk] : undefined,
usage_metadata: chunk.usage_metadata,
response_metadata: chunk.response_metadata,
id: chunk.id,
}),
text: token ?? "",
});
Expand Down
21 changes: 20 additions & 1 deletion libs/langchain-anthropic/src/tests/chat_models.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
SystemMessagePromptTemplate,
} from "@langchain/core/prompts";
import { CallbackManager } from "@langchain/core/callbacks/manager";
import { concat } from "@langchain/core/utils/stream";
import { ChatAnthropic } from "../chat_models.js";

test("Test ChatAnthropic", async () => {
Expand Down Expand Up @@ -238,7 +239,6 @@ test("Test ChatAnthropic stream method", async () => {
const stream = await model.stream("Print hello world.");
const chunks = [];
for await (const chunk of stream) {
console.log(chunk);
chunks.push(chunk);
}
expect(chunks.length).toBeGreaterThan(1);
Expand Down Expand Up @@ -346,3 +346,22 @@ test("Stream tokens", async () => {
res.usage_metadata.input_tokens + res.usage_metadata.output_tokens
);
});

test("id is supplied when invoking", async () => {
const model = new ChatAnthropic();
const result = await model.invoke("Hello");
expect(result.id).toBeDefined();
expect(result.id).not.toEqual("");
});

test("id is supplied when streaming", async () => {
const model = new ChatAnthropic();
let finalChunk: AIMessageChunk | undefined;
for await (const chunk of await model.stream("Hello")) {
finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk);
}
expect(finalChunk).toBeDefined();
if (!finalChunk) return;
expect(finalChunk.id).toBeDefined();
expect(finalChunk.id).not.toEqual("");
});

0 comments on commit e6fe232

Please sign in to comment.