Skip to content

Commit 82a239e

Browse files
FilipZmijewskijacoblee93Jacky3003madmed88bploetz
authored
feature: chat deployment implementation (#39)
* Rename auth method in docs * fix(core): Fix trim messages mutation bug (langchain-ai#7547) * release(core): 0.3.31 (langchain-ai#7548) * fix(community): Updated Embeddings URL (langchain-ai#7545) * fix(community): make sure guardrailConfig can be added even with anthropic models (langchain-ai#7542) * docs: Fix PGVectorStore import in install dependencies (TypeScript) example (langchain-ai#7533) * fix(community): Airtable url (langchain-ai#7532) * docs: Fix typo in OpenAIModerationChain example (langchain-ai#7528) * docs: Resolves langchain-ai#7483, resolves langchain-ai#7274 (langchain-ai#7505) Co-authored-by: jacoblee93 <[email protected]> * docs: Rename auth method in IBM docs (langchain-ai#7524) * docs: correct misspelling (langchain-ai#7522) Co-authored-by: jacoblee93 <[email protected]> * release(community): 0.3.25 (langchain-ai#7549) * feat(azure-cosmosdb): add session context for a user mongodb (langchain-ai#7436) Co-authored-by: jacoblee93 <[email protected]> * release(azure-cosmosdb): 0.2.7 (langchain-ai#7550) * fix(ci): Fix build (langchain-ai#7551) * feat(anthropic): Add Anthropic PDF support (document type) in invoke (langchain-ai#7496) Co-authored-by: jacoblee93 <[email protected]> * release(anthropic): 0.3.12 (langchain-ai#7552) * chore(core,langchain,community): Relax langsmith deps (langchain-ai#7556) * release(community): 0.3.26 (langchain-ai#7557) * release(core): 0.3.32 (langchain-ai#7558) * Release 0.3.12 (langchain-ai#7559) * Add deployment chat to chat class * Upadate Watsonx sdk * Rework interfaces in llms as well * Bump watsonx-ai sdk version * Remove unused code * Add fake auth --------- Co-authored-by: Jacob Lee <[email protected]> Co-authored-by: Jacky Chen <[email protected]> Co-authored-by: Mohamed Belhadj <[email protected]> Co-authored-by: Brian Ploetz <[email protected]> Co-authored-by: Eduard-Constantin Ibinceanu <[email protected]> Co-authored-by: Jonathan V <[email protected]> Co-authored-by: ucev <[email protected]> Co-authored-by: crisjy <[email protected]> Co-authored-by: Adham Badr <[email protected]>
1 parent 464e56b commit 82a239e

File tree

33 files changed

+657
-179
lines changed

33 files changed

+657
-179
lines changed

docs/core_docs/docs/how_to/tool_calling_parallel.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
"});\n",
5757
"\n",
5858
"const multiplyTool = tool(async ({ a, b }) => {\n",
59-
" return a + b;\n",
59+
" return a * b;\n",
6060
"}, {\n",
6161
" name: \"multiply\",\n",
6262
" description: \"Multiplies a and b\",\n",
@@ -220,4 +220,4 @@
220220
},
221221
"nbformat": 4,
222222
"nbformat_minor": 4
223-
}
223+
}

docs/core_docs/docs/how_to/tool_results_pass_to_model.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
":::\n",
1919
"```\n",
2020
"\n",
21-
"Some models are capable of [**tool calling**](/docs/concepts/tool_calling) - generating arguments that conform to a specific user-provided schema. This guide will demonstrate how to use those tool cals to actually call a function and properly pass the results back to the model.\n",
21+
"Some models are capable of [**tool calling**](/docs/concepts/tool_calling) - generating arguments that conform to a specific user-provided schema. This guide will demonstrate how to use those tool calls to actually call a function and properly pass the results back to the model.\n",
2222
"\n",
2323
"![](../../static/img/tool_invocation.png)\n",
2424
"\n",
@@ -367,4 +367,4 @@
367367
},
368368
"nbformat": 4,
369369
"nbformat_minor": 4
370-
}
370+
}

docs/core_docs/docs/integrations/tools/google_calendar.mdx

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import IntegrationInstallTooltip from "@mdx_components/integration_install_toolt
2525
<IntegrationInstallTooltip></IntegrationInstallTooltip>
2626

2727
```bash npm2yarn
28-
npm install @langchain/openai @langchain/core
28+
npm install @langchain/openai @langchain/core @langchain/community @langchain/langgraph
2929
```
3030

3131
<CodeBlock language="typescript">{ToolExample}</CodeBlock>

docs/core_docs/src/theme/VectorStoreTabs.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ const ${vectorStoreVarName} = new MongoDBAtlasVectorSearch(embeddings, {
5454
{
5555
value: "PGVector",
5656
label: "PGVector",
57-
text: `import PGVectorStore from "@langchain/community/vectorstores/pgvector";
57+
text: `import { PGVectorStore } from "@langchain/community/vectorstores/pgvector";
5858
5959
const ${vectorStoreVarName} = await PGVectorStore.initialize(embeddings, {})`,
6060
dependencies: "@langchain/community",

examples/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@
9393
"ioredis": "^5.3.2",
9494
"js-yaml": "^4.1.0",
9595
"langchain": "workspace:*",
96-
"langsmith": "^0.2.8",
96+
"langsmith": ">=0.2.8 <0.4.0",
9797
"mongodb": "^6.3.0",
9898
"pg": "^8.11.0",
9999
"pickleparser": "^0.2.1",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import { ChatAnthropic } from "@langchain/anthropic";
2+
3+
import * as fs from "fs";
4+
5+
export const run = async () => {
6+
const llm = new ChatAnthropic({
7+
model: "claude-3-5-sonnet-20240620", // Only claude-3-5-sonnet-20240620 , claude-3-5-sonnet-20241022 as of Jan 2025 support PDF documents as in base64
8+
});
9+
10+
// PDF needs to be in Base64.
11+
const getLocalFile = async (path: string) => {
12+
const localFile = await fs.readFileSync(path);
13+
const base64File = localFile.toString("base64");
14+
return base64File;
15+
};
16+
17+
// Or remotely
18+
const getRemoteFile = async (url: string) => {
19+
const response = await fetch(url);
20+
const arrayBuffer = await response.arrayBuffer();
21+
const base64File = Buffer.from(arrayBuffer).toString("base64");
22+
return base64File;
23+
};
24+
25+
const base64 = await getRemoteFile(
26+
"https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf"
27+
);
28+
29+
const prompt = "Summarise the contents of this PDF";
30+
31+
const response = await llm.invoke([
32+
{
33+
role: "user",
34+
content: [
35+
{
36+
type: "text",
37+
text: prompt,
38+
},
39+
{
40+
type: "document",
41+
source: {
42+
media_type: "application/pdf",
43+
type: "base64",
44+
data: base64,
45+
},
46+
},
47+
],
48+
},
49+
]);
50+
console.log(response.content);
51+
return response.content;
52+
};

examples/src/tools/google_calendar.ts

+13-8
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
1-
import { initializeAgentExecutorWithOptions } from "langchain/agents";
2-
import { OpenAI } from "@langchain/openai";
1+
import { createReactAgent } from "@langchain/langgraph/prebuilt";
2+
import { ChatOpenAI } from "@langchain/openai";
33
import { Calculator } from "@langchain/community/tools/calculator";
44
import {
55
GoogleCalendarCreateTool,
66
GoogleCalendarViewTool,
77
} from "@langchain/community/tools/google_calendar";
88

99
export async function run() {
10-
const model = new OpenAI({
10+
const model = new ChatOpenAI({
1111
temperature: 0,
1212
apiKey: process.env.OPENAI_API_KEY,
13+
model: "gpt-4o-mini",
1314
});
1415

1516
const googleCalendarParams = {
@@ -31,22 +32,26 @@ export async function run() {
3132
new GoogleCalendarViewTool(googleCalendarParams),
3233
];
3334

34-
const calendarAgent = await initializeAgentExecutorWithOptions(tools, model, {
35-
agentType: "zero-shot-react-description",
36-
verbose: true,
35+
const calendarAgent = createReactAgent({
36+
llm: model,
37+
tools,
3738
});
3839

3940
const createInput = `Create a meeting with John Doe next Friday at 4pm - adding to the agenda of it the result of 99 + 99`;
4041

41-
const createResult = await calendarAgent.invoke({ input: createInput });
42+
const createResult = await calendarAgent.invoke({
43+
messages: [{ role: "user", content: createInput }],
44+
});
4245
// Create Result {
4346
// output: 'A meeting with John Doe on 29th September at 4pm has been created and the result of 99 + 99 has been added to the agenda.'
4447
// }
4548
console.log("Create Result", createResult);
4649

4750
const viewInput = `What meetings do I have this week?`;
4851

49-
const viewResult = await calendarAgent.invoke({ input: viewInput });
52+
const viewResult = await calendarAgent.invoke({
53+
messages: [{ role: "user", content: viewInput }],
54+
});
5055
// View Result {
5156
// output: "You have no meetings this week between 8am and 8pm."
5257
// }

langchain-core/package.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@langchain/core",
3-
"version": "0.3.30",
3+
"version": "0.3.32",
44
"description": "Core LangChain.js abstractions and schemas",
55
"type": "module",
66
"engines": {
@@ -38,7 +38,7 @@
3838
"camelcase": "6",
3939
"decamelize": "1.2.0",
4040
"js-tiktoken": "^1.0.12",
41-
"langsmith": "^0.2.8",
41+
"langsmith": ">=0.2.8 <0.4.0",
4242
"mustache": "^4.2.0",
4343
"p-queue": "^6.6.2",
4444
"p-retry": "4",

langchain-core/src/messages/tests/message_utils.test.ts

+82-1
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,11 @@ import {
44
mergeMessageRuns,
55
trimMessages,
66
} from "../transformers.js";
7-
import { AIMessage } from "../ai.js";
7+
import { AIMessage, AIMessageChunk } from "../ai.js";
88
import { ChatMessage } from "../chat.js";
99
import { HumanMessage } from "../human.js";
1010
import { SystemMessage } from "../system.js";
11+
import { ToolMessage } from "../tool.js";
1112
import { BaseMessage } from "../base.js";
1213
import {
1314
getBufferString,
@@ -187,6 +188,7 @@ describe("trimMessages can trim", () => {
187188
defaultMsgSuffixLen;
188189
}
189190
}
191+
console.log(count);
190192
return count;
191193
};
192194

@@ -196,6 +198,84 @@ describe("trimMessages can trim", () => {
196198
};
197199
};
198200

201+
it("should not mutate messages if no trimming occurs with strategy last", async () => {
202+
const trimmer = trimMessages({
203+
maxTokens: 128000,
204+
strategy: "last",
205+
startOn: [HumanMessage],
206+
endOn: [AIMessage, ToolMessage],
207+
tokenCounter: () => 1,
208+
});
209+
const messages = [
210+
new HumanMessage({
211+
content: "Fetch the last 5 emails from Flora Testington's inbox.",
212+
additional_kwargs: {},
213+
response_metadata: {},
214+
}),
215+
new AIMessageChunk({
216+
id: "chatcmpl-abcdefg",
217+
content: "",
218+
additional_kwargs: {
219+
tool_calls: [
220+
{
221+
function: {
222+
name: "getEmails",
223+
arguments: JSON.stringify({
224+
inboxName: "[email protected]",
225+
amount: 5,
226+
folder: "Inbox",
227+
searchString: null,
228+
from: null,
229+
subject: null,
230+
}),
231+
},
232+
id: "foobarbaz",
233+
index: 0,
234+
type: "function",
235+
},
236+
],
237+
},
238+
response_metadata: {
239+
usage: {},
240+
},
241+
tool_calls: [
242+
{
243+
name: "getEmails",
244+
args: {
245+
inboxName: "[email protected]",
246+
amount: 5,
247+
folder: "Inbox",
248+
searchString: null,
249+
from: null,
250+
subject: null,
251+
},
252+
id: "foobarbaz",
253+
type: "tool_call",
254+
},
255+
],
256+
tool_call_chunks: [
257+
{
258+
name: "getEmails",
259+
args: '{"inboxName":"[email protected]","amount":5,"folder":"Inbox","searchString":null,"from":null,"subject":null,"cc":[],"bcc":[]}',
260+
id: "foobarbaz",
261+
index: 0,
262+
type: "tool_call_chunk",
263+
},
264+
],
265+
invalid_tool_calls: [],
266+
}),
267+
new ToolMessage({
268+
content: "a whole bunch of emails!",
269+
name: "getEmails",
270+
additional_kwargs: {},
271+
response_metadata: {},
272+
tool_call_id: "foobarbaz",
273+
}),
274+
];
275+
const trimmedMessages = await trimmer.invoke(messages);
276+
expect(trimmedMessages).toEqual(messages);
277+
});
278+
199279
it("First 30 tokens, not allowing partial messages", async () => {
200280
const { messages, dummyTokenCounter } = messagesAndTokenCounterFactory();
201281
const trimmedMessages = await trimMessages(messages, {
@@ -319,6 +399,7 @@ describe("trimMessages can trim", () => {
319399

320400
it("Last 30 tokens, including system message, allowing partial messages, end on HumanMessage", async () => {
321401
const { messages, dummyTokenCounter } = messagesAndTokenCounterFactory();
402+
console.log(messages);
322403
const trimmedMessages = await trimMessages(messages, {
323404
maxTokens: 30,
324405
tokenCounter: dummyTokenCounter,

langchain-core/src/messages/transformers.ts

+11-7
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ function _mergeMessageRuns(messages: BaseMessage[]): BaseMessage[] {
274274
}
275275
const merged: BaseMessage[] = [];
276276
for (const msg of messages) {
277-
const curr = msg; // Create a shallow copy of the message
277+
const curr = msg;
278278
const last = merged.pop();
279279
if (!last) {
280280
merged.push(curr);
@@ -861,20 +861,24 @@ async function _lastMaxTokens(
861861
...rest
862862
} = options;
863863

864+
// Create a copy of messages to avoid mutation
865+
let messagesCopy = [...messages];
866+
864867
if (endOn) {
865868
const endOnArr = Array.isArray(endOn) ? endOn : [endOn];
866869
while (
867-
messages &&
868-
!_isMessageType(messages[messages.length - 1], endOnArr)
870+
messagesCopy.length > 0 &&
871+
!_isMessageType(messagesCopy[messagesCopy.length - 1], endOnArr)
869872
) {
870-
messages.pop();
873+
messagesCopy = messagesCopy.slice(0, -1);
871874
}
872875
}
873876

874-
const swappedSystem = includeSystem && messages[0]._getType() === "system";
877+
const swappedSystem =
878+
includeSystem && messagesCopy[0]?._getType() === "system";
875879
let reversed_ = swappedSystem
876-
? messages.slice(0, 1).concat(messages.slice(1).reverse())
877-
: messages.reverse();
880+
? messagesCopy.slice(0, 1).concat(messagesCopy.slice(1).reverse())
881+
: messagesCopy.reverse();
878882

879883
reversed_ = await _firstMaxTokens(reversed_, {
880884
...rest,

langchain/package.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "langchain",
3-
"version": "0.3.11",
3+
"version": "0.3.12",
44
"description": "Typescript bindings for langchain",
55
"type": "module",
66
"engines": {
@@ -530,7 +530,7 @@
530530
"js-tiktoken": "^1.0.12",
531531
"js-yaml": "^4.1.0",
532532
"jsonpointer": "^5.0.1",
533-
"langsmith": "^0.2.8",
533+
"langsmith": ">=0.2.8 <0.4.0",
534534
"openapi-types": "^12.1.3",
535535
"p-retry": "4",
536536
"uuid": "^10.0.0",

langchain/src/chains/openai_moderation.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ export interface OpenAIModerationChainInput
2727
* OpenAIModerationChainInput interface.
2828
* @example
2929
* ```typescript
30-
* const moderation = new ChatOpenAIModerationChain({ throwError: true });
30+
* const moderation = new OpenAIModerationChain({ throwError: true });
3131
*
3232
* const badString = "Bad naughty words from user";
3333
*

libs/langchain-anthropic/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@langchain/anthropic",
3-
"version": "0.3.11",
3+
"version": "0.3.12",
44
"description": "Anthropic integrations for LangChain.js",
55
"type": "module",
66
"engines": {

libs/langchain-anthropic/src/utils/message_inputs.ts

+7
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,13 @@ function _formatContent(content: MessageContent) {
131131
source,
132132
...(cacheControl ? { cache_control: cacheControl } : {}),
133133
};
134+
} else if (contentPart.type === "document") {
135+
// PDF
136+
return {
137+
type: "document",
138+
source: contentPart.source,
139+
...(cacheControl ? { cache_control: cacheControl } : {}),
140+
};
134141
} else if (
135142
textTypes.find((t) => t === contentPart.type) &&
136143
"text" in contentPart

libs/langchain-azure-cosmosdb/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@langchain/azure-cosmosdb",
3-
"version": "0.2.6",
3+
"version": "0.2.7",
44
"description": "Azure CosmosDB integration for LangChain.js",
55
"type": "module",
66
"engines": {

0 commit comments

Comments
 (0)