Skip to content

Commit 0ecaad1

Browse files
authored
fix(openai): Revert Azure default withStructuredOutput changes (#7596)
1 parent 0eae6d9 commit 0ecaad1

File tree

3 files changed

+96
-1
lines changed

3 files changed

+96
-1
lines changed

libs/langchain-openai/src/azure/chat_models.ts

+68-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,14 @@ import {
44
type BaseChatModelParams,
55
} from "@langchain/core/language_models/chat_models";
66
import { getEnvironmentVariable } from "@langchain/core/utils/env";
7-
import { ChatOpenAI } from "../chat_models.js";
7+
import { BaseLanguageModelInput } from "@langchain/core/language_models/base";
8+
import { BaseMessage } from "@langchain/core/messages";
9+
import { Runnable } from "@langchain/core/runnables";
10+
import { z } from "zod";
11+
import {
12+
ChatOpenAI,
13+
ChatOpenAIStructuredOutputMethodOptions,
14+
} from "../chat_models.js";
815
import { OpenAIEndpointConfig, getEndpoint } from "../utils/azure.js";
916
import {
1017
AzureOpenAIInput,
@@ -635,4 +642,64 @@ export class AzureChatOpenAI extends ChatOpenAI {
635642

636643
return json;
637644
}
645+
646+
withStructuredOutput<
647+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
648+
RunOutput extends Record<string, any> = Record<string, any>
649+
>(
650+
outputSchema:
651+
| z.ZodType<RunOutput>
652+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
653+
| Record<string, any>,
654+
config?: ChatOpenAIStructuredOutputMethodOptions<false>
655+
): Runnable<BaseLanguageModelInput, RunOutput>;
656+
657+
withStructuredOutput<
658+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
659+
RunOutput extends Record<string, any> = Record<string, any>
660+
>(
661+
outputSchema:
662+
| z.ZodType<RunOutput>
663+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
664+
| Record<string, any>,
665+
config?: ChatOpenAIStructuredOutputMethodOptions<true>
666+
): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;
667+
668+
withStructuredOutput<
669+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
670+
RunOutput extends Record<string, any> = Record<string, any>
671+
>(
672+
outputSchema:
673+
| z.ZodType<RunOutput>
674+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
675+
| Record<string, any>,
676+
config?: ChatOpenAIStructuredOutputMethodOptions<boolean>
677+
):
678+
| Runnable<BaseLanguageModelInput, RunOutput>
679+
| Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;
680+
681+
withStructuredOutput<
682+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
683+
RunOutput extends Record<string, any> = Record<string, any>
684+
>(
685+
outputSchema:
686+
| z.ZodType<RunOutput>
687+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
688+
| Record<string, any>,
689+
config?: ChatOpenAIStructuredOutputMethodOptions<boolean>
690+
):
691+
| Runnable<BaseLanguageModelInput, RunOutput>
692+
| Runnable<
693+
BaseLanguageModelInput,
694+
{ raw: BaseMessage; parsed: RunOutput }
695+
> {
696+
const ensuredConfig = { ...config };
697+
// Not all Azure gpt-4o deployments models support jsonSchema yet
698+
if (this.model.startsWith("gpt-4o")) {
699+
if (ensuredConfig?.method === undefined) {
700+
ensuredConfig.method = "functionCalling";
701+
}
702+
}
703+
return super.withStructuredOutput<RunOutput>(outputSchema, ensuredConfig);
704+
}
638705
}

libs/langchain-openai/src/chat_models.ts

+13
Original file line numberDiff line numberDiff line change
@@ -1884,6 +1884,19 @@ export class ChatOpenAI<
18841884
config?: ChatOpenAIStructuredOutputMethodOptions<true>
18851885
): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;
18861886

1887+
withStructuredOutput<
1888+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
1889+
RunOutput extends Record<string, any> = Record<string, any>
1890+
>(
1891+
outputSchema:
1892+
| z.ZodType<RunOutput>
1893+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
1894+
| Record<string, any>,
1895+
config?: ChatOpenAIStructuredOutputMethodOptions<boolean>
1896+
):
1897+
| Runnable<BaseLanguageModelInput, RunOutput>
1898+
| Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;
1899+
18871900
withStructuredOutput<
18881901
// eslint-disable-next-line @typescript-eslint/no-explicit-any
18891902
RunOutput extends Record<string, any> = Record<string, any>

libs/langchain-openai/src/tests/azure/chat_models.int.test.ts

+15
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
/* eslint-disable no-process-env */
22

33
import { test, jest, expect } from "@jest/globals";
4+
import { z } from "zod";
45
import {
56
BaseMessage,
67
ChatMessage,
@@ -950,3 +951,17 @@ testFn("Test Azure ChatOpenAI with bearer token provider", async () => {
950951
const res = await chat.invoke([["system", "Say hi"], message]);
951952
// console.log(res);
952953
});
954+
955+
test("Test Azure ChatOpenAI withStructuredOutput", async () => {
956+
const chat = new AzureChatOpenAI({
957+
modelName: "gpt-4o-mini",
958+
});
959+
const message = new HumanMessage("Good!");
960+
const model = await chat.withStructuredOutput(
961+
z.object({
962+
sentiment: z.string(),
963+
})
964+
);
965+
const res = await model.invoke([message]);
966+
expect(res.sentiment).toBeDefined();
967+
});

0 commit comments

Comments
 (0)