Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(langchain): Add hub entrypoint with automatic dynamic entrypoint of models #7583

Merged
merged 2 commits into from
Jan 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions langchain/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,10 @@ hub.cjs
hub.js
hub.d.ts
hub.d.cts
hub/node.cjs
hub/node.js
hub/node.d.ts
hub/node.d.cts
util/document.cjs
util/document.js
util/document.d.ts
Expand Down
4 changes: 3 additions & 1 deletion langchain/langchain.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,8 @@ export const config = {
"storage/in_memory": "storage/in_memory",
"storage/file_system": "storage/file_system",
// hub
hub: "hub",
hub: "hub/index",
"hub/node": "hub/node",
// utilities
"util/document": "util/document",
"util/math": "util/math",
Expand Down Expand Up @@ -217,6 +218,7 @@ export const config = {
"storage/file_system",
// Prevent export due to circular dependency with "load" entrypoint
"hub",
"hub/node",
"experimental/prompts/handlebars",
],
extraImportMapEntries: [
Expand Down
13 changes: 13 additions & 0 deletions langchain/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,10 @@
"hub.js",
"hub.d.ts",
"hub.d.cts",
"hub/node.cjs",
"hub/node.js",
"hub/node.d.ts",
"hub/node.d.cts",
"util/document.cjs",
"util/document.js",
"util/document.d.ts",
Expand Down Expand Up @@ -1222,6 +1226,15 @@
"import": "./hub.js",
"require": "./hub.cjs"
},
"./hub/node": {
"types": {
"import": "./hub/node.d.ts",
"require": "./hub/node.d.cts",
"default": "./hub/node.d.ts"
},
"import": "./hub/node.js",
"require": "./hub/node.cjs"
},
"./util/document": {
"types": {
"import": "./util/document.d.ts",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { test, expect } from "@jest/globals";
import { ChatOpenAI } from "@langchain/openai";
import type { ChatPromptTemplate } from "@langchain/core/prompts";
import { TavilySearchResults } from "../../util/testing/tools/tavily_search.js";
import { pull } from "../../hub.js";
import { pull } from "../../hub/index.js";
import { AgentExecutor, createOpenAIFunctionsAgent } from "../index.js";

const tools = [new TavilySearchResults({ maxResults: 1 })];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { AsyncLocalStorage } from "async_hooks";
import { TavilySearchResults } from "../../util/testing/tools/tavily_search.js";
import { pull } from "../../hub.js";
import { pull } from "../../hub/index.js";
import { AgentExecutor, createOpenAIToolsAgent } from "../index.js";

const tools = [new TavilySearchResults({ maxResults: 1 })];
Expand Down
2 changes: 1 addition & 1 deletion langchain/src/agents/tests/create_react_agent.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { test, expect } from "@jest/globals";
import { OpenAI } from "@langchain/openai";
import type { PromptTemplate } from "@langchain/core/prompts";
import { TavilySearchResults } from "../../util/testing/tools/tavily_search.js";
import { pull } from "../../hub.js";
import { pull } from "../../hub/index.js";
import { AgentExecutor, createReactAgent } from "../index.js";

const tools = [new TavilySearchResults({ maxResults: 1 })];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { test, expect } from "@jest/globals";
import { ChatOpenAI } from "@langchain/openai";
import type { ChatPromptTemplate } from "@langchain/core/prompts";
import { TavilySearchResults } from "../../util/testing/tools/tavily_search.js";
import { pull } from "../../hub.js";
import { pull } from "../../hub/index.js";
import { AgentExecutor, createStructuredChatAgent } from "../index.js";

const tools = [new TavilySearchResults({ maxResults: 1 })];
Expand Down
2 changes: 1 addition & 1 deletion langchain/src/agents/tests/create_xml_agent.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { test, expect } from "@jest/globals";
import type { PromptTemplate } from "@langchain/core/prompts";
import { ChatOpenAI } from "@langchain/openai";
import { TavilySearchResults } from "../../util/testing/tools/tavily_search.js";
import { pull } from "../../hub.js";
import { pull } from "../../hub/index.js";
import { AgentExecutor, createXmlAgent } from "../index.js";

const tools = [new TavilySearchResults({ maxResults: 1 })];
Expand Down
3 changes: 2 additions & 1 deletion langchain/src/chat_models/tests/universal.int.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-explicit-any */
import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { it } from "@jest/globals";
Expand All @@ -8,7 +9,7 @@ import { AIMessageChunk } from "@langchain/core/messages";
import { concat } from "@langchain/core/utils/stream";
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
import { AgentExecutor, createReactAgent } from "../../agents/index.js";
import { pull } from "../../hub.js";
import { pull } from "../../hub/index.js";
import { initChatModel } from "../universal.js";

// Make copies of API keys and remove them from the environment to avoid conflicts.
Expand Down
1 change: 1 addition & 0 deletions langchain/src/chat_models/universal.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ async function _initChatModelHelper(
`Unable to infer model provider for { model: ${model} }, please specify modelProvider directly.`
);
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { modelProvider: _unused, ...passedParams } = params;

try {
Expand Down
118 changes: 42 additions & 76 deletions langchain/src/hub.ts → langchain/src/hub/base.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { Client } from "langsmith";
import { Runnable } from "@langchain/core/runnables";
import type { BaseLanguageModel } from "@langchain/core/language_models/base";
import { load } from "./load/index.js";
import type { Runnable } from "@langchain/core/runnables";
import { Client } from "langsmith";

/**
* Push a prompt to the hub.
Expand All @@ -11,7 +10,7 @@ import { load } from "./load/index.js";
* @param options
* @returns The URL of the newly pushed prompt in the hub.
*/
export async function push(
export async function basePush(
repoFullName: string,
runnable: Runnable,
options?: {
Expand Down Expand Up @@ -40,21 +39,9 @@ export async function push(
return client.pushPrompt(repoFullName, payloadOptions);
}

/**
* Pull a prompt from the hub.
* @param ownerRepoCommit The name of the repo containing the prompt, as well as an optional commit hash separated by a slash.
* @param options
* @returns
*/
export async function pull<T extends Runnable>(
export async function basePull(
ownerRepoCommit: string,
options?: {
apiKey?: string;
apiUrl?: string;
includeModel?: boolean;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
modelClass?: new (...args: any[]) => BaseLanguageModel;
}
options?: { apiKey?: string; apiUrl?: string; includeModel?: boolean }
) {
const client = new Client(options);

Expand All @@ -76,13 +63,46 @@ export async function pull<T extends Runnable>(
lc_hub_commit_hash: promptObject.commit_hash,
};

// Some nested mustache prompts have improperly parsed variables that include a dot.
if (promptObject.manifest.kwargs.template_format === "mustache") {
const stripDotNotation = (varName: string) => varName.split(".")[0];

const { input_variables } = promptObject.manifest.kwargs;
if (Array.isArray(input_variables)) {
promptObject.manifest.kwargs.input_variables =
input_variables.map(stripDotNotation);
}

const { messages } = promptObject.manifest.kwargs;
if (Array.isArray(messages)) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
promptObject.manifest.kwargs.messages = messages.map((message: any) => {
const nestedVars = message?.kwargs?.prompt?.kwargs?.input_variables;
if (Array.isArray(nestedVars)) {
// eslint-disable-next-line no-param-reassign
message.kwargs.prompt.kwargs.input_variables =
nestedVars.map(stripDotNotation);
}
return message;
});
}
}
return promptObject;
}

export function generateModelImportMap(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
modelClass?: new (...args: any[]) => BaseLanguageModel
) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const modelImportMap: Record<string, any> = {};
// TODO: Fix in 0.4.0. We can't get lc_id without instantiating the class, so we
// must put them inline here. In the future, make this less hacky
// This should probably use dynamic imports and have a web-only entrypoint
// in a future breaking release
if (options?.modelClass !== undefined) {
const modelLcName = (options.modelClass as any)?.lc_name();
if (modelClass !== undefined) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const modelLcName = (modelClass as any)?.lc_name();
let importMapKey;
if (modelLcName === "ChatAnthropic") {
importMapKey = "chat_models__anthropic";
Expand All @@ -105,62 +125,8 @@ export async function pull<T extends Runnable>(
}
modelImportMap[importMapKey] = {
...modelImportMap[importMapKey],
[modelLcName]: options.modelClass,
[modelLcName]: modelClass,
};
}

// Some nested mustache prompts have improperly parsed variables that include a dot.
if (promptObject.manifest.kwargs.template_format === "mustache") {
const stripDotNotation = (varName: string) => varName.split(".")[0];

const { input_variables } = promptObject.manifest.kwargs;
if (Array.isArray(input_variables)) {
promptObject.manifest.kwargs.input_variables =
input_variables.map(stripDotNotation);
}

const { messages } = promptObject.manifest.kwargs;
if (Array.isArray(messages)) {
promptObject.manifest.kwargs.messages = messages.map((message: any) => {
const nestedVars = message?.kwargs?.prompt?.kwargs?.input_variables;
if (Array.isArray(nestedVars)) {
// eslint-disable-next-line no-param-reassign
message.kwargs.prompt.kwargs.input_variables =
nestedVars.map(stripDotNotation);
}
return message;
});
}
}

try {
const loadedPrompt = await load<T>(
JSON.stringify(promptObject.manifest),
undefined,
undefined,
modelImportMap
);
return loadedPrompt;
} catch (e: any) {
if (options?.includeModel && options?.modelClass === undefined) {
throw new Error(
[
e.message,
"",
`To load prompts with an associated non-OpenAI model, you must pass a "modelClass" parameter like this:`,
"",
"```",
`import { ChatAnthropic } from "@langchain/anthropic";`,
"",
`const prompt = await pull("my-prompt", {`,
` includeModel: true,`,
` modelClass: ChatAnthropic,`,
`});`,
"```",
].join("\n")
);
} else {
throw e;
}
}
return modelImportMap;
}
67 changes: 67 additions & 0 deletions langchain/src/hub/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { Runnable } from "@langchain/core/runnables";
import type { BaseLanguageModel } from "@langchain/core/language_models/base";
import { load } from "../load/index.js";
import { basePush, basePull, generateModelImportMap } from "./base.js";

export { basePush as push };

/**
* Pull a prompt from the hub.
*
* @param ownerRepoCommit The name of the repo containing the prompt, as well as an optional commit hash separated by a slash.
* @param options.apiKey LangSmith API key to use when pulling the prompt
* @param options.apiUrl LangSmith API URL to use when pulling the prompt
* @param options.includeModel Whether to also instantiate and attach a model instance to the prompt,
* if the prompt has associated model metadata. If set to true, invoking the resulting pulled prompt will
* also invoke the instantiated model. For non-OpenAI models, you must also set "modelClass" to the
* correct class of the model.
* @param options.modelClass If includeModel is true, the class of the model to instantiate. Required
* for non-OpenAI models. If you are running in Node or another environment that supports dynamic imports,
* you may instead import this function from "langchain/hub/node" and pass "includeModel: true" instead
* of specifying this parameter.
* @returns
*/
export async function pull<T extends Runnable>(
ownerRepoCommit: string,
options?: {
apiKey?: string;
apiUrl?: string;
includeModel?: boolean;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
modelClass?: new (...args: any[]) => BaseLanguageModel;
}
) {
const promptObject = await basePull(ownerRepoCommit, options);
try {
const loadedPrompt = await load<T>(
JSON.stringify(promptObject.manifest),
undefined,
undefined,
generateModelImportMap(options?.modelClass)
);
return loadedPrompt;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e: any) {
if (options?.includeModel) {
throw new Error(
[
e.message,
"",
`To load prompts with an associated non-OpenAI model, you must use the "langchain/hub/node" entrypoint, or pass a "modelClass" parameter like this:`,
"",
"```",
`import { pull } from "langchain/hub";`,
`import { ChatAnthropic } from "@langchain/anthropic";`,
"",
`const prompt = await pull("my-prompt", {`,
` includeModel: true,`,
` modelClass: ChatAnthropic,`,
`});`,
"```",
].join("\n")
);
} else {
throw e;
}
}
}
Loading
Loading