diff --git a/docs/docs/getting-started/guide-chat.mdx b/docs/docs/getting-started/guide-chat.mdx index 1acd97a81389..cdefe0697add 100644 --- a/docs/docs/getting-started/guide-chat.mdx +++ b/docs/docs/getting-started/guide-chat.mdx @@ -278,7 +278,13 @@ And finally, we can use the AgentExecutor to run an agent: ```typescript // Define the list of tools the agent can use -const tools = [new SerpAPI()]; +const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), +]; // Create the agent from the chat model and the tools const agent = ChatAgent.fromLLMAndTools(new ChatOpenAI(), tools); // Create an executor, which calls to the agent until an answer is found diff --git a/docs/docs/getting-started/guide-llm.mdx b/docs/docs/getting-started/guide-llm.mdx index 436e17ac4ed0..04476c4ac825 100644 --- a/docs/docs/getting-started/guide-llm.mdx +++ b/docs/docs/getting-started/guide-llm.mdx @@ -171,7 +171,14 @@ import { SerpAPI } from "langchain/tools"; import { Calculator } from "langchain/tools/calculator"; const model = new OpenAI({ temperature: 0 }); -const tools = [new SerpAPI(), new Calculator()]; +const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), +]; const executor = await initializeAgentExecutor( tools, diff --git a/docs/docs/modules/agents/executor/getting-started.md b/docs/docs/modules/agents/executor/getting-started.md index a23eacf66298..f84af780928c 100644 --- a/docs/docs/modules/agents/executor/getting-started.md +++ b/docs/docs/modules/agents/executor/getting-started.md @@ -30,7 +30,14 @@ import { SerpAPI } from "langchain/tools"; import { Calculator } from "langchain/tools/calculator"; const model = new OpenAI({ temperature: 0 }); -const tools = [new SerpAPI(), new Calculator()]; +const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), +]; const executor = await initializeAgentExecutor( tools, diff --git a/docs/docs/modules/agents/tools/agents_with_vectorstores.md b/docs/docs/modules/agents/tools/agents_with_vectorstores.md index 473b59d365fc..946d27c353d4 100644 --- a/docs/docs/modules/agents/tools/agents_with_vectorstores.md +++ b/docs/docs/modules/agents/tools/agents_with_vectorstores.md @@ -47,7 +47,15 @@ const qaTool = new ChainTool({ Now you can construct and using the tool just as you would any other! ```typescript -const tools = [new SerpAPI(), new Calculator(), qaTool]; +const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + qaTool, +]; const executor = await initializeAgentExecutor( tools, diff --git a/docs/docs/production/tracing.md b/docs/docs/production/tracing.md index b1216c19f72e..14b126fb3d14 100644 --- a/docs/docs/production/tracing.md +++ b/docs/docs/production/tracing.md @@ -18,7 +18,14 @@ import process from "process"; export const run = async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, @@ -55,7 +62,14 @@ import { export const run = async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, @@ -90,7 +104,14 @@ export const run = async () => { callbackManager.addHandler(new LangChainTracer()); const model = new OpenAI({ temperature: 0, callbackManager }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; for (const tool of tools) { tool.callbackManager = callbackManager; } diff --git a/examples/src/agents/chat_convo_with_tracing.ts b/examples/src/agents/chat_convo_with_tracing.ts index 3feee3e566a0..c449e9711a51 100644 --- a/examples/src/agents/chat_convo_with_tracing.ts +++ b/examples/src/agents/chat_convo_with_tracing.ts @@ -7,7 +7,14 @@ import { BufferMemory } from "langchain/memory"; export const run = async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new ChatOpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/examples/src/agents/chat_mrkl.ts b/examples/src/agents/chat_mrkl.ts index 6ac91adcb130..7c74a0ff0d14 100644 --- a/examples/src/agents/chat_mrkl.ts +++ b/examples/src/agents/chat_mrkl.ts @@ -5,7 +5,14 @@ import { Calculator } from "langchain/tools/calculator"; export const run = async () => { const model = new ChatOpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/examples/src/agents/chat_mrkl_with_tracing.ts b/examples/src/agents/chat_mrkl_with_tracing.ts index 52639b2716f8..925302b0053c 100644 --- a/examples/src/agents/chat_mrkl_with_tracing.ts +++ b/examples/src/agents/chat_mrkl_with_tracing.ts @@ -6,7 +6,14 @@ import { Calculator } from "langchain/tools/calculator"; export const run = async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new ChatOpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/examples/src/agents/concurrent_mrkl.ts b/examples/src/agents/concurrent_mrkl.ts index 99e2f278654b..506ed3433937 100644 --- a/examples/src/agents/concurrent_mrkl.ts +++ b/examples/src/agents/concurrent_mrkl.ts @@ -12,7 +12,14 @@ import { export const run = async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, @@ -47,7 +54,14 @@ export const run = async () => { callbackManager.addHandler(new LangChainTracer()); const model = new OpenAI({ temperature: 0, callbackManager }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; for (const tool of tools) { tool.callbackManager = callbackManager; } diff --git a/examples/src/agents/custom_agent.ts b/examples/src/agents/custom_agent.ts index f24441fc21c9..034717de06ed 100644 --- a/examples/src/agents/custom_agent.ts +++ b/examples/src/agents/custom_agent.ts @@ -6,7 +6,14 @@ import { LLMChain } from "langchain/chains"; export const run = async () => { const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const prefix = `Answer the following questions as best you can, but speaking as a pirate might speak. You have access to the following tools:`; const suffix = `Begin! Remember to speak as a pirate when giving your final answer. Use lots of "Args" diff --git a/examples/src/agents/custom_llm_agent.ts b/examples/src/agents/custom_llm_agent.ts index fff3ac53019b..6694d42af302 100644 --- a/examples/src/agents/custom_llm_agent.ts +++ b/examples/src/agents/custom_llm_agent.ts @@ -107,7 +107,14 @@ class CustomOutputParser extends AgentActionOutputParser { export const run = async () => { const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const llmChain = new LLMChain({ prompt: new CustomPromptTemplate({ diff --git a/examples/src/agents/custom_llm_agent_chat.ts b/examples/src/agents/custom_llm_agent_chat.ts index 62818a9c9edd..af602846b546 100644 --- a/examples/src/agents/custom_llm_agent_chat.ts +++ b/examples/src/agents/custom_llm_agent_chat.ts @@ -110,7 +110,14 @@ class CustomOutputParser extends AgentActionOutputParser { export const run = async () => { const model = new ChatOpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const llmChain = new LLMChain({ prompt: new CustomPromptTemplate({ diff --git a/examples/src/agents/load_from_hub.ts b/examples/src/agents/load_from_hub.ts index 57cb275a0db6..31d90d586969 100644 --- a/examples/src/agents/load_from_hub.ts +++ b/examples/src/agents/load_from_hub.ts @@ -6,7 +6,14 @@ import { Calculator } from "langchain/tools/calculator"; export const run = async () => { const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const agent = await loadAgent( "lc://agents/zero-shot-react-description/agent.json", diff --git a/examples/src/agents/mrkl.ts b/examples/src/agents/mrkl.ts index 03c1a433db77..22f3f2a920fd 100644 --- a/examples/src/agents/mrkl.ts +++ b/examples/src/agents/mrkl.ts @@ -5,7 +5,14 @@ import { Calculator } from "langchain/tools/calculator"; export const run = async () => { const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/examples/src/agents/mrkl_with_tracing.ts b/examples/src/agents/mrkl_with_tracing.ts index 9d538b6f0cda..cfae736d43bd 100644 --- a/examples/src/agents/mrkl_with_tracing.ts +++ b/examples/src/agents/mrkl_with_tracing.ts @@ -7,7 +7,14 @@ import process from "process"; export const run = async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/examples/src/chat/agent.ts b/examples/src/chat/agent.ts index 97322bf6db0f..49ff100f2cef 100644 --- a/examples/src/chat/agent.ts +++ b/examples/src/chat/agent.ts @@ -9,7 +9,13 @@ import { } from "langchain/prompts"; export const run = async () => { - const tools = [new SerpAPI()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + ]; const prompt = ZeroShotAgent.createPrompt(tools, { prefix: `Answer the following questions as best you can, but speaking as a pirate might speak. You have access to the following tools:`, diff --git a/examples/src/chat/overview.ts b/examples/src/chat/overview.ts index 36f382ab5702..f403371ca23f 100644 --- a/examples/src/chat/overview.ts +++ b/examples/src/chat/overview.ts @@ -125,7 +125,13 @@ export const run = async () => { // other abilities, such as search, or a calculator // Define the list of tools the agent can use - const tools = [new SerpAPI()]; + const tools = [ + new SerpAPI(process.env.SERPAPI_API_KEY, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + ]; // Create the agent from the chat model and the tools const agent = ChatAgent.fromLLMAndTools(new ChatOpenAI(), tools); // Create an executor, which calls to the agent until an answer is found diff --git a/langchain/src/agents/tests/agent.int.test.ts b/langchain/src/agents/tests/agent.int.test.ts index 27f2a9e1cc3b..a415edcf3f6a 100644 --- a/langchain/src/agents/tests/agent.int.test.ts +++ b/langchain/src/agents/tests/agent.int.test.ts @@ -8,7 +8,14 @@ import { initializeAgentExecutor } from "../initialize.js"; test("Run agent from hub", async () => { const model = new OpenAI({ temperature: 0, modelName: "text-babbage-001" }); - const tools: Tool[] = [new SerpAPI(), new Calculator()]; + const tools: Tool[] = [ + new SerpAPI(undefined, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const agent = await loadAgent( "lc://agents/zero-shot-react-description/agent.json", { llm: model, tools } @@ -27,7 +34,14 @@ test("Run agent from hub", async () => { test("Run agent locally", async () => { const model = new OpenAI({ temperature: 0, modelName: "text-babbage-001" }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(undefined, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/langchain/src/callbacks/tests/langchain_tracer.int.test.ts b/langchain/src/callbacks/tests/langchain_tracer.int.test.ts index 5175e21c0655..c88fbdb3ef4d 100644 --- a/langchain/src/callbacks/tests/langchain_tracer.int.test.ts +++ b/langchain/src/callbacks/tests/langchain_tracer.int.test.ts @@ -27,7 +27,14 @@ test("Test LangChain tracer", async () => { test.skip("Test Traced Agent with concurrency (skipped until we fix concurrency)", async () => { process.env.LANGCHAIN_HANDLER = "langchain"; const model = new OpenAI({ temperature: 0 }); - const tools = [new SerpAPI(), new Calculator()]; + const tools = [ + new SerpAPI(undefined, { + location: "Austin,Texas,United States", + hl: "en", + gl: "us", + }), + new Calculator(), + ]; const executor = await initializeAgentExecutor( tools, diff --git a/langchain/src/tools/serpapi.ts b/langchain/src/tools/serpapi.ts index 56a33a80b841..555d82318f2e 100644 --- a/langchain/src/tools/serpapi.ts +++ b/langchain/src/tools/serpapi.ts @@ -5,6 +5,9 @@ import { Tool } from "./base.js"; * when used in `jest` tests. Part of the issue seems to be that the `serpapi` * package imports a wasm module to use instead of native `fetch`, which we * don't want anyway. + * + * NOTE: you must provide location, gl and hl or your region and language will + * may not match your location, and will not be deterministic. */ // Copied over from `serpapi` package @@ -45,7 +48,7 @@ interface GoogleParameters extends BaseParameters { * Location * Parameter defines from where you want the search to originate. If several * locations match the location requested, we'll pick the most popular one. Head to - * the [/locations.json API](https://serpapi.com/locations-api) if you need more + * [/locations.json API](https://serpapi.com/locations-api) if you need more * precise control. location and uule parameters can't be used together. Avoid * utilizing location when setting the location outside the U.S. when using Google * Shopping and/or Google Product API.