Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: initial work on tools #185

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
166 changes: 166 additions & 0 deletions examples/tools/index.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
import { gsx, type Streamable, type ChatResponse } from "gensx";
import { ChatCompletion, OpenAIProvider } from "@gensx/openai";
import { z } from "zod";

const EchoSchema = z.object({
message: z.string().describe("The message to echo back"),
});

type EchoParams = z.infer<typeof EchoSchema>;

const EchoTool = gsx.Tool<typeof EchoSchema, string>({
name: "echo",
description: "Echoes back the message you send",
schema: EchoSchema,
function: async (params: EchoParams) => {
console.log("Echo tool called with:", params);
return `Echo: ${params.message}`;
},
});

const GetWeatherSchema = z.object({
location: z.string().describe("The city and state or country"),
});

type GetWeatherProps = z.infer<typeof GetWeatherSchema>;

interface GetWeatherResponse {
temperature: number;
conditions: string;
}

// The params will be inferred from GetWeatherSchema
const getWeatherTool = gsx.Tool<typeof GetWeatherSchema, GetWeatherResponse>({
name: "getWeather",
description: "Get the current weather for a location",
schema: GetWeatherSchema,
function: async (params: GetWeatherProps) => {
// params.location is now properly typed as string
console.log("Getting weather for:", params);
return {
temperature: 72,
conditions: "sunny",
};
},
});

interface ToolExampleProps {
message: string;
stream: boolean;
}

const BasicToolResponseExample = gsx.Component<
ToolExampleProps,
GetWeatherResponse | string
>("BasicToolResponseExample", (props) => {
return (
<OpenAIProvider apiKey={process.env.OPENAI_API_KEY}>
<ChatCompletion
model="gpt-4o-mini"
gsxTools={[getWeatherTool]}
messages={[
{
role: "system",
content:
"You are a helpful weather assistant. Use the getWeather tool when asked about weather.",
},
{ role: "user", content: props.message },
]}
stream={props.stream}
gsxExecuteTools={false}
/>
</OpenAIProvider>
);
});

async function main() {
console.log("\n🔊 Testing echo tool...");

// Test direct function call
const result1 = await EchoTool({ message: "Hello direct!" });
console.log("Direct call result:", result1);

// Test JSX usage
const result2 = await gsx.execute(<EchoTool message="Hello JSX!" />);
console.log("JSX call result:", result2);

// log non-streamable result
const result3 = await gsx.execute<ChatResponse>(
<BasicToolResponseExample
message="What is the weather in San Francisco?"
stream={false}
/>,
);
console.log("Non-streaming tool result:", JSON.stringify(result3, null, 2));
console.log("Non-streaming tool result type:", typeof result3);
console.log("Has content:", "content" in result3);
console.log("Has tool_calls:", "tool_calls" in result3);

// Test a tool with a chat completion with a tool response
const stream1 = await gsx.execute<Streamable>(
<BasicToolResponseExample
message="What is the weather in San Francisco?"
stream={true}
/>,
);

console.log("\nStreaming tool response:");
let currentToolCall: any = null;
let accumulatedArguments = "";

for await (const chunk of stream1) {
if (typeof chunk === "string") {
process.stdout.write(chunk);
} else {
// If this is a new tool call
if (!currentToolCall && chunk.tool_call.id) {
currentToolCall = chunk.tool_call;
accumulatedArguments = chunk.tool_call.function?.arguments || "";
}
// If we're accumulating arguments
else if (currentToolCall && chunk.tool_call.function?.arguments) {
accumulatedArguments += chunk.tool_call.function.arguments;
currentToolCall.function.arguments = accumulatedArguments;
}

// If we see a closing brace, print the complete tool call
if (accumulatedArguments.trim().endsWith("}")) {
process.stdout.write(
"\n" + JSON.stringify({ tool_call: currentToolCall }, null, 2) + "\n",
);
currentToolCall = null;
accumulatedArguments = "";
}
}
}

// log another non-streamable result
const result4 = await gsx.execute<ChatResponse>(
<BasicToolResponseExample
message="What is the weather in San Francisco?"
stream={false}
/>,
);
console.log("\nNon-streaming result:", JSON.stringify(result4, null, 2));
console.log("Non-streaming result content:", result4.content);
console.log("Non-streaming result tool_calls:", result4.tool_calls);

// Test a tool with a chat completion with a regular response
const stream2 = await gsx.execute<Streamable>(
<BasicToolResponseExample message="Hello" stream={true} />,
);

console.log("\nStreaming response:");
for await (const chunk of stream2) {
if (typeof chunk === "string") {
process.stdout.write(chunk);
} else {
process.stdout.write("\n" + JSON.stringify(chunk, null, 2) + "\n");
}
}

// Log the OpenAPI schema
//console.log("\nTool schema:", EchoTool.getOpenApiSchema());
}

main().catch(console.error);
28 changes: 28 additions & 0 deletions examples/tools/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"name": "@gensx-examples/tools",
"private": true,
"version": "0.0.0",
"type": "module",
"engines": {
"node": ">=18.0.0"
},
"scripts": {
"dev": "nodemon",
"start": "NODE_OPTIONS='--enable-source-maps' tsx ./index.tsx",
"build": "tsc",
"lint": "eslint .",
"lint:fix": "eslint . --fix"
},
"dependencies": {
"@gensx/openai": "workspace:*",
"gensx": "workspace:*",
"openai": "^4.77.0",
"zod": "^3.22.4"
},
"devDependencies": {
"@types/node": "^20.17.11",
"nodemon": "^3.1.9",
"tsx": "^4.19.2",
"typescript": "^5.0.0"
}
}
19 changes: 19 additions & 0 deletions examples/tools/tsconfig copy.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "NodeNext",
"lib": ["ESNext", "DOM"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"moduleResolution": "NodeNext",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "react-jsx",
"jsxImportSource": "gensx",
"outDir": "./dist"
},
"include": ["./*.ts", "./*.tsx", "./**/*.ts", "./**/*.tsx"],
"exclude": ["node_modules"]
}
20 changes: 20 additions & 0 deletions examples/tools/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"lib": ["ESNext", "DOM"],
"jsx": "react-jsx",
"jsxImportSource": "gensx",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"isolatedModules": true,
"outDir": "dist",
"rootDir": ".",
"types": ["node"]
},
"include": ["./**/*.ts", "./**/*.tsx"]
}
3 changes: 3 additions & 0 deletions packages/gensx-openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,5 +45,8 @@
},
"publishConfig": {
"access": "public"
},
"dependencies": {
"zod": "^3.24.1"
}
}
83 changes: 72 additions & 11 deletions packages/gensx-openai/src/index.tsx
Original file line number Diff line number Diff line change
@@ -1,12 +1,24 @@
import type { Streamable } from "gensx";

import { gsx } from "gensx";
import { gsx, GsxTool } from "gensx";
import OpenAI, { ClientOptions } from "openai";
import { zodFunction } from "openai/helpers/zod";
import {
ChatCompletionChunk,
ChatCompletionCreateParams,
ChatCompletionTool,
} from "openai/resources/index.mjs";
import { Stream } from "openai/streaming";
import { z } from "zod";

interface ToolCall {
id?: string;
type?: "function";
function?: {
name?: string;
arguments?: string;
};
}

// Create a context for OpenAI
export const OpenAIContext = gsx.createContext<{
Expand All @@ -21,8 +33,13 @@ export const OpenAIProvider = gsx.Component<ClientOptions, never>(
},
);

type ChatCompletionProps = ChatCompletionCreateParams & {
gsxTools?: GsxTool<z.ZodType, unknown>[];
gsxExecuteTools?: boolean;
};

// Create a component for chat completions
export const ChatCompletion = gsx.StreamComponent<ChatCompletionCreateParams>(
export const ChatCompletion = gsx.StreamComponent<ChatCompletionProps>(
"ChatCompletion",
async (props) => {
const context = gsx.useContext(OpenAIContext);
Expand All @@ -33,33 +50,77 @@ export const ChatCompletion = gsx.StreamComponent<ChatCompletionCreateParams>(
);
}

// Convert GenSX tools to OpenAI tools
const openAITools: ChatCompletionTool[] | undefined = props.gsxTools?.map(
(tool): ChatCompletionTool => {
const { name, description, parameters } = tool.toJSON();
return zodFunction({
name,
parameters,
description,
});
},
);

if (props.stream) {
const stream = await context.client.chat.completions.create(props);
// Remove gsx related props and send the rest to OpenAI
const { gsxTools, gsxExecuteTools, ...otherProps } = props;
const stream = await context.client.chat.completions.create({
...otherProps,
tools: openAITools,
});

async function* generateTokens(): AsyncGenerator<
string,
string | { tool_call: ToolCall },
void,
undefined
> {
for await (const chunk of stream as Stream<ChatCompletionChunk>) {
const content = chunk.choices[0]?.delta?.content;
const toolCall = chunk.choices[0]?.delta?.tool_calls?.[0];

if (content) {
yield content;
}

if (toolCall) {
yield {
tool_call: {
id: toolCall.id,
type: "function",
function: {
name: toolCall.function?.name,
arguments: toolCall.function?.arguments,
},
},
};
}
}
}

const streamable: Streamable = generateTokens();
return streamable;
} else {
const response = await context.client.chat.completions.create(props);
const content = response.choices[0]?.message?.content ?? "";

function* generateTokens() {
yield content;
}
// Remove gsx related props and send the rest to OpenAI
const { gsxTools, gsxExecuteTools, ...otherProps } = props;
const response = await context.client.chat.completions.create({
...otherProps,
tools: openAITools,
});

return generateTokens();
// Return a structured response with both content and tool calls
return {
content: response.choices[0]?.message?.content ?? "",
tool_calls:
response.choices[0]?.message?.tool_calls?.map((toolCall) => ({
id: toolCall.id,
type: "function" as const,
function: {
name: toolCall.function.name,
arguments: toolCall.function.arguments,
},
})) ?? [],
};
}
},
);
10 changes: 7 additions & 3 deletions packages/gensx-openai/tests/index.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -114,12 +114,16 @@ suite("ChatCompletion", () => {
</OpenAIProvider>,
);

let resultString = "";
let accumulated = "";
for await (const chunk of result) {
resultString += chunk;
if (typeof chunk === "string") {
accumulated += chunk;
} else {
accumulated += JSON.stringify(chunk);
}
}

expect(resultString).toBe("Hello World ");
expect(accumulated).toBe("Hello World ");
});

test("handles non-streaming response", async () => {
Expand Down
Loading
Loading