Skip to content

Commit

Permalink
add example of provider pattern for injecting compnent implementations
Browse files Browse the repository at this point in the history
  • Loading branch information
EvanBoyle committed Dec 30, 2024
1 parent fb7e27b commit 1fa147a
Show file tree
Hide file tree
Showing 4 changed files with 236 additions and 33 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@
}
},
"dependencies": {
"@turbopuffer/turbopuffer": "^0.5.13",
"openai": "^4.77.0"
}
}
61 changes: 28 additions & 33 deletions playground/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ import {
} from "./hackerNewsAnalyzer";
import { BlogWritingWorkflow } from "./blogWriter";
import { ChatCompletion } from "./chatCompletion";
import {
PerplexityResearch,
TurboPufferResearch,
RAGWorkflow,
} from "./ragExample";
import fs from "fs/promises";
import type { Streamable } from "@/types";

Expand Down Expand Up @@ -33,38 +38,6 @@ async function runHNAnalysisExample() {
}

// Example 3: Streaming vs non-streaming chat completion
async function runStreamingWithChildrenExample() {
const prompt =
"Write a 250 word story about an AI that discovers the meaning of friendship through a series of small interactions with humans. Be concise but meaningful.";

console.log("\n🚀 Starting streaming example with prompt:", prompt);

console.log("\n📝 Non-streaming version (waiting for full response):");
await gsx.execute<string>(
<ChatCompletion prompt={prompt}>
{async (response: string) => {
console.log(response);
}}
</ChatCompletion>,
);

console.log("\n📝 Streaming version (processing tokens as they arrive):");
await gsx.execute(
<ChatCompletion stream={true} prompt={prompt}>
{async (response: Streamable<string>) => {
// Print tokens as they arrive
for await (const token of {
[Symbol.asyncIterator]: () => response.stream(),
}) {
process.stdout.write(token);
}
process.stdout.write("\n");
console.log("✅ Streaming complete");
}}
</ChatCompletion>,
);
}

async function runStreamingExample() {
const prompt =
"Write a 250 word story about an AI that discovers the meaning of friendship through a series of small interactions with humans. Be concise but meaningful.";
Expand All @@ -91,12 +64,34 @@ async function runStreamingExample() {
console.log("✅ Streaming complete");
}

// Example 4: RAG workflow with different research providers
async function runRAGExample() {
console.log("\n🔍 Running RAG example with different providers...");

const query = "What are the key principles of modern software architecture?";

// Using Perplexity for research
console.log("\nUsing Perplexity:");
const perplexityAnswer = await gsx.execute<string>(
<RAGWorkflow query={query} researchProvider={PerplexityResearch} />,
);
console.log("✅ Perplexity-based answer:", perplexityAnswer);

// Using TurboPuffer for research
console.log("\nUsing TurboPuffer:");
const turboPufferAnswer = await gsx.execute<string>(
<RAGWorkflow query={query} researchProvider={TurboPufferResearch} />,
);
console.log("✅ TurboPuffer-based answer:", turboPufferAnswer);
}

// Main function to run examples
async function main() {
await runBlogWritingExample();
await runHNAnalysisExample();
await runStreamingWithChildrenExample();
// TODO add back streaming with children example...
await runStreamingExample();
await runRAGExample();
}

main().catch(console.error);
185 changes: 185 additions & 0 deletions playground/ragExample.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
import { gsx } from "@/index";
import { ChatCompletion } from "./chatCompletion";
import type { WorkflowComponent, Element } from "@/types";
import OpenAI from "openai";
import { Turbopuffer } from "@turbopuffer/turbopuffer";

// Initialize clients
const pplx = new OpenAI({
apiKey: process.env.PERPLEXITY_API_KEY,
baseURL: "https://api.perplexity.ai",
});

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});

const tpuf = new Turbopuffer({
apiKey: process.env.TURBOPUFFER_API_KEY as string,
baseUrl: "https://gcp-us-west1.turbopuffer.com",
});

// Research providers
interface ResearchResult {
content: string;
source: string;
relevance?: number;
metadata?: Record<string, unknown>;
}

interface TurboPufferResult {
id: string;
vector?: number[];
attributes: Record<string, unknown> & {
content: string;
url: string;
};
dist?: number;
score?: number;
}

// Perplexity-based research provider
export const PerplexityResearch = gsx.Component<
{ query: string },
ResearchResult[]
>(async ({ query }) => {
try {
const response = await pplx.chat.completions.create({
model: "llama-3.1-sonar-large-128k-online",
messages: [
{
role: "system",
content:
"You are an AI research assistant. Your job is to find relevant online information and provide detailed answers.",
},
{
role: "user",
content: query,
},
],
});

const content = response.choices[0].message.content;
if (!content) throw new Error("No content in response");

return [
{
content,
source: "perplexity.ai",
metadata: {
model: "llama-3.1-sonar-large-128k-online",
},
},
];
} catch (error) {
console.error("Perplexity research failed:", error);
return [];
}
});

// TurboPuffer-based research provider
export const TurboPufferResearch = gsx.Component<
{ query: string },
ResearchResult[]
>(async ({ query }) => {
try {
// Create embedding for the query
const embeddingResponse = await openai.embeddings.create({
model: "text-embedding-3-large",
input: query,
encoding_format: "float",
});

const embedding = embeddingResponse.data[0].embedding;

// Query the namespace
const ns = tpuf.namespace("docs"); // You might want to make this configurable
const queryResults = (await ns.query({
vector: embedding,
top_k: 3,
include_attributes: true,
distance_metric: "cosine_distance",
})) as TurboPufferResult[];

// Transform results to ResearchResult format
return queryResults.map(result => {
// Remove any internal fields we don't want to expose
const { chunk, documentId, ...metadata } = result.attributes;

return {
content: result.attributes.content,
source: result.attributes.url,
relevance: result.score ?? 1.0,
metadata,
};
});
} catch (error) {
console.error("TurboPuffer research failed:", error);
return [];
}
});

// RAG workflow using research providers
interface RAGWorkflowProps {
query: string;
researchProvider: WorkflowComponent<{ query: string }, ResearchResult[]>;
}

export const RAGWorkflow = gsx.Component<RAGWorkflowProps, string>(
async ({ query, researchProvider: ResearchProvider }) => {
const element: Element = (
<ResearchProvider query={query}>
{(results: ResearchResult[]) => {
const context = results
.sort((a: ResearchResult, b: ResearchResult) => {
// If neither has relevance, maintain original order
if (!a.relevance && !b.relevance) return 0;
// If only one has relevance, prioritize the one with relevance
if (!a.relevance) return 1;
if (!b.relevance) return -1;
// If both have relevance, sort by score
return b.relevance - a.relevance;
})
.slice(0, 3)
.map((r: ResearchResult) => `[${r.source}]: ${r.content}`)
.join("\n\n");

return (
<ChatCompletion
prompt={`Answer based on this context:\n\n${context}\n\nQuery: ${query}`}
/>
);
}}
</ResearchProvider>
);
return element;
},
);

// Example usage
async function runRAGExample() {
console.log("\n🔍 Running RAG example with different providers...");

// Using Perplexity
console.log("\nUsing Perplexity:");
const perplexityAnswer = await gsx.execute<string>(
<RAGWorkflow
query="What is the capital of France?"
researchProvider={PerplexityResearch}
/>,
);
console.log("Answer:", perplexityAnswer);

// Using TurboPuffer
console.log("\nUsing TurboPuffer:");
const turboPufferAnswer = await gsx.execute<string>(
<RAGWorkflow
query="What is the capital of France?"
researchProvider={TurboPufferResearch}
/>,
);
console.log("Answer:", turboPufferAnswer);
}

// Run the example
runRAGExample().catch(console.error);
22 changes: 22 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 1fa147a

Please sign in to comment.