diff --git a/examples/ai-core/Makefile b/examples/ai-core/Makefile index 6686901..737fefe 100644 --- a/examples/ai-core/Makefile +++ b/examples/ai-core/Makefile @@ -40,6 +40,22 @@ embed-many_ollama-cosine-similarity: $(call RUN_EXAMPLE_TARGET,$@) +# middleware +.PHONY: middleware middleware-run middleware-all middleware_generate-text-cache-middleware middleware_generate-text-log-middleware middleware_stream-text-log-middleware middleware_stream-text-rag-middleware +middleware: middleware-run middleware-all +middleware-run: + echo - examples/middleware: +middleware-all: middleware_generate-text-cache-middleware middleware_generate-text-log-middleware middleware_stream-text-log-middleware middleware_stream-text-rag-middleware +middleware_generate-text-cache-middleware: + $(call RUN_EXAMPLE_TARGET,$@) +middleware_generate-text-log-middleware: + $(call RUN_EXAMPLE_TARGET,$@) +middleware_stream-text-log-middleware: + $(call RUN_EXAMPLE_TARGET,$@) +middleware_stream-text-rag-middleware: + $(call RUN_EXAMPLE_TARGET,$@) + + # registry .PHONY: registry registry-run registry-all registry_embed registry_stream-text registry_stream-multimodal registry: registry-run registry-all @@ -56,16 +72,18 @@ registry_stream-multimodal: # generate-object -.PHONY: generate-object generate-object-run generate-object-all generate-object_ollama generate-object_ollama-date-parsing generate-object_ollama-full-json generate-object_ollama-json generate-object_ollama-multimodal generate-object_ollama-no-schema generate-object_ollama-raw-json-schema generate-object_ollama-tool +.PHONY: generate-object generate-object-run generate-object-all generate-object_ollama generate-object_ollama-array generate-object_ollama-date-parsing generate-object_ollama-full-result generate-object_ollama-json generate-object_ollama-multimodal generate-object_ollama-no-schema generate-object_ollama-raw-json-schema generate-object_ollama-tool generate-object: generate-object-run generate-object-all generate-object-run: echo - examples/generate-object: -generate-object-all: generate-object_ollama generate-object_ollama-date-parsing generate-object_ollama-full-json generate-object_ollama-json generate-object_ollama-multimodal generate-object_ollama-no-schema generate-object_ollama-raw-json-schema generate-object_ollama-tool -generate-object_ollama-date-parsing: - $(call RUN_EXAMPLE_TARGET,$@) +generate-object-all: generate-object_ollama generate-object_ollama-array generate-object_ollama-date-parsing generate-object_ollama-full-result generate-object_ollama-json generate-object_ollama-multimodal generate-object_ollama-no-schema generate-object_ollama-raw-json-schema generate-object_ollama-tool generate-object_ollama: $(call RUN_EXAMPLE_TARGET,$@) -generate-object_ollama-full-json: +generate-object_ollama-array: + $(call RUN_EXAMPLE_TARGET,$@) +generate-object_ollama-date-parsing: + $(call RUN_EXAMPLE_TARGET,$@) +generate-object_ollama-full-result: $(call RUN_EXAMPLE_TARGET,$@) generate-object_ollama-json: $(call RUN_EXAMPLE_TARGET,$@) @@ -80,11 +98,11 @@ generate-object_ollama-tool: # generate-text -.PHONY: generate-text generate-text-run generate-text-all generate-text_ollama generate-text_ollama-completion generate-text_ollama-completion-chat generate-text_ollama-custom-fetch generate-text_ollama-custom-headers generate-text_ollama-multimodal generate-text_ollama-multimodal-base64 generate-text_ollama-multimodal-url generate-text_ollama-system-message-a generate-text_ollama-system-message-b generate-text_ollama-tool-call generate-text_ollama-tool-call-raw-json-schema +.PHONY: generate-text generate-text-run generate-text-all generate-text_ollama generate-text_ollama-completion generate-text_ollama-completion-chat generate-text_ollama-custom-fetch generate-text_ollama-custom-headers generate-text_ollama-full-result generate-text_ollama-multimodal generate-text_ollama-multimodal-base64 generate-text_ollama-multimodal-url generate-text_ollama-system-message-a generate-text_ollama-system-message-b generate-text_ollama-tool-call generate-text_ollama-tool-call-raw-json-schema generate-text: generate-text-run generate-text-all generate-text-run: echo - examples/generate-text: -generate-text-all: generate-text_ollama generate-text_ollama-completion generate-text_ollama-completion-chat generate-text_ollama-custom-fetch generate-text_ollama-custom-headers generate-text_ollama-multimodal generate-text_ollama-multimodal-base64 generate-text_ollama-multimodal-url generate-text_ollama-system-message-a generate-text_ollama-system-message-b generate-text_ollama-tool-call generate-text_ollama-tool-call-raw-json-schema +generate-text-all: generate-text_ollama generate-text_ollama-completion generate-text_ollama-completion-chat generate-text_ollama-custom-fetch generate-text_ollama-custom-headers generate-text_ollama-full-result generate-text_ollama-multimodal generate-text_ollama-multimodal-base64 generate-text_ollama-multimodal-url generate-text_ollama-system-message-a generate-text_ollama-system-message-b generate-text_ollama-tool-call generate-text_ollama-tool-call-raw-json-schema generate-text_ollama: $(call RUN_EXAMPLE_TARGET,$@) generate-text_ollama-completion: @@ -95,6 +113,8 @@ generate-text_ollama-custom-fetch: $(call RUN_EXAMPLE_TARGET,$@) generate-text_ollama-custom-headers: $(call RUN_EXAMPLE_TARGET,$@) +generate-text_ollama-full-result: + $(call RUN_EXAMPLE_TARGET,$@) generate-text_ollama-multimodal: $(call RUN_EXAMPLE_TARGET,$@) generate-text_ollama-multimodal-base64: @@ -137,11 +157,11 @@ stream-object_ollama-tool: $(call RUN_EXAMPLE_TARGET,$@) # stream-text -.PHONY: stream-text stream-text-run stream-text-all stream-text_ollama stream-text_ollama-abort stream-text_ollama-chatbot stream-text_ollama-chatbot-with-tools stream-text_ollama-completion stream-text_ollama-completion-chat stream-text_ollama-custom-fetch-inject-error stream-text_ollama-fullstream stream-text_ollama-ollama-on-chunk stream-text_ollama-ollama-on-chunk-tool-call-streaming stream-text_ollama-on-finish stream-text_ollama-reader stream-text_ollama-response-headers stream-text_ollama-tool-call-raw-json-schema +.PHONY: stream-text stream-text-run stream-text-all stream-text_ollama stream-text_ollama-abort stream-text_ollama-chatbot stream-text_ollama-chatbot-with-tools stream-text_ollama-completion stream-text_ollama-completion-chat stream-text_ollama-custom-fetch-inject-error stream-text_ollama-fullstream stream-text_ollama-fullstream-raw stream-text_ollama-ollama-on-chunk stream-text_ollama-ollama-on-chunk-tool-call-streaming stream-text_ollama-on-finish stream-text_ollama-reader stream-text_ollama-response stream-text_ollama-tool-call-raw-json-schema stream-text: stream-text-run stream-text-all stream-text-run: echo - examples/stream-text: -stream-text-all: stream-text_ollama stream-text_ollama-abort stream-text_ollama-chatbot stream-text_ollama-chatbot-with-tools stream-text_ollama-completion stream-text_ollama-completion-chat stream-text_ollama-custom-fetch-inject-error stream-text_ollama-fullstream stream-text_ollama-ollama-on-chunk stream-text_ollama-ollama-on-chunk-tool-call-streaming stream-text_ollama-on-finish stream-text_ollama-reader stream-text_ollama-response-headers stream-text_ollama-tool-call-raw-json-schema +stream-text-all: stream-text_ollama stream-text_ollama-abort stream-text_ollama-chatbot stream-text_ollama-chatbot-with-tools stream-text_ollama-completion stream-text_ollama-completion-chat stream-text_ollama-custom-fetch-inject-error stream-text_ollama-fullstream stream-text_ollama-fullstream-raw stream-text_ollama-ollama-on-chunk stream-text_ollama-ollama-on-chunk-tool-call-streaming stream-text_ollama-on-finish stream-text_ollama-reader stream-text_ollama-response stream-text_ollama-tool-call-raw-json-schema stream-text_ollama: $(call RUN_EXAMPLE_TARGET,$@) stream-text_ollama-abort: @@ -158,6 +178,8 @@ stream-text_ollama-custom-fetch-inject-error: $(call RUN_EXAMPLE_TARGET,$@) stream-text_ollama-fullstream: $(call RUN_EXAMPLE_TARGET,$@) +stream-text_ollama-fullstream-raw: + $(call RUN_EXAMPLE_TARGET,$@) stream-text_ollama-ollama-on-chunk: $(call RUN_EXAMPLE_TARGET,$@) stream-text_ollama-ollama-on-chunk-tool-call-streaming: @@ -166,20 +188,24 @@ stream-text_ollama-on-finish: $(call RUN_EXAMPLE_TARGET,$@) stream-text_ollama-reader: $(call RUN_EXAMPLE_TARGET,$@) -stream-text_ollama-response-headers: +stream-text_ollama-response: $(call RUN_EXAMPLE_TARGET,$@) stream-text_ollama-tool-call-raw-json-schema: $(call RUN_EXAMPLE_TARGET,$@) # telemetry -.PHONY: telemetry telemetry-run telemetry-all telemetry_generate-text telemetry_generate-text-tool-call telemetry_stream-text +.PHONY: telemetry telemetry-run telemetry-all telemetry_generate-object telemetry_generate-text telemetry_generate-text-tool-call telemetry_stream-object telemetry_stream-text telemetry: telemetry-run telemetry-all telemetry-run: echo - examples/telemetry: -telemetry-all: telemetry_generate-text telemetry_generate-text-tool-call telemetry_stream-text +telemetry-all: telemetry_generate-object telemetry_generate-text telemetry_generate-text-tool-call telemetry_stream-object telemetry_stream-text +telemetry_generate-object: + $(call RUN_EXAMPLE_TARGET,$@) telemetry_generate-text: $(call RUN_EXAMPLE_TARGET,$@) telemetry_generate-text-tool-call: $(call RUN_EXAMPLE_TARGET,$@) +telemetry_stream-object: + $(call RUN_EXAMPLE_TARGET,$@) telemetry_stream-text: $(call RUN_EXAMPLE_TARGET,$@) diff --git a/examples/ai-core/src/complex/math-agent/agent.ts b/examples/ai-core/src/complex/math-agent/agent.ts index 8fcbc6d..f4e0e2f 100755 --- a/examples/ai-core/src/complex/math-agent/agent.ts +++ b/examples/ai-core/src/complex/math-agent/agent.ts @@ -16,7 +16,7 @@ const problem = async function main(model: OllamaChatModelId) { console.log(`PROBLEM: ${problem}\n`) - await generateText({ + const response = await generateText({ maxToolRoundtrips: 10, model: ollama(model), prompt: problem, @@ -25,7 +25,7 @@ async function main(model: OllamaChatModelId) { 'Reason step by step. ' + 'Use the tool `calculate` when necessary. ' + 'The calculator can only do simple additions, subtractions, multiplications, and divisions. ' + - 'When you give the final answer, provide an explanation for how you got it.', + 'When you give the final answer, provide an explanation for how you got it using the `answer` tool.', toolChoice: 'required', tools: { answer: tool({ @@ -45,6 +45,8 @@ async function main(model: OllamaChatModelId) { }), }, }) + + console.error(JSON.stringify(response, null, 2)) } buildProgram('firefunction-v2', main).catch(console.error) diff --git a/examples/ai-core/src/generate-object/ollama-full-json.ts b/examples/ai-core/src/generate-object/ollama-array.ts similarity index 97% rename from examples/ai-core/src/generate-object/ollama-full-json.ts rename to examples/ai-core/src/generate-object/ollama-array.ts index b753e1f..2c624e6 100755 --- a/examples/ai-core/src/generate-object/ollama-full-json.ts +++ b/examples/ai-core/src/generate-object/ollama-array.ts @@ -8,8 +8,8 @@ import { buildProgram } from '../tools/command' async function main(model: Parameters[0]) { const result = await generateObject({ - mode: 'json', model: ollama(model), + output: 'array', prompt: 'Generate 3 character descriptions for a fantasy role playing game.', schema: z.object({ diff --git a/examples/ai-core/src/generate-object/ollama-full-result.ts b/examples/ai-core/src/generate-object/ollama-full-result.ts new file mode 100755 index 0000000..89c345d --- /dev/null +++ b/examples/ai-core/src/generate-object/ollama-full-result.ts @@ -0,0 +1,27 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateObject } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const result = await generateObject({ + model: ollama(model), + prompt: 'Generate a lasagna recipe.', + schema: z.object({ + recipe: z.object({ + ingredients: z.array( + z.object({ amount: z.string(), name: z.string() }), + ), + name: z.string(), + steps: z.array(z.string()), + }), + }), + }) + + console.log(JSON.stringify(result, null, 2)) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/generate-text/ollama-full-result.ts b/examples/ai-core/src/generate-text/ollama-full-result.ts new file mode 100755 index 0000000..417ab75 --- /dev/null +++ b/examples/ai-core/src/generate-text/ollama-full-result.ts @@ -0,0 +1,17 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateText } from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const result = await generateText({ + model: ollama(model), + prompt: 'Invent a new holiday and describe its traditions.', + }) + + console.log(JSON.stringify(result, null, 2)) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/middleware/add-to-last-user-message.ts b/examples/ai-core/src/middleware/add-to-last-user-message.ts new file mode 100644 index 0000000..e36f7d5 --- /dev/null +++ b/examples/ai-core/src/middleware/add-to-last-user-message.ts @@ -0,0 +1,28 @@ +import { LanguageModelV1CallOptions } from 'ai' + +export function addToLastUserMessage({ + params, + text, +}: { + params: LanguageModelV1CallOptions + text: string +}): LanguageModelV1CallOptions { + const { prompt, ...rest } = params + + const lastMessage = prompt.at(-1) + + if (lastMessage?.role !== 'user') { + return params + } + + return { + ...rest, + prompt: [ + ...prompt.slice(0, -1), + { + ...lastMessage, + content: [{ text, type: 'text' }, ...lastMessage.content], + }, + ], + } +} diff --git a/examples/ai-core/src/middleware/generate-text-cache-middleware.ts b/examples/ai-core/src/middleware/generate-text-cache-middleware.ts new file mode 100755 index 0000000..39b629d --- /dev/null +++ b/examples/ai-core/src/middleware/generate-text-cache-middleware.ts @@ -0,0 +1,39 @@ +#! /usr/bin/env -S pnpm tsx + +import { + experimental_wrapLanguageModel as wrapLanguageModel, + generateText, +} from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' +import { yourCacheMiddleware } from './your-cache-middleware' + +async function main(model: Parameters[0]) { + const modelWithCaching = wrapLanguageModel({ + middleware: yourCacheMiddleware, + model: ollama(model), + }) + + const start1 = Date.now() + const result1 = await generateText({ + model: modelWithCaching, + prompt: 'What cities are in the United States?', + }) + + const end1 = Date.now() + + const start2 = Date.now() + const result2 = await generateText({ + model: modelWithCaching, + prompt: 'What cities are in the United States?', + }) + const end2 = Date.now() + + console.log(`Time taken for result1: ${end1 - start1}ms`) + console.log(`Time taken for result2: ${end2 - start2}ms`) + + console.log(result1.text === result2.text) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/middleware/generate-text-log-middleware.ts b/examples/ai-core/src/middleware/generate-text-log-middleware.ts new file mode 100755 index 0000000..9482a0e --- /dev/null +++ b/examples/ai-core/src/middleware/generate-text-log-middleware.ts @@ -0,0 +1,22 @@ +#! /usr/bin/env -S pnpm tsx + +import { + experimental_wrapLanguageModel as wrapLanguageModel, + generateText, +} from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' +import { yourLogMiddleware } from './your-log-middleware' + +async function main(model: Parameters[0]) { + const result = await generateText({ + model: wrapLanguageModel({ + middleware: yourLogMiddleware, + model: ollama(model), + }), + prompt: 'What cities are in the United States?', + }) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/middleware/get-last-user-message-text.ts b/examples/ai-core/src/middleware/get-last-user-message-text.ts new file mode 100644 index 0000000..e36640a --- /dev/null +++ b/examples/ai-core/src/middleware/get-last-user-message-text.ts @@ -0,0 +1,17 @@ +import { LanguageModelV1Prompt } from 'ai' + +export function getLastUserMessageText({ + prompt, +}: { + prompt: LanguageModelV1Prompt +}): string | undefined { + const lastMessage = prompt.at(-1) + + if (lastMessage?.role !== 'user') { + return undefined + } + + return lastMessage.content.length === 0 + ? undefined + : lastMessage.content.filter((c) => c.type === 'text').join('\n') +} diff --git a/examples/ai-core/src/middleware/stream-text-log-middleware.ts b/examples/ai-core/src/middleware/stream-text-log-middleware.ts new file mode 100755 index 0000000..55108b9 --- /dev/null +++ b/examples/ai-core/src/middleware/stream-text-log-middleware.ts @@ -0,0 +1,26 @@ +#! /usr/bin/env -S pnpm tsx + +import { + experimental_wrapLanguageModel as wrapLanguageModel, + streamText, +} from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' +import { yourLogMiddleware } from './your-log-middleware' + +async function main(model: Parameters[0]) { + const result = await streamText({ + model: wrapLanguageModel({ + middleware: yourLogMiddleware, + model: ollama(model), + }), + prompt: 'What cities are in the United States?', + }) + + for await (const textPart of result.textStream) { + // consume the stream + } +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/middleware/stream-text-rag-middleware.ts b/examples/ai-core/src/middleware/stream-text-rag-middleware.ts new file mode 100755 index 0000000..bf14b9a --- /dev/null +++ b/examples/ai-core/src/middleware/stream-text-rag-middleware.ts @@ -0,0 +1,26 @@ +#! /usr/bin/env -S pnpm tsx + +import { + experimental_wrapLanguageModel as wrapLanguageModel, + streamText, +} from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' +import { yourRagMiddleware } from './your-rag-middleware' + +async function main(model: Parameters[0]) { + const result = await streamText({ + model: wrapLanguageModel({ + middleware: yourRagMiddleware, + model: ollama(model), + }), + prompt: 'What cities are in the United States?', + }) + + for await (const textPart of result.textStream) { + process.stdout.write(textPart) + } +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/middleware/your-cache-middleware.ts b/examples/ai-core/src/middleware/your-cache-middleware.ts new file mode 100644 index 0000000..afb30f1 --- /dev/null +++ b/examples/ai-core/src/middleware/your-cache-middleware.ts @@ -0,0 +1,22 @@ +import type { Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware } from 'ai' + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const cache = new Map() + +export const yourCacheMiddleware: LanguageModelV1Middleware = { + wrapGenerate: async ({ doGenerate, params }) => { + const cacheKey = JSON.stringify(params) + + if (cache.has(cacheKey)) { + return cache.get(cacheKey) + } + + const result = await doGenerate() + + cache.set(cacheKey, result) + + return result + }, + + // here you would implement the caching logic for streaming +} diff --git a/examples/ai-core/src/middleware/your-guardrail-middleware.ts b/examples/ai-core/src/middleware/your-guardrail-middleware.ts new file mode 100644 index 0000000..27019bb --- /dev/null +++ b/examples/ai-core/src/middleware/your-guardrail-middleware.ts @@ -0,0 +1,16 @@ +import type { Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware } from 'ai' + +export const yourGuardrailMiddleware: LanguageModelV1Middleware = { + wrapGenerate: async ({ doGenerate }) => { + const { text, ...rest } = await doGenerate() + + // filtering approach, e.g. for PII or other sensitive information: + const cleanedText = text?.replace(/badword/g, '') + + return { text: cleanedText, ...rest } + }, + + // here you would implement the guardrail logic for streaming + // Note: streaming guardrails are difficult to implement, because + // you do not know the full content of the stream until it's finished. +} diff --git a/examples/ai-core/src/middleware/your-log-middleware.ts b/examples/ai-core/src/middleware/your-log-middleware.ts new file mode 100644 index 0000000..8a52606 --- /dev/null +++ b/examples/ai-core/src/middleware/your-log-middleware.ts @@ -0,0 +1,50 @@ +import type { + Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware, + LanguageModelV1StreamPart, +} from 'ai' + +export const yourLogMiddleware: LanguageModelV1Middleware = { + wrapGenerate: async ({ doGenerate, params }) => { + console.log('doGenerate called') + console.log(`params: ${JSON.stringify(params, null, 2)}`) + + const result = await doGenerate() + + console.log('doGenerate finished') + console.log(`generated text: ${result.text}`) + + return result + }, + + wrapStream: async ({ doStream, params }) => { + console.log('doStream called') + console.log(`params: ${JSON.stringify(params, null, 2)}`) + + const { stream, ...rest } = await doStream() + + let generatedText = '' + + const transformStream = new TransformStream< + LanguageModelV1StreamPart, + LanguageModelV1StreamPart + >({ + flush() { + console.log('doStream finished') + console.log(`generated text: ${generatedText}`) + }, + + transform(chunk, controller) { + if (chunk.type === 'text-delta') { + generatedText += chunk.textDelta + } + + controller.enqueue(chunk) + }, + }) + + return { + stream: stream.pipeThrough(transformStream), + ...rest, + } + }, +} diff --git a/examples/ai-core/src/middleware/your-rag-middleware.ts b/examples/ai-core/src/middleware/your-rag-middleware.ts new file mode 100644 index 0000000..6b8f034 --- /dev/null +++ b/examples/ai-core/src/middleware/your-rag-middleware.ts @@ -0,0 +1,44 @@ +import type { Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware } from 'ai' + +import { addToLastUserMessage } from './add-to-last-user-message' +import { getLastUserMessageText } from './get-last-user-message-text' + +export const yourRagMiddleware: LanguageModelV1Middleware = { + transformParams: async ({ params }) => { + const lastUserMessageText = getLastUserMessageText({ + prompt: params.prompt, + }) + + if (lastUserMessageText === undefined) { + return params // do not use RAG (send unmodified parameters) + } + + const instruction = + 'Use the following information to answer the question:\n' + + findSources({ text: lastUserMessageText }) + .map((chunk) => JSON.stringify(chunk)) + .join('\n') + + return addToLastUserMessage({ params, text: instruction }) + }, +} + +// example, could implement anything here: +function findSources({ text }: { text: string }): Array<{ + previewText: string | undefined + title: string + url: string | undefined +}> { + return [ + { + previewText: 'New York is a city in the United States.', + title: 'New York', + url: 'https://en.wikipedia.org/wiki/New_York', + }, + { + previewText: 'San Francisco is a city in the United States.', + title: 'San Francisco', + url: 'https://en.wikipedia.org/wiki/San_Francisco', + }, + ] +} diff --git a/examples/ai-core/src/stream-text/ollama-fullstream-raw.ts b/examples/ai-core/src/stream-text/ollama-fullstream-raw.ts new file mode 100755 index 0000000..eae03f0 --- /dev/null +++ b/examples/ai-core/src/stream-text/ollama-fullstream-raw.ts @@ -0,0 +1,19 @@ +#! /usr/bin/env -S pnpm tsx + +import { streamText } from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const result = await streamText({ + model: ollama(model), + prompt: 'Invent a new holiday and describe its traditions.', + }) + + for await (const part of result.fullStream) { + console.log(JSON.stringify(part)) + } +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/stream-text/ollama-response-headers.ts b/examples/ai-core/src/stream-text/ollama-response.ts similarity index 100% rename from examples/ai-core/src/stream-text/ollama-response-headers.ts rename to examples/ai-core/src/stream-text/ollama-response.ts diff --git a/examples/ai-core/src/telemetry/generate-object.ts b/examples/ai-core/src/telemetry/generate-object.ts new file mode 100755 index 0000000..1f10a67 --- /dev/null +++ b/examples/ai-core/src/telemetry/generate-object.ts @@ -0,0 +1,50 @@ +#! /usr/bin/env -S pnpm tsx + +import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node' +import { NodeSDK } from '@opentelemetry/sdk-node' +import { ConsoleSpanExporter } from '@opentelemetry/sdk-trace-node' +import { generateObject } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' + +const sdk = new NodeSDK({ + instrumentations: [getNodeAutoInstrumentations()], + traceExporter: new ConsoleSpanExporter(), +}) + +sdk.start() + +async function main(model: Parameters[0]) { + const result = await generateObject({ + experimental_telemetry: { + functionId: 'my-awesome-function', + isEnabled: true, + metadata: { + someOtherThing: 'other-value', + something: 'custom', + }, + }, + model: ollama(model), + prompt: 'Generate a lasagna recipe.', + schema: z.object({ + recipe: z.object({ + ingredients: z.array( + z.object({ + amount: z.string(), + name: z.string(), + }), + ), + name: z.string(), + steps: z.array(z.string()), + }), + }), + }) + + console.log(JSON.stringify(result.object.recipe, null, 2)) + + await sdk.shutdown() +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/telemetry/stream-object.ts b/examples/ai-core/src/telemetry/stream-object.ts new file mode 100755 index 0000000..c17c168 --- /dev/null +++ b/examples/ai-core/src/telemetry/stream-object.ts @@ -0,0 +1,53 @@ +#! /usr/bin/env -S pnpm tsx + +import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node' +import { NodeSDK } from '@opentelemetry/sdk-node' +import { ConsoleSpanExporter } from '@opentelemetry/sdk-trace-node' +import { streamObject } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' + +const sdk = new NodeSDK({ + instrumentations: [getNodeAutoInstrumentations()], + traceExporter: new ConsoleSpanExporter(), +}) + +sdk.start() + +async function main(model: Parameters[0]) { + const result = await streamObject({ + experimental_telemetry: { + functionId: 'my-awesome-function', + isEnabled: true, + metadata: { + someOtherThing: 'other-value', + something: 'custom', + }, + }, + model: ollama(model), + prompt: 'Generate a lasagna recipe.', + schema: z.object({ + recipe: z.object({ + ingredients: z.array( + z.object({ + amount: z.string(), + name: z.string(), + }), + ), + name: z.string(), + steps: z.array(z.string()), + }), + }), + }) + + for await (const partialObject of result.partialObjectStream) { + console.clear() + console.log(partialObject) + } + + await sdk.shutdown() +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/packages/ollama/package.json b/packages/ollama/package.json index f072522..9a8474c 100644 --- a/packages/ollama/package.json +++ b/packages/ollama/package.json @@ -26,8 +26,8 @@ "author": "Sergio Gómez Bachiller ", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "0.0.22", - "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/provider": "0.0.23", + "@ai-sdk/provider-utils": "1.0.18", "partial-json": "0.1.7" }, "devDependencies": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6dcc8ab..41d1119 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -82,7 +82,7 @@ importers: version: 1.25.0(@opentelemetry/api@1.9.0) ai: specifier: latest - version: 3.3.19(openai@4.52.6)(react@18.3.1)(solid-js@1.8.17)(sswr@2.1.0(svelte@4.2.17))(svelte@4.2.17)(vue@3.4.27(typescript@5.5.4))(zod@3.23.8) + version: 3.3.28(openai@4.52.6)(react@18.3.1)(solid-js@1.8.17)(sswr@2.1.0(svelte@4.2.17))(svelte@4.2.17)(vue@3.4.27(typescript@5.5.4))(zod@3.23.8) commander: specifier: ^12.1.0 version: 12.1.0 @@ -113,13 +113,13 @@ importers: dependencies: '@ai-sdk/react': specifier: latest - version: 0.0.52(react@18.3.1)(zod@3.23.8) + version: 0.0.55(react@18.3.1)(zod@3.23.8) '@ai-sdk/ui-utils': specifier: latest - version: 0.0.39(zod@3.23.8) + version: 0.0.41(zod@3.23.8) ai: specifier: latest - version: 3.3.19(openai@4.52.6)(react@18.3.1)(solid-js@1.8.17)(sswr@2.1.0(svelte@4.2.17))(svelte@4.2.17)(vue@3.4.27(typescript@5.5.4))(zod@3.23.8) + version: 3.3.28(openai@4.52.6)(react@18.3.1)(solid-js@1.8.17)(sswr@2.1.0(svelte@4.2.17))(svelte@4.2.17)(vue@3.4.27(typescript@5.5.4))(zod@3.23.8) framer-motion: specifier: ^11.3.23 version: 11.3.23(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -164,11 +164,11 @@ importers: packages/ollama: dependencies: '@ai-sdk/provider': - specifier: 0.0.22 - version: 0.0.22 + specifier: 0.0.23 + version: 0.0.23 '@ai-sdk/provider-utils': - specifier: 1.0.17 - version: 1.0.17(zod@3.23.8) + specifier: 1.0.18 + version: 1.0.18(zod@3.23.8) partial-json: specifier: 0.1.7 version: 0.1.7 @@ -181,7 +181,7 @@ importers: version: 18.19.46 tsup: specifier: ^8.2.4 - version: 8.2.4(jiti@1.21.6)(postcss@8.4.41)(typescript@5.5.4)(yaml@2.4.5) + version: 8.2.4(jiti@1.21.6)(postcss@8.4.45)(typescript@5.5.4)(yaml@2.4.5) typescript: specifier: 5.5.4 version: 5.5.4 @@ -191,8 +191,8 @@ importers: packages: - '@ai-sdk/provider-utils@1.0.17': - resolution: {integrity: sha512-2VyeTH5DQ6AxqvwdyytKIeiZyYTyJffpufWjE67zM2sXMIHgYl7fivo8m5wVl6Cbf1dFPSGKq//C9s+lz+NHrQ==} + '@ai-sdk/provider-utils@1.0.18': + resolution: {integrity: sha512-9u/XE/dB1gsIGcxiC5JfGOLzUz+EKRXt66T8KYWwDg4x8d02P+fI/EPOgkf+T4oLBrcQgvs4GPXPKoXGPJxBbg==} engines: {node: '>=18'} peerDependencies: zod: ^3.0.0 @@ -200,12 +200,12 @@ packages: zod: optional: true - '@ai-sdk/provider@0.0.22': - resolution: {integrity: sha512-smZ1/2jL/JSKnbhC6ama/PxI2D/psj+YAe0c0qpd5ComQCNFltg72VFf0rpUSFMmFuj1pCCNoBOCrvyl8HTZHQ==} + '@ai-sdk/provider@0.0.23': + resolution: {integrity: sha512-oAc49O5+xypVrKM7EUU5P/Y4DUL4JZUWVxhejoAVOTOl3WZUEWsMbP3QZR+TrimQIsS0WR/n9UuF6U0jPdp0tQ==} engines: {node: '>=18'} - '@ai-sdk/react@0.0.52': - resolution: {integrity: sha512-4Gm+AoINDXQ4lzIZFKOWOcKgjgiAFdyhmBxnyuaqzTJCoRWNUSea62xhjqRE0u8wagfPgxWUAyS8BAsY0EqOyg==} + '@ai-sdk/react@0.0.55': + resolution: {integrity: sha512-9fUUEEEoH01M6ZhvyZ/2v0DI6tiYnSldBg6RaKoy+qx2tSeKvOpFNZhT/iOvQ7oqAyyp0Ocg5Rj7L/jcLXSMxw==} engines: {node: '>=18'} peerDependencies: react: ^18 || ^19 @@ -216,8 +216,8 @@ packages: zod: optional: true - '@ai-sdk/solid@0.0.42': - resolution: {integrity: sha512-tr1rXRg0bLls7ZEQCWfd0Tv7irFlKQRjBSKSCstwrGtTeDA7zwUP4tIiUaCyzM3lwyE6Qgl17SrAoxSD+xP+zQ==} + '@ai-sdk/solid@0.0.44': + resolution: {integrity: sha512-3kMhxalepc78jWr2Qg1BAHbY04JKYxp8wRu3TACrRUdokxzwD5sbZYtTb7vu9tw2wx78rfu0DH44CESFWpSfZg==} engines: {node: '>=18'} peerDependencies: solid-js: ^1.7.7 @@ -225,8 +225,8 @@ packages: solid-js: optional: true - '@ai-sdk/svelte@0.0.44': - resolution: {integrity: sha512-soSiEX1BUiwRSdoc+7mAoCeuM3Vs/ebdb1gNL7ta9Zma7GTHq802Wi7KfWfypoAqpgi0QUapzCRMvgrl4oW4AQ==} + '@ai-sdk/svelte@0.0.46': + resolution: {integrity: sha512-cokqS91vQkpqiRgf8xKwOONFb/RwkIbRg9jYVRb+z5NR9OsWXKMEfoCAf8+VgURfVbp8nqA+ddRXvtgYCwqQjQ==} engines: {node: '>=18'} peerDependencies: svelte: ^3.0.0 || ^4.0.0 @@ -234,8 +234,8 @@ packages: svelte: optional: true - '@ai-sdk/ui-utils@0.0.39': - resolution: {integrity: sha512-yxlJBFEiWR7rf/oS7MFX9O5Hr7VYV0ipMBrvds66N3+m52/nCbBB5C/eBefzeR+hoGc/r5xGo7Yd1cncGYHHTw==} + '@ai-sdk/ui-utils@0.0.41': + resolution: {integrity: sha512-I0trJKWxVG8hXeG0MvKqLG54fZjdeGjXvcVZocaSnWMBhl9lpTQxrqAR6ZsQMFDXs5DbvXoKtQs488qu2Bzaiw==} engines: {node: '>=18'} peerDependencies: zod: ^3.0.0 @@ -243,8 +243,8 @@ packages: zod: optional: true - '@ai-sdk/vue@0.0.44': - resolution: {integrity: sha512-IsDCoy7u4V081dKT1i6b/Cxh2G0aftetbif+qNQGh5QeU9TtGs9KDW+onPkXeqlDQcpMN0Q5zaNGaZ7YBK50Gw==} + '@ai-sdk/vue@0.0.46': + resolution: {integrity: sha512-H366ydskPbZP8uRs4sm3SAi97P3JVTRI5Q8xYTI6uTaY4UFBA6aOWdDxniYZNa67ebemfe11m7ksX4wHW6Wl8g==} engines: {node: '>=18'} peerDependencies: vue: ^3.3.4 @@ -281,8 +281,8 @@ packages: engines: {node: '>=6.0.0'} hasBin: true - '@babel/parser@7.25.4': - resolution: {integrity: sha512-nq+eWrOgdtu3jG5Os4TQP3x3cLA8hR8TvJNjD8vnPa20WGycimcparWnLK4jJhElTK6SDyuJo1weMKO/5LpmLA==} + '@babel/parser@7.25.6': + resolution: {integrity: sha512-trGdfBdbD0l1ZPmcJ83eNxB9rbEax4ALFTF7fN386TMYbeCQbyme5cOEXQhbGXKebwGaB/J52w1mrklMcbgy6Q==} engines: {node: '>=6.0.0'} hasBin: true @@ -290,16 +290,16 @@ packages: resolution: {integrity: sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==} engines: {node: '>=6.9.0'} - '@babel/runtime@7.25.4': - resolution: {integrity: sha512-DSgLeL/FNcpXuzav5wfYvHCGvynXkJbn3Zvc3823AEe9nPwW9IK4UoCSS5yGymmQzN0pCPvivtgS6/8U2kkm1w==} + '@babel/runtime@7.25.6': + resolution: {integrity: sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==} engines: {node: '>=6.9.0'} '@babel/types@7.25.2': resolution: {integrity: sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==} engines: {node: '>=6.9.0'} - '@babel/types@7.25.4': - resolution: {integrity: sha512-zQ1ijeeCXVEh+aNL0RlmkPkG8HUiDcU2pzQQFjtbntgAczRASFzj4H+6+bV+dy1ntKR14I/DypeuRG1uma98iQ==} + '@babel/types@7.25.6': + resolution: {integrity: sha512-/l42B1qxpG6RdfYf343Uw1vmDjeNhneUXtzhojE7pDgfpEypmRhI6j1kr17XCVv4Cgl9HdAiQY2x0GwKm7rWCw==} engines: {node: '>=6.9.0'} '@bcoe/v8-coverage@0.2.3': @@ -875,8 +875,8 @@ packages: resolution: {integrity: sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@grpc/grpc-js@1.11.1': - resolution: {integrity: sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==} + '@grpc/grpc-js@1.11.2': + resolution: {integrity: sha512-DWp92gDD7/Qkj7r8kus6/HCINeo3yPZWZ3paKgDgsbKbSpoxKg1yvN8xe2Q8uE3zOsPe3bX8FQX2+XValq2yTw==} engines: {node: '>=12.10.0'} '@grpc/proto-loader@0.7.13': @@ -1040,6 +1040,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/core@1.26.0': + resolution: {integrity: sha512-1iKxXXE8415Cdv0yjG3G6hQnB5eVEsJce3QaawX8SjDn0mAS0ZM8fAbZZJD4ajvhC15cePvosSCut404KrIIvQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/exporter-trace-otlp-grpc@0.52.0': resolution: {integrity: sha512-Ln3HU54/ytTeEMrDGNDj01357YV8Kk9PkGDHvBRo1n7bWhwZoTEnX/cTuXLYOiygBIJJjCCM+VMfWCnvtFl4Kw==} engines: {node: '>=14'} @@ -1322,14 +1328,14 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' - '@opentelemetry/propagation-utils@0.30.10': - resolution: {integrity: sha512-hhTW8pFp9PSyosYzzuUL9rdm7HF97w3OCyElufFHyUnYnKkCBbu8ne2LyF/KSdI/xZ81ubxWZs78hX4S7pLq5g==} + '@opentelemetry/propagation-utils@0.30.11': + resolution: {integrity: sha512-rY4L/2LWNk5p/22zdunpqVmgz6uN419DsRTw5KFMa6u21tWhXS8devlMy4h8m8nnS20wM7r6yYweCNNKjgLYJw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/propagator-aws-xray@1.25.1': - resolution: {integrity: sha512-soZQdO9EAROMwa9bL2C0VLadbrfRjSA9t7g6X8sL0X1B8V59pzOayYMyTW9qTECn9uuJV98A7qOnJm6KH6yk8w==} + '@opentelemetry/propagator-aws-xray@1.26.0': + resolution: {integrity: sha512-Sex+JyEZ/xX328TArBqQjh1NZSfNyw5NdASUIi9hnPsnMBMSBaDe7B9JRnXv0swz7niNyAnXa6MY7yOCV76EvA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' @@ -1356,14 +1362,14 @@ packages: peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/resource-detector-aws@1.5.2': - resolution: {integrity: sha512-LNwKy5vJM5fvCDcbXVKwg6Y1pKT4WgZUsddGMnWMEhxJcQVZm2Z9vUkyHdQU7xvJtGwCO2/TkMWHPjU1KQNDJQ==} + '@opentelemetry/resource-detector-aws@1.6.1': + resolution: {integrity: sha512-A/3lqx9xoew7sFi+AVUUVr6VgB7UJ5qqddkKR3gQk9hWLm1R7HUXVJG09cLcZ7DMNpX13DohPRGmHE/vp1vafw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/resource-detector-azure@0.2.9': - resolution: {integrity: sha512-16Z6kyrmszoa7J1uj1kbSAgZuk11K07yEDj6fa3I9XBf8Debi8y4K8ex94kpxbCfEraWagXji3bCWvaq3k4dRg==} + '@opentelemetry/resource-detector-azure@0.2.11': + resolution: {integrity: sha512-XepvQfTXWyHAoAziCfXGwYbSZL0LHtFk5iuKKN2VE2vzcoiw5Tepi0Qafuwb7CCtpQRReao4H7E29MFbCmh47g==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 @@ -1374,8 +1380,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/resource-detector-gcp@0.29.10': - resolution: {integrity: sha512-rm2HKJ9lsdoVvrbmkr9dkOzg3Uk0FksXNxvNBgrCprM1XhMoJwThI5i0h/5sJypISUAJlEeJS6gn6nROj/NpkQ==} + '@opentelemetry/resource-detector-gcp@0.29.11': + resolution: {integrity: sha512-07wJx4nyxD/c2z3n70OQOg8fmoO/baTsq8uU+f7tZaehRNQx76MPkRbV2L902N40Z21SPIG8biUZ30OXE9tOIg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 @@ -1386,8 +1392,8 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/resources@1.25.1': - resolution: {integrity: sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==} + '@opentelemetry/resources@1.26.0': + resolution: {integrity: sha512-CPNYchBE7MBecCSVy0HKpUISEeJOniWqcHaAHpmasZ3j9o6V3AyBzhRc90jdmemq0HOxDr6ylhUbDhBqqPpeNw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' @@ -1404,8 +1410,8 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' - '@opentelemetry/sdk-metrics@1.25.1': - resolution: {integrity: sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==} + '@opentelemetry/sdk-metrics@1.26.0': + resolution: {integrity: sha512-0SvDXmou/JjzSDOjUmetAAvcKQW6ZrvosU0rkbDGpXvvZN+pQF6JbK/Kd4hNdK4q/22yeruqvukXEJyySTzyTQ==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' @@ -1436,8 +1442,8 @@ packages: resolution: {integrity: sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==} engines: {node: '>=14'} - '@opentelemetry/semantic-conventions@1.26.0': - resolution: {integrity: sha512-U9PJlOswJPSgQVPI+XEuNLElyFWkb0hAiMg+DExD9V0St03X2lPHGMdxMY/LrVmoukuIpXJ12oyrOtEZ4uXFkw==} + '@opentelemetry/semantic-conventions@1.27.0': + resolution: {integrity: sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==} engines: {node: '>=14'} '@opentelemetry/sql-common@0.40.1': @@ -1654,6 +1660,9 @@ packages: '@types/node@18.19.46': resolution: {integrity: sha512-vnRgMS7W6cKa1/0G3/DTtQYpVrZ8c0Xm6UkLaVFrb9jtcVC3okokW09Ki1Qdrj9ISokszD69nY4WDLRlvHlhAA==} + '@types/node@18.19.50': + resolution: {integrity: sha512-xonK+NRrMBRtkL1hVCc3G+uXtjh1Al4opBLjqVmipe5ZAaBYWW6cNAiBVZ1BvmkBhep698rP3UM3aRAdSALuhg==} + '@types/node@20.11.20': resolution: {integrity: sha512-7/rR21OS+fq8IyHTgtLkDK949uzsa6n8BkziAKtPVpugIkO6D+/ooXMvzXxDnZrmtXVfjb1bKQafYpb8s89LOg==} @@ -1889,8 +1898,8 @@ packages: resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} engines: {node: '>= 8.0.0'} - ai@3.3.19: - resolution: {integrity: sha512-Q3K3cubOYqLVK/5P6oWu5kjjPgAo4Lr+zzLZCCG0HX9b8QemgQCPmZGRNgLNf3a2y6m4NwpjJjAPYHPlb3ytwQ==} + ai@3.3.28: + resolution: {integrity: sha512-ogrsMscar8oXa4nTEcnjvb37cs0UJ7AxVga/642BQGkGBevnKhS0hbnXEOUKmlWcny/xRuWQ3GaXA3u9CxhfhQ==} engines: {node: '>=18'} peerDependencies: openai: ^4.42.0 @@ -2109,8 +2118,8 @@ packages: resolution: {integrity: sha512-TdHqgGf9odd8SXNuxtUBVx8Nv+qZOejE6qyqiy5NtbYYQOeFa6zmHkxlPzmaLxWWHsU6nJmB7AETdVPi+2NBUg==} engines: {node: '>=8'} - cjs-module-lexer@1.3.1: - resolution: {integrity: sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==} + cjs-module-lexer@1.4.0: + resolution: {integrity: sha512-N1NGmowPlGBLsOZLPvm48StN04V4YvQRL0i6b7ctrVY3epjP/ct7hFLOItz6pDIvRjwpfPxi52a2UWV2ziir8g==} clean-regexp@1.0.0: resolution: {integrity: sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==} @@ -2269,6 +2278,15 @@ packages: supports-color: optional: true + debug@4.3.7: + resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decimal.js@10.4.3: resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==} @@ -2728,8 +2746,8 @@ packages: resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==} engines: {node: '>= 0.4'} - get-tsconfig@4.7.6: - resolution: {integrity: sha512-ZAqrLlu18NbDdRaHq+AKXzAmqIUPswPWKUchfytdAjiRFnCe5ojG2bstg6mRiZabkKfCoL/e98pbBELIV/YCeA==} + get-tsconfig@4.8.0: + resolution: {integrity: sha512-Pgba6TExTZ0FJAn1qkJAjIeKoDJ3CsI2ChuLohJnZl/tTU8MVrq3b+2t5UOPfRa4RMsorClBjJALkJUMjG1PAw==} git-raw-commits@4.0.0: resolution: {integrity: sha512-ICsMM1Wk8xSGMowkOmPrzo2Fgmfo4bMHLNX6ytHjajRJUqvHOw/TFapQ+QG75c3X/tTDDhOSRPGC52dDbNM8FQ==} @@ -3544,6 +3562,9 @@ packages: picocolors@1.0.1: resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + picocolors@1.1.0: + resolution: {integrity: sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==} + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} @@ -3640,6 +3661,10 @@ packages: resolution: {integrity: sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==} engines: {node: ^10 || ^12 || >=14} + postcss@8.4.45: + resolution: {integrity: sha512-7KTLTdzdZZYscUc65XmjFiB73vBhBfbPztCYdUNvlaso9PrzjzcmjqBPR0lNGkcVlcO4BjiO5rK/qNz+XAen1Q==} + engines: {node: ^10 || ^12 || >=14} + postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} @@ -3678,8 +3703,8 @@ packages: engines: {node: '>=14'} hasBin: true - protobufjs@7.3.3: - resolution: {integrity: sha512-HaYi2CVjiPoBR1d2zTVKVHXr9IUnpJizCjUu19vxdD3B8o4z+vfOHpIEB1358w8nv8dfUNEfDHFvMsH7QlLt/Q==} + protobufjs@7.4.0: + resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==} engines: {node: '>=12.0.0'} pseudomap@1.0.2: @@ -4444,61 +4469,61 @@ packages: snapshots: - '@ai-sdk/provider-utils@1.0.17(zod@3.23.8)': + '@ai-sdk/provider-utils@1.0.18(zod@3.23.8)': dependencies: - '@ai-sdk/provider': 0.0.22 + '@ai-sdk/provider': 0.0.23 eventsource-parser: 1.1.2 nanoid: 3.3.6 secure-json-parse: 2.7.0 optionalDependencies: zod: 3.23.8 - '@ai-sdk/provider@0.0.22': + '@ai-sdk/provider@0.0.23': dependencies: json-schema: 0.4.0 - '@ai-sdk/react@0.0.52(react@18.3.1)(zod@3.23.8)': + '@ai-sdk/react@0.0.55(react@18.3.1)(zod@3.23.8)': dependencies: - '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.39(zod@3.23.8) + '@ai-sdk/provider-utils': 1.0.18(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.41(zod@3.23.8) swr: 2.2.5(react@18.3.1) optionalDependencies: react: 18.3.1 zod: 3.23.8 - '@ai-sdk/solid@0.0.42(solid-js@1.8.17)(zod@3.23.8)': + '@ai-sdk/solid@0.0.44(solid-js@1.8.17)(zod@3.23.8)': dependencies: - '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.39(zod@3.23.8) + '@ai-sdk/provider-utils': 1.0.18(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.41(zod@3.23.8) optionalDependencies: solid-js: 1.8.17 transitivePeerDependencies: - zod - '@ai-sdk/svelte@0.0.44(svelte@4.2.17)(zod@3.23.8)': + '@ai-sdk/svelte@0.0.46(svelte@4.2.17)(zod@3.23.8)': dependencies: - '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.39(zod@3.23.8) + '@ai-sdk/provider-utils': 1.0.18(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.41(zod@3.23.8) sswr: 2.1.0(svelte@4.2.17) optionalDependencies: svelte: 4.2.17 transitivePeerDependencies: - zod - '@ai-sdk/ui-utils@0.0.39(zod@3.23.8)': + '@ai-sdk/ui-utils@0.0.41(zod@3.23.8)': dependencies: - '@ai-sdk/provider': 0.0.22 - '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) + '@ai-sdk/provider': 0.0.23 + '@ai-sdk/provider-utils': 1.0.18(zod@3.23.8) json-schema: 0.4.0 secure-json-parse: 2.7.0 zod-to-json-schema: 3.23.2(zod@3.23.8) optionalDependencies: zod: 3.23.8 - '@ai-sdk/vue@0.0.44(vue@3.4.27(typescript@5.5.4))(zod@3.23.8)': + '@ai-sdk/vue@0.0.46(vue@3.4.27(typescript@5.5.4))(zod@3.23.8)': dependencies: - '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.39(zod@3.23.8) + '@ai-sdk/provider-utils': 1.0.18(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.41(zod@3.23.8) swrv: 1.0.4(vue@3.4.27(typescript@5.5.4)) optionalDependencies: vue: 3.4.27(typescript@5.5.4) @@ -4532,15 +4557,15 @@ snapshots: dependencies: '@babel/types': 7.25.2 - '@babel/parser@7.25.4': + '@babel/parser@7.25.6': dependencies: - '@babel/types': 7.25.4 + '@babel/types': 7.25.6 '@babel/runtime@7.24.7': dependencies: regenerator-runtime: 0.14.1 - '@babel/runtime@7.25.4': + '@babel/runtime@7.25.6': dependencies: regenerator-runtime: 0.14.1 @@ -4550,7 +4575,7 @@ snapshots: '@babel/helper-validator-identifier': 7.24.7 to-fast-properties: 2.0.0 - '@babel/types@7.25.4': + '@babel/types@7.25.6': dependencies: '@babel/helper-string-parser': 7.24.8 '@babel/helper-validator-identifier': 7.24.7 @@ -5064,7 +5089,7 @@ snapshots: '@eslint/js@8.57.0': {} - '@grpc/grpc-js@1.11.1': + '@grpc/grpc-js@1.11.2': dependencies: '@grpc/proto-loader': 0.7.13 '@js-sdsl/ordered-map': 4.4.2 @@ -5073,7 +5098,7 @@ snapshots: dependencies: lodash.camelcase: 4.3.0 long: 5.2.3 - protobufjs: 7.3.3 + protobufjs: 7.4.0 yargs: 17.7.2 '@humanwhocodes/config-array@0.11.14': @@ -5230,11 +5255,11 @@ snapshots: '@opentelemetry/instrumentation-undici': 0.3.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-winston': 0.38.0(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-alibaba-cloud': 0.28.10(@opentelemetry/api@1.9.0) - '@opentelemetry/resource-detector-aws': 1.5.2(@opentelemetry/api@1.9.0) - '@opentelemetry/resource-detector-azure': 0.2.9(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-aws': 1.6.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-azure': 0.2.11(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-container': 0.3.11(@opentelemetry/api@1.9.0) - '@opentelemetry/resource-detector-gcp': 0.29.10(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-gcp': 0.29.11(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-node': 0.52.0(@opentelemetry/api@1.9.0) transitivePeerDependencies: - encoding @@ -5254,9 +5279,14 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.25.1 + '@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/exporter-trace-otlp-grpc@0.52.0(@opentelemetry/api@1.9.0)': dependencies: - '@grpc/grpc-js': 1.11.1 + '@grpc/grpc-js': 1.11.2 '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) '@opentelemetry/otlp-grpc-exporter-base': 0.52.0(@opentelemetry/api@1.9.0) @@ -5293,9 +5323,9 @@ snapshots: '@opentelemetry/instrumentation-amqplib@0.38.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5303,9 +5333,9 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/propagator-aws-xray': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/propagator-aws-xray': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 '@types/aws-lambda': 8.10.122 transitivePeerDependencies: - supports-color @@ -5313,10 +5343,10 @@ snapshots: '@opentelemetry/instrumentation-aws-sdk@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/propagation-utils': 0.30.10(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/propagation-utils': 0.30.11(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5333,16 +5363,16 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-connect@0.37.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@types/connect': 3.4.36 transitivePeerDependencies: - supports-color @@ -5351,7 +5381,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5373,25 +5403,25 @@ snapshots: '@opentelemetry/instrumentation-express@0.40.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-fastify@0.37.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-fs@0.13.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -5421,9 +5451,9 @@ snapshots: '@opentelemetry/instrumentation-hapi@0.39.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5442,7 +5472,7 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5450,16 +5480,16 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-koa@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@types/koa': 2.14.0 '@types/koa__router': 12.0.3 transitivePeerDependencies: @@ -5476,7 +5506,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@types/memcached': 2.2.10 transitivePeerDependencies: - supports-color @@ -5485,17 +5515,17 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-metrics': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/sdk-metrics': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-mongoose@0.39.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5503,7 +5533,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -5512,7 +5542,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@types/mysql': 2.15.22 transitivePeerDependencies: - supports-color @@ -5521,7 +5551,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5529,7 +5559,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5537,7 +5567,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) '@types/pg': 8.6.1 '@types/pg-pool': 2.0.4 @@ -5556,7 +5586,7 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5565,16 +5595,16 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-restify@0.39.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5582,7 +5612,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5590,7 +5620,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -5598,7 +5628,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/semantic-conventions': 1.27.0 '@types/tedious': 4.0.14 transitivePeerDependencies: - supports-color @@ -5606,7 +5636,7 @@ snapshots: '@opentelemetry/instrumentation-undici@0.3.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -5651,7 +5681,7 @@ snapshots: '@opentelemetry/otlp-grpc-exporter-base@0.52.0(@opentelemetry/api@1.9.0)': dependencies: - '@grpc/grpc-js': 1.11.1 + '@grpc/grpc-js': 1.11.2 '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) '@opentelemetry/otlp-exporter-base': 0.52.0(@opentelemetry/api@1.9.0) @@ -5666,16 +5696,16 @@ snapshots: '@opentelemetry/sdk-logs': 0.52.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-metrics': 1.25.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.25.0(@opentelemetry/api@1.9.0) - protobufjs: 7.3.3 + protobufjs: 7.4.0 - '@opentelemetry/propagation-utils@0.30.10(@opentelemetry/api@1.9.0)': + '@opentelemetry/propagation-utils@0.30.11(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/propagator-aws-xray@1.25.1(@opentelemetry/api@1.9.0)': + '@opentelemetry/propagator-aws-xray@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/propagator-b3@1.25.0(@opentelemetry/api@1.9.0)': dependencies: @@ -5692,34 +5722,35 @@ snapshots: '@opentelemetry/resource-detector-alibaba-cloud@0.28.10(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/resource-detector-aws@1.5.2(@opentelemetry/api@1.9.0)': + '@opentelemetry/resource-detector-aws@1.6.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/resource-detector-azure@0.2.9(@opentelemetry/api@1.9.0)': + '@opentelemetry/resource-detector-azure@0.2.11(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/resource-detector-container@0.3.11(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/resource-detector-gcp@0.29.10(@opentelemetry/api@1.9.0)': + '@opentelemetry/resource-detector-gcp@0.29.11(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.26.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 gcp-metadata: 6.1.0 transitivePeerDependencies: - encoding @@ -5731,11 +5762,11 @@ snapshots: '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.0 - '@opentelemetry/resources@1.25.1(@opentelemetry/api@1.9.0)': + '@opentelemetry/resources@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.25.1 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/sdk-logs@0.52.0(@opentelemetry/api@1.9.0)': dependencies: @@ -5751,12 +5782,11 @@ snapshots: '@opentelemetry/resources': 1.25.0(@opentelemetry/api@1.9.0) lodash.merge: 4.6.2 - '@opentelemetry/sdk-metrics@1.25.1(@opentelemetry/api@1.9.0)': + '@opentelemetry/sdk-metrics@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) - lodash.merge: 4.6.2 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-node@0.52.0(@opentelemetry/api@1.9.0)': dependencies: @@ -5798,12 +5828,12 @@ snapshots: '@opentelemetry/semantic-conventions@1.25.1': {} - '@opentelemetry/semantic-conventions@1.26.0': {} + '@opentelemetry/semantic-conventions@1.27.0': {} '@opentelemetry/sql-common@0.40.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@pkgjs/parseargs@0.11.0': optional: true @@ -5985,7 +6015,7 @@ snapshots: '@types/node-fetch@2.6.11': dependencies: - '@types/node': 20.14.14 + '@types/node': 20.11.20 form-data: 4.0.0 optional: true @@ -5995,6 +6025,11 @@ snapshots: dependencies: undici-types: 5.26.5 + '@types/node@18.19.50': + dependencies: + undici-types: 5.26.5 + optional: true + '@types/node@20.11.20': dependencies: undici-types: 5.26.5 @@ -6122,7 +6157,7 @@ snapshots: dependencies: '@typescript-eslint/types': 5.62.0 '@typescript-eslint/visitor-keys': 5.62.0 - debug: 4.3.6 + debug: 4.3.7 globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.2 @@ -6249,7 +6284,7 @@ snapshots: '@vue/compiler-core@3.4.27': dependencies: - '@babel/parser': 7.25.4 + '@babel/parser': 7.25.6 '@vue/shared': 3.4.27 entities: 4.5.0 estree-walker: 2.0.2 @@ -6262,14 +6297,14 @@ snapshots: '@vue/compiler-sfc@3.4.27': dependencies: - '@babel/parser': 7.25.4 + '@babel/parser': 7.25.6 '@vue/compiler-core': 3.4.27 '@vue/compiler-dom': 3.4.27 '@vue/compiler-ssr': 3.4.27 '@vue/shared': 3.4.27 estree-walker: 2.0.2 magic-string: 0.30.11 - postcss: 8.4.41 + postcss: 8.4.45 source-map-js: 1.2.0 '@vue/compiler-ssr@3.4.27': @@ -6324,7 +6359,7 @@ snapshots: agent-base@7.1.1: dependencies: - debug: 4.3.6 + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -6333,15 +6368,15 @@ snapshots: humanize-ms: 1.2.1 optional: true - ai@3.3.19(openai@4.52.6)(react@18.3.1)(solid-js@1.8.17)(sswr@2.1.0(svelte@4.2.17))(svelte@4.2.17)(vue@3.4.27(typescript@5.5.4))(zod@3.23.8): + ai@3.3.28(openai@4.52.6)(react@18.3.1)(solid-js@1.8.17)(sswr@2.1.0(svelte@4.2.17))(svelte@4.2.17)(vue@3.4.27(typescript@5.5.4))(zod@3.23.8): dependencies: - '@ai-sdk/provider': 0.0.22 - '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) - '@ai-sdk/react': 0.0.52(react@18.3.1)(zod@3.23.8) - '@ai-sdk/solid': 0.0.42(solid-js@1.8.17)(zod@3.23.8) - '@ai-sdk/svelte': 0.0.44(svelte@4.2.17)(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.39(zod@3.23.8) - '@ai-sdk/vue': 0.0.44(vue@3.4.27(typescript@5.5.4))(zod@3.23.8) + '@ai-sdk/provider': 0.0.23 + '@ai-sdk/provider-utils': 1.0.18(zod@3.23.8) + '@ai-sdk/react': 0.0.55(react@18.3.1)(zod@3.23.8) + '@ai-sdk/solid': 0.0.44(solid-js@1.8.17)(zod@3.23.8) + '@ai-sdk/svelte': 0.0.46(svelte@4.2.17)(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.41(zod@3.23.8) + '@ai-sdk/vue': 0.0.46(vue@3.4.27(typescript@5.5.4))(zod@3.23.8) '@opentelemetry/api': 1.9.0 eventsource-parser: 1.1.2 json-schema: 0.4.0 @@ -6573,7 +6608,7 @@ snapshots: ci-info@4.0.0: {} - cjs-module-lexer@1.3.1: {} + cjs-module-lexer@1.4.0: {} clean-regexp@1.0.0: dependencies: @@ -6726,6 +6761,10 @@ snapshots: dependencies: ms: 2.1.2 + debug@4.3.7: + dependencies: + ms: 2.1.3 + decimal.js@10.4.3: {} deep-eql@5.0.2: {} @@ -7354,7 +7393,7 @@ snapshots: es-errors: 1.3.0 get-intrinsic: 1.2.4 - get-tsconfig@4.7.6: + get-tsconfig@4.8.0: dependencies: resolve-pkg-maps: 1.0.0 @@ -7460,7 +7499,7 @@ snapshots: https-proxy-agent@7.0.5: dependencies: agent-base: 7.1.1 - debug: 4.3.6 + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -7497,14 +7536,14 @@ snapshots: dependencies: acorn: 8.12.1 acorn-import-attributes: 1.9.5(acorn@8.12.1) - cjs-module-lexer: 1.3.1 + cjs-module-lexer: 1.4.0 module-details-from-path: 1.0.3 import-in-the-middle@1.8.0: dependencies: acorn: 8.12.1 acorn-import-attributes: 1.9.5(acorn@8.12.1) - cjs-module-lexer: 1.3.1 + cjs-module-lexer: 1.4.0 module-details-from-path: 1.0.3 import-meta-resolve@4.1.0: {} @@ -7858,7 +7897,7 @@ snapshots: mathjs@12.4.2: dependencies: - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 complex.js: 2.1.1 decimal.js: 10.4.3 escape-latex: 1.2.0 @@ -8030,7 +8069,7 @@ snapshots: openai@4.52.6: dependencies: - '@types/node': 18.19.46 + '@types/node': 18.19.50 '@types/node-fetch': 2.6.11 abort-controller: 3.0.0 agentkeepalive: 4.5.0 @@ -8147,6 +8186,8 @@ snapshots: picocolors@1.0.1: {} + picocolors@1.1.0: {} + picomatch@2.3.1: {} pidtree@0.6.0: {} @@ -8184,12 +8225,12 @@ snapshots: optionalDependencies: postcss: 8.4.41 - postcss-load-config@6.0.1(jiti@1.21.6)(postcss@8.4.41)(yaml@2.4.5): + postcss-load-config@6.0.1(jiti@1.21.6)(postcss@8.4.45)(yaml@2.4.5): dependencies: lilconfig: 3.1.2 optionalDependencies: jiti: 1.21.6 - postcss: 8.4.41 + postcss: 8.4.45 yaml: 2.4.5 postcss-nested@6.2.0(postcss@8.4.41): @@ -8216,6 +8257,12 @@ snapshots: picocolors: 1.0.1 source-map-js: 1.2.0 + postcss@8.4.45: + dependencies: + nanoid: 3.3.7 + picocolors: 1.1.0 + source-map-js: 1.2.0 + postgres-array@2.0.0: {} postgres-bytea@1.0.0: {} @@ -8243,7 +8290,7 @@ snapshots: prettier@3.3.2: {} - protobufjs@7.3.3: + protobufjs@7.4.0: dependencies: '@protobufjs/aspromise': 1.1.2 '@protobufjs/base64': 1.1.2 @@ -8323,7 +8370,7 @@ snapshots: require-in-the-middle@7.4.0: dependencies: - debug: 4.3.6 + debug: 4.3.7 module-details-from-path: 1.0.3 resolve: 1.22.8 transitivePeerDependencies: @@ -8752,7 +8799,7 @@ snapshots: tslib@2.6.3: {} - tsup@8.2.4(jiti@1.21.6)(postcss@8.4.41)(typescript@5.5.4)(yaml@2.4.5): + tsup@8.2.4(jiti@1.21.6)(postcss@8.4.45)(typescript@5.5.4)(yaml@2.4.5): dependencies: bundle-require: 5.0.0(esbuild@0.23.1) cac: 6.7.14 @@ -8764,14 +8811,14 @@ snapshots: globby: 11.1.0 joycon: 3.1.1 picocolors: 1.0.1 - postcss-load-config: 6.0.1(jiti@1.21.6)(postcss@8.4.41)(yaml@2.4.5) + postcss-load-config: 6.0.1(jiti@1.21.6)(postcss@8.4.45)(yaml@2.4.5) resolve-from: 5.0.0 rollup: 4.21.1 source-map: 0.8.0-beta.0 sucrase: 3.35.0 tree-kill: 1.2.2 optionalDependencies: - postcss: 8.4.41 + postcss: 8.4.45 typescript: 5.5.4 transitivePeerDependencies: - jiti @@ -8787,7 +8834,7 @@ snapshots: tsx@4.7.1: dependencies: esbuild: 0.19.12 - get-tsconfig: 4.7.6 + get-tsconfig: 4.8.0 optionalDependencies: fsevents: 2.3.3