From 6e08b7a5bb642e80acffcf0fd0c3e84cba83cdb9 Mon Sep 17 00:00:00 2001 From: Lars Grammel Date: Thu, 6 Feb 2025 10:13:46 +0100 Subject: [PATCH 1/3] feat (ai/core): add onError callback to streamText --- .changeset/flat-tigers-heal.md | 5 ++++ .../ai/core/generate-text/stream-text.test.ts | 24 +++++++++++++++++++ packages/ai/core/generate-text/stream-text.ts | 14 +++++++++++ 3 files changed, 43 insertions(+) create mode 100644 .changeset/flat-tigers-heal.md diff --git a/.changeset/flat-tigers-heal.md b/.changeset/flat-tigers-heal.md new file mode 100644 index 000000000000..828348501ac4 --- /dev/null +++ b/.changeset/flat-tigers-heal.md @@ -0,0 +1,5 @@ +--- +'ai': patch +--- + +feat (ai/core): add onError callback to streamText diff --git a/packages/ai/core/generate-text/stream-text.test.ts b/packages/ai/core/generate-text/stream-text.test.ts index 1948924a5f4e..60e0fbb9c41b 100644 --- a/packages/ai/core/generate-text/stream-text.test.ts +++ b/packages/ai/core/generate-text/stream-text.test.ts @@ -1887,6 +1887,30 @@ describe('streamText', () => { }); }); + describe('options.onError', () => { + it('should invoke onError', async () => { + const result: Array<{ error: unknown }> = []; + + const { fullStream } = streamText({ + model: new MockLanguageModelV1({ + doStream: async () => { + throw new Error('test error'); + }, + }), + prompt: 'test-input', + onError(event) { + console.log('foo'); + result.push(event); + }, + }); + + // consume stream + await convertAsyncIterableToArray(fullStream); + + expect(result).toStrictEqual([{ error: new Error('test error') }]); + }); + }); + describe('options.onFinish', () => { it('should send correct information', async () => { let result!: Parameters< diff --git a/packages/ai/core/generate-text/stream-text.ts b/packages/ai/core/generate-text/stream-text.ts index b8490e34574f..292e5619d7d9 100644 --- a/packages/ai/core/generate-text/stream-text.ts +++ b/packages/ai/core/generate-text/stream-text.ts @@ -117,6 +117,7 @@ If set and supported by the model, calls will generate deterministic results. @param experimental_generateMessageId - Generate a unique ID for each message. @param onChunk - Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved. +@param onError - Callback that is called when an error occurs during streaming. You can use it to log errors. @param onStepFinish - Callback that is called when each step (LLM call) is finished, including intermediate steps. @param onFinish - Callback that is called when the LLM response and all request tool executions (for tools that have an `execute` function) are finished. @@ -151,6 +152,7 @@ export function streamText< experimental_repairToolCall: repairToolCall, experimental_transform: transform, onChunk, + onError, onFinish, onStepFinish, _internal: { @@ -267,6 +269,11 @@ Callback that is called for each chunk of the stream. The stream processing will >; }) => Promise | void; + /** +Callback that is invoked when an error occurs during streaming. You can use it to log errors. + */ + onError?: (event: { error: unknown }) => Promise | void; + /** Callback that is called when the LLM response and all request tool executions (for tools that have an `execute` function) are finished. @@ -317,6 +324,7 @@ Internal. For test use only. May change without notice. continueSteps, providerOptions, onChunk, + onError, onFinish, onStepFinish, now, @@ -478,6 +486,7 @@ class DefaultStreamTextResult continueSteps, providerOptions, onChunk, + onError, onFinish, onStepFinish, now, @@ -520,6 +529,7 @@ class DefaultStreamTextResult } >; }) => Promise | void); + onError: undefined | ((event: { error: unknown }) => Promise | void); onFinish: | undefined | (( @@ -588,6 +598,10 @@ class DefaultStreamTextResult await onChunk?.({ chunk: part }); } + if (part.type === 'error') { + await onError?.({ error: part.error }); + } + if (part.type === 'text-delta') { recordedStepText += part.textDelta; recordedContinuationText += part.textDelta; From 050de5c99fb337226d8fcc34e1b3b2e8ced19954 Mon Sep 17 00:00:00 2001 From: Lars Grammel Date: Thu, 6 Feb 2025 10:18:44 +0100 Subject: [PATCH 2/3] guide --- .../03-ai-sdk-core/05-generating-text.mdx | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/content/docs/03-ai-sdk-core/05-generating-text.mdx b/content/docs/03-ai-sdk-core/05-generating-text.mdx index faf95289fae4..7cb791f73882 100644 --- a/content/docs/03-ai-sdk-core/05-generating-text.mdx +++ b/content/docs/03-ai-sdk-core/05-generating-text.mdx @@ -94,6 +94,25 @@ It also provides several promises that resolve when the stream is finished: - `result.finishReason`: The reason the model finished generating text. - `result.usage`: The usage of the model during text generation. +### `onError` callback + +`streamText` immediately starts streaming to enable sending data without waiting for the model. +Errors become part of the stream and are not thrown to prevent e.g. servers from crashing. + +To log errors, you can provide an `onError` callback that is triggered when an error occurs. + +```tsx highlight="6-8" +import { streamText } from 'ai'; + +const result = streamText({ + model: yourModel, + prompt: 'Invent a new holiday and describe its traditions.', + onError({ error }) { + console.error(error); // your error logging logic here + }, +}); +``` + ### `onChunk` callback When using `streamText`, you can provide an `onChunk` callback that is triggered for each chunk of the stream. From 476d3b324c9140f0c200060196abdce350882288 Mon Sep 17 00:00:00 2001 From: Lars Grammel Date: Thu, 6 Feb 2025 10:20:21 +0100 Subject: [PATCH 3/3] ref --- .../01-ai-sdk-core/02-stream-text.mdx | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/content/docs/07-reference/01-ai-sdk-core/02-stream-text.mdx b/content/docs/07-reference/01-ai-sdk-core/02-stream-text.mdx index 9a9c91008d2f..b197ccfbfd9b 100644 --- a/content/docs/07-reference/01-ai-sdk-core/02-stream-text.mdx +++ b/content/docs/07-reference/01-ai-sdk-core/02-stream-text.mdx @@ -737,6 +737,25 @@ To see `streamText` in action, check out [these examples](#examples). }, ], }, + { + name: 'onError', + type: '(event: OnErrorResult) => Promise |void', + isOptional: true, + description: + 'Callback that is called when an error occurs during streaming. You can use it to log errors.', + properties: [ + { + type: 'OnErrorResult', + parameters: [ + { + name: 'error', + type: 'unknown', + description: 'The error that occurred.', + }, + ], + }, + ], + }, { name: 'experimental_output', type: 'Output',