From 6df52136c162c64fbd44ee7f3e5ab13c6e2a9f99 Mon Sep 17 00:00:00 2001 From: Lars Grammel Date: Thu, 28 Mar 2024 18:46:08 +0100 Subject: [PATCH] ai/core docs improvements (#1249) --- docs/pages/docs/ai-core/_meta.json | 1 + docs/pages/docs/ai-core/custom-provider.mdx | 94 +++++++++++++++++++++ docs/pages/docs/ai-core/index.mdx | 32 +++++++ docs/pages/docs/ai-core/tools.mdx | 2 +- 4 files changed, 128 insertions(+), 1 deletion(-) create mode 100644 docs/pages/docs/ai-core/custom-provider.mdx diff --git a/docs/pages/docs/ai-core/_meta.json b/docs/pages/docs/ai-core/_meta.json index fcf8aac36588..4cb227b1a09c 100644 --- a/docs/pages/docs/ai-core/_meta.json +++ b/docs/pages/docs/ai-core/_meta.json @@ -10,6 +10,7 @@ "tools": "Tools and Tool Calling", "openai": "OpenAI Provider", "mistral": "Mistral Provider", + "custom-provider": "Custom Providers", "generate-text": "generateText API", "stream-text": "streamText API", "generate-object": "generateObject API", diff --git a/docs/pages/docs/ai-core/custom-provider.mdx b/docs/pages/docs/ai-core/custom-provider.mdx new file mode 100644 index 000000000000..4d773cb4b843 --- /dev/null +++ b/docs/pages/docs/ai-core/custom-provider.mdx @@ -0,0 +1,94 @@ +--- +title: Custom Providers +--- + +import { Callout } from 'nextra-theme-docs'; + +# Custom Providers + + + The AI SDK Language Model Specification is experimental. It may change in the + future without a major version bump. + + +The AI SDK provides a language model specification. +You can write your own providers that adhere to the AI SDK language model specification and they will be compatible with the AI Core functions. + +You can find the Language Model Specification in the [AI SDK repository](https://github.com/vercel/ai/tree/main/packages/core/spec/language-model/v1). +It can be imported from `ai/spec`. + +We provide an [OpenAI reference implementation](https://github.com/vercel/ai/tree/main/packages/core/openai) +and a [Mistral reference implementation](https://github.com/vercel/ai/tree/main/packages/core/mistral). + +## Provider Facade + +A custom provider should follow the pattern of using a provider facade with factory methods for the specific providers. +An instance of the custom provider class with default settings can be exported for convenience. + +```ts filename="custom-provider-facade.ts" +import { generateId, loadApiKey } from 'ai/spec'; +import { CustomChatLanguageModel } from './custom-chat-language-model'; +import { CustomChatModelId, CustomChatSettings } from './mistral-chat-settings'; + +/** + * Custom provider facade. + */ +export class CustomProvider { + readonly baseUrl?: string; + readonly apiKey?: string; + + constructor( + options: { + baseUrl?: string; + apiKey?: string; + } = {}, + ) { + this.baseUrl = options.baseUrl; + this.apiKey = options.apiKey; + } + + private get baseConfig() { + return { + baseUrl: this.baseUrl ?? 'https://custom.ai/v1', + headers: () => ({ + Authorization: `Bearer ${loadApiKey({ + apiKey: this.apiKey, + environmentVariableName: 'CUSTOM_API_KEY', + description: 'Custom Provider', + })}`, + }), + }; + } + + chat(modelId: CustomChatModelId, settings: CustomChatSettings = {}) { + return new CustomChatLanguageModel(modelId, settings, { + provider: 'custom.chat', + ...this.baseConfig, + }); + } +} + +/** + * Default custom provider instance. + */ +export const customprovider = new CustomProvider(); +``` + +## Language Model Implementation + +Please refer to the Language Model Specification in the [AI SDK repository](https://github.com/vercel/ai/tree/main/packages/core/spec/language-model/v1). + +We provide an [OpenAI reference implementation](https://github.com/vercel/ai/tree/main/packages/core/openai) +and a [Mistral reference implementation](https://github.com/vercel/ai/tree/main/packages/core/mistral). + +### Errors + +The AI SDK provides [standardized errors](https://github.com/vercel/ai/tree/main/packages/core/spec/errors) that should be used by providers where possible. +This will make it easy for user to debug them. + +### Retries, timeouts, and abort signals + +The AI SDK will handle retries, timeouts, and aborting requests in a unified way. +The model classes should not implement retries or timeouts themselves. +Instead, they should use the abortSignal parameter to determine when the call should be aborted, and they should throw `ApiCallErrors` (or similar) +with a correct `isRetryable` flag when errors such as network errors occur. diff --git a/docs/pages/docs/ai-core/index.mdx b/docs/pages/docs/ai-core/index.mdx index b096989e1922..0ffa613f73a8 100644 --- a/docs/pages/docs/ai-core/index.mdx +++ b/docs/pages/docs/ai-core/index.mdx @@ -37,6 +37,36 @@ const { text } = await experimental_generateText({ }); ``` +## Schema Specification and Validation with Zod + +Tool usage and structured object generation require the specification of schemas. +The AI SDK uses [Zod](https://zod.dev/), the most popular JavaScript schema validation library, for schema specification and validation. + +You can install Zod with + +```sh +npm install zod +``` + +You can then easily specify schemas, for example: + +```ts +const recipeSchema = z.object({ + recipe: z.object({ + name: z.string(), + ingredients: z.array( + z.object({ + name: z.string(), + amount: z.string(), + }), + ), + steps: z.array(z.string()), + }), +}); +``` + +Such schemas can be used to define parameters for tool calls and to generated structured objects with `generateObject` and `streamObject`. + ## Language Model Interface Providers need to provide an implementation of the language model interface to be compatible with the AI SDK. @@ -45,4 +75,6 @@ The AI SDK contains the following providers: - [OpenAI Provider](/docs/ai-core/openai) (`ai/openai`) - [Mistral Provider](/docs/ai-core/mistral) (`ai/mistral`) +The AI SDK also provides a [language model specification](https://github.com/vercel/ai/tree/main/packages/core/spec/language-model/v1) that you can use to implement [custom providers](/docs/ai-core/custom-provider). + ![AI SDK Diagram](/images/ai-sdk-diagram.png) diff --git a/docs/pages/docs/ai-core/tools.mdx b/docs/pages/docs/ai-core/tools.mdx index ff630a8b3fda..942d97e4e4a0 100644 --- a/docs/pages/docs/ai-core/tools.mdx +++ b/docs/pages/docs/ai-core/tools.mdx @@ -19,7 +19,7 @@ You define the tools and pass them into `generateText` or `streamText` using the AI Core tools are objects that implement the `ExperimentalTool` interface with the following properties: - **description**: An optional description of the tool. This information is provided to the language model and can influence when the tool is picked. -- **parameters**: A Zod schema that defines the parameters. It is converted to a JSON schema that is consumed by the LLM, and also used to validate the LLM tool calls. +- **parameters**: A [Zod](https://zod.dev/) schema that defines the parameters. It is converted to a JSON schema that is consumed by the LLM, and also used to validate the LLM tool calls. - **execute**: An optional async function that is called with the arguments from the tool call and produces a value of type `RESULT`. The `tools` parameter is an object that has the tool names as keys and the tools as values: