Skip to content

Commit

Permalink
feat: implement Claude 3, Claude3.5, Nova Pro, Nova Lite and Mistral …
Browse files Browse the repository at this point in the history
…model integration with AWS Bedrock (stackblitz-labs#974)

* feat: Integrate AWS Bedrock with Claude 3.5 Sonnet, Claude 3 Sonnet, and Claude 3.5 Haiku

* update Dockerfile for AWS Bedrock configuration

* feat: add new Bedrock model 'Mistral' and update Haiku to version 3

* feat: add new bedrock model Nova Lite and Nova Pro

* Update README documentation to reflect the latest changes

* Add the icon for aws bedrock

* add support for serialized AWS Bedrock configuration in api key
  • Loading branch information
kunjabijukchhe authored and apple committed Jan 9, 2025
1 parent 9cb31bc commit b321be6
Show file tree
Hide file tree
Showing 11 changed files with 1,304 additions and 5 deletions.
106 changes: 106 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
# Rename this file to .env once you have filled in the below environment variables!

# Get your GROQ API Key here -
# https://console.groq.com/keys
# You only need this environment variable set if you want to use Groq models
GROQ_API_KEY=

# Get your HuggingFace API Key here -
# https://huggingface.co/settings/tokens
# You only need this environment variable set if you want to use HuggingFace models
HuggingFace_API_KEY=


# Get your Open AI API Key by following these instructions -
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
# You only need this environment variable set if you want to use GPT models
OPENAI_API_KEY=

# Get your Anthropic API Key in your account settings -
# https://console.anthropic.com/settings/keys
# You only need this environment variable set if you want to use Claude models
ANTHROPIC_API_KEY=

# Get your OpenRouter API Key in your account settings -
# https://openrouter.ai/settings/keys
# You only need this environment variable set if you want to use OpenRouter models
OPEN_ROUTER_API_KEY=

# Get your Google Generative AI API Key by following these instructions -
# https://console.cloud.google.com/apis/credentials
# You only need this environment variable set if you want to use Google Generative AI models
GOOGLE_GENERATIVE_AI_API_KEY=

# You only need this environment variable set if you want to use oLLAMA models
# DONT USE http://localhost:11434 due to IPV6 issues
# USE EXAMPLE http://127.0.0.1:11434
OLLAMA_API_BASE_URL=

# You only need this environment variable set if you want to use OpenAI Like models
OPENAI_LIKE_API_BASE_URL=

# You only need this environment variable set if you want to use Together AI models
TOGETHER_API_BASE_URL=

# You only need this environment variable set if you want to use DeepSeek models through their API
DEEPSEEK_API_KEY=

# Get your OpenAI Like API Key
OPENAI_LIKE_API_KEY=

# Get your Together API Key
TOGETHER_API_KEY=

# You only need this environment variable set if you want to use Hyperbolic models
#Get your Hyperbolics API Key at https://app.hyperbolic.xyz/settings
#baseURL="https://api.hyperbolic.xyz/v1/chat/completions"
HYPERBOLIC_API_KEY=
HYPERBOLIC_API_BASE_URL=

# Get your Mistral API Key by following these instructions -
# https://console.mistral.ai/api-keys/
# You only need this environment variable set if you want to use Mistral models
MISTRAL_API_KEY=

# Get the Cohere Api key by following these instructions -
# https://dashboard.cohere.com/api-keys
# You only need this environment variable set if you want to use Cohere models
COHERE_API_KEY=

# Get LMStudio Base URL from LM Studio Developer Console
# Make sure to enable CORS
# DONT USE http://localhost:1234 due to IPV6 issues
# Example: http://127.0.0.1:1234
LMSTUDIO_API_BASE_URL=

# Get your xAI API key
# https://x.ai/api
# You only need this environment variable set if you want to use xAI models
XAI_API_KEY=

# Get your Perplexity API Key here -
# https://www.perplexity.ai/settings/api
# You only need this environment variable set if you want to use Perplexity models
PERPLEXITY_API_KEY=

# Get your AWS configuration
# https://console.aws.amazon.com/iam/home
# The JSON should include the following keys:
# - region: The AWS region where Bedrock is available.
# - accessKeyId: Your AWS access key ID.
# - secretAccessKey: Your AWS secret access key.
# - sessionToken (optional): Temporary session token if using an IAM role or temporary credentials.
# Example JSON:
# {"region": "us-east-1", "accessKeyId": "yourAccessKeyId", "secretAccessKey": "yourSecretAccessKey", "sessionToken": "yourSessionToken"}
AWS_BEDROCK_CONFIG=

# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug

# Example Context Values for qwen2.5-coder:32b
#
# DEFAULT_NUM_CTX=32768 # Consumes 36GB of VRAM
# DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM
# DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM
# DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM
DEFAULT_NUM_CTX=
3 changes: 3 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ ARG OLLAMA_API_BASE_URL
ARG XAI_API_KEY
ARG TOGETHER_API_KEY
ARG TOGETHER_API_BASE_URL
ARG AWS_BEDROCK_CONFIG
ARG VITE_LOG_LEVEL=debug
ARG DEFAULT_NUM_CTX

Expand All @@ -42,6 +43,7 @@ ENV WRANGLER_SEND_METRICS=false \
XAI_API_KEY=${XAI_API_KEY} \
TOGETHER_API_KEY=${TOGETHER_API_KEY} \
TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG} \
VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}

Expand Down Expand Up @@ -80,6 +82,7 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \
XAI_API_KEY=${XAI_API_KEY} \
TOGETHER_API_KEY=${TOGETHER_API_KEY} \
TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG} \
VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}

Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
- ⬜ Voice prompting
- ⬜ Azure Open AI API Integration
- ✅ Perplexity Integration (@meetpateltech)
- ✅ AWS Bedrock Integration (@kunjabijukchhe)
- ⬜ Vertex AI Integration

## Features
Expand Down
1 change: 1 addition & 0 deletions app/components/settings/data/DataTab.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ const API_KEY_PROVIDERS = [
'Perplexity',
'Cohere',
'AzureOpenAI',
'AmazonBedrock',
] as const;

interface ApiKeys {
Expand Down
113 changes: 113 additions & 0 deletions app/lib/modules/llm/providers/amazon-bedrock.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import { BaseProvider } from '~/lib/modules/llm/base-provider';
import type { ModelInfo } from '~/lib/modules/llm/types';
import type { LanguageModelV1 } from 'ai';
import type { IProviderSetting } from '~/types/model';
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';

interface AWSBedRockConfig {
region: string;
accessKeyId: string;
secretAccessKey: string;
sessionToken?: string;
}

export default class AmazonBedrockProvider extends BaseProvider {
name = 'AmazonBedrock';
getApiKeyLink = 'https://console.aws.amazon.com/iam/home';

config = {
apiTokenKey: 'AWS_BEDROCK_CONFIG',
};

staticModels: ModelInfo[] = [
{
name: 'anthropic.claude-3-5-sonnet-20240620-v1:0',
label: 'Claude 3.5 Sonnet (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 4096,
},
{
name: 'anthropic.claude-3-sonnet-20240229-v1:0',
label: 'Claude 3 Sonnet (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 4096,
},
{
name: 'anthropic.claude-3-haiku-20240307-v1:0',
label: 'Claude 3 Haiku (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 4096,
},
{
name: 'amazon.nova-pro-v1:0',
label: 'Amazon Nova Pro (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 5120,
},
{
name: 'amazon.nova-lite-v1:0',
label: 'Amazon Nova Lite (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 5120,
},
{
name: 'mistral.mistral-large-2402-v1:0',
label: 'Mistral Large 24.02 (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 8192,
},
];

private _parseAndValidateConfig(apiKey: string): AWSBedRockConfig {
let parsedConfig: AWSBedRockConfig;

try {
parsedConfig = JSON.parse(apiKey);
} catch {
throw new Error(
'Invalid AWS Bedrock configuration format. Please provide a valid JSON string containing region, accessKeyId, and secretAccessKey.',
);
}

const { region, accessKeyId, secretAccessKey, sessionToken } = parsedConfig;

if (!region || !accessKeyId || !secretAccessKey) {
throw new Error(
'Missing required AWS credentials. Configuration must include region, accessKeyId, and secretAccessKey.',
);
}

return {
region,
accessKeyId,
secretAccessKey,
...(sessionToken && { sessionToken }),
};
}

getModelInstance(options: {
model: string;
serverEnv: any;
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
}): LanguageModelV1 {
const { model, serverEnv, apiKeys, providerSettings } = options;

const { apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: providerSettings?.[this.name],
serverEnv: serverEnv as any,
defaultBaseUrlKey: '',
defaultApiTokenKey: 'AWS_BEDROCK_CONFIG',
});

if (!apiKey) {
throw new Error(`Missing API key for ${this.name} provider`);
}

const config = this._parseAndValidateConfig(apiKey);
const bedrock = createAmazonBedrock(config);

return bedrock(model);
}
}
2 changes: 2 additions & 0 deletions app/lib/modules/llm/registry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import PerplexityProvider from './providers/perplexity';
import TogetherProvider from './providers/together';
import XAIProvider from './providers/xai';
import HyperbolicProvider from './providers/hyperbolic';
import AmazonBedrockProvider from './providers/amazon-bedrock';

export {
AnthropicProvider,
Expand All @@ -32,4 +33,5 @@ export {
XAIProvider,
TogetherProvider,
LMStudioProvider,
AmazonBedrockProvider,
};
2 changes: 2 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ services:
- XAI_API_KEY=${XAI_API_KEY}
- TOGETHER_API_KEY=${TOGETHER_API_KEY}
- TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
- AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG}
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
- RUNNING_IN_DOCKER=true
Expand Down Expand Up @@ -54,6 +55,7 @@ services:
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
- TOGETHER_API_KEY=${TOGETHER_API_KEY}
- TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
- AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG}
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
- RUNNING_IN_DOCKER=true
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
"@ai-sdk/google": "^0.0.52",
"@ai-sdk/mistral": "^0.0.43",
"@ai-sdk/openai": "^0.0.66",
"@ai-sdk/amazon-bedrock": "1.0.6",
"@codemirror/autocomplete": "^6.18.3",
"@codemirror/commands": "^6.7.1",
"@codemirror/lang-cpp": "^6.0.2",
Expand Down
Loading

0 comments on commit b321be6

Please sign in to comment.