diff --git a/docs/cli/commands.md b/docs/cli/commands.md index 16cb08f..445d72b 100644 --- a/docs/cli/commands.md +++ b/docs/cli/commands.md @@ -153,6 +153,31 @@ Slash commands provide meta-level control over the CLI itself. - **User-level:** `~/.blackboxcli/agents/` (personal agents, available across projects) - **Note:** For detailed information on creating and managing subagents, see the [Subagents documentation](../subagents.md). +- **`/datasource`** + - **Description:** Manage data source configurations for MongoDB and Supabase databases. Configure named data sources to use with the `/data` command. + - **Sub-commands:** + - **`create`**: + - **Description:** Launch an interactive wizard to create a new data source configuration. The wizard guides you through selecting the database type (MongoDB or Supabase), naming the data source, and entering connection credentials. + - **Usage:** `/datasource create` + - **`manage`**: + - **Description:** Open an interactive management dialog to view and delete existing data source configurations. + - **Usage:** `/datasource manage` + - **Storage Location:** + - **Project-level:** `.blackboxcli/data-configs/` (shared with team) + - **Note:** Data source configurations are stored as JSON files and contain connection credentials. These are stored in plain text, so be mindful of security when sharing project directories. + +- **`/data`** + - **Description:** Query a configured data source using natural language. The AI will automatically use the appropriate database tool (mongo_find or supabase_select) with the configured credentials. + - **Usage:** `/data "query text"` + - **Examples:** + - `/data movies "find me all movies released in 2025"` + - `/data user-profiles "show me users with status active"` + - **Details:** + - The command requires a data source name and a query string in quotes + - The data source must be configured using `/datasource create` first + - The AI will automatically convert your natural language query into appropriate database query parameters + - Supports tab completion for data source names + - [**`/tools`**](../tools/index.md) - **Description:** Display a list of tools that are currently available within Blackbox Code. - **Usage:** `/tools [desc]` diff --git a/packages/cli/src/services/BuiltinCommandLoader.ts b/packages/cli/src/services/BuiltinCommandLoader.ts index 67dfac2..accb937 100644 --- a/packages/cli/src/services/BuiltinCommandLoader.ts +++ b/packages/cli/src/services/BuiltinCommandLoader.ts @@ -38,6 +38,8 @@ import { themeCommand } from '../ui/commands/themeCommand.js'; import { toolsCommand } from '../ui/commands/toolsCommand.js'; import { vimCommand } from '../ui/commands/vimCommand.js'; import { setupGithubCommand } from '../ui/commands/setupGithubCommand.js'; +import { datasourceCommand } from '../ui/commands/datasourceCommand.js'; +import { dataCommand } from '../ui/commands/dataCommand.js'; /** * Loads the core, hard-coded slash commands that are an integral part @@ -87,6 +89,8 @@ export class BuiltinCommandLoader implements ICommandLoader { vimCommand, setupGithubCommand, terminalSetupCommand, + datasourceCommand, + dataCommand, ]; return allDefinitions.filter((cmd): cmd is SlashCommand => cmd !== null); diff --git a/packages/cli/src/ui/App.tsx b/packages/cli/src/ui/App.tsx index 053a5c4..fa6c0ca 100644 --- a/packages/cli/src/ui/App.tsx +++ b/packages/cli/src/ui/App.tsx @@ -35,6 +35,8 @@ import { useDialogClose } from './hooks/useDialogClose.js'; import { useSlashCommandProcessor } from './hooks/slashCommandProcessor.js'; import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js'; import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js'; +import { useDatasourceCreateDialog } from './hooks/useDatasourceCreateDialog.js'; +import { useDatasourceManagerDialog } from './hooks/useDatasourceManagerDialog.js'; import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js'; import { useMessageQueue } from './hooks/useMessageQueue.js'; import { useConsoleMessages } from './hooks/useConsoleMessages.js'; @@ -69,6 +71,10 @@ import { AgentCreationWizard, AgentsManagerDialog, } from './components/subagents/index.js'; +import { + DatasourceCreationWizard, + DatasourceManagerDialog, +} from './components/datasources/index.js'; import { Colors } from './colors.js'; import { loadHierarchicalGeminiMemory } from '../config/config.js'; import type { LoadedSettings } from '../config/settings.js'; @@ -338,6 +344,18 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => { closeAgentsManagerDialog, } = useAgentsManagerDialog(); + const { + isDatasourceCreateDialogOpen, + openDatasourceCreateDialog, + closeDatasourceCreateDialog, + } = useDatasourceCreateDialog(); + + const { + isDatasourceManagerDialogOpen, + openDatasourceManagerDialog, + closeDatasourceManagerDialog, + } = useDatasourceManagerDialog(); + const { isFolderTrustDialogOpen, handleFolderTrustSelect, isRestarting } = useFolderTrust(settings, setIsTrustedFolder); @@ -785,6 +803,8 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => { handleModelSelectionOpen, openSubagentCreateDialog, openAgentsManagerDialog, + openDatasourceCreateDialog, + openDatasourceManagerDialog, openHistoryBrowser, toggleVimEnabled, setIsProcessing, @@ -1425,6 +1445,20 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => { config={config} /> + ) : isDatasourceCreateDialogOpen ? ( + + + + ) : isDatasourceManagerDialogOpen ? ( + + + ) : isHistoryBrowserOpen && logger ? ( "query text" + */ +function parseDataCommand(args: string): { + dataSourceName: string; + query: string; +} | null { + const trimmed = args.trim(); + if (!trimmed) { + return null; + } + + // Try to find a quoted string (the query) + // Match: name "query" or name 'query' + const quotedMatch = trimmed.match(/^([^\s"']+)\s+(["'])(.*)\2$/s); + if (quotedMatch) { + return { + dataSourceName: quotedMatch[1], + query: quotedMatch[3], + }; + } + + // If no quotes, try splitting on first space and treat rest as query + const spaceIndex = trimmed.indexOf(' '); + if (spaceIndex > 0) { + return { + dataSourceName: trimmed.substring(0, spaceIndex), + query: trimmed.substring(spaceIndex + 1), + }; + } + + // Just the data source name, no query + return { + dataSourceName: trimmed, + query: '', + }; +} + +/** + * Parses a MongoDB URI to extract the database name. + * Supports formats like: + * - mongodb://localhost:27017/dbname + * - mongodb://user:pass@host:port/dbname + * - mongodb+srv://user:pass@cluster.mongodb.net/dbname?options + */ +function parseMongoDBDatabaseName(uri: string): string | null { + try { + // For mongodb+srv, we need to handle it differently since URL doesn't parse it well + if (uri.startsWith('mongodb+srv://')) { + // Extract path after the domain, e.g., mongodb+srv://cluster.net/dbname?options + const match = uri.match(/mongodb\+srv:\/\/[^/]+\/([^/?]+)/); + if (match && match[1]) { + return match[1]; + } + return null; + } + + // For regular mongodb:// URIs, try parsing as URL + // Replace mongodb:// with http:// for URL parsing + const httpUri = uri.replace(/^mongodb:/, 'http:'); + const url = new URL(httpUri); + // Get the pathname (e.g., "/dbname" or "/dbname?options") + const pathname = url.pathname; + // Remove leading slash and any query parameters or additional path segments + const dbName = pathname.replace(/^\/+/, '').split('?')[0].split('/')[0].trim(); + return dbName || null; + } catch { + // Fallback: try to extract database name manually using regex + // Match pattern: /dbname or /dbname?options (but not after // which is the protocol) + // Look for /dbname after the host part + const match = uri.match(/(?:\/\/[^/]+\/)([^/?]+)(?:\?|$)/); + if (match && match[1]) { + return match[1]; + } + return null; + } +} + +/** + * Creates a context-aware prompt that instructs the AI to use the specified data source. + */ +function createDataQueryPrompt( + dataConfig: DataConfig, + userQuery: string, +): string { + let dbNameHint = ''; + let dbName = null; + + if (dataConfig.type === 'mongodb') { + const uri = (dataConfig.credentials as { uri: string }).uri; + dbName = parseMongoDBDatabaseName(uri); + if (dbName) { + dbNameHint = `\n\nCRITICAL: The database name extracted from the MongoDB URI is "${dbName}". You MUST use "${dbName}" as the \`db\` parameter value. Do NOT use "${dataConfig.name}" as the database name - "${dataConfig.name}" is only the data source configuration name for identifying which credentials to use.`; + } + } + + const dataSourceInfo = + dataConfig.type === 'mongodb' + ? `MongoDB database. Use the mongo_find tool with the dataSource parameter set to "${dataConfig.name}".` + : `Supabase database. Use the supabase_select tool with the dataSource parameter set to "${dataConfig.name}".`; + + const toolName = + dataConfig.type === 'mongodb' ? 'mongo_find' : 'supabase_select'; + + if (!userQuery) { + return `You have access to a configured data source named "${dataConfig.name}" which is a ${dataSourceInfo}${dbNameHint} + +Please help the user query this data source. Ask them what they would like to know.`; + } + + const dbParamHint = dbName + ? `\n\nWhen calling ${toolName}, use these parameters: +- \`dataSource\`: "${dataConfig.name}" (the configuration name) +- \`db\`: "${dbName}" (the actual database name from the URI, NOT "${dataConfig.name}")` + : `\n\nWhen calling ${toolName}, use: +- \`dataSource\`: "${dataConfig.name}"`; + + return `The user wants to query their configured data source "${dataConfig.name}" which is a ${dataSourceInfo}${dbNameHint} + +User query: "${userQuery}" + +Please use the ${toolName} tool to execute this query. Convert the natural language query into appropriate tool parameters.${dbParamHint} + +Important: Always include the dataSource parameter with value "${dataConfig.name}" when calling the tool.`; +} + +export const dataCommand: SlashCommand = { + name: 'data', + description: + 'Query a configured data source. Usage: /data "query text"', + kind: CommandKind.BUILT_IN, + action: async ( + context: CommandContext, + args: string, + ): Promise => { + const { services } = context; + const { config } = services; + + if (!config) { + return { + type: 'message', + messageType: 'error', + content: 'Configuration not available.', + } as MessageActionReturn; + } + + // Parse command arguments + const parsed = parseDataCommand(args); + if (!parsed) { + return { + type: 'message', + messageType: 'error', + content: + 'Invalid command format. Usage: /data "query text"', + } as MessageActionReturn; + } + + const { dataSourceName, query } = parsed; + + if (!dataSourceName) { + return { + type: 'message', + messageType: 'error', + content: 'Data source name is required.', + } as MessageActionReturn; + } + + // Load the data configuration + try { + const dataConfigManager = config.getDataConfigManager(); + const dataConfig = await dataConfigManager.loadDataConfig(dataSourceName); + + if (!dataConfig) { + return { + type: 'message', + messageType: 'error', + content: `Data source "${dataSourceName}" not found. Please configure it using /datasource create.`, + } as MessageActionReturn; + } + + // Create the context-aware prompt + const prompt = createDataQueryPrompt(dataConfig, query); + + // Submit the prompt to the AI + return { + type: 'submit_prompt', + content: prompt, + } as SubmitPromptActionReturn; + } catch (error) { + return { + type: 'message', + messageType: 'error', + content: `Error loading data source configuration: ${error instanceof Error ? error.message : 'Unknown error'}`, + } as MessageActionReturn; + } + }, + completion: async ( + context: CommandContext, + partialArg: string, + ): Promise => { + const { services } = context; + const { config } = services; + + if (!config) { + return []; + } + + try { + const dataConfigManager = config.getDataConfigManager(); + const dataConfigs = await dataConfigManager.listDataConfigs(); + + // Filter by partial match and return just the names + const matches = dataConfigs + .filter((dc) => dc.name.startsWith(partialArg)) + .map((dc) => dc.name); + + return matches; + } catch { + return []; + } + }, +}; + diff --git a/packages/cli/src/ui/commands/datasourceCommand.ts b/packages/cli/src/ui/commands/datasourceCommand.ts new file mode 100644 index 0000000..8aea48c --- /dev/null +++ b/packages/cli/src/ui/commands/datasourceCommand.ts @@ -0,0 +1,38 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { + CommandKind, + type SlashCommand, + type OpenDialogActionReturn, +} from './types.js'; + +export const datasourceCommand: SlashCommand = { + name: 'datasource', + description: 'Manage data source configurations for MongoDB and Supabase.', + kind: CommandKind.BUILT_IN, + subCommands: [ + { + name: 'create', + description: 'Create a new data source configuration.', + kind: CommandKind.BUILT_IN, + action: (): OpenDialogActionReturn => ({ + type: 'dialog', + dialog: 'datasource_create', + }), + }, + { + name: 'manage', + description: 'Manage existing data source configurations (view, edit, delete).', + kind: CommandKind.BUILT_IN, + action: (): OpenDialogActionReturn => ({ + type: 'dialog', + dialog: 'datasource_list', + }), + }, + ], +}; + diff --git a/packages/cli/src/ui/commands/types.ts b/packages/cli/src/ui/commands/types.ts index acef56d..f1e9acc 100644 --- a/packages/cli/src/ui/commands/types.ts +++ b/packages/cli/src/ui/commands/types.ts @@ -119,7 +119,9 @@ export interface OpenDialogActionReturn { | 'model' | 'subagent_create' | 'subagent_list' - | 'history_browser'; + | 'history_browser' + | 'datasource_create' + | 'datasource_list'; } /** diff --git a/packages/cli/src/ui/components/datasources/create/DatasourceCreationWizard.tsx b/packages/cli/src/ui/components/datasources/create/DatasourceCreationWizard.tsx new file mode 100644 index 0000000..8e1e591 --- /dev/null +++ b/packages/cli/src/ui/components/datasources/create/DatasourceCreationWizard.tsx @@ -0,0 +1,254 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { useState, useCallback } from 'react'; +import { Box, Text } from 'ink'; +import { RadioButtonSelect } from '../../shared/RadioButtonSelect.js'; +import { TextInput } from '../../shared/TextInput.js'; +import { Colors } from '../../../colors.js'; +import type { DataSourceType, DataConfig } from '@blackbox_ai/blackbox-cli-core'; +import type { Config } from '@blackbox_ai/blackbox-cli-core'; +import { useKeypress } from '../../../hooks/useKeypress.js'; + +interface DatasourceCreationWizardProps { + onClose: () => void; + config: Config | null; +} + +type WizardStep = 'type' | 'name' | 'credentials' | 'summary'; + +export function DatasourceCreationWizard({ + onClose, + config, +}: DatasourceCreationWizardProps) { + const [step, setStep] = useState('type'); + const [dataSourceType, setDataSourceType] = useState( + null, + ); + const [name, setName] = useState(''); + const [uri, setUri] = useState(''); + const [url, setUrl] = useState(''); + const [key, setKey] = useState(''); + const [error, setError] = useState(null); + const [isSaving, setIsSaving] = useState(false); + + const handleTypeSelect = useCallback( + (selectedValue: string) => { + setDataSourceType(selectedValue as DataSourceType); + setStep('name'); + }, + [], + ); + + const handleNameSubmit = useCallback(() => { + if (!name.trim()) { + setError('Name is required'); + return; + } + if (!/^[a-zA-Z0-9_-]+$/.test(name.trim())) { + setError( + 'Name must contain only alphanumeric characters, dashes, and underscores', + ); + return; + } + setError(null); + setStep('credentials'); + }, [name]); + + const handleCredentialsSubmit = useCallback(() => { + if (dataSourceType === 'mongodb') { + if (!uri.trim()) { + setError('MongoDB URI is required'); + return; + } + } else if (dataSourceType === 'supabase') { + if (!url.trim() || !key.trim()) { + setError('Supabase URL and Key are required'); + return; + } + } + setError(null); + setStep('summary'); + }, [dataSourceType, uri, url, key]); + + const handleSave = useCallback(async () => { + if (!config || !dataSourceType) return; + + setIsSaving(true); + setError(null); + + try { + const dataConfigManager = config.getDataConfigManager(); + + const credentials = + dataSourceType === 'mongodb' + ? { uri: uri.trim() } + : { url: url.trim(), key: key.trim() }; + + const dataConfig: DataConfig = { + name: name.trim(), + type: dataSourceType, + credentials, + filePath: '', // Will be set by manager + }; + + await dataConfigManager.createDataConfig(dataConfig); + onClose(); + } catch (err) { + setError( + err instanceof Error ? err.message : 'Failed to save data source', + ); + } finally { + setIsSaving(false); + } + }, [config, dataSourceType, name, uri, url, key, onClose]); + + useKeypress( + (key) => { + if (key.name === 'escape') { + if (step === 'type') { + onClose(); + } else if (step === 'name') { + setStep('type'); + } else if (step === 'credentials') { + setStep('name'); + } else if (step === 'summary') { + setStep('credentials'); + } + } + }, + { isActive: true }, + ); + + const renderStep = () => { + switch (step) { + case 'type': + return ( + + + Select the type of data source you want to configure: + + + + ); + + case 'name': + return ( + + + Enter a name for this data source (alphanumeric, dashes, underscores only): + + {error && {error}} + + + ); + + case 'credentials': + return ( + + {dataSourceType === 'mongodb' ? ( + <> + + Enter your MongoDB connection URI: + + {error && {error}} + + + ) : ( + <> + + Enter your Supabase project URL: + + {}} + placeholder="https://your-project.supabase.co" + height={1} + isActive={true} + /> + Enter your Supabase API key: + {error && {error}} + + + )} + + ); + + case 'summary': + return ( + + Review your data source configuration: + Name: {name} + Type: {dataSourceType} + {dataSourceType === 'mongodb' ? ( + URI: {uri.substring(0, 50)}... + ) : ( + <> + URL: {url} + Key: {key.substring(0, 20)}... + + )} + {error && {error}} + + Press Enter to save, or ESC to go back + + + ); + } + }; + + useKeypress( + (key) => { + if (key.name === 'return' && step === 'summary' && !isSaving) { + void handleSave(); + } + }, + { isActive: step === 'summary' }, + ); + + return ( + + + Create Data Source Configuration + + {renderStep()} + + ); +} + diff --git a/packages/cli/src/ui/components/datasources/index.ts b/packages/cli/src/ui/components/datasources/index.ts new file mode 100644 index 0000000..0acb6f8 --- /dev/null +++ b/packages/cli/src/ui/components/datasources/index.ts @@ -0,0 +1,12 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +// Creation Wizard +export { DatasourceCreationWizard } from './create/DatasourceCreationWizard.js'; + +// Management Dialog +export { DatasourceManagerDialog } from './manage/DatasourceManagerDialog.js'; + diff --git a/packages/cli/src/ui/components/datasources/manage/DatasourceManagerDialog.tsx b/packages/cli/src/ui/components/datasources/manage/DatasourceManagerDialog.tsx new file mode 100644 index 0000000..1be4642 --- /dev/null +++ b/packages/cli/src/ui/components/datasources/manage/DatasourceManagerDialog.tsx @@ -0,0 +1,166 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { useState, useCallback, useEffect } from 'react'; +import { Box, Text } from 'ink'; +import { RadioButtonSelect } from '../../shared/RadioButtonSelect.js'; +import { Colors } from '../../../colors.js'; +import type { DataConfig, Config } from '@blackbox_ai/blackbox-cli-core'; +import { useKeypress } from '../../../hooks/useKeypress.js'; + +interface DatasourceManagerDialogProps { + onClose: () => void; + config: Config | null; +} + +type ManagementStep = 'list' | 'view'; + +export function DatasourceManagerDialog({ + onClose, + config, +}: DatasourceManagerDialogProps) { + const [dataSources, setDataSources] = useState([]); + const [selectedIndex, setSelectedIndex] = useState(-1); + const [step, setStep] = useState('list'); + const [error, setError] = useState(null); + const [isDeleting, setIsDeleting] = useState(false); + + const loadDataSources = useCallback(async () => { + if (!config) return; + + try { + const dataConfigManager = config.getDataConfigManager(); + const configs = await dataConfigManager.listDataConfigs(true); + setDataSources(configs); + setError(null); + } catch (err) { + setError( + err instanceof Error ? err.message : 'Failed to load data sources', + ); + } + }, [config]); + + useEffect(() => { + void loadDataSources(); + }, [loadDataSources]); + + const handleSelect = useCallback( + (index: number) => { + setSelectedIndex(index); + setStep('view'); + }, + [], + ); + + const handleDelete = useCallback(async () => { + if (!config || selectedIndex < 0) return; + + const selected = dataSources[selectedIndex]; + if (!selected) return; + + setIsDeleting(true); + setError(null); + + try { + const dataConfigManager = config.getDataConfigManager(); + await dataConfigManager.deleteDataConfig(selected.name); + await loadDataSources(); + setSelectedIndex(-1); + setStep('list'); + } catch (err) { + setError( + err instanceof Error ? err.message : 'Failed to delete data source', + ); + } finally { + setIsDeleting(false); + } + }, [config, selectedIndex, dataSources, loadDataSources]); + + useKeypress( + (key) => { + if (key.name === 'escape') { + if (step === 'list') { + onClose(); + } else if (step === 'view') { + setStep('list'); + setSelectedIndex(-1); + } + } + if (key.name === 'd' && step === 'view' && !isDeleting) { + void handleDelete(); + } + }, + { isActive: true }, + ); + + if (step === 'list') { + if (dataSources.length === 0) { + return ( + + + Data Source Manager + + + No data sources configured. Use /datasource create to add one. + + + ); + } + + return ( + + + Data Source Manager + + Select a data source to view or delete: + {error && {error}} + ({ + label: `${ds.name} (${ds.type})`, + value: ds.name, + }))} + initialIndex={0} + onSelect={(value, index) => handleSelect(index)} + isFocused={true} + /> + + ); + } + + const selected = dataSources[selectedIndex]; + if (!selected) { + return null; + } + + return ( + + + Data Source: {selected.name} + + Type: {selected.type} + {selected.type === 'mongodb' ? ( + + URI:{' '} + {(selected.credentials as { uri: string }).uri.substring(0, 50)}... + + ) : ( + <> + URL: {(selected.credentials as { url: string }).url} + + Key: {(selected.credentials as { key: string }).key.substring(0, 20)} + ... + + + )} + {error && {error}} + {isDeleting && Deleting...} + + Press 'd' to delete, or ESC to go back + + + ); +} + diff --git a/packages/cli/src/ui/hooks/slashCommandProcessor.ts b/packages/cli/src/ui/hooks/slashCommandProcessor.ts index 459b9a9..0d613ef 100644 --- a/packages/cli/src/ui/hooks/slashCommandProcessor.ts +++ b/packages/cli/src/ui/hooks/slashCommandProcessor.ts @@ -56,6 +56,8 @@ export const useSlashCommandProcessor = ( openModelSelectionDialog: () => void, openSubagentCreateDialog: () => void, openAgentsManagerDialog: () => void, + openDatasourceCreateDialog: () => void, + openDatasourceManagerDialog: () => void, openHistoryBrowser: () => void, toggleVimEnabled: () => Promise, setIsProcessing: (isProcessing: boolean) => void, @@ -415,6 +417,12 @@ export const useSlashCommandProcessor = ( case 'subagent_list': openAgentsManagerDialog(); return { type: 'handled' }; + case 'datasource_create': + openDatasourceCreateDialog(); + return { type: 'handled' }; + case 'datasource_list': + openDatasourceManagerDialog(); + return { type: 'handled' }; case 'history_browser': openHistoryBrowser(); return { type: 'handled' }; @@ -667,6 +675,8 @@ export const useSlashCommandProcessor = ( openSettingsDialog, openSubagentCreateDialog, openAgentsManagerDialog, + openDatasourceCreateDialog, + openDatasourceManagerDialog, openHistoryBrowser, setShellConfirmationRequest, setSessionShellAllowlist, diff --git a/packages/cli/src/ui/hooks/useDatasourceCreateDialog.ts b/packages/cli/src/ui/hooks/useDatasourceCreateDialog.ts new file mode 100644 index 0000000..0b0db2e --- /dev/null +++ b/packages/cli/src/ui/hooks/useDatasourceCreateDialog.ts @@ -0,0 +1,27 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { useState, useCallback } from 'react'; + +export function useDatasourceCreateDialog() { + const [isDatasourceCreateDialogOpen, setIsDatasourceCreateDialogOpen] = + useState(false); + + const openDatasourceCreateDialog = useCallback(() => { + setIsDatasourceCreateDialogOpen(true); + }, []); + + const closeDatasourceCreateDialog = useCallback(() => { + setIsDatasourceCreateDialogOpen(false); + }, []); + + return { + isDatasourceCreateDialogOpen, + openDatasourceCreateDialog, + closeDatasourceCreateDialog, + }; +} + diff --git a/packages/cli/src/ui/hooks/useDatasourceManagerDialog.ts b/packages/cli/src/ui/hooks/useDatasourceManagerDialog.ts new file mode 100644 index 0000000..3ce5206 --- /dev/null +++ b/packages/cli/src/ui/hooks/useDatasourceManagerDialog.ts @@ -0,0 +1,34 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { useState, useCallback } from 'react'; + +export interface UseDatasourceManagerDialogReturn { + isDatasourceManagerDialogOpen: boolean; + openDatasourceManagerDialog: () => void; + closeDatasourceManagerDialog: () => void; +} + +export const useDatasourceManagerDialog = + (): UseDatasourceManagerDialogReturn => { + const [isDatasourceManagerDialogOpen, setIsDatasourceManagerDialogOpen] = + useState(false); + + const openDatasourceManagerDialog = useCallback(() => { + setIsDatasourceManagerDialogOpen(true); + }, []); + + const closeDatasourceManagerDialog = useCallback(() => { + setIsDatasourceManagerDialogOpen(false); + }, []); + + return { + isDatasourceManagerDialogOpen, + openDatasourceManagerDialog, + closeDatasourceManagerDialog, + }; + }; + diff --git a/packages/core/package.json b/packages/core/package.json index e5ac496..a63d65e 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -56,7 +56,9 @@ "jsonrepair": "^3.13.0", "marked": "^15.0.12", "mnemonist": "^0.40.3", + "mongodb": "^6.3.0", "open": "^10.1.2", + "@supabase/supabase-js": "^2.39.0", "openai": "5.11.0", "picomatch": "^4.0.1", "playwright": "^1.56.0", diff --git a/packages/core/src/config/config.ts b/packages/core/src/config/config.ts index bfb4f61..6dc1dda 100644 --- a/packages/core/src/config/config.ts +++ b/packages/core/src/config/config.ts @@ -23,6 +23,7 @@ import { } from '../services/fileSystemService.js'; import { GitService } from '../services/gitService.js'; import { SubagentManager } from '../subagents/subagent-manager.js'; +import { DataConfigManager } from '../datasources/data-config-manager.js'; import type { TelemetryTarget } from '../telemetry/index.js'; import { DEFAULT_OTLP_ENDPOINT, @@ -58,6 +59,8 @@ import { BrowserCloseTool, } from '../tools/browser_use.js'; import { WriteFileTool } from '../tools/write-file.js'; +import { MongoFindTool } from '../tools/mongo-find.js'; +import { SupabaseSelectTool } from '../tools/supabase-select.js'; import { shouldAttemptBrowserLaunch } from '../utils/browser.js'; import { FileExclusions } from '../utils/ignorePatterns.js'; import { WorkspaceContext } from '../utils/workspaceContext.js'; @@ -265,6 +268,7 @@ export class Config { private toolRegistry!: ToolRegistry; private promptRegistry!: PromptRegistry; private subagentManager!: SubagentManager; + private dataConfigManager!: DataConfigManager; private sessionId: string; private fileSystemService: FileSystemService; private contentGeneratorConfig!: ContentGeneratorConfig; @@ -492,6 +496,7 @@ export class Config { } this.promptRegistry = new PromptRegistry(); this.subagentManager = new SubagentManager(this); + this.dataConfigManager = new DataConfigManager(this); this.toolRegistry = await this.createToolRegistry(); logCliConfiguration(this, new StartSessionEvent(this, this.toolRegistry)); } @@ -1061,6 +1066,10 @@ export class Config { return this.subagentManager; } + getDataConfigManager(): DataConfigManager { + return this.dataConfigManager; + } + async createToolRegistry(): Promise { const registry = new ToolRegistry(this); @@ -1125,6 +1134,8 @@ export class Config { registerCoreTool(TodoWriteTool, this); registerCoreTool(ExitPlanModeTool, this); registerCoreTool(WebFetchTool, this); + registerCoreTool(MongoFindTool, this); + registerCoreTool(SupabaseSelectTool, this); // Register web search tool if: // 1. Tavily API key is set, OR diff --git a/packages/core/src/config/storage.ts b/packages/core/src/config/storage.ts index c67f359..7795855 100644 --- a/packages/core/src/config/storage.ts +++ b/packages/core/src/config/storage.ts @@ -111,4 +111,8 @@ export class Storage { getHistoryFilePath(): string { return path.join(this.getProjectTempDir(), 'shell_history'); } + + getDataConfigsDir(): string { + return path.join(this.getGeminiDir(), 'data-configs'); + } } diff --git a/packages/core/src/core/prompts.ts b/packages/core/src/core/prompts.ts index 416eeff..4734661 100644 --- a/packages/core/src/core/prompts.ts +++ b/packages/core/src/core/prompts.ts @@ -374,6 +374,46 @@ When users need to search for information on the web, you have access to the '${ **DO NOT guess or provide potentially outdated information.** Instead, immediately use '${ToolNames.WEB_SEARCH}' to get accurate, current information with proper source citations. This ensures users receive reliable, up-to-date information rather than potentially incorrect data from your training cutoff. Make sure to give proper citations for the user. +## Database Tools (${ToolNames.MONGO_FIND} and ${ToolNames.SUPABASE_SELECT}) +You have access to read-only database tools that allow you to query MongoDB and Supabase databases directly. These tools execute locally within the CLI and never expose credentials to the LLM. + +**Data Source Configuration**: Users can configure named data sources using the \`/datasource\` command. When a user mentions a configured data source or when the \`dataSource\` parameter is provided, you should use it instead of environment variables. The \`dataSource\` parameter tells the tool to use credentials from the user's configuration, making it easier to work with multiple databases. + +### MongoDB Tool (${ToolNames.MONGO_FIND}) +- **When to Use**: Use '${ToolNames.MONGO_FIND}' when you need to query data from a MongoDB database. +- **Parameters**: + - \`db\` (required): Database name + - \`collection\` (required): Collection name + - \`filter\` (optional): MongoDB filter query object (e.g., \`{ status: "active", age: { $gte: 18 } }\`) + - \`dataSource\` (optional): Name of a configured data source. If provided, uses credentials from the configuration instead of MONGODB_URI environment variable. + - \`limit\` (optional): Maximum rows to return (default: 100, max: 100) + - \`projection\` (optional): Fields to include/exclude (e.g., \`{ name: 1, email: 1, _id: 0 }\`) +- **Environment Variable**: Requires \`MONGODB_URI\` to be set in .env file (e.g., \`MONGODB_URI=mongodb://localhost:27017/mydb\`) +- **Safety**: Read-only operations only, maximum 100 rows per query +- **Examples**: + - Find active users: \`${ToolNames.MONGO_FIND}(db="prod", collection="users", filter={status: "active"}, limit=20)\` + - Query with projection: \`${ToolNames.MONGO_FIND}(db="mydb", collection="products", filter={price: {$lt: 100}}, projection={name: 1, price: 1})\` + +### Supabase Tool (${ToolNames.SUPABASE_SELECT}) +- **dataSource Parameter**: If the user has configured a data source using \`/datasource create\`, you can use the \`dataSource\` parameter to automatically use the configured credentials. This is especially useful when users reference a data source by name in their queries. +- **When to Use**: Use '${ToolNames.SUPABASE_SELECT}' when you need to query data from a Supabase database. +- **Parameters**: + - \`table\` (required): Table name + - \`filter\` (optional): Filter conditions as key-value pairs (e.g., \`{ status: "active", age: 18 }\`) + - \`limit\` (optional): Maximum rows to return (default: 100, max: 100) + - \`columns\` (optional): Specific columns to select (e.g., \`["id", "name", "email"]\`) +- **Environment Variables**: Requires \`SUPABASE_URL\` and \`SUPABASE_KEY\` to be set in .env file +- **Safety**: Read-only operations only, maximum 100 rows per query +- **Examples**: + - Find active users: \`${ToolNames.SUPABASE_SELECT}(table="users", filter={status: "active"}, limit=20)\` + - Select specific columns: \`${ToolNames.SUPABASE_SELECT}(table="products", columns=["id", "name", "price"], filter={in_stock: true})\` + +### Important Notes +- **Credentials**: Database credentials are read from environment variables (.env file) and are never exposed to the LLM +- **Read-Only**: These tools only support read operations - no insert, update, or delete operations +- **Row Limits**: Maximum 100 rows per query to prevent context overflow +- **Error Handling**: Connection errors are handled gracefully without exposing sensitive connection details + # Task Management - You have access to the ${ToolNames.TODO_WRITE} tool to help you track tasks in the plan. Use these tools VERY frequently to ensure that you are tracking your tasks and giving the user visibility into your progress. These tools are also EXTREMELY helpful for planning tasks, and for breaking down larger complex tasks into smaller steps. If you do not use this tool when planning, you may forget to do important tasks - and that is unacceptable. diff --git a/packages/core/src/datasources/data-config-manager.ts b/packages/core/src/datasources/data-config-manager.ts new file mode 100644 index 0000000..2747947 --- /dev/null +++ b/packages/core/src/datasources/data-config-manager.ts @@ -0,0 +1,396 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import * as fs from 'fs/promises'; +import * as path from 'path'; +import type { DataConfig, CreateDataConfigOptions, DataSourceType } from './types.js'; +import { DataConfigError, DataConfigErrorCode } from './types.js'; +import type { Config } from '../config/config.js'; + +const BLACKBOX_CONFIG_DIR = '.blackboxcli'; +const DATA_CONFIG_DIR = 'data-configs'; + +/** + * Manages data source configurations stored as JSON files. + * Provides CRUD operations for data source configurations. + */ +export class DataConfigManager { + private dataConfigsCache: DataConfig[] | null = null; + private readonly changeListeners: Set<() => void> = new Set(); + + constructor(private readonly config: Config) {} + + addChangeListener(listener: () => void): () => void { + this.changeListeners.add(listener); + return () => { + this.changeListeners.delete(listener); + }; + } + + private notifyChangeListeners(): void { + for (const listener of this.changeListeners) { + try { + listener(); + } catch (error) { + console.warn('Data config change listener threw an error:', error); + } + } + } + + /** + * Gets the directory path where data configurations are stored. + */ + getDataConfigsDir(): string { + return path.join( + this.config.getProjectRoot(), + BLACKBOX_CONFIG_DIR, + DATA_CONFIG_DIR, + ); + } + + /** + * Gets the file path for a data configuration by name. + */ + getDataConfigPath(name: string): string { + // Sanitize name to be filesystem-safe + const sanitizedName = name.replace(/[^a-zA-Z0-9_-]/g, '_'); + return path.join(this.getDataConfigsDir(), `${sanitizedName}.json`); + } + + /** + * Validates a data configuration. + */ + private validateConfig(config: DataConfig): void { + if (!config.name || typeof config.name !== 'string' || config.name.trim() === '') { + throw new DataConfigError( + 'Data source name must be a non-empty string', + DataConfigErrorCode.INVALID_NAME, + config.name, + ); + } + + // Validate name format (alphanumeric, dashes, underscores) + if (!/^[a-zA-Z0-9_-]+$/.test(config.name)) { + throw new DataConfigError( + 'Data source name must contain only alphanumeric characters, dashes, and underscores', + DataConfigErrorCode.INVALID_NAME, + config.name, + ); + } + + if (!config.type || (config.type !== 'mongodb' && config.type !== 'supabase')) { + throw new DataConfigError( + 'Data source type must be either "mongodb" or "supabase"', + DataConfigErrorCode.INVALID_CONFIG, + config.name, + ); + } + + if (!config.credentials) { + throw new DataConfigError( + 'Data source credentials are required', + DataConfigErrorCode.INVALID_CONFIG, + config.name, + ); + } + + // Validate credentials based on type + if (config.type === 'mongodb') { + const creds = config.credentials as { uri?: string }; + if (!creds.uri || typeof creds.uri !== 'string' || creds.uri.trim() === '') { + throw new DataConfigError( + 'MongoDB credentials must include a non-empty "uri" field', + DataConfigErrorCode.INVALID_CONFIG, + config.name, + ); + } + } else if (config.type === 'supabase') { + const creds = config.credentials as { url?: string; key?: string }; + if (!creds.url || typeof creds.url !== 'string' || creds.url.trim() === '') { + throw new DataConfigError( + 'Supabase credentials must include a non-empty "url" field', + DataConfigErrorCode.INVALID_CONFIG, + config.name, + ); + } + if (!creds.key || typeof creds.key !== 'string' || creds.key.trim() === '') { + throw new DataConfigError( + 'Supabase credentials must include a non-empty "key" field', + DataConfigErrorCode.INVALID_CONFIG, + config.name, + ); + } + } + } + + /** + * Creates a new data configuration. + * + * @param config - Data configuration to create + * @param options - Creation options + * @throws DataConfigError if creation fails + */ + async createDataConfig( + config: DataConfig, + options: CreateDataConfigOptions = {}, + ): Promise { + this.validateConfig(config); + + const filePath = this.getDataConfigPath(config.name); + + // Check if file already exists + if (!options.overwrite) { + try { + await fs.access(filePath); + throw new DataConfigError( + `Data source "${config.name}" already exists at ${filePath}`, + DataConfigErrorCode.ALREADY_EXISTS, + config.name, + ); + } catch (error) { + if (error instanceof DataConfigError) throw error; + // File doesn't exist, which is what we want + } + } + + // Ensure directory exists + const dir = path.dirname(filePath); + await fs.mkdir(dir, { recursive: true }); + + // Update config with actual file path + const finalConfig: DataConfig = { + ...config, + filePath, + }; + + // Write the file + try { + await fs.writeFile( + filePath, + JSON.stringify( + { + name: finalConfig.name, + type: finalConfig.type, + credentials: finalConfig.credentials, + }, + null, + 2, + ), + 'utf8', + ); + // Refresh cache after successful creation + await this.refreshCache(); + this.notifyChangeListeners(); + } catch (error) { + throw new DataConfigError( + `Failed to write data config file: ${error instanceof Error ? error.message : 'Unknown error'}`, + DataConfigErrorCode.FILE_ERROR, + config.name, + ); + } + } + + /** + * Loads a data configuration by name. + * + * @param name - Name of the data source to load + * @returns DataConfig or null if not found + */ + async loadDataConfig(name: string): Promise { + const filePath = this.getDataConfigPath(name); + try { + const content = await fs.readFile(filePath, 'utf8'); + const jsonConfig = JSON.parse(content) as { + name: string; + type: DataSourceType; + credentials: unknown; + }; + + const config: DataConfig = { + name: jsonConfig.name, + type: jsonConfig.type, + credentials: jsonConfig.credentials as DataConfig['credentials'], + filePath, + }; + + // Validate loaded config + this.validateConfig(config); + return config; + } catch (error) { + if ((error as { code?: string })?.code === 'ENOENT') { + return null; + } + throw new DataConfigError( + `Failed to load data config "${name}": ${error instanceof Error ? error.message : 'Unknown error'}`, + DataConfigErrorCode.FILE_ERROR, + name, + ); + } + } + + /** + * Lists all data configurations. + * + * @param force - Force refresh from disk, bypassing cache + * @returns Array of data configurations + */ + async listDataConfigs(force: boolean = false): Promise { + if (this.dataConfigsCache && !force) { + return this.dataConfigsCache; + } + + return this.refreshCache(); + } + + /** + * Refreshes the cache by reading all configuration files from disk. + */ + private async refreshCache(): Promise { + const configs: DataConfig[] = []; + const dir = this.getDataConfigsDir(); + + try { + await fs.access(dir); + } catch { + // Directory doesn't exist, return empty array + this.dataConfigsCache = configs; + return configs; + } + + try { + const files = await fs.readdir(dir); + for (const file of files) { + if (!file.endsWith('.json')) continue; + + const filePath = path.join(dir, file); + try { + const content = await fs.readFile(filePath, 'utf8'); + const jsonConfig = JSON.parse(content) as { + name: string; + type: DataSourceType; + credentials: unknown; + }; + + const config: DataConfig = { + name: jsonConfig.name, + type: jsonConfig.type, + credentials: jsonConfig.credentials as DataConfig['credentials'], + filePath, + }; + + // Validate before adding + try { + this.validateConfig(config); + configs.push(config); + } catch (error) { + console.warn( + `Skipping invalid data config file ${file}: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } catch (error) { + console.warn( + `Failed to read data config file ${file}: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } + } catch (error) { + throw new DataConfigError( + `Failed to list data configs: ${error instanceof Error ? error.message : 'Unknown error'}`, + DataConfigErrorCode.FILE_ERROR, + ); + } + + this.dataConfigsCache = configs; + return configs; + } + + /** + * Updates an existing data configuration. + * + * @param name - Name of the data source to update + * @param updates - Partial configuration updates + * @throws DataConfigError if data source not found or update fails + */ + async updateDataConfig( + name: string, + updates: Partial, + ): Promise { + const existing = await this.loadDataConfig(name); + if (!existing) { + throw new DataConfigError( + `Data source "${name}" not found`, + DataConfigErrorCode.NOT_FOUND, + name, + ); + } + + // Merge updates with existing configuration + const updatedConfig: DataConfig = { + ...existing, + ...updates, + name, // Don't allow renaming through update + filePath: existing.filePath, // Keep original file path + }; + + // Validate the updated configuration + this.validateConfig(updatedConfig); + + // Write the updated configuration + try { + await fs.writeFile( + existing.filePath, + JSON.stringify( + { + name: updatedConfig.name, + type: updatedConfig.type, + credentials: updatedConfig.credentials, + }, + null, + 2, + ), + 'utf8', + ); + await this.refreshCache(); + this.notifyChangeListeners(); + } catch (error) { + throw new DataConfigError( + `Failed to update data config: ${error instanceof Error ? error.message : 'Unknown error'}`, + DataConfigErrorCode.FILE_ERROR, + name, + ); + } + } + + /** + * Deletes a data configuration. + * + * @param name - Name of the data source to delete + * @throws DataConfigError if data source not found or deletion fails + */ + async deleteDataConfig(name: string): Promise { + const existing = await this.loadDataConfig(name); + if (!existing) { + throw new DataConfigError( + `Data source "${name}" not found`, + DataConfigErrorCode.NOT_FOUND, + name, + ); + } + + try { + await fs.unlink(existing.filePath); + await this.refreshCache(); + this.notifyChangeListeners(); + } catch (error) { + throw new DataConfigError( + `Failed to delete data config: ${error instanceof Error ? error.message : 'Unknown error'}`, + DataConfigErrorCode.FILE_ERROR, + name, + ); + } + } +} + diff --git a/packages/core/src/datasources/index.ts b/packages/core/src/datasources/index.ts new file mode 100644 index 0000000..481136b --- /dev/null +++ b/packages/core/src/datasources/index.ts @@ -0,0 +1,17 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +export { DataConfigManager } from './data-config-manager.js'; +export type { + DataConfig, + DataSourceType, + DataSourceCredentials, + MongoDBCredentials, + SupabaseCredentials, + CreateDataConfigOptions, +} from './types.js'; +export { DataConfigError, DataConfigErrorCode } from './types.js'; + diff --git a/packages/core/src/datasources/types.ts b/packages/core/src/datasources/types.ts new file mode 100644 index 0000000..c7591ec --- /dev/null +++ b/packages/core/src/datasources/types.ts @@ -0,0 +1,85 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +/** + * Represents the type of data source. + */ +export type DataSourceType = 'mongodb' | 'supabase'; + +/** + * Credentials for MongoDB data source. + */ +export interface MongoDBCredentials { + uri: string; +} + +/** + * Credentials for Supabase data source. + */ +export interface SupabaseCredentials { + url: string; + key: string; +} + +/** + * Union type for all credential types. + */ +export type DataSourceCredentials = MongoDBCredentials | SupabaseCredentials; + +/** + * Core configuration for a data source as stored in JSON files. + */ +export interface DataConfig { + /** Unique name identifier for the data source */ + name: string; + + /** Type of database (mongodb or supabase) */ + type: DataSourceType; + + /** Credentials specific to the database type */ + credentials: DataSourceCredentials; + + /** Absolute path to the configuration file */ + filePath: string; +} + +/** + * Options for creating a new data configuration. + */ +export interface CreateDataConfigOptions { + /** Whether to overwrite existing configuration with same name */ + overwrite?: boolean; +} + +/** + * Error thrown when a data config operation fails. + */ +export class DataConfigError extends Error { + constructor( + message: string, + readonly code: string, + readonly dataSourceName?: string, + ) { + super(message); + this.name = 'DataConfigError'; + } +} + +/** + * Error codes for data config operations. + */ +export const DataConfigErrorCode = { + NOT_FOUND: 'NOT_FOUND', + ALREADY_EXISTS: 'ALREADY_EXISTS', + INVALID_CONFIG: 'INVALID_CONFIG', + INVALID_NAME: 'INVALID_NAME', + FILE_ERROR: 'FILE_ERROR', + VALIDATION_ERROR: 'VALIDATION_ERROR', +} as const; + +export type DataConfigErrorCode = + (typeof DataConfigErrorCode)[keyof typeof DataConfigErrorCode]; + diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index e1357d3..f5729d5 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -92,6 +92,8 @@ export * from './tools/memoryTool.js'; export * from './tools/shell.js'; export * from './tools/web-search.js'; export * from './tools/read-many-files.js'; +export * from './tools/mongo-find.js'; +export * from './tools/supabase-select.js'; export * from './tools/mcp-client.js'; export * from './tools/mcp-tool.js'; diff --git a/packages/core/src/tools/mongo-find.test.ts b/packages/core/src/tools/mongo-find.test.ts new file mode 100644 index 0000000..d3b4a66 --- /dev/null +++ b/packages/core/src/tools/mongo-find.test.ts @@ -0,0 +1,303 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { MongoFindTool } from './mongo-find.js'; +import type { Config } from '../config/config.js'; +import { ToolErrorType } from './tool-error.js'; + +// Mock MongoDB client +vi.mock('mongodb', () => { + const mockCollection = { + find: vi.fn(), + }; + + const mockDb = { + collection: vi.fn(() => mockCollection), + }; + + const mockClient = { + connect: vi.fn(), + close: vi.fn(), + db: vi.fn(() => mockDb), + }; + + return { + MongoClient: vi.fn(() => mockClient), + }; +}); + +describe('MongoFindTool', () => { + let mockConfig: Config; + let mockMongoClient: { + connect: ReturnType; + close: ReturnType; + db: ReturnType; + }; + let mockCollection: { find: ReturnType }; + + beforeEach(() => { + vi.resetAllMocks(); + process.env['MONGODB_URI'] = 'mongodb://localhost:27017/testdb'; + + mockConfig = {} as unknown as Config; + + // Get the mocked client instance + // eslint-disable-next-line @typescript-eslint/no-require-imports + const { MongoClient } = require('mongodb'); + mockMongoClient = new MongoClient('') as { + connect: ReturnType; + close: ReturnType; + db: ReturnType; + }; + const mockDb = mockMongoClient.db('test') as { + collection: ReturnType; + }; + mockCollection = mockDb.collection('test') as { + find: ReturnType; + }; + }); + + afterEach(() => { + delete process.env['MONGODB_URI']; + vi.restoreAllMocks(); + }); + + describe('parameter validation', () => { + it('should throw error when db parameter is missing', () => { + const tool = new MongoFindTool(mockConfig); + const params = { collection: 'users' }; + /* @ts-expect-error - we are testing validation */ + expect(() => tool.build(params)).toThrow(); + }); + + it('should throw error when collection parameter is missing', () => { + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb' }; + /* @ts-expect-error - we are testing validation */ + expect(() => tool.build(params)).toThrow(); + }); + + it('should throw error when db is empty string', () => { + const tool = new MongoFindTool(mockConfig); + const params = { db: '', collection: 'users' }; + expect(() => tool.build(params)).toThrow('Parameter "db" must be a non-empty string'); + }); + + it('should throw error when collection is empty string', () => { + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: '' }; + expect(() => tool.build(params)).toThrow('Parameter "collection" must be a non-empty string'); + }); + + it('should throw error when limit exceeds maximum', () => { + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users', limit: 200 }; + expect(() => tool.build(params)).toThrow('Parameter "limit" must be a number between 1 and 100'); + }); + + it('should throw error when limit is less than 1', () => { + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users', limit: 0 }; + expect(() => tool.build(params)).toThrow('Parameter "limit" must be a number between 1 and 100'); + }); + + it('should accept valid parameters', () => { + const tool = new MongoFindTool(mockConfig); + const params = { + db: 'mydb', + collection: 'users', + filter: { status: 'active' }, + limit: 50, + projection: { name: 1, email: 1 }, + }; + expect(() => tool.build(params)).not.toThrow(); + }); + }); + + describe('execute', () => { + it('should return error when MONGODB_URI is not set', async () => { + delete process.env['MONGODB_URI']; + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('MONGODB_URI environment variable is not set'); + }); + + it('should connect to MongoDB and execute find query', async () => { + const mockCursor = { + project: vi.fn().mockReturnThis(), + limit: vi.fn().mockReturnThis(), + toArray: vi.fn().mockResolvedValue([ + { _id: '1', name: 'John', email: 'john@example.com' }, + { _id: '2', name: 'Jane', email: 'jane@example.com' }, + ]), + }; + + mockCollection.find.mockReturnValue(mockCursor); + mockMongoClient.connect.mockResolvedValue(undefined); + mockMongoClient.close.mockResolvedValue(undefined); + + const tool = new MongoFindTool(mockConfig); + const params = { + db: 'mydb', + collection: 'users', + filter: { status: 'active' }, + limit: 10, + }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(mockMongoClient.connect).toHaveBeenCalled(); + expect(mockCollection.find).toHaveBeenCalledWith({ status: 'active' }); + expect(mockCursor.limit).toHaveBeenCalledWith(10); + expect(result.error).toBeUndefined(); + const content = JSON.parse(result.llmContent as string); + expect(content.success).toBe(true); + expect(content.count).toBe(2); + expect(content.data).toHaveLength(2); + }); + + it('should enforce maximum limit of 100', async () => { + const mockCursor = { + project: vi.fn().mockReturnThis(), + limit: vi.fn().mockReturnThis(), + toArray: vi.fn().mockResolvedValue([]), + }; + + mockCollection.find.mockReturnValue(mockCursor); + mockMongoClient.connect.mockResolvedValue(undefined); + mockMongoClient.close.mockResolvedValue(undefined); + + const tool = new MongoFindTool(mockConfig); + const params = { + db: 'mydb', + collection: 'users', + limit: 150, // Should be capped at 100 + }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockCursor.limit).toHaveBeenCalledWith(100); + }); + + it('should apply projection when provided', async () => { + const mockCursor = { + project: vi.fn().mockReturnThis(), + limit: vi.fn().mockReturnThis(), + toArray: vi.fn().mockResolvedValue([]), + }; + + mockCollection.find.mockReturnValue(mockCursor); + mockMongoClient.connect.mockResolvedValue(undefined); + mockMongoClient.close.mockResolvedValue(undefined); + + const tool = new MongoFindTool(mockConfig); + const params = { + db: 'mydb', + collection: 'users', + projection: { name: 1, email: 1 }, + }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockCursor.project).toHaveBeenCalledWith({ name: 1, email: 1 }); + }); + + it('should handle connection errors gracefully', async () => { + mockMongoClient.connect.mockRejectedValue( + new Error('Connection failed'), + ); + + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('Failed to connect to MongoDB'); + }); + + it('should handle authentication errors gracefully', async () => { + mockMongoClient.connect.mockRejectedValue( + new Error('Authentication failed'), + ); + + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('Authentication failed'); + expect(result.llmContent).not.toContain('MONGODB_URI'); + }); + + it('should always close the connection', async () => { + const mockCursor = { + project: vi.fn().mockReturnThis(), + limit: vi.fn().mockReturnThis(), + toArray: vi.fn().mockResolvedValue([]), + }; + + mockCollection.find.mockReturnValue(mockCursor); + mockMongoClient.connect.mockResolvedValue(undefined); + mockMongoClient.close.mockResolvedValue(undefined); + + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users' }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockMongoClient.close).toHaveBeenCalled(); + }); + + it('should handle errors during connection close', async () => { + const mockCursor = { + project: vi.fn().mockReturnThis(), + limit: vi.fn().mockReturnThis(), + toArray: vi.fn().mockResolvedValue([]), + }; + + mockCollection.find.mockReturnValue(mockCursor); + mockMongoClient.connect.mockResolvedValue(undefined); + mockMongoClient.close.mockRejectedValue( + new Error('Close failed'), + ); + + const tool = new MongoFindTool(mockConfig); + const params = { db: 'mydb', collection: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + // Should still return success even if close fails + expect(result.error).toBeUndefined(); + }); + }); + + describe('getDescription', () => { + it('should return descriptive string', () => { + const tool = new MongoFindTool(mockConfig); + const params = { + db: 'mydb', + collection: 'users', + filter: { status: 'active' }, + limit: 20, + }; + const invocation = tool.build(params); + const description = invocation.getDescription(); + + expect(description).toContain('mydb'); + expect(description).toContain('users'); + expect(description).toContain('active'); + expect(description).toContain('20'); + }); + }); +}); + diff --git a/packages/core/src/tools/mongo-find.ts b/packages/core/src/tools/mongo-find.ts new file mode 100644 index 0000000..fa5b091 --- /dev/null +++ b/packages/core/src/tools/mongo-find.ts @@ -0,0 +1,341 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import type { FunctionDeclaration } from '@google/genai'; +import { MongoClient, type Db, type Collection } from 'mongodb'; +import type { Config } from '../config/config.js'; +import { ToolErrorType } from './tool-error.js'; +import type { ToolResult } from './tools.js'; +import { + BaseDeclarativeTool, + BaseToolInvocation, + Kind, +} from './tools.js'; + +const MAX_ROWS = 100; + +const mongoFindToolSchemaData: FunctionDeclaration = { + name: 'mongo_find', + description: + 'Executes a read-only find() query on a MongoDB collection. Returns matching documents as a JSON array. Maximum 100 rows per query.', + parametersJsonSchema: { + type: 'object', + properties: { + db: { + type: 'string', + description: 'The name of the database to query', + }, + collection: { + type: 'string', + description: 'The name of the collection to query', + }, + filter: { + type: 'object', + description: + 'MongoDB filter query object (e.g., { status: "active", age: { $gte: 18 } })', + additionalProperties: true, + }, + limit: { + type: 'number', + description: 'Maximum number of rows to return (default: 100, max: 100)', + minimum: 1, + maximum: MAX_ROWS, + default: MAX_ROWS, + }, + projection: { + type: 'object', + description: + 'Fields to include or exclude (e.g., { name: 1, email: 1, _id: 0 })', + additionalProperties: true, + }, + dataSource: { + type: 'string', + description: + 'Optional name of a configured data source. If provided, uses credentials from the data source configuration instead of MONGODB_URI environment variable.', + }, + }, + required: ['db', 'collection'], + additionalProperties: false, + }, +}; + +const mongoFindToolDescription = ` +Executes a read-only find() query on a MongoDB collection. + +## Usage + +Use this tool to query MongoDB databases when you need to retrieve data. This tool is read-only and safe to use. + +## Parameters + +- \`db\` (string, required): The name of the database to query +- \`collection\` (string, required): The name of the collection to query +- \`filter\` (object, optional): MongoDB filter query object. Examples: + - \`{ status: "active" }\` - Find documents where status equals "active" + - \`{ age: { $gte: 18 } }\` - Find documents where age is greater than or equal to 18 + - \`{ name: { $regex: "^John" } }\` - Find documents where name starts with "John" +- \`limit\` (number, optional): Maximum number of rows to return (default: 100, max: 100) +- \`projection\` (object, optional): Fields to include or exclude. Examples: + - \`{ name: 1, email: 1 }\` - Include only name and email fields + - \`{ _id: 0, password: 0 }\` - Exclude _id and password fields + +## Data Source Configuration + +You can use a configured data source by providing the \`dataSource\` parameter. If provided, the tool will use credentials from the data source configuration instead of environment variables. + +## Environment Variables + +If \`dataSource\` is not provided, this tool requires the \`MONGODB_URI\` environment variable to be set in your .env file: +- \`MONGODB_URI=mongodb://localhost:27017/mydb\` (local MongoDB) +- \`MONGODB_URI=mongodb+srv://user:pass@cluster.mongodb.net/dbname\` (MongoDB Atlas) + +## Safety + +- Read-only operations only (find() queries) +- Maximum 100 rows per query +- Credentials are read from data source configuration or environment variables, never exposed to the LLM +`; + +export interface MongoFindParams { + db: string; + collection: string; + filter?: Record; + limit?: number; + projection?: Record; + dataSource?: string; +} + +class MongoFindToolInvocation extends BaseToolInvocation< + MongoFindParams, + ToolResult +> { + constructor( + private readonly config: Config, + params: MongoFindParams, + ) { + super(params); + } + + getDescription(): string { + const filterStr = this.params.filter + ? JSON.stringify(this.params.filter) + : '{}'; + const limit = this.params.limit ?? MAX_ROWS; + return `Querying MongoDB: ${this.params.db}.${this.params.collection} with filter ${filterStr}, limit ${limit}`; + } + + async execute(_signal: AbortSignal): Promise { + let mongoUri: string | undefined; + + // Try to load from data source configuration if provided + if (this.params.dataSource) { + try { + const dataConfigManager = this.config.getDataConfigManager(); + const dataConfig = await dataConfigManager.loadDataConfig( + this.params.dataSource, + ); + if (!dataConfig) { + const errorMessage = `Data source "${this.params.dataSource}" not found. Please configure it using /datasource create.`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + if (dataConfig.type !== 'mongodb') { + const errorMessage = `Data source "${this.params.dataSource}" is of type "${dataConfig.type}", not "mongodb".`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + mongoUri = (dataConfig.credentials as { uri: string }).uri; + } catch (error) { + const errorMessage = `Failed to load data source configuration: ${error instanceof Error ? error.message : 'Unknown error'}`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + } else { + // Fall back to environment variable + mongoUri = process.env['MONGODB_URI']; + } + + if (!mongoUri) { + const errorMessage = + 'MONGODB_URI environment variable is not set and no dataSource was provided. Please set it in your .env file or configure a data source using /datasource create.'; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + + let client: MongoClient | null = null; + + try { + // Validate parameters + if (!this.params.db || typeof this.params.db !== 'string') { + throw new Error('Parameter "db" must be a non-empty string'); + } + if (!this.params.collection || typeof this.params.collection !== 'string') { + throw new Error('Parameter "collection" must be a non-empty string'); + } + + // Enforce limit + const limit = Math.min(this.params.limit ?? MAX_ROWS, MAX_ROWS); + + // Connect to MongoDB + client = new MongoClient(mongoUri); + await client.connect(); + + const database: Db = client.db(this.params.db); + const collection: Collection = database.collection( + this.params.collection, + ); + + // Build query + const filter = this.params.filter ?? {}; + const projection = this.params.projection; + + // Execute read-only find() query + let query = collection.find(filter); + if (projection) { + query = query.project(projection); + } + query = query.limit(limit); + + const results = await query.toArray(); + + // Convert MongoDB documents to plain JSON objects + const jsonResults = results.map((doc: unknown) => { + // Convert ObjectId and other BSON types to JSON-serializable format + const plain = JSON.parse(JSON.stringify(doc)); + return plain; + }); + + const resultCount = jsonResults.length; + const resultMessage = `Found ${resultCount} document${resultCount !== 1 ? 's' : ''} in ${this.params.db}.${this.params.collection}`; + + return { + llmContent: JSON.stringify({ + success: true, + count: resultCount, + data: jsonResults, + }), + returnDisplay: resultMessage, + }; + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + console.error( + `[MongoFindTool] Error executing query on ${this.params.db}.${this.params.collection}: ${errorMessage}`, + ); + + // Don't expose connection details in error messages + let safeErrorMessage = errorMessage; + if (errorMessage.includes('authentication') || errorMessage.includes('auth')) { + safeErrorMessage = 'Authentication failed. Please check your MONGODB_URI credentials.'; + } else if (errorMessage.includes('connection') || errorMessage.includes('connect')) { + safeErrorMessage = 'Failed to connect to MongoDB. Please check your MONGODB_URI and ensure the server is accessible.'; + } + + return { + llmContent: JSON.stringify({ + success: false, + error: safeErrorMessage, + }), + returnDisplay: `Error: ${safeErrorMessage}`, + error: { + message: safeErrorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } finally { + // Always close the connection + if (client) { + try { + await client.close(); + } catch (closeError) { + console.error('[MongoFindTool] Error closing MongoDB connection:', closeError); + } + } + } + } +} + +export class MongoFindTool extends BaseDeclarativeTool< + MongoFindParams, + ToolResult +> { + static readonly Name: string = mongoFindToolSchemaData.name!; + + constructor(private readonly config: Config) { + super( + MongoFindTool.Name, + 'MongoFind', + mongoFindToolDescription, + Kind.Read, + mongoFindToolSchemaData.parametersJsonSchema as Record, + ); + } + + protected override validateToolParamValues( + params: MongoFindParams, + ): string | null { + if (!params.db || typeof params.db !== 'string' || params.db.trim() === '') { + return 'Parameter "db" must be a non-empty string'; + } + + if ( + !params.collection || + typeof params.collection !== 'string' || + params.collection.trim() === '' + ) { + return 'Parameter "collection" must be a non-empty string'; + } + + if (params.limit !== undefined) { + if (typeof params.limit !== 'number' || params.limit < 1 || params.limit > MAX_ROWS) { + return `Parameter "limit" must be a number between 1 and ${MAX_ROWS}`; + } + } + + return null; + } + + protected createInvocation(params: MongoFindParams) { + return new MongoFindToolInvocation(this.config, params); + } +} + diff --git a/packages/core/src/tools/supabase-select.test.ts b/packages/core/src/tools/supabase-select.test.ts new file mode 100644 index 0000000..56e3465 --- /dev/null +++ b/packages/core/src/tools/supabase-select.test.ts @@ -0,0 +1,348 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { SupabaseSelectTool } from './supabase-select.js'; +import type { Config } from '../config/config.js'; +import { ToolErrorType } from './tool-error.js'; +import { createClient } from '@supabase/supabase-js'; + +// Mock Supabase client +vi.mock('@supabase/supabase-js', () => { + const mockQuery = { + eq: vi.fn().mockReturnThis(), + limit: vi.fn().mockReturnThis(), + select: vi.fn().mockReturnThis(), + }; + + const mockFrom = vi.fn(() => mockQuery); + + const mockClient = { + from: mockFrom, + }; + + return { + createClient: vi.fn(() => mockClient), + }; +}); + +describe('SupabaseSelectTool', () => { + let mockConfig: Config; + let mockSupabaseClient: ReturnType; + let mockQuery: { + eq: ReturnType; + limit: ReturnType; + select: ReturnType; + }; + + beforeEach(() => { + vi.resetAllMocks(); + process.env['SUPABASE_URL'] = 'https://test.supabase.co'; + process.env['SUPABASE_KEY'] = 'test-key'; + + mockConfig = {} as unknown as Config; + + // Get the mocked client instance + mockSupabaseClient = createClient('', '') as unknown as ReturnType< + typeof createClient + >; + mockQuery = (mockSupabaseClient as unknown as { from: ReturnType }).from('test') as { + eq: ReturnType; + limit: ReturnType; + select: ReturnType; + }; + }); + + afterEach(() => { + delete process.env['SUPABASE_URL']; + delete process.env['SUPABASE_KEY']; + vi.restoreAllMocks(); + }); + + describe('parameter validation', () => { + it('should throw error when table parameter is missing', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = {}; + /* @ts-expect-error - we are testing validation */ + expect(() => tool.build(params)).toThrow(); + }); + + it('should throw error when table is empty string', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: '' }; + expect(() => tool.build(params)).toThrow('Parameter "table" must be a non-empty string'); + }); + + it('should throw error when limit exceeds maximum', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users', limit: 200 }; + expect(() => tool.build(params)).toThrow('Parameter "limit" must be a number between 1 and 100'); + }); + + it('should throw error when limit is less than 1', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users', limit: 0 }; + expect(() => tool.build(params)).toThrow('Parameter "limit" must be a number between 1 and 100'); + }); + + it('should throw error when columns is not an array', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users', columns: 'not-an-array' }; + /* @ts-expect-error - we are testing validation */ + expect(() => tool.build(params)).toThrow('Parameter "columns" must be an array of strings'); + }); + + it('should throw error when columns array is empty', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users', columns: [] }; + expect(() => tool.build(params)).toThrow('Parameter "columns" must contain at least one column name'); + }); + + it('should throw error when columns contains non-string', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users', columns: ['id', 123] }; + /* @ts-expect-error - we are testing validation */ + expect(() => tool.build(params)).toThrow('Parameter "columns" must contain only non-empty strings'); + }); + + it('should accept valid parameters', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { + table: 'users', + filter: { status: 'active' }, + limit: 50, + columns: ['id', 'name', 'email'], + }; + expect(() => tool.build(params)).not.toThrow(); + }); + }); + + describe('execute', () => { + it('should return error when SUPABASE_URL is not set', async () => { + delete process.env['SUPABASE_URL']; + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('SUPABASE_URL'); + }); + + it('should return error when SUPABASE_KEY is not set', async () => { + delete process.env['SUPABASE_KEY']; + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('SUPABASE_KEY'); + }); + + it('should return error when both SUPABASE_URL and SUPABASE_KEY are not set', async () => { + delete process.env['SUPABASE_URL']; + delete process.env['SUPABASE_KEY']; + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('SUPABASE_URL and SUPABASE_KEY'); + }); + + it('should create client and execute select query', async () => { + mockQuery.select.mockResolvedValue({ + data: [ + { id: 1, name: 'John', email: 'john@example.com' }, + { id: 2, name: 'Jane', email: 'jane@example.com' }, + ], + error: null, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { + table: 'users', + filter: { status: 'active' }, + limit: 10, + }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(createClient).toHaveBeenCalledWith( + 'https://test.supabase.co', + 'test-key', + ); + expect(mockQuery.eq).toHaveBeenCalledWith('status', 'active'); + expect(mockQuery.limit).toHaveBeenCalledWith(10); + expect(result.error).toBeUndefined(); + const content = JSON.parse(result.llmContent as string); + expect(content.success).toBe(true); + expect(content.count).toBe(2); + expect(content.data).toHaveLength(2); + }); + + it('should enforce maximum limit of 100', async () => { + mockQuery.select.mockResolvedValue({ + data: [], + error: null, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { + table: 'users', + limit: 150, // Should be capped at 100 + }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockQuery.limit).toHaveBeenCalledWith(100); + }); + + it('should select specific columns when provided', async () => { + mockQuery.select.mockResolvedValue({ + data: [], + error: null, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { + table: 'users', + columns: ['id', 'name', 'email'], + }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockQuery.select).toHaveBeenCalledWith('id,name,email'); + }); + + it('should select all columns when columns not provided', async () => { + mockQuery.select.mockResolvedValue({ + data: [], + error: null, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockQuery.select).toHaveBeenCalledWith('*'); + }); + + it('should apply multiple filters', async () => { + mockQuery.select.mockResolvedValue({ + data: [], + error: null, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { + table: 'users', + filter: { status: 'active', age: 18 }, + }; + const invocation = tool.build(params); + await invocation.execute(new AbortController().signal); + + expect(mockQuery.eq).toHaveBeenCalledWith('status', 'active'); + expect(mockQuery.eq).toHaveBeenCalledWith('age', 18); + }); + + it('should handle Supabase errors gracefully', async () => { + mockQuery.select.mockResolvedValue({ + data: null, + error: { message: 'Table does not exist' }, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'nonexistent' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('Table "nonexistent" does not exist'); + }); + + it('should handle authentication errors gracefully', async () => { + mockQuery.select.mockResolvedValue({ + data: null, + error: { message: 'JWT expired' }, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('Authentication failed'); + expect(result.llmContent).not.toContain('SUPABASE_KEY'); + }); + + it('should handle connection errors gracefully', async () => { + mockQuery.select.mockResolvedValue({ + data: null, + error: { message: 'Network error' }, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error?.type).toBe(ToolErrorType.EXECUTION_FAILED); + expect(result.llmContent).toContain('Failed to connect to Supabase'); + }); + + it('should handle empty results', async () => { + mockQuery.select.mockResolvedValue({ + data: [], + error: null, + }); + + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const result = await invocation.execute(new AbortController().signal); + + expect(result.error).toBeUndefined(); + const content = JSON.parse(result.llmContent as string); + expect(content.success).toBe(true); + expect(content.count).toBe(0); + expect(content.data).toEqual([]); + }); + }); + + describe('getDescription', () => { + it('should return descriptive string', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { + table: 'users', + filter: { status: 'active' }, + limit: 20, + columns: ['id', 'name'], + }; + const invocation = tool.build(params); + const description = invocation.getDescription(); + + expect(description).toContain('users'); + expect(description).toContain('active'); + expect(description).toContain('id, name'); + expect(description).toContain('20'); + }); + + it('should show * when columns not specified', () => { + const tool = new SupabaseSelectTool(mockConfig); + const params = { table: 'users' }; + const invocation = tool.build(params); + const description = invocation.getDescription(); + + expect(description).toContain('*'); + }); + }); +}); + diff --git a/packages/core/src/tools/supabase-select.ts b/packages/core/src/tools/supabase-select.ts new file mode 100644 index 0000000..e0c18e1 --- /dev/null +++ b/packages/core/src/tools/supabase-select.ts @@ -0,0 +1,355 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import type { FunctionDeclaration } from '@google/genai'; +import { createClient, type SupabaseClient } from '@supabase/supabase-js'; +import type { Config } from '../config/config.js'; +import { ToolErrorType } from './tool-error.js'; +import type { ToolResult } from './tools.js'; +import { + BaseDeclarativeTool, + BaseToolInvocation, + Kind, +} from './tools.js'; + +const MAX_ROWS = 100; + +const supabaseSelectToolSchemaData: FunctionDeclaration = { + name: 'supabase_select', + description: + 'Executes a read-only select() query on a Supabase table. Returns matching rows as a JSON array. Maximum 100 rows per query.', + parametersJsonSchema: { + type: 'object', + properties: { + table: { + type: 'string', + description: 'The name of the table to query', + }, + filter: { + type: 'object', + description: + 'Filter conditions as key-value pairs (e.g., { status: "active", age: 18 }). Each key is a column name and value is the filter value.', + additionalProperties: true, + }, + limit: { + type: 'number', + description: 'Maximum number of rows to return (default: 100, max: 100)', + minimum: 1, + maximum: MAX_ROWS, + default: MAX_ROWS, + }, + columns: { + type: 'array', + items: { + type: 'string', + }, + description: 'Specific columns to select (e.g., ["id", "name", "email"]). If not specified, all columns are returned.', + }, + dataSource: { + type: 'string', + description: + 'Optional name of a configured data source. If provided, uses credentials from the data source configuration instead of SUPABASE_URL and SUPABASE_KEY environment variables.', + }, + }, + required: ['table'], + additionalProperties: false, + }, +}; + +const supabaseSelectToolDescription = ` +Executes a read-only select() query on a Supabase table. + +## Usage + +Use this tool to query Supabase databases when you need to retrieve data. This tool is read-only and safe to use. + +## Parameters + +- \`table\` (string, required): The name of the table to query +- \`filter\` (object, optional): Filter conditions as key-value pairs. Examples: + - \`{ status: "active" }\` - Find rows where status equals "active" + - \`{ age: 18, city: "New York" }\` - Find rows where age is 18 AND city is "New York" + - Note: For more complex queries (like greater than, less than, etc.), you may need to use multiple filter conditions +- \`limit\` (number, optional): Maximum number of rows to return (default: 100, max: 100) +- \`columns\` (string[], optional): Specific columns to select. If not specified, all columns are returned. + +## Data Source Configuration + +You can use a configured data source by providing the \`dataSource\` parameter. If provided, the tool will use credentials from the data source configuration instead of environment variables. + +## Environment Variables + +If \`dataSource\` is not provided, this tool requires the following environment variables to be set in your .env file: +- \`SUPABASE_URL\`: Your Supabase project URL (e.g., https://your-project.supabase.co) +- \`SUPABASE_KEY\`: Your Supabase anon key or service role key + +## Safety + +- Read-only operations only (select() queries) +- Maximum 100 rows per query +- Credentials are read from data source configuration or environment variables, never exposed to the LLM +`; + +export interface SupabaseSelectParams { + table: string; + filter?: Record; + limit?: number; + columns?: string[]; + dataSource?: string; +} + +class SupabaseSelectToolInvocation extends BaseToolInvocation< + SupabaseSelectParams, + ToolResult +> { + constructor( + private readonly config: Config, + params: SupabaseSelectParams, + ) { + super(params); + } + + getDescription(): string { + const filterStr = this.params.filter + ? JSON.stringify(this.params.filter) + : '{}'; + const limit = this.params.limit ?? MAX_ROWS; + const columnsStr = this.params.columns + ? this.params.columns.join(', ') + : '*'; + return `Querying Supabase table: ${this.params.table} with filter ${filterStr}, columns ${columnsStr}, limit ${limit}`; + } + + async execute(_signal: AbortSignal): Promise { + let supabaseUrl: string | undefined; + let supabaseKey: string | undefined; + + // Try to load from data source configuration if provided + if (this.params.dataSource) { + try { + const dataConfigManager = this.config.getDataConfigManager(); + const dataConfig = await dataConfigManager.loadDataConfig( + this.params.dataSource, + ); + if (!dataConfig) { + const errorMessage = `Data source "${this.params.dataSource}" not found. Please configure it using /datasource create.`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + if (dataConfig.type !== 'supabase') { + const errorMessage = `Data source "${this.params.dataSource}" is of type "${dataConfig.type}", not "supabase".`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + const creds = dataConfig.credentials as { url: string; key: string }; + supabaseUrl = creds.url; + supabaseKey = creds.key; + } catch (error) { + const errorMessage = `Failed to load data source configuration: ${error instanceof Error ? error.message : 'Unknown error'}`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + } else { + // Fall back to environment variables + supabaseUrl = process.env['SUPABASE_URL']; + supabaseKey = process.env['SUPABASE_KEY']; + } + + if (!supabaseUrl || !supabaseKey) { + const missingVars: string[] = []; + if (!supabaseUrl) missingVars.push('SUPABASE_URL'); + if (!supabaseKey) missingVars.push('SUPABASE_KEY'); + const errorMessage = `${missingVars.join(' and ')} environment variable${missingVars.length > 1 ? 's are' : ' is'} not set and no dataSource was provided. Please set them in your .env file or configure a data source using /datasource create.`; + return { + llmContent: JSON.stringify({ + success: false, + error: errorMessage, + }), + returnDisplay: `Error: ${errorMessage}`, + error: { + message: errorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + + try { + // Validate parameters + if (!this.params.table || typeof this.params.table !== 'string') { + throw new Error('Parameter "table" must be a non-empty string'); + } + + // Enforce limit + const limit = Math.min(this.params.limit ?? MAX_ROWS, MAX_ROWS); + + // Create Supabase client + const supabase: SupabaseClient = createClient(supabaseUrl, supabaseKey); + + // Build query + let query = supabase.from(this.params.table).select( + this.params.columns ? this.params.columns.join(',') : '*', + ); + + // Apply filters + if (this.params.filter) { + for (const [column, value] of Object.entries(this.params.filter)) { + query = query.eq(column, value); + } + } + + // Apply limit + query = query.limit(limit); + + // Execute read-only select() query + const { data, error } = await query; + + if (error) { + throw new Error(error.message); + } + + const results = data || []; + const resultCount = results.length; + const resultMessage = `Found ${resultCount} row${resultCount !== 1 ? 's' : ''} in table ${this.params.table}`; + + return { + llmContent: JSON.stringify({ + success: true, + count: resultCount, + data: results, + }), + returnDisplay: resultMessage, + }; + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + console.error( + `[SupabaseSelectTool] Error executing query on table ${this.params.table}: ${errorMessage}`, + ); + + // Don't expose connection details in error messages + let safeErrorMessage = errorMessage; + if ( + errorMessage.includes('authentication') || + errorMessage.includes('auth') || + errorMessage.includes('JWT') + ) { + safeErrorMessage = + 'Authentication failed. Please check your SUPABASE_KEY.'; + } else if ( + errorMessage.includes('connection') || + errorMessage.includes('connect') || + errorMessage.includes('network') + ) { + safeErrorMessage = + 'Failed to connect to Supabase. Please check your SUPABASE_URL and ensure the service is accessible.'; + } else if (errorMessage.includes('relation') || errorMessage.includes('does not exist')) { + safeErrorMessage = `Table "${this.params.table}" does not exist or is not accessible.`; + } + + return { + llmContent: JSON.stringify({ + success: false, + error: safeErrorMessage, + }), + returnDisplay: `Error: ${safeErrorMessage}`, + error: { + message: safeErrorMessage, + type: ToolErrorType.EXECUTION_FAILED, + }, + }; + } + } +} + +export class SupabaseSelectTool extends BaseDeclarativeTool< + SupabaseSelectParams, + ToolResult +> { + static readonly Name: string = supabaseSelectToolSchemaData.name!; + + constructor(private readonly config: Config) { + super( + SupabaseSelectTool.Name, + 'SupabaseSelect', + supabaseSelectToolDescription, + Kind.Read, + supabaseSelectToolSchemaData.parametersJsonSchema as Record< + string, + unknown + >, + ); + } + + protected override validateToolParamValues( + params: SupabaseSelectParams, + ): string | null { + if ( + !params.table || + typeof params.table !== 'string' || + params.table.trim() === '' + ) { + return 'Parameter "table" must be a non-empty string'; + } + + if (params.limit !== undefined) { + if ( + typeof params.limit !== 'number' || + params.limit < 1 || + params.limit > MAX_ROWS + ) { + return `Parameter "limit" must be a number between 1 and ${MAX_ROWS}`; + } + } + + if (params.columns !== undefined) { + if (!Array.isArray(params.columns)) { + return 'Parameter "columns" must be an array of strings'; + } + if (params.columns.length === 0) { + return 'Parameter "columns" must contain at least one column name'; + } + for (const col of params.columns) { + if (typeof col !== 'string' || col.trim() === '') { + return 'Parameter "columns" must contain only non-empty strings'; + } + } + } + + return null; + } + + protected createInvocation(params: SupabaseSelectParams) { + return new SupabaseSelectToolInvocation(this.config, params); + } +} + diff --git a/packages/core/src/tools/test-mongo-direct.ts b/packages/core/src/tools/test-mongo-direct.ts new file mode 100644 index 0000000..94d9028 --- /dev/null +++ b/packages/core/src/tools/test-mongo-direct.ts @@ -0,0 +1,127 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + * + * Direct test script for MongoDB tool + * Run with: npx tsx packages/core/src/tools/test-mongo-direct.ts + */ + +import { MongoFindTool } from './mongo-find.js'; +import type { Config } from '../config/config.js'; + +// Mock config (minimal implementation) +const mockConfig = {} as unknown as Config; + +async function testMongoFind() { + console.log('Testing MongoDB Find Tool...\n'); + + // Check if MONGODB_URI is set + if (!process.env['MONGODB_URI']) { + console.error('❌ MONGODB_URI environment variable is not set!'); + console.log('\nPlease set it in your .env file or export it:'); + console.log(' export MONGODB_URI="mongodb://localhost:27017/testdb"'); + console.log(' # Or for MongoDB Atlas:'); + console.log( + ' export MONGODB_URI="mongodb+srv://user:pass@cluster.mongodb.net/dbname"', + ); + process.exit(1); + } + + console.log(`✓ MONGODB_URI is set: ${process.env['MONGODB_URI'].replace(/:[^:@]+@/, ':****@')}\n`); + + const tool = new MongoFindTool(mockConfig); + + // Test 1: Simple query + console.log('Test 1: Simple query (all documents)'); + try { + const params1 = { + db: 'testdb', + collection: 'users', + limit: 5, + }; + const invocation1 = tool.build(params1); + console.log(` Description: ${invocation1.getDescription()}`); + const result1 = await invocation1.execute(new AbortController().signal); + const content1 = JSON.parse(result1.llmContent as string); + console.log(` ✓ Success: ${content1.success}`); + console.log(` ✓ Count: ${content1.count}`); + if (content1.data && content1.data.length > 0) { + console.log(` ✓ Sample document:`, JSON.stringify(content1.data[0], null, 2)); + } + console.log(` Display: ${result1.returnDisplay}\n`); + } catch (error) { + console.error(` ❌ Error: ${error instanceof Error ? error.message : String(error)}\n`); + } + + // Test 2: Query with filter + console.log('Test 2: Query with filter'); + try { + const params2 = { + db: 'testdb', + collection: 'users', + filter: { status: 'active' }, + limit: 10, + }; + const invocation2 = tool.build(params2); + console.log(` Description: ${invocation2.getDescription()}`); + const result2 = await invocation2.execute(new AbortController().signal); + const content2 = JSON.parse(result2.llmContent as string); + console.log(` ✓ Success: ${content2.success}`); + console.log(` ✓ Count: ${content2.count}`); + console.log(` Display: ${result2.returnDisplay}\n`); + } catch (error) { + console.error(` ❌ Error: ${error instanceof Error ? error.message : String(error)}\n`); + } + + // Test 3: Query with projection + console.log('Test 3: Query with projection (specific fields)'); + try { + const params3 = { + db: 'testdb', + collection: 'users', + filter: { status: 'active' }, + projection: { name: 1, email: 1, _id: 0 }, + limit: 5, + }; + const invocation3 = tool.build(params3); + console.log(` Description: ${invocation3.getDescription()}`); + const result3 = await invocation3.execute(new AbortController().signal); + const content3 = JSON.parse(result3.llmContent as string); + console.log(` ✓ Success: ${content3.success}`); + console.log(` ✓ Count: ${content3.count}`); + if (content3.data && content3.data.length > 0) { + console.log(` ✓ Sample (with projection):`, JSON.stringify(content3.data[0], null, 2)); + } + console.log(` Display: ${result3.returnDisplay}\n`); + } catch (error) { + console.error(` ❌ Error: ${error instanceof Error ? error.message : String(error)}\n`); + } + + // Test 4: Error handling - invalid database + console.log('Test 4: Error handling (non-existent database)'); + try { + const params4 = { + db: 'nonexistent_db', + collection: 'users', + }; + const invocation4 = tool.build(params4); + const result4 = await invocation4.execute(new AbortController().signal); + const content4 = JSON.parse(result4.llmContent as string); + if (!content4.success) { + console.log(` ✓ Correctly handled error: ${content4.error}`); + } + console.log(` Display: ${result4.returnDisplay}\n`); + } catch (error) { + console.log(` ✓ Error caught: ${error instanceof Error ? error.message : String(error)}\n`); + } + + console.log('✅ All tests completed!'); +} + +// Run the test +testMongoFind().catch((error) => { + console.error('Fatal error:', error); + process.exit(1); +}); + diff --git a/packages/core/src/tools/tool-names.ts b/packages/core/src/tools/tool-names.ts index d449442..91462d5 100644 --- a/packages/core/src/tools/tool-names.ts +++ b/packages/core/src/tools/tool-names.ts @@ -31,4 +31,6 @@ export const ToolNames = { BROWSER_CLOSE: 'browser_close', WEATHER: 'weather', WEB_SEARCH: 'web_search', + MONGO_FIND: 'mongo_find', + SUPABASE_SELECT: 'supabase_select', } as const;