From 3b0dee015120a2d269b49b4b92c3a2354d69ca84 Mon Sep 17 00:00:00 2001 From: Art Moskvin Date: Thu, 20 Mar 2025 11:51:49 +0100 Subject: [PATCH 1/6] add openai provider --- src/components/ChatArea.tsx | 2 - src/components/SettingsDialog.tsx | 78 -------- src/hooks/useMessageConversion.ts | 4 +- src/index.ts | 171 +++++++++--------- src/lib/converters/anthropic.ts | 120 ++++--------- src/lib/converters/openai.ts | 153 ++++++++++++++++ src/lib/mcp/adapters.ts | 17 ++ src/lib/messageConverters.ts | 54 +++--- src/lib/utils.ts | 30 ++-- src/main/services/anthropic.ts | 286 ++++++++++++++++++------------ src/main/services/chat.ts | 173 +++++++++++------- src/main/services/llm.ts | 52 ++++++ src/main/services/llmprovider.ts | 79 +++++++++ src/main/services/openai.ts | 232 ++++++++++++++++++++++++ src/types/message.ts | 18 +- src/types/settings.ts | 79 ++++----- 16 files changed, 1031 insertions(+), 517 deletions(-) create mode 100644 src/lib/converters/openai.ts create mode 100644 src/main/services/llm.ts create mode 100644 src/main/services/llmprovider.ts create mode 100644 src/main/services/openai.ts diff --git a/src/components/ChatArea.tsx b/src/components/ChatArea.tsx index 887809c..38f0a1f 100644 --- a/src/components/ChatArea.tsx +++ b/src/components/ChatArea.tsx @@ -8,7 +8,6 @@ import { ScrollArea } from "@/components/ui/scroll-area"; import { H2 } from "@/components/ui/typography"; import { Avatar, AvatarImage, AvatarFallback } from "@/components/ui/avatar"; import { useMessageConversion } from "@/hooks/useMessageConversion"; -import { SidebarTrigger } from "./ui/sidebar"; import { Breadcrumb, BreadcrumbItem, @@ -16,7 +15,6 @@ import { BreadcrumbPage, BreadcrumbSeparator, } from "./ui/breadcrumb"; -import { Separator } from "@radix-ui/react-separator"; interface ChatAreaProps { conversation: Conversation | null; diff --git a/src/components/SettingsDialog.tsx b/src/components/SettingsDialog.tsx index be7487e..401eb38 100644 --- a/src/components/SettingsDialog.tsx +++ b/src/components/SettingsDialog.tsx @@ -51,84 +51,6 @@ function AnthropicSettings({ settings, onChange }: ProviderSettingsProps) { className="col-span-3" /> -
- - -
-
- - -
-
- -
- onChange({ ...settings, models: { ...settings.models, thinking: value } })} - disabled={!(settings.models.chat.includes('claude-3-7') || settings.models.title.includes('claude-3-7'))} - /> - {!(settings.models.chat.includes('claude-3-7') || settings.models.title.includes('claude-3-7')) && ( - - Only available with Claude 3.7 models - - )} -
-
); } diff --git a/src/hooks/useMessageConversion.ts b/src/hooks/useMessageConversion.ts index f43df8c..4664346 100644 --- a/src/hooks/useMessageConversion.ts +++ b/src/hooks/useMessageConversion.ts @@ -1,11 +1,11 @@ import { useMemo } from 'react'; import { UIMessage } from '@/types'; -import { convertClaudeMessages } from '@/lib/messageConverters'; +import { convertMessages } from '@/lib/messageConverters'; import { Message } from '@/types/message'; export function useMessageConversion(messages: Message[] | undefined): UIMessage[] { return useMemo(() => { if (!messages) return []; - return convertClaudeMessages(messages); + return convertMessages(messages); }, [messages]); } diff --git a/src/index.ts b/src/index.ts index e7a052a..467c2b2 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,28 +1,32 @@ -import { app, BrowserWindow, ipcMain, session, dialog } from 'electron'; -import * as path from 'path'; -import * as fs from 'fs'; - -import { initializeDatabase, setupDbHandlers } from './main/db'; -import { initializeMCP, listTools } from './main/mcp'; -import { AnalyticsService } from '@/main/services/analytics'; -import { ChatService, setupChatHandlers } from './main/services/chat'; -import { AnthropicService } from './main/services/anthropic'; -import { PostHog } from 'posthog-node'; -import { getOrCreateUserId } from './lib/account'; - +import { app, BrowserWindow, ipcMain, session, dialog } from "electron"; +import * as path from "path"; +import * as fs from "fs"; + +import { initializeDatabase, setupDbHandlers, getUserSettings } from "./main/db"; +import { initializeMCP, listTools } from "./main/mcp"; +import { AnalyticsService } from "@/main/services/analytics"; +import { ChatService, setupChatHandlers } from "./main/services/chat"; +import { AnthropicService } from "./main/services/anthropic"; +import { OpenAIService } from "./main/services/openai"; +import { LLMServiceProvider } from "./main/services/llmprovider"; +import { PostHog } from "posthog-node"; +import { getOrCreateUserId } from "./lib/account"; // Store chat service reference for cleanup let chatService: ChatService | null = null; // Set up logging -const setupLogging = (debug: boolean = false) => { - const logPath = path.join(app.getPath('userData'), 'logs'); +const setupLogging = (debug = false) => { + const logPath = path.join(app.getPath("userData"), "logs"); if (!fs.existsSync(logPath)) { fs.mkdirSync(logPath, { recursive: true }); } - const logFile = path.join(logPath, `${new Date().toISOString().split('T')[0]}.log`); - const logStream = fs.createWriteStream(logFile, { flags: 'a' }); + const logFile = path.join( + logPath, + `${new Date().toISOString().split("T")[0]}.log`, + ); + const logStream = fs.createWriteStream(logFile, { flags: "a" }); // Redirect console.log and console.error to file const originalLog = console.log; @@ -30,33 +34,39 @@ const setupLogging = (debug: boolean = false) => { const originalDebug = console.debug; console.log = (...args) => { - const message = args.map(arg => - typeof arg === 'object' ? JSON.stringify(arg, null, 2) : arg - ).join(' '); + const message = args + .map((arg) => + typeof arg === "object" ? JSON.stringify(arg, null, 2) : arg, + ) + .join(" "); logStream.write(`[LOG ${new Date().toISOString()}] ${message}\n`); originalLog.apply(console, args); }; console.error = (...args) => { - const message = args.map(arg => - typeof arg === 'object' ? JSON.stringify(arg, null, 2) : arg - ).join(' '); + const message = args + .map((arg) => + typeof arg === "object" ? JSON.stringify(arg, null, 2) : arg, + ) + .join(" "); logStream.write(`[ERROR ${new Date().toISOString()}] ${message}\n`); originalError.apply(console, args); }; console.debug = (...args) => { if (debug) { - const message = args.map(arg => - typeof arg === 'object' ? JSON.stringify(arg, null, 2) : arg - ).join(' '); + const message = args + .map((arg) => + typeof arg === "object" ? JSON.stringify(arg, null, 2) : arg, + ) + .join(" "); logStream.write(`[DEBUG ${new Date().toISOString()}] ${message}\n`); originalDebug.apply(console, args); - }; - } + } + }; // Log startup info - console.debug('App starting...', { + console.debug("App starting...", { version: app.getVersion(), electron: process.versions.electron, chrome: process.versions.chrome, @@ -66,7 +76,7 @@ const setupLogging = (debug: boolean = false) => { isPackaged: app.isPackaged, resourcesPath: process.resourcesPath, appPath: app.getAppPath(), - userData: app.getPath('userData') + userData: app.getPath("userData"), }); }; // This allows TypeScript to pick up the magic constants that's auto-generated by Forge's Webpack @@ -76,29 +86,29 @@ declare const MAIN_WINDOW_WEBPACK_ENTRY: string; declare const MAIN_WINDOW_PRELOAD_WEBPACK_ENTRY: string; // Set up logging as early as possible -const DEBUG = process.env.HIDE_APP_DEBUG === 'true'; +const DEBUG = process.env.HIDE_APP_DEBUG === "true"; setupLogging(DEBUG); // Catch any uncaught errors -process.on('uncaughtException', (error) => { - console.error('Uncaught exception:', { +process.on("uncaughtException", (error) => { + console.error("Uncaught exception:", { error: error.toString(), stack: error.stack, - details: error + details: error, }); }); -process.on('unhandledRejection', (reason, _promise) => { +process.on("unhandledRejection", (reason, _promise) => { const error = reason instanceof Error ? reason : new Error(String(reason)); - console.error('Unhandled rejection:', { + console.error("Unhandled rejection:", { error: error.toString(), stack: error.stack, - details: reason + details: reason, }); }); // Handle creating/removing shortcuts on Windows when installing/uninstalling. -if (require('electron-squirrel-startup')) { +if (require("electron-squirrel-startup")) { app.quit(); } @@ -107,14 +117,14 @@ const createWindow = (): BrowserWindow => { const mainWindow = new BrowserWindow({ height: 900, width: 1440, - titleBarStyle: 'hidden', - backgroundColor: 'rgb(2 6 23)', + titleBarStyle: "hidden", + backgroundColor: "rgb(2 6 23)", trafficLightPosition: { x: 10, y: 10 }, webPreferences: { preload: MAIN_WINDOW_PRELOAD_WEBPACK_ENTRY, webSecurity: true, nodeIntegration: false, - contextIsolation: true + contextIsolation: true, }, }); @@ -123,14 +133,14 @@ const createWindow = (): BrowserWindow => { callback({ responseHeaders: { ...details.responseHeaders, - 'Content-Security-Policy': [ + "Content-Security-Policy": [ "default-src 'self';", "script-src 'self' 'unsafe-eval' 'unsafe-inline' https://eu-assets.i.posthog.com;", "style-src 'self' 'unsafe-inline' data:;", "connect-src 'self' https://eu.i.posthog.com https://eu-assets.i.posthog.com https://api.anthropic.com;", "img-src 'self' data: https:;", - ].join(' ') - } + ].join(" "), + }, }); }); @@ -146,25 +156,25 @@ const createWindow = (): BrowserWindow => { // Quit when all windows are closed, except on macOS. There, it's common // for applications and their menu bar to stay active until the user quits // explicitly with Cmd + Q. -app.on('window-all-closed', () => { - if (process.platform !== 'darwin') { +app.on("window-all-closed", () => { + if (process.platform !== "darwin") { app.quit(); } }); -app.on('will-quit', async () => { +app.on("will-quit", async () => { if (chatService) { - console.debug('Stopping all active chats before quit...'); + console.debug("Stopping all active chats before quit..."); try { await chatService.stopAllChats(); - console.debug('All chats stopped successfully'); + console.debug("All chats stopped successfully"); } catch (error) { - console.error('Error stopping chats:', error); + console.error("Error stopping chats:", error); } } }); -app.on('activate', () => { +app.on("activate", () => { // On OS X it's common to re-create a window in the app when the // dock icon is clicked and there are no other windows open. if (BrowserWindow.getAllWindows().length === 0) { @@ -174,11 +184,11 @@ app.on('activate', () => { const getMCPConfig = async () => { const mcpPath = !app.isPackaged - ? process.env.LOCAL_MCP_PATH // Development TODO: Make this configurable - : path.join(process.resourcesPath, 'hide-mcp'); // Production (binary) + ? process.env.LOCAL_MCP_PATH // Development TODO: Make this configurable + : path.join(process.resourcesPath, "hide-mcp"); // Production (binary) // Log some diagnostic information - console.debug('MCP configuration:', { + console.debug("MCP configuration:", { production: app.isPackaged, resourcesPath: process.resourcesPath, mcpPath, @@ -193,25 +203,23 @@ const getMCPConfig = async () => { // In development, use system's uv if (!app.isPackaged) { return { - cmd: 'uv', - args: ['--directory', mcpPath, 'run', 'hide-mcp', 'server'] + cmd: "uv", + args: ["--directory", mcpPath, "run", "hide-mcp", "server"], }; } // In production, use binary return { cmd: mcpPath, - args: [ - 'server' - ] + args: ["server"], }; }; // Handle directory picker dialog -ipcMain.handle('dialog:showDirectoryPicker', async () => { +ipcMain.handle("dialog:showDirectoryPicker", async () => { const result = await dialog.showOpenDialog({ - properties: ['openDirectory', 'createDirectory'], - title: 'Select Project Directory', + properties: ["openDirectory", "createDirectory"], + title: "Select Project Directory", }); return result; }); @@ -222,17 +230,17 @@ app.whenReady().then(async () => { setupDbHandlers(); const { cmd, args } = await getMCPConfig(); - console.debug('Initializing MCP...', { cmd, args }); + console.debug("Initializing MCP...", { cmd, args }); try { // Initialize analytics - const posthog = new PostHog( - process.env.POSTHOG_API_KEY, - { host: 'https://eu.i.posthog.com' } - ); + const posthog = new PostHog(process.env.POSTHOG_API_KEY, { + host: "https://eu.i.posthog.com", + }); - const isProd = process.env.NODE_ENV === 'production'; - const enableAnalytics = process.env.ENABLE_ANALYTICS?.toLowerCase() === 'true' || false; + const isProd = process.env.NODE_ENV === "production"; + const enableAnalytics = + process.env.ENABLE_ANALYTICS?.toLowerCase() === "true" || false; const analytics = new AnalyticsService(posthog, isProd || enableAnalytics); // Initialize MCP first @@ -243,29 +251,30 @@ app.whenReady().then(async () => { // Wait for MCP initialization await initPromise; - console.debug('MCP initialized successfully'); + console.debug("MCP initialized successfully"); - // Now that MCP is ready, initialize Anthropic service + // Now that MCP is ready, initialize LLM providers const tools = await listTools(); const anthropicService = new AnthropicService(tools, analytics); + const openaiService = new OpenAIService(tools, analytics); + + // Create and configure the LLM service provider + const llmServiceProvider = new LLMServiceProvider(); + llmServiceProvider.registerProvider(anthropicService); + llmServiceProvider.registerProvider(openaiService); // Create chat service - chatService = new ChatService(anthropicService, analytics); + chatService = new ChatService(llmServiceProvider, analytics); setupChatHandlers(chatService); - const settingsStatus = anthropicService.loadSettings(); - if (!settingsStatus.success) { - console.debug('Credentials missing, notifying renderer process:', settingsStatus.error); - mainWindow.webContents.send('credentials:required', settingsStatus.error); - } - console.debug('chat service initialized successfully'); - analytics.capture(getOrCreateUserId(), 'app_launched'); + console.debug("chat service initialized successfully"); + analytics.capture(getOrCreateUserId(), "app_launched"); } catch (err) { - console.error('Failed to initialize application:', err); + console.error("Failed to initialize application:", err); // Show an error dialog to the user dialog.showErrorBox( - 'Error Starting Application', - `Failed to initialize required services. Error: ${err.message}. The application may not work correctly.` + "Error Starting Application", + `Failed to initialize required services. Error: ${err.message}. The application may not work correctly.`, ); } }); diff --git a/src/lib/converters/anthropic.ts b/src/lib/converters/anthropic.ts index 83421b5..70fe2f6 100644 --- a/src/lib/converters/anthropic.ts +++ b/src/lib/converters/anthropic.ts @@ -1,4 +1,4 @@ -import { AssistantContentBlock, AssistantMessage, Message, newAssistantMessage, newUserMessage, UserContentBlock, UserMessage } from "@/types/message"; +import { AssistantContentBlock, AssistantMessage, Message, newAssistantMessage, ToolResultMessage, UserMessage } from "@/types/message"; import { ImageBlockParam, MessageParam } from "@anthropic-ai/sdk/resources"; export function convertToAnthropic(message: Message): MessageParam { @@ -9,16 +9,8 @@ export function convertToAnthropic(message: Message): MessageParam { case 'assistant': { return convertAssistantMessageToAnthropic(message); } - } -} - -export function convertFromAnthropic(message: MessageParam): Message { - switch (message.role) { - case 'user': { - return convertUserMessageFromAnthropic(message); - } - case 'assistant': { - return convertAssistantMessageFromAnthropic(message); + case 'tool': { + return convertToolMessageToAnthropic(message); } } } @@ -49,30 +41,6 @@ function convertUserMessageToAnthropic(message: UserMessage): MessageParam { }, } as ImageBlockParam; // because mimeType is enum } - - case 'tool_result': { - return { - type: 'tool_result', - tool_use_id: block.toolUseId, - is_error: block.isError, - content: block.content.map(c => { - if (c.type === 'text') { - return c; - } - - if (c.type === 'image') { - return { - type: 'image', - source: { - type: 'base64', - data: c.data, - media_type: c.mimeType, - }, - } as ImageBlockParam; // because mimeType is enum - } - }), - }; - } } }) }; @@ -122,64 +90,36 @@ function convertAssistantMessageToAnthropic(message: AssistantMessage): MessageP }; } -function convertUserMessageFromAnthropic(message: MessageParam): UserMessage { - if (typeof message.content === 'string') { - return newUserMessage(message.content); - } - - const content: UserContentBlock[] = message.content.map(block => { - switch (block.type) { - case 'text': { - return block; - } - - case 'image': { - return { - type: 'image', - data: block.source.data, - mimeType: block.source.media_type, - }; - } - - case 'tool_result': { - if (typeof block.content === 'string') { - return { - type: 'tool_result', - toolUseId: block.tool_use_id, - isError: block.is_error, - content: [{ type: 'text', text: block.content }], - }; - } - - return { - type: 'tool_result', - toolUseId: block.tool_use_id, - isError: block.is_error, - content: block.content.map(c => { - if (c.type === 'text') { - return c; - } - - if (c.type === 'image') { - return { - type: 'image', - data: c.source.data, - mimeType: c.source.media_type, - }; - } - }), - }; - } - - default: { - console.warn('Unexpected block type for user message from Anthropic:', block.type); +function convertToolMessageToAnthropic(message: ToolResultMessage): MessageParam { + return { + role: 'user', + content: message.content.map(block => { + return { + type: 'tool_result', + tool_use_id: block.toolUseId, + is_error: block.isError, + content: block.content.map(c => { + if (c.type === 'text') { + return c; + } + + if (c.type === 'image') { + return { + type: 'image', + source: { + type: 'base64', + data: c.data, + media_type: c.mimeType, + }, + } as ImageBlockParam; // because media_type is enum + } + }), } - } - }); - return newUserMessage(content); + }) + }; } -function convertAssistantMessageFromAnthropic(message: MessageParam): AssistantMessage { +export function convertAssistantMessageFromAnthropic(message: MessageParam): AssistantMessage { if (typeof message.content === 'string') { return newAssistantMessage(message.content); } diff --git a/src/lib/converters/openai.ts b/src/lib/converters/openai.ts new file mode 100644 index 0000000..42e606b --- /dev/null +++ b/src/lib/converters/openai.ts @@ -0,0 +1,153 @@ +import { Message, TextBlock, ToolUseBlock, UserMessage, AssistantMessage, ToolResultMessage, newAssistantMessage } from '@/types/message'; +import { ChatCompletionMessageParam, ChatCompletionContentPartText, ChatCompletionMessageToolCall, ChatCompletionAssistantMessageParam } from 'openai/resources/chat'; + +/** + * Converts our internal message format to OpenAI's format + */ +export function convertToOpenAI(message: Message): ChatCompletionMessageParam[] { + switch (message.role) { + case 'user': { + return [convertUserMessageToOpenAI(message)]; + } + case 'assistant': { + return [convertAssistantMessageToOpenAI(message)]; + } + case 'tool': { + return convertToolMessageToOpenAI(message); + } + } +} + +/** + * Converts a user message to OpenAI format + */ +function convertUserMessageToOpenAI(message: UserMessage): ChatCompletionMessageParam { + // Simple case: text-only message + if (typeof message.content === 'string') { + return { + role: 'user', + content: message.content, + }; + } + + return { + role: 'user', + content: message.content.map(block => { + switch (block.type) { + case 'text': + return { + type: 'text', + text: block.text + } + case 'image': + return { + type: 'image_url', + image_url: { + url: block.data, + }, + } + } + }) + } +} + +/** + * Converts an assistant message to OpenAI format + */ +function convertAssistantMessageToOpenAI(message: AssistantMessage): ChatCompletionMessageParam { + // Simple case: text-only message + if (typeof message.content === 'string') { + return { + role: 'assistant', + content: message.content, + }; + } + + const content: ChatCompletionContentPartText[] = message.content + .filter(block => block.type === 'text') + .map((block: TextBlock) => { + return { + type: 'text', + text: block.text + } + }); + + const toolCalls: ChatCompletionMessageToolCall[] = message.content + .filter(block => block.type === 'tool_use') + .map((block: ToolUseBlock) => { + return { + type: 'function', + id: block.id, + function: { + name: block.name, + arguments: JSON.stringify(block.args) + } + } + }); + + return { + role: 'assistant', + content: content, + tool_calls: toolCalls, + } +} + +function convertToolMessageToOpenAI(message: ToolResultMessage): ChatCompletionMessageParam[] { + return message.content.map(block => { + return { + role: 'tool', + tool_call_id: block.toolUseId, + content: block.content.map(c => { + if (c.type === 'text') { + return { + type: 'text', + text: c.text + }; + } + + if (c.type === 'image') { + return { + type: 'text', + text: 'Image Attachments are not supported for tools yet', + }; + } + }), + }; + }); +} + +/** + * Converts OpenAI's message format to our internal format + */ +export function convertAssistantMessageFromOpenAI(message: ChatCompletionAssistantMessageParam): Message { + const content: TextBlock[] = []; + + if (message.content) { + if (typeof message.content === 'string') { + content.push({ + type: 'text', + text: message.content, + }); + } + + if (Array.isArray(message.content)) { + const textBlocks: TextBlock[] = message.content + // Filter out refusal blocks + .filter((block): block is ChatCompletionContentPartText => block.type === 'text') + .map(block => ({ type: 'text', text: block.text })) + + content.push(...textBlocks); + } + } + + const toolCalls: ToolUseBlock[] = message.tool_calls?.map(call => { + return { + id: call.id, + type: 'tool_use', + name: call.function.name, + args: JSON.parse(call.function.arguments || '{}'), + }; + }) ?? []; + + return newAssistantMessage([...content, ...toolCalls]); +} diff --git a/src/lib/mcp/adapters.ts b/src/lib/mcp/adapters.ts index ca6b644..338def6 100644 --- a/src/lib/mcp/adapters.ts +++ b/src/lib/mcp/adapters.ts @@ -1,5 +1,6 @@ import type { Tool as AnthropicTool } from '@anthropic-ai/sdk/resources/messages'; import type { Tool as MCPTool } from '@modelcontextprotocol/sdk/types'; +import type { ChatCompletionTool } from 'openai/resources'; export function mcpToAnthropicTool(tool: MCPTool): AnthropicTool { const { name, description, inputSchema, ...rest } = tool; @@ -14,3 +15,19 @@ export function mcpToAnthropicTool(tool: MCPTool): AnthropicTool { ...rest }; } + +export function mcpToOpenAIFunction(tool: MCPTool): ChatCompletionTool { + const { name, description, inputSchema } = tool; + return { + type: 'function', + function: { + name, + description, + parameters: { + type: inputSchema.type, + properties: inputSchema.properties, + required: inputSchema.required || [], + }, + }, + }; +} diff --git a/src/lib/messageConverters.ts b/src/lib/messageConverters.ts index 6a59f89..331eec7 100644 --- a/src/lib/messageConverters.ts +++ b/src/lib/messageConverters.ts @@ -1,16 +1,18 @@ -import { AssistantMessage, Message, ToolUseBlock, UserMessage } from '@/types/message'; +import { AssistantMessage, Message, ToolResultMessage, ToolUseBlock, UserMessage } from '@/types/message'; import { UIMessage } from '@/types'; -export function convertClaudeMessages(messages: Message[]): UIMessage[] { +export function convertMessages(messages: Message[]): UIMessage[] { return messages.flatMap(message => { switch (message.role) { case 'user': return convertUserMessage(message); case 'assistant': return convertAssistantMessage(message); + case 'tool': + return convertToolMessage(message); } - }); + }) } function convertUserMessage(message: UserMessage): UIMessage[] { @@ -22,7 +24,7 @@ function convertUserMessage(message: UserMessage): UIMessage[] { }]; } - return message.content.flatMap((part, partIdx): UIMessage[] => { + return message.content.flatMap(part => { switch (part.type) { case 'text': return [{ @@ -36,26 +38,6 @@ function convertUserMessage(message: UserMessage): UIMessage[] { role: message.role, content: 'Image Attachments are not supported yet', }]; - case 'tool_result': - return part.content.map((block, blockIdx) => { - switch (block.type) { - case 'text': - const content = block.text; - return { - id: `${message.id}-${partIdx}-${blockIdx}`, - role: 'tool_result', - content: content, - isError: part.isError, - } - case 'image': - return { - id: `${message.id}-${partIdx}-${blockIdx}`, - role: 'tool_result', - content: 'Image Attachments are not supported yet', - isError: part.isError, - }; - } - }) } }); } @@ -94,6 +76,30 @@ function convertAssistantMessage(message: AssistantMessage): UIMessage[] { } }); } + +function convertToolMessage(message: ToolResultMessage): UIMessage[] { + return message.content.flatMap((block, idx) => { + return block.content.map((c, cIdx) => { + switch (c.type) { + case 'text': + return { + id: `${message.id}-${idx}-${cIdx}`, + role: 'tool_result', + content: c.text, + isError: block.isError, + }; + case 'image': + return { + id: `${message.id}-${idx}-${cIdx}`, + role: 'tool_result', + content: 'Image Attachments are not supported yet', + isError: block.isError, + }; + } + }); + }); +} + /** * This function converts a ToolUseBlock to a UIMessage. It is very tightly and implicitly * coupled with the tools that are currently supported, and will need to be updated if new diff --git a/src/lib/utils.ts b/src/lib/utils.ts index a0de663..3d92378 100644 --- a/src/lib/utils.ts +++ b/src/lib/utils.ts @@ -16,21 +16,21 @@ export const simpleHash = (str: string): number => { }; export const formatTimestamp = (timestamp: number) => { - const date = new Date(timestamp); - const today = new Date(); - const yesterday = new Date(today); - yesterday.setDate(yesterday.getDate() - 1); + const date = new Date(timestamp); + const today = new Date(); + const yesterday = new Date(today); + yesterday.setDate(yesterday.getDate() - 1); - const isToday = date.toDateString() === today.toDateString(); - const isYesterday = date.toDateString() === yesterday.toDateString(); + const isToday = date.toDateString() === today.toDateString(); + const isYesterday = date.toDateString() === yesterday.toDateString(); - const time = date.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' }); + const time = date.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' }); - if (isToday) { - return `Today at ${time}`; - } else if (isYesterday) { - return `Yesterday at ${time}`; - } else { - return `${date.toLocaleDateString()} ${time}`; - } - }; + if (isToday) { + return `Today at ${time}`; + } else if (isYesterday) { + return `Yesterday at ${time}`; + } else { + return `${date.toLocaleDateString()} ${time}`; + } +}; diff --git a/src/main/services/anthropic.ts b/src/main/services/anthropic.ts index eb884fa..906ae92 100644 --- a/src/main/services/anthropic.ts +++ b/src/main/services/anthropic.ts @@ -1,166 +1,222 @@ -import Anthropic from '@anthropic-ai/sdk'; -import { ImageBlockParam, TextBlockParam, ToolUseBlockParam } from '@anthropic-ai/sdk/resources'; -import { MessageParam, RedactedThinkingBlockParam, ThinkingBlockParam, Tool as AnthropicTool, ToolResultBlockParam } from '@anthropic-ai/sdk/resources/messages'; -import type { CallToolResult as ToolResult, Tool } from '@modelcontextprotocol/sdk/types'; -import { callTool } from '@/main/mcp'; -import { mcpToAnthropicTool } from '@/lib/mcp/adapters'; -import { Message } from '@/types/message'; -import { ProviderSettings } from '@/types/settings'; -import { getUserSettings } from '@/main/db'; -import { convertToAnthropic, convertFromAnthropic } from '@/lib/converters/anthropic'; -import { AnalyticsService } from './analytics'; -import { getOrCreateUserId } from '@/lib/account'; -import { isAbortError } from '../errors'; - -export class AnthropicService { +import { Anthropic } from "@anthropic-ai/sdk"; +import { TextBlockParam, ToolUseBlockParam } from "@anthropic-ai/sdk/resources"; +import { + RedactedThinkingBlockParam, + ThinkingBlockParam, + Tool as AnthropicTool, +} from "@anthropic-ai/sdk/resources/messages"; +import type { + CallToolResult as ToolResult, + Tool, +} from "@modelcontextprotocol/sdk/types"; +import { callTool } from "@/main/mcp"; +import { mcpToAnthropicTool } from "@/lib/mcp/adapters"; +import { + Message, + newToolResultMessage, + ToolResultBlock, +} from "@/types/message"; +import { ProviderSettings } from "@/types/settings"; +import { getUserSettings } from "@/main/db"; +import { + convertToAnthropic, + convertAssistantMessageFromAnthropic, +} from "@/lib/converters/anthropic"; +import { AnalyticsService } from "./analytics"; +import { getOrCreateUserId } from "@/lib/account"; +import { isAbortError } from "../errors"; +import { LLMService } from "./llm"; + +export class AnthropicService implements LLMService { private client: Anthropic; - private chatModel: string; - private titleModel: string; - private thinkingEnabled: boolean; private tools: AnthropicTool[]; private analytics: AnalyticsService; private maxTokens: number; private budgetTokens: number; + private supportedModels: string[] = [ + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + "claude-2.1", + "claude-2.0" + ]; - constructor(tools: Tool[], analytics: AnalyticsService, maxTokens: number = 4096, budgetTokens: number = 1024) { + constructor( + tools: Tool[], + analytics: AnalyticsService, + maxTokens = 4096, + budgetTokens = 1024, + ) { this.tools = tools.map(mcpToAnthropicTool); this.analytics = analytics; this.maxTokens = maxTokens; this.budgetTokens = budgetTokens; } + + getSupportedModels(): string[] { + return this.supportedModels; + } + + getProviderName(): string { + return "anthropic"; + } - // TODO: add abort signal - async *sendMessage(messages: Message[], systemPrompt?: string, abortSignal?: AbortSignal): AsyncGenerator { - this.analytics.capture(getOrCreateUserId(), 'anthropic.send_message.start', { - message_count: messages.length, - model: this.chatModel, - }); + async *sendMessage( + messages: Message[], + options: { + model: string; + thinking?: boolean; + systemPrompt?: string; + }, + abortSignal?: AbortSignal, + ): AsyncGenerator { + this.analytics.capture( + getOrCreateUserId(), + "anthropic.send_message.start", + { + message_count: messages.length, + model: options.model, + }, + ); try { - const loopMessages = messages.map(m => convertToAnthropic(m)); + const loopMessages = messages.map((m) => convertToAnthropic(m)); while (true) { - const response = await this.client.messages.create({ - model: this.chatModel, - max_tokens: this.maxTokens, - system: systemPrompt, - messages: loopMessages, - tools: this.tools, - thinking: this.thinkingEnabled ? { type: 'enabled', budget_tokens: this.budgetTokens } : undefined, - }, { signal: abortSignal }); + const response = await this.client.messages.create( + { + model: options.model, + max_tokens: this.maxTokens, + system: options.systemPrompt, + messages: loopMessages, + tools: this.tools, + thinking: options.thinking + ? { type: "enabled", budget_tokens: this.budgetTokens } + : undefined, + }, + { signal: abortSignal }, + ); const responseMessage = { role: response.role, // always 'assistant' - content: response.content.map(block => { - if (block.type === 'text') { - return block as TextBlockParam + content: response.content.map((block) => { + if (block.type === "text") { + return block as TextBlockParam; } - if (block.type === 'tool_use') { + if (block.type === "tool_use") { return block as ToolUseBlockParam; } - if (block.type === 'thinking') { + if (block.type === "thinking") { return block as ThinkingBlockParam; } - if (block.type === 'redacted_thinking') { + if (block.type === "redacted_thinking") { return block as RedactedThinkingBlockParam; } }), }; - yield convertFromAnthropic(responseMessage); + yield convertAssistantMessageFromAnthropic(responseMessage); loopMessages.push(responseMessage); - const toolUseBlocks = responseMessage.content.filter(block => block.type === 'tool_use') as ToolUseBlockParam[]; + const toolUseBlocks = responseMessage.content.filter( + (block) => block.type === "tool_use", + ) as ToolUseBlockParam[]; if (toolUseBlocks.length === 0) { // no tool_use blocks, exit loop break; } + const toolResultBlocks: ToolResultBlock[] = []; + for (const block of toolUseBlocks) { - console.debug(`Calling tool ${block.name} with input ${block.input}`); + // TODO: make this async const result = await callTool(block.name, block.input); - console.debug('Got tool result:', result); - const toolResultMessage: MessageParam = { - role: 'user', - content: [this.makeToolResultBlock(result, block.id)], - }; + // const toolResultMessage: MessageParam = { + // role: 'user', + // content: [this.makeToolResultBlock(result, block.id)], + // }; + // + toolResultBlocks.push(this.makeToolResultBlock(result, block.id)); - yield convertFromAnthropic(toolResultMessage); - - loopMessages.push(toolResultMessage); + // yield convertFromAnthropic(toolResultMessage); + // + // loopMessages.push(toolResultMessage); } + + const toolResultMessage = newToolResultMessage(toolResultBlocks); + yield toolResultMessage; + loopMessages.push(convertToAnthropic(toolResultMessage)); } - this.analytics.capture(getOrCreateUserId(), 'anthropic.send_message.success', { - message_count: messages.length, - model: this.chatModel, - }); + + this.analytics.capture( + getOrCreateUserId(), + "anthropic.send_message.success", + { + message_count: messages.length, + model: options.model, + }, + ); } catch (error) { if (!isAbortError(error)) { - console.error('Error sending message to Claude:', error); - this.analytics.capture(getOrCreateUserId(), 'anthropic.send_message.error', { - message_count: messages.length, - model: this.chatModel, - }); + console.error("Error sending message to Claude:", error); + this.analytics.capture( + getOrCreateUserId(), + "anthropic.send_message.error", + { + message_count: messages.length, + model: options.model, + }, + ); } throw error; } } - async generateTitle(message: string) { + async generateTitle(message: string, model: string) { try { const response = await this.client.messages.create({ - model: this.titleModel, + model, max_tokens: 50, messages: [ { - role: 'user', + role: "user", content: `Generate a very brief and concise title(maximum 40 characters) for a conversation that starts with this message: "${message}".Respond with just the title, no quotes or extra text.`, }, ], }); - if (response.content[0].type === 'text') { + if (response.content[0].type === "text") { return response.content[0].text.trim(); } - throw new Error('Unexpected response type from Claude'); + throw new Error("Unexpected response type from Claude"); } catch (error) { - console.error('Error generating title:', error); - return 'New Chat'; // Fallback title + console.error("Error generating title:", error); + return "New Chat"; // Fallback title } } loadSettings(): { success: boolean; error?: string } { try { const settings = this.getProviderSettings(); - if (!settings.models.chat) { - throw new Error('Chat model is not set. Please select a model in Settings.'); - } - if (!settings.models.title) { - throw new Error('Title model is not set. Please select a model in Settings.'); - } - - this.thinkingEnabled = settings.models.thinking; - this.chatModel = settings.models.chat; - this.titleModel = settings.models.title; this.client = new Anthropic({ apiKey: settings.apiKey, maxRetries: 16, }); - this.analytics.capture(getOrCreateUserId(), 'anthropic_settings_updated', { - chat_model: this.chatModel, - title_model: this.titleModel, - }); + this.analytics.capture( + getOrCreateUserId(), + "anthropic_settings_reloaded", + ); return { success: true }; } catch (error) { - this.analytics.capture(getOrCreateUserId(), 'anthropic_settings_error', { + this.analytics.capture(getOrCreateUserId(), "anthropic_settings_error", { error_type: error.name, - error_message: error.message + error_message: error.message, }); return { success: false, error: error.message }; } @@ -168,47 +224,49 @@ export class AnthropicService { private makeToolResultBlock( result: ToolResult, - toolUseId: string - ): ToolResultBlockParam { + toolUseId: string, + ): ToolResultBlock { return { - type: 'tool_result', - tool_use_id: toolUseId, - is_error: result.isError, - content: result.content - .map(c => { - if (c.type === 'text') { - return c as TextBlockParam; - } - - if (c.type === 'image') { - return { - type: 'image', - source: { - type: 'base64', - data: c.data, - media_type: c.mimeType, - }, - } as ImageBlockParam; - } - - if (c.type === 'resource') { - throw new Error('Embedded resources are not supported'); - } - }) - } + type: "tool_result", + toolUseId: toolUseId, + isError: result.isError, + content: result.content.map((c) => { + if (c.type === "text") { + return { + type: "text", + text: c.text, + }; + } + + if (c.type === "image") { + return { + type: "image", + data: c.data, + mimeType: c.mimeType, + }; + } + + if (c.type === "resource") { + throw new Error("Embedded resources are not supported"); + } + }), + }; } private getProviderSettings(): ProviderSettings { const settings = getUserSettings(); if (!settings) { - throw new Error('No settings found'); + throw new Error("No settings found"); } - if (settings.model_provider !== 'anthropic') { - throw new Error(`Provider ${settings.model_provider} is not supported.Use Anthropic instead.`); + if (settings.model_provider !== "anthropic") { + throw new Error( + `Provider ${settings.model_provider} is not supported.Use Anthropic instead.`, + ); } - const providerSettings = settings.provider_settings[settings.model_provider]; + const providerSettings = + settings.provider_settings[settings.model_provider]; if (!providerSettings?.apiKey) { throw new Error(`No API key found for ${settings.model_provider}`); } diff --git a/src/main/services/chat.ts b/src/main/services/chat.ts index 1f75da7..35341d5 100644 --- a/src/main/services/chat.ts +++ b/src/main/services/chat.ts @@ -1,40 +1,56 @@ -import { BrowserWindow, ipcMain } from 'electron'; -import { AnthropicService } from '@/main/services/anthropic'; -import { getConversationById, updateConversation } from '@/main/db'; -import { Conversation } from '@/types'; -import { isAbortError } from '@/main/errors'; -import { Message } from '@/types/message'; -import { AnalyticsService } from './analytics'; -import { getOrCreateUserId } from '@/lib/account'; +import { BrowserWindow, ipcMain } from "electron"; +import { LLMServiceProvider } from "@/main/services/llmprovider"; +import { getConversationById, updateConversation } from "@/main/db"; +import { Conversation } from "@/types"; +import { isAbortError } from "@/main/errors"; +import { Message } from "@/types/message"; +import { AnalyticsService } from "./analytics"; +import { getOrCreateUserId } from "@/lib/account"; export class ChatService { - private activeChats: Map; - private anthropicService: AnthropicService; + private activeChats: Map< + string, + { + abortController: AbortController; + } + >; + private llmServiceProvider: LLMServiceProvider; private analyticsService: AnalyticsService; - constructor(llmService: AnthropicService, analyticsService: AnalyticsService) { + constructor( + llmServiceProvider: LLMServiceProvider, + analyticsService: AnalyticsService, + ) { this.activeChats = new Map(); - this.anthropicService = llmService; + this.llmServiceProvider = llmServiceProvider; this.analyticsService = analyticsService; } - async startChat(conversationId: string, systemPrompt?: string): Promise { + async startChat( + conversationId: string, + options: { model: string; thinking?: boolean; systemPrompt?: string }, + ): Promise { if (this.activeChats.has(conversationId)) { - throw new Error('Chat is already running'); + throw new Error("Chat is already running"); } const userId = getOrCreateUserId(); - this.analyticsService.capture(userId, 'chat_started', { + this.analyticsService.capture(userId, "chat_started", { conversation_id: conversationId, - has_system_prompt: !!systemPrompt + has_system_prompt: !!options.systemPrompt, }); const conversation = getConversationById(conversationId); - if (!conversation) throw new Error('Conversation not found'); + if (!conversation) throw new Error("Conversation not found"); + const llmService = this.llmServiceProvider.getServiceForModel( + options.model, + ); + if (!llmService) { + throw new Error(`No provider available for model: ${options.model}`); + } + const abortController = new AbortController(); this.activeChats.set(conversationId, { abortController, @@ -43,14 +59,18 @@ export class ChatService { // Update status to active const updatedConversation = { ...conversation, - status: 'active' as const, - updatedAt: Date.now() + status: "active" as const, + updatedAt: Date.now(), }; updateConversation(updatedConversation); this.broadcastUpdate(updatedConversation); try { - for await (const message of this.anthropicService.sendMessage(conversation.messages, systemPrompt, abortController.signal)) { + for await (const message of llmService.sendMessage( + conversation.messages, + options, + abortController.signal, + )) { this.handleNewMessage(conversationId, message); } @@ -59,8 +79,8 @@ export class ChatService { if (finalConversation) { const inactiveConversation = { ...finalConversation, - status: 'inactive' as const, - updatedAt: Date.now() + status: "inactive" as const, + updatedAt: Date.now(), }; updateConversation(inactiveConversation); this.broadcastUpdate(inactiveConversation); @@ -71,20 +91,20 @@ export class ChatService { } if (!isAbortError(error)) { - this.analyticsService.capture(userId, 'chat_error', { + this.analyticsService.capture(userId, "chat_error", { conversation_id: conversationId, - error: error.toString() + error: error.toString(), }); console.error(`Chat ${conversationId} error:`, error); - throw error + throw error; } } finally { const conversation = getConversationById(conversationId); if (conversation) { const c = { ...conversation, - status: 'inactive' as const, - updatedAt: Date.now() + status: "inactive" as const, + updatedAt: Date.now(), }; updateConversation(c); this.broadcastUpdate(c); @@ -95,55 +115,76 @@ export class ChatService { async stopChat(conversationId: string): Promise { const chat = this.activeChats.get(conversationId); - if (!chat) throw new Error('Chat not found or not running'); + if (!chat) throw new Error("Chat not found or not running"); chat.abortController.abort(); // the cleanup should be handled by the finally block in sendMessage } - async generateTitle(conversationId: string, message: string): Promise { + async generateTitle( + conversationId: string, + message: string, + model: string, + ): Promise { const conversation = getConversationById(conversationId); - if (!conversation) throw new Error('Conversation not found'); + if (!conversation) throw new Error("Conversation not found"); + const llmService = this.llmServiceProvider.getServiceForModel(model); + if (!llmService) { + console.warn(`No provider available for model: ${model}, cannot generate title`); + return; + } + try { - const title = await this.anthropicService.generateTitle(message); + const title = await llmService.generateTitle(message, model); // fetching the conversation again in case it was updated by another process // TODO: use transactions const conversation = getConversationById(conversationId); + if (!conversation) return; + const updatedConversation = { ...conversation, title, - updatedAt: Date.now() + updatedAt: Date.now(), }; updateConversation(updatedConversation); this.broadcastUpdate(updatedConversation); } catch (error) { - console.error(`Error generating title for conversation ${conversationId}:`, error); + console.error( + `Error generating title for conversation ${conversationId}:`, + error, + ); // Don't throw - title generation is not critical } } reloadSettings(): void { - console.info('Reloading settings'); - this.anthropicService.loadSettings(); + console.info("Reloading settings"); + this.llmServiceProvider.reloadAllSettings(); } - private async handleNewMessage(conversationId: string, message: Message): Promise { + private async handleNewMessage( + conversationId: string, + message: Message, + ): Promise { const conversation = getConversationById(conversationId); if (!conversation) return; updateConversation({ ...conversation, messages: [...conversation.messages, message], - updatedAt: Date.now() + updatedAt: Date.now(), }); this.broadcastMessage(conversationId, message); } private broadcastMessage(conversationId: string, message: Message): void { - BrowserWindow.getAllWindows().forEach(window => { - window.webContents.send('chat:messageUpdate', { conversationId, message }); + BrowserWindow.getAllWindows().forEach((window) => { + window.webContents.send("chat:messageUpdate", { + conversationId, + message, + }); }); } @@ -151,59 +192,59 @@ export class ChatService { // but creating a separate broadcast method for each conversation field is not ideal either // what can we do? private broadcastUpdate(conversation: Conversation): void { - BrowserWindow.getAllWindows().forEach(window => { - window.webContents.send('chat:update', conversation); + BrowserWindow.getAllWindows().forEach((window) => { + window.webContents.send("chat:update", conversation); }); } async stopAllChats(): Promise { await Promise.all( - Array.from(this.activeChats.keys()).map(conversationId => - this.stopChat(conversationId).catch(error => { + Array.from(this.activeChats.keys()).map((conversationId) => + this.stopChat(conversationId).catch((error) => { console.error(`Error stopping chat ${conversationId}:`, error); - }) - ) + }), + ), ); } } export const setupChatHandlers = (chatManager: ChatService) => { - ipcMain.handle('chat:start', async (_, { - conversationId, - systemPrompt - }) => { + ipcMain.handle("chat:start", async (_, { conversationId, options }) => { try { - return await chatManager.startChat(conversationId, systemPrompt); + return await chatManager.startChat(conversationId, options); } catch (error) { - console.error('Error starting chat:', error); + console.error("Error starting chat:", error); throw error; } }); - ipcMain.handle('chat:stop', async (_, { conversationId }) => { + ipcMain.handle("chat:stop", async (_, { conversationId }) => { try { return await chatManager.stopChat(conversationId); } catch (error) { - console.error('Error stopping chat:', error); + console.error("Error stopping chat:", error); throw error; } }); - ipcMain.handle('chat:generateTitle', async (_, { conversationId, message }) => { - try { - return await chatManager.generateTitle(conversationId, message); - } catch (error) { - console.error('Error generating title:', error); - throw error; - } - }); + ipcMain.handle( + "chat:generateTitle", + async (_, { conversationId, message, model }) => { + try { + return await chatManager.generateTitle(conversationId, message, model); + } catch (error) { + console.error("Error generating title:", error); + throw error; + } + }, + ); - ipcMain.handle('chat:reloadSettings', async () => { + ipcMain.handle("chat:reloadSettings", async () => { try { return chatManager.reloadSettings(); } catch (error) { - console.error('Error reloading settings:', error); + console.error("Error reloading settings:", error); throw error; } }); -} +}; diff --git a/src/main/services/llm.ts b/src/main/services/llm.ts new file mode 100644 index 0000000..e5fe789 --- /dev/null +++ b/src/main/services/llm.ts @@ -0,0 +1,52 @@ +import { Message } from "@/types/message"; + +/** + * LLMService interface for language model providers. + * This interface abstracts the common operations needed for different LLM providers. + * New providers can implement this interface to ensure compatibility with the app. + */ +export interface LLMService { + /** + * Sends messages to the LLM provider and yields response messages in a streaming fashion. + * @param messages The conversation history + * @param options Object containing model name and optional system prompt + * @param abortSignal Optional abort signal to cancel the request + * @returns An AsyncGenerator yielding the model's responses as they arrive + */ + sendMessage( + messages: Message[], + // TODO: extract common parameters from options + options: { + model: string, + thinking?: boolean, + systemPrompt?: string, + }, + abortSignal?: AbortSignal, + ): AsyncGenerator; + + /** + * Generates a title for a conversation based on the first message. + * @param message The first message of the conversation + * @param model The model to use for title generation + * @returns Promise resolving to the generated title + */ + generateTitle(message: string, model: string): Promise; + + /** + * Loads and applies provider-specific settings. + * @returns Object indicating success status and optional error message + */ + loadSettings(): { success: boolean; error?: string }; + + /** + * Gets the list of models supported by this provider. + * @returns Array of model identifiers supported by this provider + */ + getSupportedModels(): string[]; + + /** + * Gets the provider name. + * @returns String identifier for the provider (e.g., "anthropic", "openai") + */ + getProviderName(): string; +} diff --git a/src/main/services/llmprovider.ts b/src/main/services/llmprovider.ts new file mode 100644 index 0000000..4270822 --- /dev/null +++ b/src/main/services/llmprovider.ts @@ -0,0 +1,79 @@ +import { LLMService } from "@/main/services/llm"; + +/** + * Manages LLM providers and services, handling provider selection based on model + */ +export class LLMServiceProvider { + private providers: Map = new Map(); + private modelToProviderMap: Map = new Map(); + + constructor() { } + + /** + * Registers an LLM service provider for use + * @param provider The LLM service to register + */ + public registerProvider(provider: LLMService): void { + const providerName = provider.getProviderName(); + this.providers.set(providerName, provider); + + // Map each model to this provider + for (const model of provider.getSupportedModels()) { + this.modelToProviderMap.set(model, providerName); + } + + // Initialize settings for this provider + provider.loadSettings(); + } + + /** + * Gets the appropriate LLM service for the specified model + * @param model The model identifier + * @returns The LLM service that supports the specified model, or null if no provider is found + */ + public getServiceForModel(model: string): LLMService | null { + // Get the provider name for this model + const providerName = this.modelToProviderMap.get(model); + + if (!providerName) { + // No provider found + return null; + } + + return this.providers.get(providerName) || null; + } + + /** + * Gets all registered providers + * @returns Array of registered LLM services + */ + public getAllProviders(): LLMService[] { + return Array.from(this.providers.values()); + } + + /** + * Gets a provider by name + * @param providerName The name of the provider to get + * @returns The requested LLM service or undefined if not found + */ + public getProviderByName(providerName: string): LLMService | undefined { + return this.providers.get(providerName); + } + + /** + * Gets all supported models from all providers + * @returns Array of all supported model identifiers + */ + public getAllSupportedModels(): string[] { + return Array.from(this.modelToProviderMap.keys()); + } + + /** + * Reloads settings for all registered providers + */ + public reloadAllSettings(): void { + for (const provider of this.providers.values()) { + provider.loadSettings(); + } + } +} diff --git a/src/main/services/openai.ts b/src/main/services/openai.ts new file mode 100644 index 0000000..d0c5f27 --- /dev/null +++ b/src/main/services/openai.ts @@ -0,0 +1,232 @@ +import { OpenAI } from "openai"; +import { ChatCompletionTool } from "openai/resources"; +import { LLMService } from "./llm"; +import { + Message, + newToolResultMessage, + ToolResultBlock, +} from "@/types/message"; +import { ProviderSettings } from "@/types/settings"; +import { getUserSettings } from "@/main/db"; +import { callTool } from "@/main/mcp"; +import { AnalyticsService } from "./analytics"; +import { getOrCreateUserId } from "@/lib/account"; +import { isAbortError } from "../errors"; +import type { + CallToolResult as ToolResult, + Tool, +} from "@modelcontextprotocol/sdk/types"; +import { mcpToOpenAIFunction } from "@/lib/mcp/adapters"; +import { + convertToOpenAI, + convertAssistantMessageFromOpenAI, +} from "@/lib/converters/openai"; + +export class OpenAIService implements LLMService { + private client: OpenAI; + private tools: ChatCompletionTool[]; + private analytics: AnalyticsService; + private maxTokens: number; + private supportedModels: string[] = [ + "gpt-4o", + "gpt-4-turbo", + "gpt-4", + "gpt-3.5-turbo", + "gpt-4-vision-preview" + ]; + + constructor(tools: Tool[], analytics: AnalyticsService, maxTokens = 4096) { + this.tools = tools.map(mcpToOpenAIFunction); + this.analytics = analytics; + this.maxTokens = maxTokens; + } + + getSupportedModels(): string[] { + return this.supportedModels; + } + + getProviderName(): string { + return "openai"; + } + + async *sendMessage( + messages: Message[], + options: { + model: string; + thinking?: boolean, + systemPrompt?: string; + }, + abortSignal?: AbortSignal, + ): AsyncGenerator { + this.analytics.capture(getOrCreateUserId(), "openai.send_message.start", { + message_count: messages.length, + model: options.model, + }); + + try { + const loopMessages = messages.flatMap(convertToOpenAI); + + // Add system prompt if provided + if (options.systemPrompt) { + loopMessages.unshift({ + role: "system", + content: options.systemPrompt, + }); + } + + while (true) { + const response = await this.client.chat.completions.create( + { + model: options.model, + messages: loopMessages, + max_tokens: this.maxTokens, + tools: this.tools, + stream: false, + }, + { signal: abortSignal }, + ); + + const message = response.choices[0].message; + const responseMessage = convertAssistantMessageFromOpenAI(message); + + yield responseMessage; + + // If there's no tool call, we're done + if (!message.tool_calls) break; + + // Add the response to our messages array for the next iteration + loopMessages.push(message); + + // Handle tool calls + for (const toolCall of message.tool_calls) { + const name = toolCall.function.name; + const args = JSON.parse(toolCall.function.arguments); + + const result = await callTool(name, args); + const toolResultMessage = newToolResultMessage([ + this.makeToolResultBlock(result, toolCall.id), + ]); + yield toolResultMessage; + messages.push(toolResultMessage); + } + } + + this.analytics.capture( + getOrCreateUserId(), + "openai.send_message.success", + { + message_count: messages.length, + model: options.model, + }, + ); + } catch (error) { + if (!isAbortError(error)) { + console.error("Error sending message to OpenAI:", error); + this.analytics.capture( + getOrCreateUserId(), + "openai.send_message.error", + { + message_count: messages.length, + model: options.model, + }, + ); + } + throw error; + } + } + + async generateTitle(message: string, model: string): Promise { + try { + const response = await this.client.chat.completions.create({ + model, + messages: [ + { + role: "user", + content: `Generate a very brief and concise title (maximum 40 characters) for a conversation that starts with this message: "${message}". Respond with just the title, no quotes or extra text.`, + }, + ], + max_tokens: 50, + }); + + return response.choices[0].message.content?.trim() || "New Chat"; + } catch (error) { + console.error("Error generating title:", error); + return "New Chat"; // Fallback title + } + } + + loadSettings(): { success: boolean; error?: string } { + try { + const settings = this.getProviderSettings(); + this.client = new OpenAI({ + apiKey: settings.apiKey, + maxRetries: 16, + }); + + this.analytics.capture(getOrCreateUserId(), "openai_settings_reloaded"); + + return { success: true }; + } catch (error) { + this.analytics.capture(getOrCreateUserId(), "openai_settings_error", { + error_type: error.name, + error_message: error.message, + }); + return { success: false, error: error.message }; + } + } + + private makeToolResultBlock( + result: ToolResult, + toolUseId: string, + ): ToolResultBlock { + return { + type: "tool_result", + toolUseId: toolUseId, + isError: result.isError, + content: result.content.map((c) => { + if (c.type === "text") { + return { + type: "text", + text: c.text, + }; + } + + if (c.type === "image") { + return { + type: "image", + data: c.data, + mimeType: c.mimeType, + }; + } + + if (c.type === "resource") { + throw new Error("Embedded resources are not supported"); + } + }), + }; + } + + private getProviderSettings(): ProviderSettings { + const settings = getUserSettings(); + if (!settings) { + throw new Error("No settings found"); + } + + if ( + settings.model_provider !== "openai" && + settings.model_provider !== "anthropic" + ) { + throw new Error( + `Provider ${settings.model_provider} is not supported. Use OpenAI or Anthropic instead.`, + ); + } + + const providerSettings = + settings.provider_settings[settings.model_provider]; + if (!providerSettings?.apiKey) { + throw new Error(`No API key found for ${settings.model_provider}`); + } + + return providerSettings; + } +} diff --git a/src/types/message.ts b/src/types/message.ts index 5117175..8cbeced 100644 --- a/src/types/message.ts +++ b/src/types/message.ts @@ -1,7 +1,7 @@ import { v4 as uuidv4 } from 'uuid'; -export type Message = UserMessage | AssistantMessage; +export type Message = UserMessage | AssistantMessage | ToolResultMessage; export type UserMessage = { id: string; @@ -31,7 +31,21 @@ export const newAssistantMessage = (content: string | AssistantContentBlock[]): }; }; -export type UserContentBlock = TextBlock | ImageBlock | ToolResultBlock; +export type ToolResultMessage = { + id: string; + role: 'tool'; + content: ToolResultBlock[]; +} + +export const newToolResultMessage = (content: ToolResultBlock[]): ToolResultMessage => { + return { + id: uuidv4(), + role: 'tool', + content + }; +}; + +export type UserContentBlock = TextBlock | ImageBlock; export type AssistantContentBlock = TextBlock | ToolUseBlock | ThinkingBlock | RedactedThinkingBlock; export type ToolResultContentBlock = TextBlock | ImageBlock; diff --git a/src/types/settings.ts b/src/types/settings.ts index e7402ce..4980bb5 100644 --- a/src/types/settings.ts +++ b/src/types/settings.ts @@ -1,58 +1,51 @@ -export interface ModelSettings { - chat: string; - title: string; - thinking: boolean; - [key: string]: string | boolean; // Allow string indexing -} - export interface ProviderSettings { - apiKey: string; - models: ModelSettings; - [key: string]: string | ModelSettings; // Allow string indexing + apiKey: string; } -export type Provider = 'anthropic'; //later add openai and google +export type Provider = "anthropic" | "openai"; export interface UserSettings { - model_provider: Provider; - provider_settings: { - [key: string]: ProviderSettings; - }; - created_at: number; - updated_at: number; + model_provider: Provider; + provider_settings: { + [key: string]: ProviderSettings; + }; + created_at: number; + updated_at: number; } export const defaultProviderSettings: ProviderSettings = { - apiKey: "", - models: { - chat: '', - title: '', - thinking: false - } + apiKey: "", }; -export const newUserSettings = (provider: Provider, settings: ProviderSettings): UserSettings => { - return { - model_provider: provider, - provider_settings: { - [provider]: settings - }, - created_at: Date.now(), - updated_at: Date.now() - }; +export const newUserSettings = ( + provider: Provider, + settings: ProviderSettings, +): UserSettings => { + return { + model_provider: provider, + provider_settings: { + [provider]: settings, + }, + created_at: Date.now(), + updated_at: Date.now(), + }; }; -export const getCurrentProviderSettings = (settings: UserSettings): ProviderSettings | undefined => { - if (!settings.provider_settings[settings.model_provider]) { - return undefined; - } - return settings.provider_settings[settings.model_provider]; +export const getCurrentProviderSettings = ( + settings: UserSettings, +): ProviderSettings | undefined => { + if (!settings.provider_settings[settings.model_provider]) { + return undefined; + } + return settings.provider_settings[settings.model_provider]; }; -export const getCurrentProviderApiKey = (settings: UserSettings): string | undefined => { - const providerSettings = getCurrentProviderSettings(settings); - if (!providerSettings) { - return undefined; - } - return providerSettings.apiKey; +export const getCurrentProviderApiKey = ( + settings: UserSettings, +): string | undefined => { + const providerSettings = getCurrentProviderSettings(settings); + if (!providerSettings) { + return undefined; + } + return providerSettings.apiKey; }; From f98b24befa96383e40e9e60173248fedd2051e21 Mon Sep 17 00:00:00 2001 From: Art Moskvin Date: Thu, 20 Mar 2025 11:52:08 +0100 Subject: [PATCH 2/6] configure eslint --- .eslintrc.json | 12 +- package-lock.json | 436 ++++++++++++++++++++++++++++++++++++++++++---- package.json | 2 + 3 files changed, 418 insertions(+), 32 deletions(-) diff --git a/.eslintrc.json b/.eslintrc.json index 2d7aa60..cee92b6 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -12,5 +12,15 @@ "plugin:import/electron", "plugin:import/typescript" ], - "parser": "@typescript-eslint/parser" + "parser": "@typescript-eslint/parser", + "settings": { + "import/resolver": { + "typescript": { + "alwaysTryTypes": true + }, + "node": { + "extensions": [".js", ".jsx", ".ts", ".tsx"] + } + } + } } diff --git a/package-lock.json b/package-lock.json index 088d57e..17da096 100644 --- a/package-lock.json +++ b/package-lock.json @@ -35,6 +35,7 @@ "framer-motion": "^12.4.7", "highlight.js": "^11.11.0", "lucide-react": "^0.468.0", + "openai": "^4.87.3", "posthog-js": "^1.116.6", "posthog-node": "^4.6.0", "react": "^19.0.0", @@ -71,6 +72,7 @@ "css-loader": "^6.11.0", "electron": "33.2.1", "eslint": "^8.57.1", + "eslint-import-resolver-typescript": "^4.2.2", "eslint-plugin-import": "^2.31.0", "fork-ts-checker-webpack-plugin": "^7.3.0", "node-loader": "^2.1.0", @@ -1002,6 +1004,40 @@ "node": ">=14.14" } }, + "node_modules/@emnapi/core": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.3.1.tgz", + "integrity": "sha512-pVGjBIt1Y6gg3EJN8jTcfpP/+uuRksIo055oE/OBkDNcjZqVbfkWCksG1Jp4yZnj3iKWyWX8fdG/j6UDYPbFog==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.1", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz", + "integrity": "sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.1.tgz", + "integrity": "sha512-iIBu7mwkq4UQGeMEM8bLwNK962nXdhodeScX4slfQnRhEMMzvYivHhutCIk8uojvmASXXPC2WNEjwxFWk72Oqw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@emotion/is-prop-valid": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.1.tgz", @@ -2072,6 +2108,19 @@ "node": ">= 0.6" } }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.7.tgz", + "integrity": "sha512-5yximcFK5FNompXfJFoWanu5l8v1hNGqNHh9du1xETp9HWk/B/PzvchX55WYOPaIeNglG8++68AAiauBAtbnzw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.3.1", + "@emnapi/runtime": "^1.3.1", + "@tybys/wasm-util": "^0.9.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -3181,6 +3230,17 @@ "devOptional": true, "license": "MIT" }, + "node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.9.0.tgz", + "integrity": "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/better-sqlite3": { "version": "7.6.12", "resolved": "https://registry.npmjs.org/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz", @@ -3799,6 +3859,163 @@ "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "license": "ISC" }, + "node_modules/@unrs/rspack-resolver-binding-darwin-arm64": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-darwin-arm64/-/rspack-resolver-binding-darwin-arm64-1.2.2.tgz", + "integrity": "sha512-i7z0B+C0P8Q63O/5PXJAzeFtA1ttY3OR2VSJgGv18S+PFNwD98xHgAgPOT1H5HIV6jlQP8Avzbp09qxJUdpPNw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-darwin-x64": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-darwin-x64/-/rspack-resolver-binding-darwin-x64-1.2.2.tgz", + "integrity": "sha512-YEdFzPjIbDUCfmehC6eS+AdJYtFWY35YYgWUnqqTM2oe/N58GhNy5yRllxYhxwJ9GcfHoNc6Ubze1yjkNv+9Qg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-freebsd-x64": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-freebsd-x64/-/rspack-resolver-binding-freebsd-x64-1.2.2.tgz", + "integrity": "sha512-TU4ntNXDgPN2giQyyzSnGWf/dVCem5lvwxg0XYvsvz35h5H19WrhTmHgbrULMuypCB3aHe1enYUC9rPLDw45mA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-arm-gnueabihf": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-arm-gnueabihf/-/rspack-resolver-binding-linux-arm-gnueabihf-1.2.2.tgz", + "integrity": "sha512-ik3w4/rU6RujBvNWiDnKdXi1smBhqxEDhccNi/j2rHaMjm0Fk49KkJ6XKsoUnD2kZ5xaMJf9JjailW/okfUPIw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-arm64-gnu": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-arm64-gnu/-/rspack-resolver-binding-linux-arm64-gnu-1.2.2.tgz", + "integrity": "sha512-fp4Azi8kHz6TX8SFmKfyScZrMLfp++uRm2srpqRjsRZIIBzH74NtSkdEUHImR4G7f7XJ+sVZjCc6KDDK04YEpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-arm64-musl": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-arm64-musl/-/rspack-resolver-binding-linux-arm64-musl-1.2.2.tgz", + "integrity": "sha512-gMiG3DCFioJxdGBzhlL86KcFgt9HGz0iDhw0YVYPsShItpN5pqIkNrI+L/Q/0gfDiGrfcE0X3VANSYIPmqEAlQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-x64-gnu": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-x64-gnu/-/rspack-resolver-binding-linux-x64-gnu-1.2.2.tgz", + "integrity": "sha512-n/4n2CxaUF9tcaJxEaZm+lqvaw2gflfWQ1R9I7WQgYkKEKbRKbpG/R3hopYdUmLSRI4xaW1Cy0Bz40eS2Yi4Sw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-linux-x64-musl": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-linux-x64-musl/-/rspack-resolver-binding-linux-x64-musl-1.2.2.tgz", + "integrity": "sha512-cHyhAr6rlYYbon1L2Ag449YCj3p6XMfcYTP0AQX+KkQo025d1y/VFtPWvjMhuEsE2lLvtHm7GdJozj6BOMtzVg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-wasm32-wasi": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-wasm32-wasi/-/rspack-resolver-binding-wasm32-wasi-1.2.2.tgz", + "integrity": "sha512-eogDKuICghDLGc32FtP+WniG38IB1RcGOGz0G3z8406dUdjJvxfHGuGs/dSlM9YEp/v0lEqhJ4mBu6X2nL9pog==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.7" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/rspack-resolver-binding-win32-arm64-msvc": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-win32-arm64-msvc/-/rspack-resolver-binding-win32-arm64-msvc-1.2.2.tgz", + "integrity": "sha512-7sWRJumhpXSi2lccX8aQpfFXHsSVASdWndLv8AmD8nDRA/5PBi8IplQVZNx2mYRx6+Bp91Z00kuVqpXO9NfCTg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/rspack-resolver-binding-win32-x64-msvc": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@unrs/rspack-resolver-binding-win32-x64-msvc/-/rspack-resolver-binding-win32-x64-msvc-1.2.2.tgz", + "integrity": "sha512-hewo/UMGP1a7O6FG/ThcPzSJdm/WwrYDNkdGgWl6M18H6K6MSitklomWpT9MUtT5KGj++QJb06va/14QBC4pvw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@vercel/webpack-asset-relocator-loader": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/@vercel/webpack-asset-relocator-loader/-/webpack-asset-relocator-loader-1.7.3.tgz", @@ -4506,9 +4723,9 @@ } }, "node_modules/axios": { - "version": "1.7.9", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz", - "integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.3.tgz", + "integrity": "sha512-iP4DebzoNlP/YN2dpwCgb8zoCmhtkajzS48JvwmkSkXvPI3DHc7m+XYL5tGnSlJtR6nImXZmdCuN5aP8dh1d8A==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", @@ -7335,6 +7552,43 @@ "ms": "^2.1.1" } }, + "node_modules/eslint-import-resolver-typescript": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-4.2.2.tgz", + "integrity": "sha512-Rg1YEsb9UKLQ8BOv27cS3TZ6LhEAKQVgVOXArcE/sQrlnX8+FjmJRSC29ij1qrn+eurFuMsCFUcs7/+27T0vqQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "debug": "^4.4.0", + "get-tsconfig": "^4.10.0", + "rspack-resolver": "^1.2.2", + "stable-hash": "^0.0.5", + "tinyglobby": "^0.2.12" + }, + "engines": { + "node": "^16.17.0 || >=18.6.0" + }, + "funding": { + "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*", + "eslint-plugin-import-x": "*", + "is-bun-module": "*" + }, + "peerDependenciesMeta": { + "eslint-plugin-import": { + "optional": true + }, + "eslint-plugin-import-x": { + "optional": true + }, + "is-bun-module": { + "optional": true + } + } + }, "node_modules/eslint-module-utils": { "version": "2.12.0", "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.0.tgz", @@ -7832,6 +8086,28 @@ "express": "^4.0.0 || ^5.0.0-alpha.1" } }, + "node_modules/express-ws/node_modules/ws": { + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", + "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/express/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -12162,6 +12438,51 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/openai": { + "version": "4.87.3", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.87.3.tgz", + "integrity": "sha512-d2D54fzMuBYTxMW8wcNmhT1rYKcTfMJ8t+4KjH2KtvYenygITiGBgHoIrzHwnDQWW+C5oCA+ikIR2jgPCFqcKQ==", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + }, + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } + } + }, + "node_modules/openai/node_modules/@types/node": { + "version": "18.19.80", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.80.tgz", + "integrity": "sha512-kEWeMwMeIvxYkeg1gTc01awpwLbfMRZXdIhwRcakd/KlK53jmRC26LqcbIt7fnAQTu5GzlnWmzA3H6+l1u6xxQ==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/openai/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -13964,6 +14285,29 @@ "postcss": "^8.4.38" } }, + "node_modules/rspack-resolver": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/rspack-resolver/-/rspack-resolver-1.2.2.tgz", + "integrity": "sha512-Fwc19jMBA3g+fxDJH2B4WxwZjE0VaaOL7OX/A4Wn5Zv7bOD/vyPZhzXfaO73Xc2GAlfi96g5fGUa378WbIGfFw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/JounQin" + }, + "optionalDependencies": { + "@unrs/rspack-resolver-binding-darwin-arm64": "1.2.2", + "@unrs/rspack-resolver-binding-darwin-x64": "1.2.2", + "@unrs/rspack-resolver-binding-freebsd-x64": "1.2.2", + "@unrs/rspack-resolver-binding-linux-arm-gnueabihf": "1.2.2", + "@unrs/rspack-resolver-binding-linux-arm64-gnu": "1.2.2", + "@unrs/rspack-resolver-binding-linux-arm64-musl": "1.2.2", + "@unrs/rspack-resolver-binding-linux-x64-gnu": "1.2.2", + "@unrs/rspack-resolver-binding-linux-x64-musl": "1.2.2", + "@unrs/rspack-resolver-binding-wasm32-wasi": "1.2.2", + "@unrs/rspack-resolver-binding-win32-arm64-msvc": "1.2.2", + "@unrs/rspack-resolver-binding-win32-x64-msvc": "1.2.2" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -14819,6 +15163,13 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/stable-hash": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz", + "integrity": "sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==", + "dev": true, + "license": "MIT" + }, "node_modules/statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -15520,6 +15871,51 @@ "license": "MIT", "optional": true }, + "node_modules/tinyglobby": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.12.tgz", + "integrity": "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.3", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.4.3", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.3.tgz", + "integrity": "sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tmp": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", @@ -16510,28 +16906,6 @@ "node": ">= 10" } }, - "node_modules/webpack-dev-server/node_modules/ws": { - "version": "8.18.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.1.tgz", - "integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/webpack-merge": { "version": "5.10.0", "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz", @@ -16822,17 +17196,17 @@ "license": "ISC" }, "node_modules/ws": { - "version": "7.5.10", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", - "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", - "dev": true, + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==", + "devOptional": true, "license": "MIT", "engines": { - "node": ">=8.3.0" + "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" + "utf-8-validate": ">=5.0.2" }, "peerDependenciesMeta": { "bufferutil": { diff --git a/package.json b/package.json index 387605e..a77884a 100644 --- a/package.json +++ b/package.json @@ -36,6 +36,7 @@ "css-loader": "^6.11.0", "electron": "33.2.1", "eslint": "^8.57.1", + "eslint-import-resolver-typescript": "^4.2.2", "eslint-plugin-import": "^2.31.0", "fork-ts-checker-webpack-plugin": "^7.3.0", "node-loader": "^2.1.0", @@ -78,6 +79,7 @@ "framer-motion": "^12.4.7", "highlight.js": "^11.11.0", "lucide-react": "^0.468.0", + "openai": "^4.87.3", "posthog-js": "^1.116.6", "posthog-node": "^4.6.0", "react": "^19.0.0", From 8d236a57e606fd656084a18d70e338f3215f71ac Mon Sep 17 00:00:00 2001 From: Art Moskvin Date: Thu, 20 Mar 2025 14:57:49 +0100 Subject: [PATCH 3/6] remove models from settings --- src/components/SettingsDialog.tsx | 132 +++++++++++++++++---------- src/main/db.ts | 10 +- src/main/migrations/index.ts | 3 +- src/main/migrations/migration_002.ts | 33 +++++++ src/main/services/anthropic.ts | 11 +-- src/main/services/openai.ts | 14 +-- src/types/settings.ts | 20 +--- 7 files changed, 127 insertions(+), 96 deletions(-) create mode 100644 src/main/migrations/migration_002.ts diff --git a/src/components/SettingsDialog.tsx b/src/components/SettingsDialog.tsx index 401eb38..c04ffd8 100644 --- a/src/components/SettingsDialog.tsx +++ b/src/components/SettingsDialog.tsx @@ -1,4 +1,5 @@ import * as React from "react" +import { Check, Loader2 } from "lucide-react" import { Button } from "@/components/ui/button" import { Dialog, @@ -12,14 +13,12 @@ import { import { Input } from "@/components/ui/input" import { Label } from "@/components/ui/label" import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "@/components/ui/select" -import { Switch } from "@/components/ui/switch" -import { defaultProviderSettings, getCurrentProviderApiKey, newUserSettings, Provider, ProviderSettings, UserSettings } from "@/types/settings" + Tabs, + TabsContent, + TabsList, + TabsTrigger +} from "@/components/ui/tabs" +import { defaultProviderSettings, Provider, ProviderSettings, UserSettings, newUserSettings } from "@/types/settings" import { useToast } from "@/components/ui/use-toast" import { cn } from "@/lib/utils" @@ -37,20 +36,36 @@ interface ProviderSettingsProps { function AnthropicSettings({ settings, onChange }: ProviderSettingsProps) { return ( -
-
- - onChange({ ...settings, apiKey: e.target.value })} - placeholder="sk-ant-..." - className="col-span-3" - /> -
+
+ + onChange({ ...settings, apiKey: e.target.value })} + /> +

+ Enter your Anthropic API key. You can find this in your Anthropic console. +

+
+ ); +} + +function OpenAISettings({ settings, onChange }: ProviderSettingsProps) { + return ( +
+ + onChange({ ...settings, apiKey: e.target.value })} + /> +

+ Enter your OpenAI API key. You can find this in your OpenAI dashboard. +

); } @@ -65,6 +80,7 @@ export function SettingsDialog({ const [settings, setSettings] = React.useState(null); const [isSaving, setIsSaving] = React.useState(false); const [error, setError] = React.useState(null); + const [activeTab, setActiveTab] = React.useState("anthropic"); // Load settings when dialog opens React.useEffect(() => { @@ -75,17 +91,18 @@ export function SettingsDialog({ } }, [open]); - const handleSettingChange = (provider: Provider, settings: ProviderSettings) => { + const handleSettingChange = (provider: Provider, providerSettings: ProviderSettings) => { setSettings(s => { if (!s) { - return newUserSettings(provider, settings); + return newUserSettings(provider, providerSettings); } return { ...s, provider_settings: { ...s.provider_settings, - [provider]: settings - } + [provider]: providerSettings + }, + updated_at: Date.now() }; }); }; @@ -98,18 +115,12 @@ export function SettingsDialog({ setError(null); setIsSaving(true); - // Validate API key for selected provider - const apiKey = getCurrentProviderApiKey(settings); - if (!apiKey?.trim()) { - throw new Error(`Please enter an API key for ${settings.model_provider}`); - } - await window.settings.update(settings); await window.chat.reloadSettings(); toast({ title: "Settings saved", - description: "Your settings have been updated successfully.", + description: "Your API keys have been updated successfully.", duration: 3000, variant: "success" }) @@ -140,24 +151,37 @@ export function SettingsDialog({ - Settings + Provider Settings - Configure your AI model provider and preferences. + Configure your AI provider API keys. These keys are stored securely and used to make requests to the + respective providers.
-
-
- - - Anthropic - -
- handleSettingChange("anthropic", settings)} - /> -
+ setActiveTab(value as Provider)} + className="w-full" + > + + OpenAI + Anthropic + + + handleSettingChange("openai", providerSettings)} + /> + + + handleSettingChange("anthropic", providerSettings)} + /> + + + {(error) && (
{error} @@ -168,11 +192,21 @@ export function SettingsDialog({ type="submit" disabled={isSaving} > - {isSaving ? "Saving..." : "Save"} + {isSaving ? ( + <> + + Saving... + + ) : ( + <> + + Save Changes + + )} ); -} +} \ No newline at end of file diff --git a/src/main/db.ts b/src/main/db.ts index e50a48b..9ef104c 100644 --- a/src/main/db.ts +++ b/src/main/db.ts @@ -127,7 +127,6 @@ export const deleteProjectConversations = (projectId: string): void => { interface UserSettingsRow { id: number; - model_provider: string; provider_settings: string; created_at: number; updated_at: number; @@ -140,7 +139,6 @@ export const getUserSettings = (): UserSettings | null => { if (!row) return null; return { - model_provider: row.model_provider as UserSettings['model_provider'], provider_settings: JSON.parse(row.provider_settings), created_at: row.created_at, updated_at: row.updated_at @@ -154,13 +152,11 @@ export const updateUserSettings = async (settings: Omit { return { - model_provider: provider, provider_settings: { [provider]: settings, }, @@ -31,21 +29,9 @@ export const newUserSettings = ( }; }; -export const getCurrentProviderSettings = ( - settings: UserSettings, -): ProviderSettings | undefined => { - if (!settings.provider_settings[settings.model_provider]) { - return undefined; - } - return settings.provider_settings[settings.model_provider]; -}; - -export const getCurrentProviderApiKey = ( +export const getProviderApiKey = ( settings: UserSettings, + provider: Provider ): string | undefined => { - const providerSettings = getCurrentProviderSettings(settings); - if (!providerSettings) { - return undefined; - } - return providerSettings.apiKey; + return settings.provider_settings[provider]?.apiKey; }; From a7a45ba9037459a9433e4544f815260629c7c5b5 Mon Sep 17 00:00:00 2001 From: Art Moskvin Date: Thu, 20 Mar 2025 16:08:11 +0100 Subject: [PATCH 4/6] add model and thinking toggle in chat input --- src/components/ChatArea.tsx | 10 +++--- src/components/ChatInput.tsx | 63 +++++++++++++++++++++++++++++++----- src/index.css | 2 +- 3 files changed, 61 insertions(+), 14 deletions(-) diff --git a/src/components/ChatArea.tsx b/src/components/ChatArea.tsx index 38f0a1f..6c61899 100644 --- a/src/components/ChatArea.tsx +++ b/src/components/ChatArea.tsx @@ -18,8 +18,8 @@ import { interface ChatAreaProps { conversation: Conversation | null; - onNewConversation: (message: string) => Promise; - onNewMessage: (conversationId: string, message: string) => Promise; + onNewConversation: (message: string, model?: string, thinking?: boolean) => Promise; + onNewMessage: (conversationId: string, message: string, model?: string, thinking?: boolean) => Promise; onStop: () => Promise; isLoading: boolean; project: Project | null; @@ -77,15 +77,15 @@ export const ChatArea = ({ await onStop(); }; - const handleMessage = async (input: string) => { + const handleMessage = async (input: string, model?: string, thinking?: boolean) => { if (!input.trim()) return; if (!conversation) { - await onNewConversation(input); + await onNewConversation(input, model, thinking); return; } - await onNewMessage(conversation.id, input); + await onNewMessage(conversation.id, input, model, thinking); }; return ( diff --git a/src/components/ChatInput.tsx b/src/components/ChatInput.tsx index 7e548c9..cf779c0 100644 --- a/src/components/ChatInput.tsx +++ b/src/components/ChatInput.tsx @@ -1,11 +1,14 @@ import React, { useState, useRef, useEffect, KeyboardEvent, ChangeEvent, FormEvent } from 'react'; -import { Send, Square } from 'lucide-react'; +import { Send, Square, Brain } from 'lucide-react'; import { Button } from '@/components/ui/button' import { Textarea } from '@/components/ui/textarea' +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select" +import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip" +import { cn } from "@/lib/utils" interface ChatInputProps { - onSendMessage?: (message: string) => void; + onSendMessage?: (message: string, model?: string, thinking?: boolean) => void; onStop?: () => void; placeholder?: string; maxHeight?: number; @@ -26,6 +29,8 @@ export const ChatInput: React.FC = ({ isStopping = false, }) => { const [message, setMessage] = useState(''); + const [model, setModel] = useState('claude'); + const [thinking, setThinking] = useState(false); const textareaRef = useRef(null); // Auto-resize textarea as content grows @@ -40,7 +45,7 @@ export const ChatInput: React.FC = ({ const handleSubmit = (e: FormEvent) => { e.preventDefault(); if (message.trim() && !disabled) { - onSendMessage(message.trim()); + onSendMessage(message.trim(), model, thinking); setMessage(''); } }; @@ -49,7 +54,7 @@ export const ChatInput: React.FC = ({ if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); if (message.trim() && !disabled) { - onSendMessage(message.trim()); + onSendMessage(message.trim(), model, thinking); setMessage(''); } } @@ -64,17 +69,59 @@ export const ChatInput: React.FC = ({ onSubmit={handleSubmit} className={`w-full max-w-2xl mx-auto ${className}`.trim()} > -
+