From cc0339f939733a200f73325b4df1d01c976572f4 Mon Sep 17 00:00:00 2001 From: Chris Scott <99081550+chriswritescode-dev@users.noreply.github.com> Date: Mon, 23 Feb 2026 15:07:28 -0500 Subject: [PATCH 1/2] Add memory plugin with semantic search and session planning --- .gitguardian.yaml | 2 - Dockerfile | 14 + README.md | 1 + backend/src/db/migration-runner.ts | 118 +++ backend/src/db/migrations.ts | 188 ----- backend/src/db/migrations/001-base-schema.ts | 177 +++++ .../db/migrations/002-repos-nullable-url.ts | 84 +++ .../db/migrations/003-repos-add-columns.ts | 38 + .../src/db/migrations/004-repos-indexes.ts | 22 + .../migrations/005-repos-local-path-prefix.ts | 28 + .../006-git-token-to-credentials.ts | 74 ++ backend/src/db/migrations/index.ts | 16 + backend/src/db/queries.ts | 7 - backend/src/db/schema.ts | 135 +--- backend/src/index.ts | 3 + backend/src/routes/health.ts | 16 +- backend/src/routes/memory.ts | 446 +++++++++++ backend/src/routes/repo-git.ts | 92 --- backend/src/routes/settings.ts | 27 +- backend/src/services/git/GitService.ts | 266 +------ .../src/services/opencode-single-server.ts | 14 +- backend/src/services/plugin-memory.ts | 242 ++++++ backend/src/services/project-id-resolver.ts | 68 ++ backend/src/services/repo.ts | 34 +- backend/src/types/git.ts | 13 - backend/src/utils/git-auth.ts | 4 - backend/src/utils/version-utils.ts | 12 + backend/test/routes/repo-git.test.ts | 117 --- backend/test/services/git/GitService.test.ts | 621 --------------- backend/test/utils/git-errors.test.ts | 2 +- docs/features/memory.md | 222 ++++++ docs/features/overview.md | 10 + docs/index.md | 1 + frontend/src/App.tsx | 6 + frontend/src/api/git.ts | 36 +- frontend/src/api/memory.ts | 96 +++ frontend/src/api/repos.ts | 6 +- frontend/src/api/settings.ts | 4 + frontend/src/api/ssh.ts | 13 +- frontend/src/api/types/settings.ts | 8 +- .../components/file-browser/FileDiffView.tsx | 33 +- .../components/memory/MemoryFormDialog.tsx | 147 ++++ frontend/src/components/memory/MemoryList.tsx | 146 ++++ .../components/message/RecordingOverlay.tsx | 52 -- .../repo/ResetPermissionsDialog.tsx | 81 ++ .../components/settings/GeneralSettings.tsx | 2 + .../settings/MemoryPluginConfig.tsx | 373 +++++++++ .../settings/OpenCodeConfigManager.tsx | 41 +- .../components/source-control/ChangesTab.tsx | 121 +-- .../source-control/CommitDetailView.tsx | 102 --- .../components/source-control/CommitsTab.tsx | 61 +- .../source-control/GitFlatFileItem.tsx | 59 +- .../source-control/GitFlatFileList.tsx | 76 +- .../source-control/SourceControlPanel.tsx | 125 ++-- frontend/src/components/ui/discard-dialog.tsx | 59 -- .../components/ui/floating-action-button.tsx | 85 --- frontend/src/hooks/useDebounce.ts | 12 + frontend/src/hooks/useGit.ts | 14 +- frontend/src/hooks/useMemories.ts | 94 +++ frontend/src/hooks/usePassphraseHandler.ts | 70 -- frontend/src/hooks/useStandalone.ts | 22 - frontend/src/lib/git-status-styles.ts | 3 +- frontend/src/lib/mcpServerTemplates.ts | 123 --- frontend/src/lib/providerTemplates.ts | 369 --------- frontend/src/pages/Memories.tsx | 115 +++ frontend/src/pages/RepoDetail.tsx | 92 +-- frontend/src/pages/SessionDetail.tsx | 39 +- frontend/src/stores/sessionTodosStore.ts | 6 - frontend/src/types/git.ts | 14 - package.json | 2 +- packages/memory/.gitignore | 3 + packages/memory/.npmignore | 7 + packages/memory/README.md | 110 +++ packages/memory/config.json | 20 + packages/memory/package.json | 58 ++ packages/memory/scripts/download-models.js | 38 + packages/memory/src/agents/code.ts | 112 +++ packages/memory/src/agents/index.ts | 36 + packages/memory/src/agents/memory.ts | 224 ++++++ packages/memory/src/agents/review.ts | 48 ++ packages/memory/src/agents/types.ts | 37 + packages/memory/src/cache/index.ts | 8 + packages/memory/src/cache/memory-cache.ts | 62 ++ packages/memory/src/cache/types.ts | 7 + packages/memory/src/config.ts | 75 ++ packages/memory/src/embedding/api.ts | 118 +++ packages/memory/src/embedding/client.ts | 191 +++++ packages/memory/src/embedding/index.ts | 92 +++ packages/memory/src/embedding/local.ts | 126 ++++ packages/memory/src/embedding/server.ts | 265 +++++++ packages/memory/src/embedding/shared.ts | 316 ++++++++ packages/memory/src/embedding/types.ts | 9 + packages/memory/src/hooks/compaction-utils.ts | 142 ++++ packages/memory/src/hooks/index.ts | 10 + packages/memory/src/hooks/keyword.ts | 127 ++++ packages/memory/src/hooks/params.ts | 73 ++ packages/memory/src/hooks/session.ts | 243 ++++++ packages/memory/src/index.ts | 570 ++++++++++++++ .../memory/src/services/embedding-sync.ts | 175 +++++ packages/memory/src/services/memory.ts | 295 ++++++++ packages/memory/src/services/session-state.ts | 111 +++ packages/memory/src/setup.ts | 130 ++++ packages/memory/src/storage/database.ts | 129 ++++ packages/memory/src/storage/index.ts | 20 + packages/memory/src/storage/memory-queries.ts | 457 ++++++++++++ .../memory/src/storage/metadata-queries.ts | 46 ++ .../src/storage/session-state-queries.ts | 91 +++ packages/memory/src/storage/vec-client.ts | 187 +++++ packages/memory/src/storage/vec-direct.ts | 115 +++ packages/memory/src/storage/vec-types.ts | 16 + packages/memory/src/storage/vec-worker.ts | 265 +++++++ packages/memory/src/storage/vec.ts | 53 ++ packages/memory/src/types.ts | 118 +++ packages/memory/src/utils/logger.ts | 61 ++ packages/memory/test/cache.test.ts | 146 ++++ packages/memory/test/compaction-utils.test.ts | 168 +++++ packages/memory/test/embedding.test.ts | 292 ++++++++ packages/memory/test/hooks.test.ts | 453 +++++++++++ packages/memory/test/memory-service.test.ts | 426 +++++++++++ packages/memory/test/plugin.test.ts | 412 ++++++++++ packages/memory/test/session-state.test.ts | 198 +++++ packages/memory/test/setup.test.ts | 112 +++ packages/memory/tsconfig.build.json | 13 + packages/memory/tsconfig.json | 17 + pnpm-lock.yaml | 706 ++++++++++++++++++ pnpm-workspace.yaml | 1 + scripts/docker-entrypoint.sh | 8 + shared/src/config/env.ts | 12 + shared/src/schemas/index.ts | 1 + shared/src/schemas/memory.ts | 72 ++ shared/src/schemas/settings.ts | 8 +- shared/src/types/index.ts | 14 +- 132 files changed, 11442 insertions(+), 2834 deletions(-) create mode 100644 backend/src/db/migration-runner.ts delete mode 100644 backend/src/db/migrations.ts create mode 100644 backend/src/db/migrations/001-base-schema.ts create mode 100644 backend/src/db/migrations/002-repos-nullable-url.ts create mode 100644 backend/src/db/migrations/003-repos-add-columns.ts create mode 100644 backend/src/db/migrations/004-repos-indexes.ts create mode 100644 backend/src/db/migrations/005-repos-local-path-prefix.ts create mode 100644 backend/src/db/migrations/006-git-token-to-credentials.ts create mode 100644 backend/src/db/migrations/index.ts create mode 100644 backend/src/routes/memory.ts create mode 100644 backend/src/services/plugin-memory.ts create mode 100644 backend/src/services/project-id-resolver.ts create mode 100644 backend/src/utils/version-utils.ts create mode 100644 docs/features/memory.md create mode 100644 frontend/src/api/memory.ts create mode 100644 frontend/src/components/memory/MemoryFormDialog.tsx create mode 100644 frontend/src/components/memory/MemoryList.tsx delete mode 100644 frontend/src/components/message/RecordingOverlay.tsx create mode 100644 frontend/src/components/repo/ResetPermissionsDialog.tsx create mode 100644 frontend/src/components/settings/MemoryPluginConfig.tsx delete mode 100644 frontend/src/components/source-control/CommitDetailView.tsx delete mode 100644 frontend/src/components/ui/discard-dialog.tsx delete mode 100644 frontend/src/components/ui/floating-action-button.tsx create mode 100644 frontend/src/hooks/useDebounce.ts create mode 100644 frontend/src/hooks/useMemories.ts delete mode 100644 frontend/src/hooks/usePassphraseHandler.ts delete mode 100644 frontend/src/hooks/useStandalone.ts delete mode 100644 frontend/src/lib/mcpServerTemplates.ts delete mode 100644 frontend/src/lib/providerTemplates.ts create mode 100644 frontend/src/pages/Memories.tsx create mode 100644 packages/memory/.gitignore create mode 100644 packages/memory/.npmignore create mode 100644 packages/memory/README.md create mode 100644 packages/memory/config.json create mode 100644 packages/memory/package.json create mode 100644 packages/memory/scripts/download-models.js create mode 100644 packages/memory/src/agents/code.ts create mode 100644 packages/memory/src/agents/index.ts create mode 100644 packages/memory/src/agents/memory.ts create mode 100644 packages/memory/src/agents/review.ts create mode 100644 packages/memory/src/agents/types.ts create mode 100644 packages/memory/src/cache/index.ts create mode 100644 packages/memory/src/cache/memory-cache.ts create mode 100644 packages/memory/src/cache/types.ts create mode 100644 packages/memory/src/config.ts create mode 100644 packages/memory/src/embedding/api.ts create mode 100644 packages/memory/src/embedding/client.ts create mode 100644 packages/memory/src/embedding/index.ts create mode 100644 packages/memory/src/embedding/local.ts create mode 100644 packages/memory/src/embedding/server.ts create mode 100644 packages/memory/src/embedding/shared.ts create mode 100644 packages/memory/src/embedding/types.ts create mode 100644 packages/memory/src/hooks/compaction-utils.ts create mode 100644 packages/memory/src/hooks/index.ts create mode 100644 packages/memory/src/hooks/keyword.ts create mode 100644 packages/memory/src/hooks/params.ts create mode 100644 packages/memory/src/hooks/session.ts create mode 100644 packages/memory/src/index.ts create mode 100644 packages/memory/src/services/embedding-sync.ts create mode 100644 packages/memory/src/services/memory.ts create mode 100644 packages/memory/src/services/session-state.ts create mode 100644 packages/memory/src/setup.ts create mode 100644 packages/memory/src/storage/database.ts create mode 100644 packages/memory/src/storage/index.ts create mode 100644 packages/memory/src/storage/memory-queries.ts create mode 100644 packages/memory/src/storage/metadata-queries.ts create mode 100644 packages/memory/src/storage/session-state-queries.ts create mode 100644 packages/memory/src/storage/vec-client.ts create mode 100644 packages/memory/src/storage/vec-direct.ts create mode 100644 packages/memory/src/storage/vec-types.ts create mode 100644 packages/memory/src/storage/vec-worker.ts create mode 100644 packages/memory/src/storage/vec.ts create mode 100644 packages/memory/src/types.ts create mode 100644 packages/memory/src/utils/logger.ts create mode 100644 packages/memory/test/cache.test.ts create mode 100644 packages/memory/test/compaction-utils.test.ts create mode 100644 packages/memory/test/embedding.test.ts create mode 100644 packages/memory/test/hooks.test.ts create mode 100644 packages/memory/test/memory-service.test.ts create mode 100644 packages/memory/test/plugin.test.ts create mode 100644 packages/memory/test/session-state.test.ts create mode 100644 packages/memory/test/setup.test.ts create mode 100644 packages/memory/tsconfig.build.json create mode 100644 packages/memory/tsconfig.json create mode 100644 shared/src/schemas/memory.ts diff --git a/.gitguardian.yaml b/.gitguardian.yaml index 83b6d7a6..62f06156 100644 --- a/.gitguardian.yaml +++ b/.gitguardian.yaml @@ -17,8 +17,6 @@ paths-ignore: - '**/*.spec.tsx' - 'temp/**' - '**/*.md' - - '**/test/**' - - '**/__tests__/**' # Specific files to ignore files-ignore: diff --git a/Dockerfile b/Dockerfile index b075a5b3..74ba710c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -41,6 +41,7 @@ COPY --chown=node:node package.json pnpm-workspace.yaml pnpm-lock.yaml ./ COPY --chown=node:node shared/package.json ./shared/ COPY --chown=node:node backend/package.json ./backend/ COPY --chown=node:node frontend/package.json ./frontend/ +COPY --chown=node:node packages/memory ./packages/memory/ RUN pnpm install --frozen-lockfile @@ -52,8 +53,10 @@ COPY backend ./backend COPY frontend/src ./frontend/src COPY frontend/public ./frontend/public COPY frontend/index.html frontend/vite.config.ts frontend/tsconfig*.json frontend/components.json frontend/eslint.config.js ./frontend/ +COPY packages/memory ./packages/memory RUN pnpm --filter frontend build +RUN pnpm --filter @opencode-manager/memory build FROM base AS runner @@ -80,6 +83,7 @@ ENV PORT=5003 ENV OPENCODE_SERVER_PORT=5551 ENV DATABASE_PATH=/app/data/opencode.db ENV WORKSPACE_PATH=/workspace +ENV NODE_PATH=/opt/opencode-plugins/node_modules COPY --from=deps --chown=node:node /app/node_modules ./node_modules COPY --from=builder /app/shared ./shared @@ -90,6 +94,16 @@ COPY package.json pnpm-workspace.yaml ./ RUN mkdir -p /app/backend/node_modules/@opencode-manager && \ ln -s /app/shared /app/backend/node_modules/@opencode-manager/shared +COPY --from=builder /app/packages/memory /opt/opencode-plugins/src + +RUN cd /opt/opencode-plugins/src && npm install + +RUN mkdir -p /opt/opencode-plugins/node_modules/@opencode-manager/memory && \ + cp -r /opt/opencode-plugins/src/dist/* /opt/opencode-plugins/node_modules/@opencode-manager/memory/ && \ + cp /opt/opencode-plugins/src/package.json /opt/opencode-plugins/node_modules/@opencode-manager/memory/ && \ + cp /opt/opencode-plugins/src/config.json /opt/opencode-plugins/node_modules/@opencode-manager/memory/config.json 2>/dev/null || true && \ + cp -r /opt/opencode-plugins/src/node_modules/* /opt/opencode-plugins/node_modules/ 2>/dev/null || true + COPY scripts/docker-entrypoint.sh /docker-entrypoint.sh RUN chmod +x /docker-entrypoint.sh diff --git a/README.md b/README.md index 4663b08f..09d91b56 100644 --- a/README.md +++ b/README.md @@ -76,6 +76,7 @@ On first launch, you'll be prompted to create an admin account. That's it! - **OAuth Support** - Secure OAuth login for Anthropic and GitHub Copilot - **Custom Agents** - Create agents with custom system prompts and tool permissions - **MCP Servers** - Add local or remote MCP servers with pre-built templates +- **Memory Plugin** - Persistent project knowledge with semantic search, planning state, and compaction awareness ### Mobile & PWA - **Mobile-First Design** - Responsive UI optimized for mobile diff --git a/backend/src/db/migration-runner.ts b/backend/src/db/migration-runner.ts new file mode 100644 index 00000000..e2b928a0 --- /dev/null +++ b/backend/src/db/migration-runner.ts @@ -0,0 +1,118 @@ +import { Database } from 'bun:sqlite' +import { logger } from '../utils/logger' + +export interface Migration { + version: number + name: string + up(db: Database): void + down(db: Database): void +} + +interface MigrationRecord { + version: number + name: string + applied_at: number +} + +function ensureMigrationsTable(db: Database): void { + db.run(` + CREATE TABLE IF NOT EXISTS schema_migrations ( + version INTEGER PRIMARY KEY, + name TEXT NOT NULL, + applied_at INTEGER NOT NULL + ) + `) +} + +function getAppliedVersions(db: Database): Set { + const rows = db.prepare('SELECT version FROM schema_migrations ORDER BY version').all() as MigrationRecord[] + return new Set(rows.map(r => r.version)) +} + +function markApplied(db: Database, migration: Migration): void { + db.prepare('INSERT INTO schema_migrations (version, name, applied_at) VALUES (?, ?, ?)') + .run(migration.version, migration.name, Date.now()) +} + +function markReverted(db: Database, version: number): void { + db.prepare('DELETE FROM schema_migrations WHERE version = ?').run(version) +} + +export function migrate(db: Database, migrations: Migration[]): void { + ensureMigrationsTable(db) + + const applied = getAppliedVersions(db) + const sorted = [...migrations].sort((a, b) => a.version - b.version) + const pending = sorted.filter(m => !applied.has(m.version)) + + if (pending.length === 0) { + logger.info('Database schema is up to date') + return + } + + logger.info(`Running ${pending.length} pending migration(s)`) + + for (const migration of pending) { + logger.info(`Applying migration ${migration.version}: ${migration.name}`) + db.run('BEGIN TRANSACTION') + try { + migration.up(db) + markApplied(db, migration) + db.run('COMMIT') + logger.info(`Migration ${migration.version} applied successfully`) + } catch (error) { + db.run('ROLLBACK') + logger.error(`Migration ${migration.version} failed:`, error) + throw error + } + } + + logger.info('All migrations applied successfully') +} + +export function rollback(db: Database, migrations: Migration[], targetVersion?: number): void { + ensureMigrationsTable(db) + + const applied = getAppliedVersions(db) + const sorted = [...migrations] + .filter(m => applied.has(m.version)) + .sort((a, b) => b.version - a.version) + + if (sorted.length === 0) { + logger.info('No migrations to rollback') + return + } + + const latest = sorted[0] + if (!latest) { + logger.info('No migrations to rollback') + return + } + const target = targetVersion ?? latest.version - 1 + + const toRevert = sorted.filter(m => m.version > target) + + if (toRevert.length === 0) { + logger.info('No migrations to rollback') + return + } + + logger.info(`Rolling back ${toRevert.length} migration(s) to version ${target}`) + + for (const migration of toRevert) { + logger.info(`Reverting migration ${migration.version}: ${migration.name}`) + db.run('BEGIN TRANSACTION') + try { + migration.down(db) + markReverted(db, migration.version) + db.run('COMMIT') + logger.info(`Migration ${migration.version} reverted successfully`) + } catch (error) { + db.run('ROLLBACK') + logger.error(`Rollback of migration ${migration.version} failed:`, error) + throw error + } + } + + logger.info('Rollback completed successfully') +} diff --git a/backend/src/db/migrations.ts b/backend/src/db/migrations.ts deleted file mode 100644 index 0a587839..00000000 --- a/backend/src/db/migrations.ts +++ /dev/null @@ -1,188 +0,0 @@ -import { Database } from 'bun:sqlite' -import { logger } from '../utils/logger' - -interface ColumnInfo { - cid: number - name: string - type: string - notnull: number - dflt_value: unknown - pk: number -} - -export function runMigrations(db: Database): void { - try { - const tableInfo = db.prepare("PRAGMA table_info(repos)").all() as ColumnInfo[] - - const repoUrlColumn = tableInfo.find((col: ColumnInfo) => col.name === 'repo_url') - if (repoUrlColumn && repoUrlColumn.notnull === 1) { - logger.info('Migrating repos table to allow nullable repo_url for local repos') - db.run('BEGIN TRANSACTION') - try { - db.run(` - CREATE TABLE repos_new ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - repo_url TEXT, - local_path TEXT NOT NULL, - branch TEXT, - default_branch TEXT, - clone_status TEXT NOT NULL, - cloned_at INTEGER NOT NULL, - last_pulled INTEGER, - opencode_config_name TEXT, - is_worktree BOOLEAN DEFAULT FALSE, - is_local BOOLEAN DEFAULT FALSE - ) - `) - - const existingColumns = tableInfo.map((col: ColumnInfo) => col.name) - const columnsToCopy = ['id', 'repo_url', 'local_path', 'branch', 'default_branch', 'clone_status', 'cloned_at', 'last_pulled', 'opencode_config_name', 'is_worktree', 'is_local'] - .filter(col => existingColumns.includes(col)) - - const columnsStr = columnsToCopy.join(', ') - db.run(`INSERT INTO repos_new (${columnsStr}) SELECT ${columnsStr} FROM repos`) - - db.run('DROP TABLE repos') - db.run('ALTER TABLE repos_new RENAME TO repos') - db.run('COMMIT') - logger.info('Successfully migrated repos table to allow nullable repo_url') - } catch (migrationError) { - db.run('ROLLBACK') - throw migrationError - } - } - - const hasBranchColumn = tableInfo.some(col => col.name === 'branch') - - if (!hasBranchColumn) { - logger.info('Adding missing branch column to repos table') - db.run('ALTER TABLE repos ADD COLUMN branch TEXT') - } - - try { - db.run(` - CREATE UNIQUE INDEX IF NOT EXISTS idx_repo_url_branch - ON repos(repo_url, branch) - WHERE branch IS NOT NULL - `) - } catch (error) { - logger.warn('Index already exists or could not be created', error) - } - - try { - db.run(` - CREATE UNIQUE INDEX IF NOT EXISTS idx_local_path - ON repos(local_path) - `) - } catch (error) { - logger.warn('Local path index already exists or could not be created', error) - } - - const requiredColumns = [ - { name: 'default_branch', sql: 'ALTER TABLE repos ADD COLUMN default_branch TEXT' }, - { name: 'clone_status', sql: 'ALTER TABLE repos ADD COLUMN clone_status TEXT NOT NULL DEFAULT "cloning"' }, - { name: 'cloned_at', sql: 'ALTER TABLE repos ADD COLUMN cloned_at INTEGER NOT NULL DEFAULT 0' }, - { name: 'last_pulled', sql: 'ALTER TABLE repos ADD COLUMN last_pulled INTEGER' }, - { name: 'opencode_config_name', sql: 'ALTER TABLE repos ADD COLUMN opencode_config_name TEXT' }, - { name: 'is_worktree', sql: 'ALTER TABLE repos ADD COLUMN is_worktree BOOLEAN DEFAULT FALSE' }, - { name: 'is_local', sql: 'ALTER TABLE repos ADD COLUMN is_local BOOLEAN DEFAULT FALSE' } - ] - - for (const column of requiredColumns) { - const hasColumn = tableInfo.some(col => col.name === column.name) - if (!hasColumn) { - logger.info(`Adding missing column: ${column.name}`) - try { - db.run(column.sql) - } catch (error) { - logger.debug(`Column ${column.name} might already exist:`, error) - } - } - } - - const indexes = [ - 'CREATE INDEX IF NOT EXISTS idx_repo_clone_status ON repos(clone_status)', - 'CREATE INDEX IF NOT EXISTS idx_user_id ON user_preferences(user_id)', - 'CREATE INDEX IF NOT EXISTS idx_opencode_user_id ON opencode_configs(user_id)', - 'CREATE INDEX IF NOT EXISTS idx_opencode_default ON opencode_configs(user_id, is_default)' - ] - - for (const indexSql of indexes) { - try { - db.run(indexSql) - } catch (error) { - logger.warn('Index already exists:', error) - } - } - - try { - const repos = db.prepare("SELECT id, local_path FROM repos WHERE local_path LIKE 'repos/%'").all() as Array<{ - id: number - local_path: string - }> - if (repos.length > 0) { - logger.info(`Migrating ${repos.length} repos to remove 'repos/' prefix from local_path`) - const updateStmt = db.prepare("UPDATE repos SET local_path = ? WHERE id = ?") - for (const repo of repos) { - const newPath = repo.local_path.replace(/^repos\//, '') - updateStmt.run(newPath, repo.id) - logger.info(`Updated repo ${repo.id}: ${repo.local_path} -> ${newPath}`) - } - } - } catch (error) { - logger.error('Failed to migrate local_path format:', error) - } - - migrateGitTokenToCredentials(db) - - logger.info('Database migrations completed successfully') - } catch (error) { - logger.error('Failed to run database migrations:', error) - throw error - } -} - -function migrateGitTokenToCredentials(db: Database): void { - try { - const rows = db.prepare('SELECT user_id, preferences FROM user_preferences').all() as Array<{ - user_id: string - preferences: string - }> - - for (const row of rows) { - try { - const parsed = JSON.parse(row.preferences) as Record - const gitToken = parsed.gitToken as string | undefined - const existingCredentials = parsed.gitCredentials as Array | undefined - - if (!gitToken) { - continue - } - - if (existingCredentials && existingCredentials.length > 0) { - continue - } - - const { gitToken: _, ...rest } = parsed - void _ - const migrated = { - ...rest, - gitCredentials: [{ - name: 'GitHub', - host: 'https://github.com/', - token: gitToken, - }], - } - - db.prepare('UPDATE user_preferences SET preferences = ? WHERE user_id = ?') - .run(JSON.stringify(migrated), row.user_id) - - logger.info(`Migrated gitToken to gitCredentials for user: ${row.user_id}`) - } catch (parseError) { - logger.error(`Failed to parse preferences for user ${row.user_id}:`, parseError) - } - } - } catch (error) { - logger.error('Failed to migrate gitToken to gitCredentials:', error) - } -} diff --git a/backend/src/db/migrations/001-base-schema.ts b/backend/src/db/migrations/001-base-schema.ts new file mode 100644 index 00000000..4e832ef5 --- /dev/null +++ b/backend/src/db/migrations/001-base-schema.ts @@ -0,0 +1,177 @@ +import type { Migration } from '../migration-runner' + +const migration: Migration = { + version: 1, + name: 'base-schema', + + up(db) { + db.run(` + CREATE TABLE IF NOT EXISTS repos ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + repo_url TEXT, + local_path TEXT NOT NULL, + branch TEXT, + default_branch TEXT, + clone_status TEXT NOT NULL, + cloned_at INTEGER NOT NULL, + last_pulled INTEGER, + opencode_config_name TEXT, + is_worktree BOOLEAN DEFAULT FALSE, + is_local BOOLEAN DEFAULT FALSE + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_repo_clone_status ON repos(clone_status)') + + db.run(` + CREATE TABLE IF NOT EXISTS user_preferences ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id TEXT NOT NULL DEFAULT 'default', + preferences TEXT NOT NULL, + updated_at INTEGER NOT NULL, + UNIQUE(user_id) + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_user_id ON user_preferences(user_id)') + + db.run(` + CREATE TABLE IF NOT EXISTS opencode_configs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id TEXT NOT NULL DEFAULT 'default', + config_name TEXT NOT NULL, + config_content TEXT NOT NULL, + is_default BOOLEAN DEFAULT FALSE, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + UNIQUE(user_id, config_name) + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_opencode_user_id ON opencode_configs(user_id)') + db.run('CREATE INDEX IF NOT EXISTS idx_opencode_default ON opencode_configs(user_id, is_default)') + + db.run(` + CREATE TABLE IF NOT EXISTS "user" ( + id TEXT PRIMARY KEY NOT NULL, + name TEXT NOT NULL, + email TEXT NOT NULL UNIQUE, + emailVerified INTEGER NOT NULL DEFAULT 0, + image TEXT, + createdAt INTEGER NOT NULL, + updatedAt INTEGER NOT NULL, + role TEXT DEFAULT 'user' + ) + `) + + db.run(` + CREATE TABLE IF NOT EXISTS "session" ( + id TEXT PRIMARY KEY NOT NULL, + expiresAt INTEGER NOT NULL, + token TEXT NOT NULL UNIQUE, + createdAt INTEGER NOT NULL, + updatedAt INTEGER NOT NULL, + ipAddress TEXT, + userAgent TEXT, + userId TEXT NOT NULL REFERENCES "user"(id) ON DELETE CASCADE + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_session_userId ON "session"(userId)') + db.run('CREATE INDEX IF NOT EXISTS idx_session_token ON "session"(token)') + + db.run(` + CREATE TABLE IF NOT EXISTS "account" ( + id TEXT PRIMARY KEY NOT NULL, + accountId TEXT NOT NULL, + providerId TEXT NOT NULL, + userId TEXT NOT NULL REFERENCES "user"(id) ON DELETE CASCADE, + accessToken TEXT, + refreshToken TEXT, + idToken TEXT, + accessTokenExpiresAt INTEGER, + refreshTokenExpiresAt INTEGER, + scope TEXT, + password TEXT, + createdAt INTEGER NOT NULL, + updatedAt INTEGER NOT NULL + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_account_userId ON "account"(userId)') + db.run('CREATE UNIQUE INDEX IF NOT EXISTS idx_account_provider ON "account"(providerId, accountId)') + + db.run(` + CREATE TABLE IF NOT EXISTS "verification" ( + id TEXT PRIMARY KEY NOT NULL, + identifier TEXT NOT NULL, + value TEXT NOT NULL, + expiresAt INTEGER NOT NULL, + createdAt INTEGER, + updatedAt INTEGER + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_verification_identifier ON "verification"(identifier)') + + db.run(` + CREATE TABLE IF NOT EXISTS "passkey" ( + id TEXT PRIMARY KEY NOT NULL, + name TEXT, + publicKey TEXT NOT NULL, + userId TEXT NOT NULL REFERENCES "user"(id) ON DELETE CASCADE, + credentialID TEXT NOT NULL, + counter INTEGER NOT NULL, + deviceType TEXT NOT NULL, + backedUp INTEGER NOT NULL DEFAULT 0, + transports TEXT, + createdAt INTEGER, + aaguid TEXT + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_passkey_userId ON "passkey"(userId)') + db.run('CREATE INDEX IF NOT EXISTS idx_passkey_credentialID ON "passkey"(credentialID)') + + db.run(` + CREATE TABLE IF NOT EXISTS trusted_ssh_hosts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + host TEXT NOT NULL UNIQUE, + key_type TEXT NOT NULL, + public_key TEXT NOT NULL, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_trusted_ssh_hosts_host ON trusted_ssh_hosts(host)') + + db.run(` + CREATE TABLE IF NOT EXISTS repo_settings ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + repo_id INTEGER NOT NULL REFERENCES repos(id) ON DELETE CASCADE, + key TEXT NOT NULL, + value TEXT NOT NULL, + updated_at INTEGER NOT NULL, + UNIQUE(repo_id, key) + ) + `) + + db.run('CREATE INDEX IF NOT EXISTS idx_repo_settings_repo ON repo_settings(repo_id)') + }, + + down(db) { + db.run('DROP TABLE IF EXISTS repo_settings') + db.run('DROP TABLE IF EXISTS trusted_ssh_hosts') + db.run('DROP TABLE IF EXISTS "passkey"') + db.run('DROP TABLE IF EXISTS "verification"') + db.run('DROP TABLE IF EXISTS "account"') + db.run('DROP TABLE IF EXISTS "session"') + db.run('DROP TABLE IF EXISTS "user"') + db.run('DROP TABLE IF EXISTS opencode_configs') + db.run('DROP TABLE IF EXISTS user_preferences') + db.run('DROP TABLE IF EXISTS repos') + }, +} + +export default migration diff --git a/backend/src/db/migrations/002-repos-nullable-url.ts b/backend/src/db/migrations/002-repos-nullable-url.ts new file mode 100644 index 00000000..c4033b76 --- /dev/null +++ b/backend/src/db/migrations/002-repos-nullable-url.ts @@ -0,0 +1,84 @@ +import type { Migration } from '../migration-runner' + +interface ColumnInfo { + name: string + notnull: number +} + +const migration: Migration = { + version: 2, + name: 'repos-nullable-url', + + up(db) { + const tableInfo = db.prepare('PRAGMA table_info(repos)').all() as ColumnInfo[] + const repoUrlColumn = tableInfo.find(col => col.name === 'repo_url') + if (!repoUrlColumn || repoUrlColumn.notnull !== 1) return + + const existingColumns = tableInfo.map(col => col.name) + + db.run(` + CREATE TABLE repos_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + repo_url TEXT, + local_path TEXT NOT NULL, + branch TEXT, + default_branch TEXT, + clone_status TEXT NOT NULL, + cloned_at INTEGER NOT NULL, + last_pulled INTEGER, + opencode_config_name TEXT, + is_worktree BOOLEAN DEFAULT FALSE, + is_local BOOLEAN DEFAULT FALSE + ) + `) + + const targetColumns = [ + 'id', 'repo_url', 'local_path', 'branch', 'default_branch', + 'clone_status', 'cloned_at', 'last_pulled', 'opencode_config_name', + 'is_worktree', 'is_local' + ] + const columnsToCopy = targetColumns.filter(col => existingColumns.includes(col)) + const columnsStr = columnsToCopy.join(', ') + + db.run(`INSERT INTO repos_new (${columnsStr}) SELECT ${columnsStr} FROM repos`) + db.run('DROP TABLE repos') + db.run('ALTER TABLE repos_new RENAME TO repos') + db.run('CREATE INDEX IF NOT EXISTS idx_repo_clone_status ON repos(clone_status)') + }, + + down(db) { + const tableInfo = db.prepare('PRAGMA table_info(repos)').all() as ColumnInfo[] + const existingColumns = tableInfo.map(col => col.name) + + db.run(` + CREATE TABLE repos_old ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + repo_url TEXT NOT NULL, + local_path TEXT NOT NULL, + branch TEXT, + default_branch TEXT, + clone_status TEXT NOT NULL, + cloned_at INTEGER NOT NULL, + last_pulled INTEGER, + opencode_config_name TEXT, + is_worktree BOOLEAN DEFAULT FALSE, + is_local BOOLEAN DEFAULT FALSE + ) + `) + + const targetColumns = [ + 'id', 'repo_url', 'local_path', 'branch', 'default_branch', + 'clone_status', 'cloned_at', 'last_pulled', 'opencode_config_name', + 'is_worktree', 'is_local' + ] + const columnsToCopy = targetColumns.filter(col => existingColumns.includes(col)) + const columnsStr = columnsToCopy.join(', ') + + db.run(`INSERT INTO repos_old (${columnsStr}) SELECT ${columnsStr} FROM repos WHERE repo_url IS NOT NULL`) + db.run('DROP TABLE repos') + db.run('ALTER TABLE repos_old RENAME TO repos') + db.run('CREATE INDEX IF NOT EXISTS idx_repo_clone_status ON repos(clone_status)') + }, +} + +export default migration diff --git a/backend/src/db/migrations/003-repos-add-columns.ts b/backend/src/db/migrations/003-repos-add-columns.ts new file mode 100644 index 00000000..91325014 --- /dev/null +++ b/backend/src/db/migrations/003-repos-add-columns.ts @@ -0,0 +1,38 @@ +import type { Migration } from '../migration-runner' + +interface ColumnInfo { + name: string +} + +const COLUMNS = [ + { name: 'branch', sql: 'ALTER TABLE repos ADD COLUMN branch TEXT' }, + { name: 'default_branch', sql: 'ALTER TABLE repos ADD COLUMN default_branch TEXT' }, + { name: 'clone_status', sql: 'ALTER TABLE repos ADD COLUMN clone_status TEXT NOT NULL DEFAULT "cloning"' }, + { name: 'cloned_at', sql: 'ALTER TABLE repos ADD COLUMN cloned_at INTEGER NOT NULL DEFAULT 0' }, + { name: 'last_pulled', sql: 'ALTER TABLE repos ADD COLUMN last_pulled INTEGER' }, + { name: 'opencode_config_name', sql: 'ALTER TABLE repos ADD COLUMN opencode_config_name TEXT' }, + { name: 'is_worktree', sql: 'ALTER TABLE repos ADD COLUMN is_worktree BOOLEAN DEFAULT FALSE' }, + { name: 'is_local', sql: 'ALTER TABLE repos ADD COLUMN is_local BOOLEAN DEFAULT FALSE' }, +] + +const migration: Migration = { + version: 3, + name: 'repos-add-columns', + + up(db) { + const tableInfo = db.prepare('PRAGMA table_info(repos)').all() as ColumnInfo[] + const existing = new Set(tableInfo.map(col => col.name)) + + for (const col of COLUMNS) { + if (!existing.has(col.name)) { + db.run(col.sql) + } + } + }, + + down(db) { + void db + }, +} + +export default migration diff --git a/backend/src/db/migrations/004-repos-indexes.ts b/backend/src/db/migrations/004-repos-indexes.ts new file mode 100644 index 00000000..370eb34c --- /dev/null +++ b/backend/src/db/migrations/004-repos-indexes.ts @@ -0,0 +1,22 @@ +import type { Migration } from '../migration-runner' + +const migration: Migration = { + version: 4, + name: 'repos-indexes', + + up(db) { + db.run(` + CREATE UNIQUE INDEX IF NOT EXISTS idx_repo_url_branch + ON repos(repo_url, branch) + WHERE branch IS NOT NULL + `) + db.run('CREATE UNIQUE INDEX IF NOT EXISTS idx_local_path ON repos(local_path)') + }, + + down(db) { + db.run('DROP INDEX IF EXISTS idx_repo_url_branch') + db.run('DROP INDEX IF EXISTS idx_local_path') + }, +} + +export default migration diff --git a/backend/src/db/migrations/005-repos-local-path-prefix.ts b/backend/src/db/migrations/005-repos-local-path-prefix.ts new file mode 100644 index 00000000..188e9557 --- /dev/null +++ b/backend/src/db/migrations/005-repos-local-path-prefix.ts @@ -0,0 +1,28 @@ +import type { Migration } from '../migration-runner' +import { logger } from '../../utils/logger' + +const migration: Migration = { + version: 5, + name: 'repos-local-path-prefix', + + up(db) { + const repos = db.prepare("SELECT id, local_path FROM repos WHERE local_path LIKE 'repos/%'").all() as Array<{ + id: number + local_path: string + }> + + if (repos.length === 0) return + + logger.info(`Stripping 'repos/' prefix from ${repos.length} repo local_path(s)`) + const stmt = db.prepare('UPDATE repos SET local_path = ? WHERE id = ?') + for (const repo of repos) { + stmt.run(repo.local_path.replace(/^repos\//, ''), repo.id) + } + }, + + down(db) { + void db + }, +} + +export default migration diff --git a/backend/src/db/migrations/006-git-token-to-credentials.ts b/backend/src/db/migrations/006-git-token-to-credentials.ts new file mode 100644 index 00000000..c3f717a7 --- /dev/null +++ b/backend/src/db/migrations/006-git-token-to-credentials.ts @@ -0,0 +1,74 @@ +import type { Migration } from '../migration-runner' +import { logger } from '../../utils/logger' + +const migration: Migration = { + version: 6, + name: 'git-token-to-credentials', + + up(db) { + const rows = db.prepare('SELECT user_id, preferences FROM user_preferences').all() as Array<{ + user_id: string + preferences: string + }> + + for (const row of rows) { + try { + const parsed = JSON.parse(row.preferences) as Record + const gitToken = parsed.gitToken as string | undefined + const existingCredentials = parsed.gitCredentials as Array | undefined + + if (!gitToken) continue + if (existingCredentials && existingCredentials.length > 0) continue + + const { gitToken: _, ...rest } = parsed + void _ + const migrated = { + ...rest, + gitCredentials: [{ + name: 'GitHub', + host: 'https://github.com/', + token: gitToken, + }], + } + + db.prepare('UPDATE user_preferences SET preferences = ? WHERE user_id = ?') + .run(JSON.stringify(migrated), row.user_id) + + logger.info(`Migrated gitToken to gitCredentials for user: ${row.user_id}`) + } catch (parseError) { + logger.error(`Failed to parse preferences for user ${row.user_id}:`, parseError) + } + } + }, + + down(db) { + const rows = db.prepare('SELECT user_id, preferences FROM user_preferences').all() as Array<{ + user_id: string + preferences: string + }> + + for (const row of rows) { + try { + const parsed = JSON.parse(row.preferences) as Record + const credentials = parsed.gitCredentials as Array<{ token: string }> | undefined + + const firstCredential = credentials?.[0] + if (!firstCredential) continue + + const { gitCredentials: _, ...rest } = parsed + void _ + const reverted = { + ...rest, + gitToken: firstCredential.token, + } + + db.prepare('UPDATE user_preferences SET preferences = ? WHERE user_id = ?') + .run(JSON.stringify(reverted), row.user_id) + } catch (parseError) { + logger.error(`Failed to revert preferences for user ${row.user_id}:`, parseError) + } + } + }, +} + +export default migration diff --git a/backend/src/db/migrations/index.ts b/backend/src/db/migrations/index.ts new file mode 100644 index 00000000..9a163eb8 --- /dev/null +++ b/backend/src/db/migrations/index.ts @@ -0,0 +1,16 @@ +import type { Migration } from '../migration-runner' +import migration001 from './001-base-schema' +import migration002 from './002-repos-nullable-url' +import migration003 from './003-repos-add-columns' +import migration004 from './004-repos-indexes' +import migration005 from './005-repos-local-path-prefix' +import migration006 from './006-git-token-to-credentials' + +export const allMigrations: Migration[] = [ + migration001, + migration002, + migration003, + migration004, + migration005, + migration006, +] diff --git a/backend/src/db/queries.ts b/backend/src/db/queries.ts index 0153e10f..5ac39b4b 100644 --- a/backend/src/db/queries.ts +++ b/backend/src/db/queries.ts @@ -94,13 +94,6 @@ export function getRepoById(db: Database, id: number): Repo | null { return row ? rowToRepo(row) : null } -export function getRepoByUrl(db: Database, repoUrl: string): Repo | null { - const stmt = db.prepare('SELECT * FROM repos WHERE repo_url = ?') - const row = stmt.get(repoUrl) as RepoRow | undefined - - return row ? rowToRepo(row) : null -} - export function getRepoByUrlAndBranch(db: Database, repoUrl: string, branch?: string): Repo | null { const query = branch ? 'SELECT * FROM repos WHERE repo_url = ? AND branch = ?' diff --git a/backend/src/db/schema.ts b/backend/src/db/schema.ts index 098ef07f..230dc0c4 100644 --- a/backend/src/db/schema.ts +++ b/backend/src/db/schema.ts @@ -2,144 +2,19 @@ import { Database } from 'bun:sqlite' import { logger } from '../utils/logger' import { mkdirSync } from 'fs' import { dirname } from 'path' -import { runMigrations } from './migrations' +import { migrate } from './migration-runner' +import { allMigrations } from './migrations' export function initializeDatabase(dbPath: string = './data/opencode.db'): Database { mkdirSync(dirname(dbPath), { recursive: true }) const db = new Database(dbPath) - - db.run(` - CREATE TABLE IF NOT EXISTS repos ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - repo_url TEXT, - local_path TEXT NOT NULL, - branch TEXT, - default_branch TEXT, - clone_status TEXT NOT NULL, - cloned_at INTEGER NOT NULL, - last_pulled INTEGER, - opencode_config_name TEXT, - is_worktree BOOLEAN DEFAULT FALSE, - is_local BOOLEAN DEFAULT FALSE - ); - - CREATE INDEX IF NOT EXISTS idx_repo_clone_status ON repos(clone_status); - - CREATE TABLE IF NOT EXISTS user_preferences ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id TEXT NOT NULL DEFAULT 'default', - preferences TEXT NOT NULL, - updated_at INTEGER NOT NULL, - UNIQUE(user_id) - ); - - CREATE INDEX IF NOT EXISTS idx_user_id ON user_preferences(user_id); - - CREATE TABLE IF NOT EXISTS opencode_configs ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id TEXT NOT NULL DEFAULT 'default', - config_name TEXT NOT NULL, - config_content TEXT NOT NULL, - is_default BOOLEAN DEFAULT FALSE, - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL, - UNIQUE(user_id, config_name) - ); - - CREATE INDEX IF NOT EXISTS idx_opencode_user_id ON opencode_configs(user_id); - CREATE INDEX IF NOT EXISTS idx_opencode_default ON opencode_configs(user_id, is_default); - - -- Better Auth tables - CREATE TABLE IF NOT EXISTS "user" ( - id TEXT PRIMARY KEY NOT NULL, - name TEXT NOT NULL, - email TEXT NOT NULL UNIQUE, - emailVerified INTEGER NOT NULL DEFAULT 0, - image TEXT, - createdAt INTEGER NOT NULL, - updatedAt INTEGER NOT NULL, - role TEXT DEFAULT 'user' - ); - - CREATE TABLE IF NOT EXISTS "session" ( - id TEXT PRIMARY KEY NOT NULL, - expiresAt INTEGER NOT NULL, - token TEXT NOT NULL UNIQUE, - createdAt INTEGER NOT NULL, - updatedAt INTEGER NOT NULL, - ipAddress TEXT, - userAgent TEXT, - userId TEXT NOT NULL REFERENCES "user"(id) ON DELETE CASCADE - ); - - CREATE INDEX IF NOT EXISTS idx_session_userId ON "session"(userId); - CREATE INDEX IF NOT EXISTS idx_session_token ON "session"(token); - - CREATE TABLE IF NOT EXISTS "account" ( - id TEXT PRIMARY KEY NOT NULL, - accountId TEXT NOT NULL, - providerId TEXT NOT NULL, - userId TEXT NOT NULL REFERENCES "user"(id) ON DELETE CASCADE, - accessToken TEXT, - refreshToken TEXT, - idToken TEXT, - accessTokenExpiresAt INTEGER, - refreshTokenExpiresAt INTEGER, - scope TEXT, - password TEXT, - createdAt INTEGER NOT NULL, - updatedAt INTEGER NOT NULL - ); - - CREATE INDEX IF NOT EXISTS idx_account_userId ON "account"(userId); - CREATE UNIQUE INDEX IF NOT EXISTS idx_account_provider ON "account"(providerId, accountId); - - CREATE TABLE IF NOT EXISTS "verification" ( - id TEXT PRIMARY KEY NOT NULL, - identifier TEXT NOT NULL, - value TEXT NOT NULL, - expiresAt INTEGER NOT NULL, - createdAt INTEGER, - updatedAt INTEGER - ); - - CREATE INDEX IF NOT EXISTS idx_verification_identifier ON "verification"(identifier); - - CREATE TABLE IF NOT EXISTS "passkey" ( - id TEXT PRIMARY KEY NOT NULL, - name TEXT, - publicKey TEXT NOT NULL, - userId TEXT NOT NULL REFERENCES "user"(id) ON DELETE CASCADE, - credentialID TEXT NOT NULL, - counter INTEGER NOT NULL, - deviceType TEXT NOT NULL, - backedUp INTEGER NOT NULL DEFAULT 0, - transports TEXT, - createdAt INTEGER, - aaguid TEXT - ); - - CREATE INDEX IF NOT EXISTS idx_passkey_userId ON "passkey"(userId); - CREATE INDEX IF NOT EXISTS idx_passkey_credentialID ON "passkey"(credentialID); - CREATE TABLE IF NOT EXISTS trusted_ssh_hosts ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - host TEXT NOT NULL UNIQUE, - key_type TEXT NOT NULL, - public_key TEXT NOT NULL, - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL - ); + migrate(db, allMigrations) - CREATE INDEX IF NOT EXISTS idx_trusted_ssh_hosts_host ON trusted_ssh_hosts(host); - `) - - runMigrations(db) - db.prepare('INSERT OR IGNORE INTO user_preferences (user_id, preferences, updated_at) VALUES (?, ?, ?)') .run('default', '{}', Date.now()) - + logger.info('Database initialized successfully') - + return db } diff --git a/backend/src/index.ts b/backend/src/index.ts index e1a79dca..202c68d6 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -31,6 +31,7 @@ import { createTitleRoutes } from './routes/title' import { createSSERoutes } from './routes/sse' import { createSSHRoutes } from './routes/ssh' import { createNotificationRoutes } from './routes/notifications' +import { createMemoryRoutes } from './routes/memory' import { createMcpOauthProxyRoutes } from './routes/mcp-oauth-proxy' import { createAuthRoutes, createAuthInfoRoutes, syncAdminFromEnv } from './routes/auth' import { createAuth } from './auth' @@ -41,6 +42,7 @@ import { SettingsService } from './services/settings' import { opencodeServerManager } from './services/opencode-single-server' import { proxyRequest, proxyMcpAuthStart, proxyMcpAuthAuthenticate } from './services/proxy' import { NotificationService } from './services/notification' + import { logger } from './utils/logger' import { getWorkspacePath, @@ -254,6 +256,7 @@ protectedApi.route('/generate-title', createTitleRoutes()) protectedApi.route('/sse', createSSERoutes()) protectedApi.route('/ssh', createSSHRoutes(gitAuthService)) protectedApi.route('/notifications', createNotificationRoutes(notificationService)) +protectedApi.route('/memory', createMemoryRoutes(db)) app.route('/api', protectedApi) diff --git a/backend/src/routes/health.ts b/backend/src/routes/health.ts index 4750314a..4189061c 100644 --- a/backend/src/routes/health.ts +++ b/backend/src/routes/health.ts @@ -2,25 +2,11 @@ import { Hono } from 'hono' import type { Database } from 'bun:sqlite' import { readFile } from 'fs/promises' import { opencodeServerManager } from '../services/opencode-single-server' +import { compareVersions } from '../utils/version-utils' const GITHUB_REPO_OWNER = 'chriswritescode-dev' const GITHUB_REPO_NAME = 'opencode-manager' -function compareVersions(a: string, b: string): number { - const cleanA = a.replace(/^v/, '') - const cleanB = b.replace(/^v/, '') - const partsA = cleanA.split('.').map(Number) - const partsB = cleanB.split('.').map(Number) - - for (let i = 0; i < Math.max(partsA.length, partsB.length); i++) { - const partA = partsA[i] ?? 0 - const partB = partsB[i] ?? 0 - if (partA > partB) return 1 - if (partA < partB) return -1 - } - return 0 -} - interface CachedRelease { tagName: string htmlUrl: string diff --git a/backend/src/routes/memory.ts b/backend/src/routes/memory.ts new file mode 100644 index 00000000..927d3904 --- /dev/null +++ b/backend/src/routes/memory.ts @@ -0,0 +1,446 @@ +import { Hono } from 'hono' +import { Database } from 'bun:sqlite' +import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs' +import { join } from 'path' +import { logger } from '../utils/logger' +import { PluginMemoryService } from '../services/plugin-memory' +import { resolveProjectId } from '../services/project-id-resolver' +import { getRepoById } from '../db/queries' +import { getWorkspacePath } from '@opencode-manager/shared/config/env' +import { + CreateMemoryRequestSchema, + UpdateMemoryRequestSchema, + MemoryListQuerySchema, + PluginConfigSchema, + type PluginConfig, +} from '@opencode-manager/shared/schemas' + +function resolveMemoryDataDir(): string { + return join(getWorkspacePath(), '.opencode', 'state', 'opencode', 'memory') +} + +function resolvePluginConfigPath(): string { + return join(resolveMemoryDataDir(), 'config.json') +} + +function getDefaultPluginConfig(): PluginConfig { + return { + embedding: { + provider: 'local', + model: 'all-MiniLM-L6-v2', + dimensions: 384, + }, + dedupThreshold: 0.25, + } +} + +function loadPluginConfigFromDisk(): PluginConfig { + const configPath = resolvePluginConfigPath() + + if (!existsSync(configPath)) { + return getDefaultPluginConfig() + } + + try { + const content = readFileSync(configPath, 'utf-8') + const parsed = JSON.parse(content) + const result = PluginConfigSchema.safeParse(parsed) + + if (!result.success) { + logger.error('Invalid plugin config:', result.error) + return getDefaultPluginConfig() + } + + return result.data + } catch (error) { + logger.error('Failed to load plugin config:', error) + return getDefaultPluginConfig() + } +} + +function savePluginConfigToDisk(config: PluginConfig): void { + const configPath = resolvePluginConfigPath() + const dataDir = resolveMemoryDataDir() + + if (!existsSync(dataDir)) { + mkdirSync(dataDir, { recursive: true }) + } + + writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf-8') +} + +export function createMemoryRoutes(db: Database): Hono { + const app = new Hono() + const pluginMemory = new PluginMemoryService() + + app.get('/', async (c) => { + const query = c.req.query() + const parsed = MemoryListQuerySchema.safeParse({ + projectId: query.projectId, + scope: query.scope, + content: query.content, + limit: query.limit ? parseInt(query.limit, 10) : undefined, + offset: query.offset ? parseInt(query.offset, 10) : undefined, + }) + + if (!parsed.success) { + return c.json({ error: 'Invalid query parameters', details: parsed.error }, 400) + } + + const filters = parsed.data + + if (!filters.projectId) { + return c.json({ memories: [] }) + } + + const memories = pluginMemory.list(filters.projectId, { + scope: filters.scope, + content: filters.content, + limit: filters.limit, + offset: filters.offset, + }) + + return c.json({ memories }) + }) + + app.post('/', async (c) => { + const body = await c.req.json() + const parsed = CreateMemoryRequestSchema.safeParse(body) + + if (!parsed.success) { + return c.json({ error: 'Invalid request', details: parsed.error }, 400) + } + + try { + const id = pluginMemory.create(parsed.data) + const memory = pluginMemory.getById(id) + + if (!memory) { + return c.json({ error: 'Failed to retrieve created memory' }, 500) + } + + return c.json({ memory }, 201) + } catch (error) { + logger.error('Failed to create memory:', error) + return c.json({ error: 'Failed to create memory' }, 500) + } + }) + + app.get('/project-summary', async (c) => { + const repoIdParam = c.req.query('repoId') + + if (!repoIdParam) { + return c.json({ error: 'Missing repoId parameter' }, 400) + } + + const repoId = parseInt(repoIdParam, 10) + + if (isNaN(repoId)) { + return c.json({ error: 'Invalid repoId' }, 400) + } + + try { + const repo = getRepoById(db, repoId) + + if (!repo) { + return c.json({ projectId: null, stats: { total: 0, byScope: {} }, error: 'Repository not found' }, 404) + } + + const projectId = await resolveProjectId(repo.fullPath) + + if (!projectId) { + return c.json({ projectId: null, stats: { total: 0, byScope: {} } }) + } + + const stats = pluginMemory.getStats(projectId) + + return c.json({ projectId, stats }) + } catch (error) { + logger.error('Failed to get project summary:', error) + return c.json({ projectId: null, stats: { total: 0, byScope: {} }, error: 'Failed to get project summary' }, 500) + } + }) + + app.get('/stats', async (c) => { + const projectId = c.req.query('projectId') + + if (!projectId) { + return c.json({ error: 'Missing projectId parameter' }, 400) + } + + try { + const stats = pluginMemory.getStats(projectId) + return c.json(stats) + } catch (error) { + logger.error('Failed to get memory stats:', error) + return c.json({ error: 'Failed to get stats' }, 500) + } + }) + + app.get('/resolve-project', async (c) => { + const repoIdParam = c.req.query('repoId') + + if (!repoIdParam) { + return c.json({ error: 'Missing repoId parameter' }, 400) + } + + const repoId = parseInt(repoIdParam, 10) + + if (isNaN(repoId)) { + return c.json({ error: 'Invalid repoId' }, 400) + } + + try { + const repo = getRepoById(db, repoId) + + if (!repo) { + return c.json({ projectId: null, error: 'Repository not found' }, 404) + } + + const projectId = await resolveProjectId(repo.fullPath) + + return c.json({ projectId }) + } catch (error) { + logger.error('Failed to resolve project ID:', error) + return c.json({ projectId: null, error: 'Failed to resolve project ID' }, 500) + } + }) + + app.get('/plugin-config', async (c) => { + try { + const config = loadPluginConfigFromDisk() + return c.json({ config }) + } catch (error) { + logger.error('Failed to get plugin config:', error) + return c.json({ error: 'Failed to get plugin config' }, 500) + } + }) + + app.put('/plugin-config', async (c) => { + try { + const body = await c.req.json() + const parsed = PluginConfigSchema.safeParse(body) + + if (!parsed.success) { + return c.json({ error: 'Invalid config', details: parsed.error.flatten() }, 400) + } + + const config = parsed.data + config.dedupThreshold = Math.max(0.05, Math.min(0.4, config.dedupThreshold ?? 0.25)) + + savePluginConfigToDisk(config) + + return c.json({ success: true, config }) + } catch (error) { + logger.error('Failed to save plugin config:', error) + return c.json({ error: 'Failed to save plugin config' }, 500) + } + }) + + app.post('/test-embedding', async (c) => { + try { + const config = loadPluginConfigFromDisk() + + if (config.embedding.provider === 'local') { + const validModels = ['all-MiniLM-L6-v2'] + if (!validModels.includes(config.embedding.model)) { + return c.json({ + success: false, + error: `Invalid model: ${config.embedding.model}. Valid models: ${validModels.join(', ')}` + }, 400) + } + return c.json({ + success: true, + message: 'Local provider configured. Model will be loaded on server restart.', + dimensions: config.embedding.dimensions ?? 384, + }) + } + + const endpoints: Record = { + openai: 'https://api.openai.com/v1/embeddings', + voyage: 'https://api.voyageai.com/v1/embeddings', + } + + const extractHost = (url: string): string => { + const protocolEnd = url.indexOf('://') + if (protocolEnd === -1) return url + const pathStart = url.indexOf('/', protocolEnd + 3) + return pathStart === -1 ? url : url.slice(0, pathStart) + } + + const baseUrl = extractHost(config.embedding.baseUrl || '') + const endpoint = baseUrl + ? `${baseUrl}/v1/embeddings` + : endpoints[config.embedding.provider] ?? '' + + if (!endpoint) { + return c.json({ success: false, error: 'No endpoint configured' }, 400) + } + + if (!config.embedding.apiKey) { + return c.json({ success: false, error: 'API key not configured. Please save your API key first.' }, 400) + } + + const testBody = { + model: config.embedding.model, + input: ['test'], + } + + const headers: Record = { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${config.embedding.apiKey}`, + } + + const response = await fetch(endpoint, { + method: 'POST', + headers, + body: JSON.stringify(testBody), + }) + + if (!response.ok) { + const errorText = await response.text() + return c.json({ + success: false, + error: `API error: ${response.status}`, + message: errorText, + }, 400) + } + + const data = await response.json() as { + data?: Array<{ embedding: number[] }> + embeddings?: Array<{ embedding: number[] }> + } + + const embeddings = data.data || data.embeddings + if (!embeddings || embeddings.length === 0 || !embeddings[0]) { + return c.json({ success: false, error: 'Invalid response from API' }, 400) + } + + const firstEmbedding = embeddings[0] + const actualDimensions = firstEmbedding.embedding.length + + return c.json({ + success: true, + message: `Embedding test successful. Generated ${actualDimensions}d embedding.`, + dimensions: actualDimensions, + }) + } catch (error) { + logger.error('Failed to test embedding config:', error) + return c.json({ + success: false, + error: 'Failed to test embedding configuration', + message: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.get('/:id', async (c) => { + const id = parseInt(c.req.param('id'), 10) + + if (isNaN(id)) { + return c.json({ error: 'Invalid memory ID' }, 400) + } + + const memory = pluginMemory.getById(id) + + if (!memory) { + return c.json({ error: 'Memory not found' }, 404) + } + + return c.json({ memory }) + }) + + app.put('/:id', async (c) => { + const id = parseInt(c.req.param('id'), 10) + + if (isNaN(id)) { + return c.json({ error: 'Invalid memory ID' }, 400) + } + + const body = await c.req.json() + const parsed = UpdateMemoryRequestSchema.safeParse(body) + + if (!parsed.success) { + return c.json({ error: 'Invalid request', details: parsed.error }, 400) + } + + try { + pluginMemory.update(id, parsed.data) + const memory = pluginMemory.getById(id) + return c.json({ memory }) + } catch (error) { + logger.error('Failed to update memory:', error) + return c.json({ error: 'Failed to update memory' }, 500) + } + }) + + app.delete('/:id', async (c) => { + const id = parseInt(c.req.param('id'), 10) + + if (isNaN(id)) { + return c.json({ error: 'Invalid memory ID' }, 400) + } + + try { + pluginMemory.delete(id) + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to delete memory:', error) + return c.json({ error: 'Failed to delete memory' }, 500) + } + }) + + app.post('/reindex', async (c) => { + try { + const db = pluginMemory.getDb() + + if (!db) { + return c.json({ + error: 'Memory database not found. Make sure the memory plugin has been initialized.', + total: 0, + embedded: 0, + failed: 0 + }, 404) + } + + const memories = pluginMemory.listAll() + + if (memories.length === 0) { + return c.json({ + success: true, + message: 'No memories to reindex', + total: 0, + embedded: 0, + failed: 0 + }) + } + + try { + db.exec('DELETE FROM memory_embeddings') + } catch { + return c.json({ + success: true, + message: 'Cleared embeddings. Server restart required to regenerate embeddings with new model.', + total: memories.length, + embedded: 0, + failed: 0, + requiresRestart: true + }) + } + + return c.json({ + success: true, + message: `Cleared ${memories.length} embeddings. Server restart required to regenerate embeddings.`, + total: memories.length, + embedded: 0, + failed: 0, + requiresRestart: true + }) + } catch (error) { + logger.error('Failed to reindex memories:', error) + return c.json({ error: 'Failed to reindex memories', details: error instanceof Error ? error.message : 'Unknown error' }, 500) + } + }) + + return app +} diff --git a/backend/src/routes/repo-git.ts b/backend/src/routes/repo-git.ts index 070f0094..490f8ed2 100644 --- a/backend/src/routes/repo-git.ts +++ b/backend/src/routes/repo-git.ts @@ -303,98 +303,6 @@ export function createRepoGitRoutes(database: Database, gitAuthService: GitAuthS } }) - app.post('/:id/git/discard', async (c) => { - try { - const id = parseInt(c.req.param('id')) - const repo = db.getRepoById(database, id) - - if (!repo) { - return c.json({ error: 'Repo not found' }, 404) - } - - const body = await c.req.json() - const { paths, staged } = body - - if (!paths || !Array.isArray(paths)) { - return c.json({ error: 'paths is required and must be an array' }, 400) - } - - await git.discardChanges(id, paths, staged ?? false, database) - - const status = await git.getStatus(id, database) - return c.json(status) - } catch (error: unknown) { - logger.error('Failed to discard changes:', error) - const gitError = parseGitError(error) - return c.json( - { error: gitError.summary, detail: gitError.detail, code: gitError.code }, - gitError.statusCode as ContentfulStatusCode - ) - } - }) - - app.get('/:id/git/commit/:hash', async (c) => { - try { - const id = parseInt(c.req.param('id')) - const hash = c.req.param('hash') - const repo = db.getRepoById(database, id) - - if (!repo) { - return c.json({ error: 'Repo not found' }, 404) - } - - if (!hash) { - return c.json({ error: 'hash is required' }, 400) - } - - const commitDetails = await git.getCommitDetails(id, hash, database) - - if (!commitDetails) { - return c.json({ error: 'Commit not found' }, 404) - } - - return c.json(commitDetails) - } catch (error: unknown) { - logger.error('Failed to get commit details:', error) - const gitError = parseGitError(error) - return c.json( - { error: gitError.summary, detail: gitError.detail, code: gitError.code }, - gitError.statusCode as ContentfulStatusCode - ) - } - }) - - app.get('/:id/git/commit/:hash/diff', async (c) => { - try { - const id = parseInt(c.req.param('id')) - const hash = c.req.param('hash') - const filePath = c.req.query('path') - const repo = db.getRepoById(database, id) - - if (!repo) { - return c.json({ error: 'Repo not found' }, 404) - } - - if (!hash) { - return c.json({ error: 'hash is required' }, 400) - } - - if (!filePath) { - return c.json({ error: 'path query parameter is required' }, 400) - } - - const diff = await git.getCommitDiff(id, hash, filePath, database) - return c.json(diff) - } catch (error: unknown) { - logger.error('Failed to get commit diff:', error) - const gitError = parseGitError(error) - return c.json( - { error: gitError.summary, detail: gitError.detail, code: gitError.code }, - gitError.statusCode as ContentfulStatusCode - ) - } - }) - app.get('/:id/git/log', async (c) => { try { const id = parseInt(c.req.param('id')) diff --git a/backend/src/routes/settings.ts b/backend/src/routes/settings.ts index 91a843c1..6c082574 100644 --- a/backend/src/routes/settings.ts +++ b/backend/src/routes/settings.ts @@ -18,19 +18,7 @@ import { opencodeServerManager } from '../services/opencode-single-server' import { DEFAULT_AGENTS_MD } from '../constants' import { validateSSHPrivateKey } from '../utils/ssh-validation' import { encryptSecret } from '../utils/crypto' - -function compareVersions(v1: string, v2: string): number { - const parts1 = v1.split('.').map(s => Number(s)) - const parts2 = v2.split('.').map(s => Number(s)) - - for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) { - const p1 = parts1[i] || 0 - const p2 = parts2[i] || 0 - if (p1 > p2) return 1 - if (p1 < p2) return -1 - } - return 0 -} +import { compareVersions } from '../utils/version-utils' function getOpenCodeInstallMethod(): string { const homePath = process.env.HOME || '' @@ -156,6 +144,19 @@ export function createSettingsRoutes(db: Database) { } }) + app.get('/memory-plugin-status', async (c) => { + try { + const userId = c.req.query('userId') || 'default' + const configs = settingsService.getOpenCodeConfigs(userId) + const defaultConfig = configs.configs.find((cfg: { isDefault: boolean }) => cfg.isDefault) + const isEnabled = defaultConfig?.content?.plugin?.includes('@opencode-manager/memory') ?? false + return c.json({ memoryPluginEnabled: isEnabled }) + } catch (error) { + logger.error('Failed to get memory plugin status:', error) + return c.json({ error: 'Failed to get memory plugin status' }, 500) + } + }) + app.patch('/', async (c) => { try { const userId = c.req.query('userId') || 'default' diff --git a/backend/src/services/git/GitService.ts b/backend/src/services/git/GitService.ts index 8c85d7e4..32c4b259 100644 --- a/backend/src/services/git/GitService.ts +++ b/backend/src/services/git/GitService.ts @@ -7,7 +7,7 @@ import { resolveGitIdentity, createGitIdentityEnv, isSSHUrl } from '../../utils/ import { isNoUpstreamError, parseBranchNameFromError } from '../../utils/git-errors' import { SettingsService } from '../settings' import type { Database } from 'bun:sqlite' -import type { GitBranch, GitCommit, FileDiffResponse, GitDiffOptions, GitStatusResponse, GitFileStatus, GitFileStatusType, CommitDetails, CommitFile } from '../../types/git' +import type { GitBranch, GitCommit, FileDiffResponse, GitDiffOptions, GitStatusResponse, GitFileStatus, GitFileStatusType } from '../../types/git' import type { GitCredential } from '@opencode-manager/shared' import path from 'path' @@ -269,252 +269,6 @@ export class GitService { } } - async discardChanges(repoId: number, paths: string[], staged: boolean, database: Database): Promise { - try { - const repo = getRepoById(database, repoId) - if (!repo) { - throw new Error(`Repository not found`) - } - - const repoPath = repo.fullPath - const env = this.gitAuthService.getGitEnvironment() - - if (paths.length === 0) { - return '' - } - - if (staged) { - const args = ['git', '-C', repoPath, 'restore', '--staged', '--worktree', '--source', 'HEAD', '--', ...paths] - return await executeCommand(args, { env }) - } - - const statusOutput = await executeCommand( - ['git', '-C', repoPath, 'status', '--porcelain', '-u', '--', ...paths], - { env } - ) - - const untrackedPaths: string[] = [] - const trackedPaths: string[] = [] - - for (const line of statusOutput.split('\n')) { - if (!line.trim()) continue - const statusCode = line.substring(0, 2) - const filePath = line.substring(3).trim() - - if (statusCode === '??') { - untrackedPaths.push(filePath) - } else { - trackedPaths.push(filePath) - } - } - - const results: string[] = [] - - if (trackedPaths.length > 0) { - const args = ['git', '-C', repoPath, 'checkout', '--', ...trackedPaths] - results.push(await executeCommand(args, { env })) - } - - if (untrackedPaths.length > 0) { - try { - const args = ['git', '-C', repoPath, 'clean', '-fd', '--', ...untrackedPaths] - results.push(await executeCommand(args, { env })) - } catch (error: unknown) { - logger.error(`Failed to clean untracked files for repo ${repoId}:`, error) - throw error - } - } - - return results.join('\n') - } catch (error: unknown) { - logger.error(`Failed to discard changes for repo ${repoId}:`, error) - throw error - } - } - - private normalizeRenamePath(path: string): string { - const renamePattern = /\{[^=]+=>\s*([^}]+)\}/ - let normalized = path - while (renamePattern.test(normalized)) { - normalized = normalized.replace(renamePattern, '$1') - } - return normalized.trim() - } - - private parseNumstatOutput(output: string): Map { - const map = new Map() - const lines = output.trim().split('\n') - - for (const line of lines) { - if (!line.trim()) continue - - const parts = line.split('\t') - if (parts.length >= 3) { - const additions = parts[0] - const deletions = parts[1] - const filePath = parts.slice(2).join('\t') - const normalizedPath = this.normalizeRenamePath(filePath) - - if ( - additions?.match(/^\d+$/) && - deletions?.match(/^\d+$/) && - normalizedPath - ) { - map.set(normalizedPath, { - additions: parseInt(additions, 10), - deletions: parseInt(deletions, 10) - }) - } - } - } - - return map - } - - private parseCommitFiles( - output: string, - numstatMap: Map - ): CommitFile[] { - const files: CommitFile[] = [] - const lines = output.trim().split('\n') - - for (const line of lines) { - if (!line.trim()) continue - - const parts = line.split('\t') - if (parts.length >= 2 && parts[0] && parts[0].match(/^[AMDRC]/)) { - const statusCode = parts[0] - const fromPath = parts[1] || '' - const toPath = parts[2] || parts[1] || '' - const isRename = statusCode.startsWith('R') - const isCopy = statusCode.startsWith('C') - - let status: GitFileStatusType = 'modified' - switch (statusCode.charAt(0)) { - case 'A': - status = 'added' - break - case 'D': - status = 'deleted' - break - case 'R': - status = 'renamed' - break - case 'C': - status = 'copied' - break - case 'M': - status = 'modified' - break - } - - const numstatData = numstatMap.get(toPath) - const additions = numstatData?.additions ?? 0 - const deletions = numstatData?.deletions ?? 0 - - files.push({ - path: toPath, - status, - oldPath: isRename || isCopy ? fromPath : undefined, - additions, - deletions - }) - } - } - - return files - } - - async getCommitDetails(repoId: number, hash: string, database: Database): Promise { - try { - const repo = getRepoById(database, repoId) - if (!repo) { - throw new Error(`Repository not found: ${repoId}`) - } - - const repoPath = path.resolve(repo.fullPath) - const env = this.gitAuthService.getGitEnvironment(true) - - const commitOutput = await executeCommand( - ['git', '-C', repoPath, 'log', '-1', '--format=%H%x00%an%x00%ae%x00%at%x00%B', hash], - { env } - ) - - if (!commitOutput.trim()) { - return null - } - - const parts = commitOutput.trim().split('\0') - const [commitHash, authorName, authorEmail, timestamp, message] = parts - - if (!commitHash) { - return null - } - - const filesOutput = await executeCommand( - ['git', '-C', repoPath, 'show', '-M', '--name-status', '--format=', hash], - { env } - ) - - const numstatOutput = await executeCommand( - ['git', '-C', repoPath, 'show', '-M', '--numstat', '--format=', hash], - { env } - ) - - const numstatMap = this.parseNumstatOutput(numstatOutput) - const files = this.parseCommitFiles(filesOutput, numstatMap) - - return { - hash: commitHash, - authorName: authorName || '', - authorEmail: authorEmail || '', - date: timestamp || '', - message: message || '', - unpushed: await this.isCommitUnpushed(repoPath, commitHash, env), - files - } - } catch (error: unknown) { - logger.error(`Failed to get commit details for repo ${repoId}:`, error) - throw new Error(`Failed to get commit details: ${getErrorMessage(error)}`) - } - } - - async getCommitDiff(repoId: number, hash: string, filePath: string, database: Database): Promise { - try { - const repo = getRepoById(database, repoId) - if (!repo) { - throw new Error(`Repository not found: ${repoId}`) - } - - const repoPath = path.resolve(repo.fullPath) - const env = this.gitAuthService.getGitEnvironment(true) - - const diff = await executeCommand( - ['git', '-C', repoPath, 'show', '--format=', hash, '--', filePath], - { env } - ) - - const status = this.detectDiffStatus(diff) - return this.parseDiffOutput(diff, status, filePath) - } catch (error: unknown) { - logger.error(`Failed to get commit diff for repo ${repoId}:`, error) - throw new Error(`Failed to get commit diff: ${getErrorMessage(error)}`) - } - } - - private detectDiffStatus(diff: string): GitFileStatusType { - if (diff.includes('new file mode')) { - return 'added' - } - if (diff.includes('deleted file mode')) { - return 'deleted' - } - if (diff.includes('rename from') || diff.includes('rename to')) { - return 'renamed' - } - return 'modified' - } - private async setupSSHIfNeeded(repoUrl: string | undefined, database: Database): Promise { await this.gitAuthService.setupSSHForRepoUrl(repoUrl, database) } @@ -915,7 +669,6 @@ export class GitService { let additions = 0 let deletions = 0 let isBinary = false - const MAX_DIFF_SIZE = 500 * 1024 if (typeof diff === 'string') { if (diff.includes('Binary files') || diff.includes('GIT binary patch')) { @@ -929,21 +682,13 @@ export class GitService { } } - let diffOutput = typeof diff === 'string' ? diff : '' - let truncated = false - if (diffOutput.length > MAX_DIFF_SIZE) { - diffOutput = diffOutput.substring(0, MAX_DIFF_SIZE) + '\n\n... (diff truncated due to size)' - truncated = true - } - return { path: filePath || '', status: status as GitFileStatusType, - diff: diffOutput, + diff: typeof diff === 'string' ? diff : '', additions, deletions, - isBinary, - truncated + isBinary } } @@ -956,11 +701,6 @@ export class GitService { } } - private async isCommitUnpushed(repoPath: string, commitHash: string, env: Record): Promise { - const unpushedHashes = await this.getUnpushedCommitHashes(repoPath, env) - return unpushedHashes.has(commitHash) - } - private async getUnpushedCommitHashes(repoPath: string, env: Record): Promise> { try { const output = await executeCommand( diff --git a/backend/src/services/opencode-single-server.ts b/backend/src/services/opencode-single-server.ts index de8b1b12..e679bf78 100644 --- a/backend/src/services/opencode-single-server.ts +++ b/backend/src/services/opencode-single-server.ts @@ -17,6 +17,7 @@ import { decryptSecret } from '../utils/crypto' import { SettingsService } from './settings' import { getWorkspacePath, getOpenCodeConfigFilePath, ENV } from '@opencode-manager/shared/config/env' import type { Database } from 'bun:sqlite' +import { compareVersions } from '../utils/version-utils' const OPENCODE_SERVER_PORT = ENV.OPENCODE.PORT const OPENCODE_SERVER_HOST = ENV.OPENCODE.HOST @@ -25,19 +26,6 @@ const OPENCODE_CONFIG_PATH = getOpenCodeConfigFilePath() const MIN_OPENCODE_VERSION = '1.0.137' const MAX_STDERR_SIZE = 10240 -function compareVersions(v1: string, v2: string): number { - const parts1 = v1.split('.').map(Number) - const parts2 = v2.split('.').map(Number) - - for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) { - const p1 = parts1[i] || 0 - const p2 = parts2[i] || 0 - if (p1 > p2) return 1 - if (p1 < p2) return -1 - } - return 0 -} - class OpenCodeServerManager { private static instance: OpenCodeServerManager private serverProcess: ReturnType | null = null diff --git a/backend/src/services/plugin-memory.ts b/backend/src/services/plugin-memory.ts new file mode 100644 index 00000000..f3227d5f --- /dev/null +++ b/backend/src/services/plugin-memory.ts @@ -0,0 +1,242 @@ +import { Database } from 'bun:sqlite' +import path from 'path' +import { existsSync } from 'node:fs' +import { getWorkspacePath } from '@opencode-manager/shared/config/env' + +export interface PluginMemory { + id: number + projectId: string + scope: 'convention' | 'decision' | 'context' + content: string + filePath: string | null + accessCount: number + lastAccessedAt: number | null + createdAt: number + updatedAt: number +} + +interface DbMemoryRow { + id: number + project_id: string + scope: string + content: string + file_path: string | null + access_count: number + last_accessed_at: number | null + created_at: number + updated_at: number +} + +interface MemoryFilters { + scope?: 'convention' | 'decision' | 'context' + content?: string + limit?: number + offset?: number +} + +function getPluginDbPath(): string { + return path.join(getWorkspacePath(), '.opencode', 'state', 'opencode', 'memory', 'memory.db') +} + +function mapRowToMemory(row: DbMemoryRow): PluginMemory { + return { + id: row.id, + projectId: row.project_id, + scope: row.scope as PluginMemory['scope'], + content: row.content, + filePath: row.file_path, + accessCount: row.access_count, + lastAccessedAt: row.last_accessed_at, + createdAt: row.created_at, + updatedAt: row.updated_at, + } +} + +export class PluginMemoryService { + private db: Database | null = null + + getDb(): Database | null { + if (this.db) return this.db + + const dbPath = getPluginDbPath() + + if (!existsSync(dbPath)) { + return null + } + + try { + this.db = new Database(dbPath) + this.db.exec('PRAGMA journal_mode = WAL') + return this.db + } catch { + return null + } + } + + list(projectId: string, filters?: MemoryFilters): PluginMemory[] { + const db = this.getDb() + if (!db) return [] + + let sql = 'SELECT * FROM memories WHERE project_id = ?' + const params: (string | number)[] = [projectId] + + if (filters?.scope) { + sql += ' AND scope = ?' + params.push(filters.scope) + } + + if (filters?.content) { + sql += ' AND content LIKE ?' + params.push(`%${filters.content}%`) + } + + sql += ' ORDER BY updated_at DESC' + + if (filters?.limit) { + sql += ' LIMIT ?' + params.push(filters.limit) + } + + if (filters?.offset) { + sql += ' OFFSET ?' + params.push(filters.offset) + } + + const stmt = db.prepare(sql) + const rows = stmt.all(...params) as DbMemoryRow[] + return rows.map(mapRowToMemory) + } + + listAll(filters?: { projectId?: string; scope?: string; limit?: number; offset?: number }): PluginMemory[] { + const db = this.getDb() + if (!db) return [] + + let sql = 'SELECT * FROM memories WHERE 1=1' + const params: (string | number)[] = [] + + if (filters?.projectId) { + sql += ' AND project_id = ?' + params.push(filters.projectId) + } + + if (filters?.scope) { + sql += ' AND scope = ?' + params.push(filters.scope) + } + + sql += ' ORDER BY updated_at DESC' + + if (filters?.limit) { + sql += ' LIMIT ?' + params.push(filters.limit) + } + + if (filters?.offset) { + sql += ' OFFSET ?' + params.push(filters.offset) + } + + const stmt = db.prepare(sql) + const rows = stmt.all(...params) as DbMemoryRow[] + return rows.map(mapRowToMemory) + } + + getById(id: number): PluginMemory | undefined { + const db = this.getDb() + if (!db) return undefined + + const stmt = db.prepare('SELECT * FROM memories WHERE id = ?') + const row = stmt.get(id) as DbMemoryRow | undefined + return row ? mapRowToMemory(row) : undefined + } + + create(input: { projectId: string; scope: string; content: string }): number { + const db = this.getDb() + if (!db) throw new Error('Plugin database not available') + + const now = Date.now() + const stmt = db.prepare(` + INSERT INTO memories (project_id, scope, content, access_count, created_at, updated_at) + VALUES (?, ?, ?, 0, ?, ?) + `) + const result = stmt.run(input.projectId, input.scope, input.content, now, now) + return result.lastInsertRowid as number + } + + update(id: number, input: { content?: string; scope?: string }): void { + const db = this.getDb() + if (!db) throw new Error('Plugin database not available') + + const updates: string[] = [] + const params: (string | number)[] = [] + + if (input.content !== undefined) { + updates.push('content = ?') + params.push(input.content) + } + + if (input.scope !== undefined) { + updates.push('scope = ?') + params.push(input.scope) + } + + if (updates.length === 0) return + + updates.push('updated_at = ?') + params.push(Date.now()) + params.push(id) + + const sql = `UPDATE memories SET ${updates.join(', ')} WHERE id = ?` + const stmt = db.prepare(sql) + stmt.run(...params) + + try { + const deleteEmbeddings = db.prepare('DELETE FROM memory_embeddings WHERE memory_id = ?') + deleteEmbeddings.run(id) + } catch { + // table may not exist + } + } + + delete(id: number): void { + const db = this.getDb() + if (!db) throw new Error('Plugin database not available') + + try { + const deleteEmbeddings = db.prepare('DELETE FROM memory_embeddings WHERE memory_id = ?') + deleteEmbeddings.run(id) + } catch { + // table may not exist + } + + const stmt = db.prepare('DELETE FROM memories WHERE id = ?') + stmt.run(id) + } + + getStats(projectId: string): { projectId: string; total: number; byScope: Record } { + const db = this.getDb() + if (!db) { + return { projectId, total: 0, byScope: {} } + } + + const totalStmt = db.prepare('SELECT COUNT(*) as count FROM memories WHERE project_id = ?') + const totalResult = totalStmt.get(projectId) as { count: number } + const total = totalResult.count + + const byScopeStmt = db.prepare('SELECT scope, COUNT(*) as count FROM memories WHERE project_id = ? GROUP BY scope') + const byScopeRows = byScopeStmt.all(projectId) as { scope: string; count: number }[] + const byScope: Record = {} + for (const row of byScopeRows) { + byScope[row.scope] = row.count + } + + return { projectId, total, byScope } + } + + close(): void { + if (this.db) { + this.db.close() + this.db = null + } + } +} diff --git a/backend/src/services/project-id-resolver.ts b/backend/src/services/project-id-resolver.ts new file mode 100644 index 00000000..69d0c7b1 --- /dev/null +++ b/backend/src/services/project-id-resolver.ts @@ -0,0 +1,68 @@ +import { exec } from 'child_process' +import { readFile } from 'fs/promises' +import { fileExists } from './file-operations' + +const projectIdCache = new Map() + +async function executeGitCommand(cwd: string, args: string[]): Promise { + return new Promise((resolve, reject) => { + exec('git ' + args.join(' '), { cwd }, (error, stdout) => { + if (error) { + reject(error) + } else { + resolve(stdout.trim()) + } + }) + }) +} + +export async function resolveProjectId(repoFullPath: string): Promise { + if (projectIdCache.has(repoFullPath)) { + return projectIdCache.get(repoFullPath) ?? null + } + + const cacheFile = `${repoFullPath}/.git/opencode` + const cacheExists = await fileExists(cacheFile) + + if (cacheExists) { + try { + const cachedId = (await readFile(cacheFile, 'utf-8')).trim() + if (cachedId) { + projectIdCache.set(repoFullPath, cachedId) + return cachedId + } + } catch { + // cache file may not exist or be readable + } + } + + try { + const gitDir = `${repoFullPath}/.git` + const gitDirExists = await fileExists(gitDir) + if (!gitDirExists) { + return null + } + + const output = await executeGitCommand(repoFullPath, [ + 'rev-list', + '--max-parents=0', + '--all', + ]) + + if (!output) { + return null + } + + const commits = output.split('\n').filter(Boolean).sort() + const projectId = commits[0] + + if (!projectId) { + return null + } + + projectIdCache.set(repoFullPath, projectId) + return projectId + } catch { + return null + } +} diff --git a/backend/src/services/repo.ts b/backend/src/services/repo.ts index 84cc4abd..fbcefd6d 100644 --- a/backend/src/services/repo.ts +++ b/backend/src/services/repo.ts @@ -9,6 +9,7 @@ import type { GitAuthService } from './git-auth' import { isGitHubHttpsUrl, isSSHUrl, normalizeSSHUrl } from '../utils/git-auth' import path from 'path' import { parseSSHHost } from '../utils/ssh-key-manager' +import { getErrorMessage } from '../utils/error-utils' const GIT_CLONE_TIMEOUT = 300000 @@ -38,29 +39,6 @@ function enhanceCloneError(error: unknown, repoUrl: string, originalMessage: str return error instanceof Error ? error : new Error(originalMessage) } -interface ErrorWithMessage { - message: string -} - -function isErrorWithMessage(error: unknown): error is ErrorWithMessage { - return ( - typeof error === 'object' && - error !== null && - 'message' in error && - typeof (error as ErrorWithMessage).message === 'string' - ) -} - -function getErrorMessage(error: unknown): string { - if (isErrorWithMessage(error)) { - return error.message - } - if (error instanceof Error) { - return error.message - } - return String(error) -} - async function hasCommits(repoPath: string, env: Record): Promise { try { await executeCommand(['git', '-C', repoPath, 'rev-parse', 'HEAD'], { env, silent: true }) @@ -197,7 +175,7 @@ export async function initLocalRepo( throw new Error(`Directory exists but is not a valid Git repository. Please provide either a Git repository path or a simple directory name to create a new empty repository.`) } } catch (error: unknown) { - if (isErrorWithMessage(error) && getErrorMessage(error).includes('No such file or directory')) { + if (getErrorMessage(error).includes('No such file or directory')) { throw error } throw new Error(`Failed to process absolute path '${normalizedInputPath}': ${getErrorMessage(error)}`) @@ -393,12 +371,12 @@ export async function cloneRepo( try { await executeCommand(['git', 'clone', '-b', branch, normalizedRepoUrl, worktreeDirName], { cwd: getReposPath(), env, timeout: GIT_CLONE_TIMEOUT }) } catch (error: unknown) { - if (isErrorWithMessage(error) && getErrorMessage(error).includes('destination path') && getErrorMessage(error).includes('already exists')) { + if (getErrorMessage(error).includes('destination path') && getErrorMessage(error).includes('already exists')) { logger.error(`Clone failed: directory still exists after cleanup attempt`) throw new Error(`Workspace directory ${worktreeDirName} already exists. Please delete it manually or contact support.`) } - if (branch && isErrorWithMessage(error) && (getErrorMessage(error).includes('Remote branch') || getErrorMessage(error).includes('not found'))) { + if (branch && (getErrorMessage(error).includes('Remote branch') || getErrorMessage(error).includes('not found'))) { logger.info(`Branch '${branch}' not found, cloning default branch and creating branch locally`) try { await executeCommand(['git', 'clone', normalizedRepoUrl, worktreeDirName], { cwd: getReposPath(), env, timeout: GIT_CLONE_TIMEOUT }) @@ -496,12 +474,12 @@ export async function cloneRepo( await executeCommand(cloneCmd, { cwd: getReposPath(), env, timeout: GIT_CLONE_TIMEOUT }) } catch (error: unknown) { - if (isErrorWithMessage(error) && getErrorMessage(error).includes('destination path') && getErrorMessage(error).includes('already exists')) { + if (getErrorMessage(error).includes('destination path') && getErrorMessage(error).includes('already exists')) { logger.error(`Clone failed: directory still exists after cleanup attempt`) throw new Error(`Workspace directory ${worktreeDirName} already exists. Please delete it manually or contact support.`) } - if (branch && isErrorWithMessage(error) && (getErrorMessage(error).includes('Remote branch') || getErrorMessage(error).includes('not found'))) { + if (branch && (getErrorMessage(error).includes('Remote branch') || getErrorMessage(error).includes('not found'))) { logger.info(`Branch '${branch}' not found, cloning default branch and creating branch locally`) try { await executeCommand(['git', 'clone', normalizedRepoUrl, worktreeDirName], { cwd: getReposPath(), env, timeout: GIT_CLONE_TIMEOUT }) diff --git a/backend/src/types/git.ts b/backend/src/types/git.ts index effe8fd1..324cf53d 100644 --- a/backend/src/types/git.ts +++ b/backend/src/types/git.ts @@ -31,7 +31,6 @@ export interface FileDiffResponse { additions: number deletions: number isBinary: boolean - truncated?: boolean } export interface GitDiffOptions { @@ -49,15 +48,3 @@ export interface GitBranch { behind?: number isWorktree?: boolean } - -export interface CommitFile { - path: string - status: GitFileStatusType - oldPath?: string - additions: number - deletions: number -} - -export interface CommitDetails extends GitCommit { - files: CommitFile[] -} diff --git a/backend/src/utils/git-auth.ts b/backend/src/utils/git-auth.ts index 56e8b34b..27f9e46b 100644 --- a/backend/src/utils/git-auth.ts +++ b/backend/src/utils/git-auth.ts @@ -103,10 +103,6 @@ export function createGitEnv(credentials: GitCredential[]): Record { - return createGitEnv([{ name: 'GitHub', host: 'https://github.com/', token: gitToken, type: 'pat' }]) -} - export function findGitHubCredential(credentials: GitCredential[]): GitCredential | null { if (!credentials || credentials.length === 0) return null diff --git a/backend/src/utils/version-utils.ts b/backend/src/utils/version-utils.ts new file mode 100644 index 00000000..d34ed61b --- /dev/null +++ b/backend/src/utils/version-utils.ts @@ -0,0 +1,12 @@ +export function compareVersions(v1: string, v2: string): number { + const parts1 = v1.replace(/^v/, '').split('.').map(Number) + const parts2 = v2.replace(/^v/, '').split('.').map(Number) + + for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) { + const p1 = parts1[i] ?? 0 + const p2 = parts2[i] ?? 0 + if (p1 > p2) return 1 + if (p1 < p2) return -1 + } + return 0 +} diff --git a/backend/test/routes/repo-git.test.ts b/backend/test/routes/repo-git.test.ts index 2ec70ee1..103a6fce 100644 --- a/backend/test/routes/repo-git.test.ts +++ b/backend/test/routes/repo-git.test.ts @@ -299,121 +299,4 @@ describe('Repo Git Routes', () => { expect(body).toHaveProperty('error') }) }) - - describe('POST /:id/git/discard', () => { - it('should return 404 when repo does not exist', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue(null) - const response = await app.request('/999/git/discard', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ paths: ['file1.ts'] }), - }) - const body = await response.json() - - expect(response.status).toBe(404) - expect(body).toHaveProperty('error', 'Repo not found') - }) - - it('should return 400 when paths is not an array', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1 } as any) - const response = await app.request('/1/git/discard', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ paths: 'not-an-array' }), - }) - const body = await response.json() - - expect(response.status).toBe(400) - expect(body).toHaveProperty('error', 'paths is required and must be an array') - }) - - it('should return 400 when paths is missing', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1 } as any) - const response = await app.request('/1/git/discard', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({}), - }) - const body = await response.json() - - expect(response.status).toBe(400) - expect(body).toHaveProperty('error', 'paths is required and must be an array') - }) - - it('should return 500 when git operation fails', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1, fullPath: '/path/to/repo' } as any) - const response = await app.request('/1/git/discard', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ paths: ['file1.ts'], staged: false }), - }) - const body = await response.json() - - expect(response.status).toBe(500) - expect(body).toHaveProperty('error') - }) - }) - - describe('GET /:id/git/commit/:hash', () => { - it('should return 404 when repo does not exist', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue(null) - const response = await app.request('/999/git/commit/abc123') - - expect(response.status).toBe(404) - const body = await response.json() - expect(body).toHaveProperty('error', 'Repo not found') - }) - - it('should return 400 when hash is missing', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1 } as any) - const response = await app.request('/1/git/commit/') - - expect(response.status).toBeGreaterThanOrEqual(400) - }) - - it('should return 500 when git operation fails', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1, fullPath: '/path/to/repo' } as any) - const response = await app.request('/1/git/commit/abc123') - - expect(response.status).toBe(500) - const body = await response.json() - expect(body).toHaveProperty('error') - }) - }) - - describe('GET /:id/git/commit/:hash/diff', () => { - it('should return 404 when repo does not exist', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue(null) - const response = await app.request('/999/git/commit/abc123/diff?path=file.ts') - - expect(response.status).toBe(404) - const body = await response.json() - expect(body).toHaveProperty('error', 'Repo not found') - }) - - it('should return 400 when hash is missing', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1 } as any) - const response = await app.request('/1/git/commit//diff?path=file.ts') - - expect(response.status).toBeGreaterThanOrEqual(400) - }) - - it('should return 400 when path query parameter is missing', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1 } as any) - const response = await app.request('/1/git/commit/abc123/diff') - const body = await response.json() - - expect(response.status).toBe(400) - expect(body).toHaveProperty('error', 'path query parameter is required') - }) - - it('should return 500 when git operation fails', async () => { - ;(db.getRepoById as MockedFunction).mockReturnValue({ id: 1, fullPath: '/path/to/repo' } as any) - const response = await app.request('/1/git/commit/abc123/diff?path=file.ts') - - expect(response.status).toBe(500) - const body = await response.json() - expect(body).toHaveProperty('error') - }) - }) }) diff --git a/backend/test/services/git/GitService.test.ts b/backend/test/services/git/GitService.test.ts index d096a605..783853f1 100644 --- a/backend/test/services/git/GitService.test.ts +++ b/backend/test/services/git/GitService.test.ts @@ -29,7 +29,6 @@ vi.mock('../../../src/utils/git-auth', () => ({ resolveGitIdentity: vi.fn().mockResolvedValue(null), createGitIdentityEnv: vi.fn().mockReturnValue({}), createSilentGitEnv: vi.fn(), - filterGitCredentials: vi.fn().mockReturnValue([]), })) vi.mock('../../../src/utils/git-errors', () => ({ @@ -776,624 +775,4 @@ describe('GitService', () => { expect(result).toBe("Switched to branch 'main'") }) }) - - describe('discardChanges', () => { - it('discards staged changes using restore --staged --worktree', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockResolvedValue('Changes discarded') - - const result = await service.discardChanges(1, ['file.ts', 'dir/file2.ts'], true, database) - - expect(getRepoByIdMock).toHaveBeenCalledWith(database, 1) - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'restore', '--staged', '--worktree', '--source', 'HEAD', '--', 'file.ts', 'dir/file2.ts'], - { env: expect.any(Object) } - ) - expect(result).toBe('Changes discarded') - }) - - it('discards unstaged tracked changes using checkout', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('status')) { - return Promise.resolve('M file.ts\n') - } - if (args.includes('checkout')) { - return Promise.resolve('Updated 1 path') - } - return Promise.resolve('') - }) - - const result = await service.discardChanges(1, ['file.ts'], false, database) - - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'status', '--porcelain', '-u', '--', 'file.ts'], - { env: expect.any(Object) } - ) - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'checkout', '--', 'file.ts'], - { env: expect.any(Object) } - ) - expect(result).toBe('Updated 1 path') - }) - - it('removes unstaged untracked files using git clean', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('status')) { - return Promise.resolve('?? untracked.ts\n') - } - if (args.includes('clean')) { - return Promise.resolve('Removed untracked.ts') - } - return Promise.resolve('') - }) - - const result = await service.discardChanges(1, ['untracked.ts'], false, database) - - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'clean', '-fd', '--', 'untracked.ts'], - { env: expect.any(Object) } - ) - expect(result).toContain('Removed untracked.ts') - }) - - it('handles mixed tracked and untracked files', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('status')) { - return Promise.resolve('M modified.ts\n?? untracked.ts\n') - } - if (args.includes('checkout')) { - return Promise.resolve('Updated 1 path') - } - if (args.includes('clean')) { - return Promise.resolve('Removed untracked.ts') - } - return Promise.resolve('') - }) - - const result = await service.discardChanges(1, ['modified.ts', 'untracked.ts'], false, database) - - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'checkout', '--', 'modified.ts'], - { env: expect.any(Object) } - ) - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'clean', '-fd', '--', 'untracked.ts'], - { env: expect.any(Object) } - ) - expect(result).toContain('Updated 1 path') - expect(result).toContain('Removed untracked.ts') - }) - - it('returns early when no paths provided', async () => { - const result = await service.discardChanges(1, [], false, database) - - expect(executeCommandMock).not.toHaveBeenCalled() - expect(result).toBe('') - }) - - it('throws error when repository not found', async () => { - getRepoByIdMock.mockReturnValue(null) - - await expect(service.discardChanges(1, ['file.ts'], false, database)).rejects.toThrow('Repository not found') - }) - - it('logs and throws error on git command failure', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - const error = new Error('Permission denied') - executeCommandMock.mockImplementation((args) => { - if (args.includes('status')) { - return Promise.resolve('M file.ts\n') - } - if (args.includes('checkout')) { - return Promise.reject(error) - } - return Promise.resolve('') - }) - - await expect(service.discardChanges(1, ['file.ts'], false, database)).rejects.toThrow() - }) - - it('handles directories in untracked clean operation', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('status')) { - return Promise.resolve('?? dir/\n') - } - if (args.includes('clean')) { - return Promise.resolve('Removing dir/') - } - return Promise.resolve('') - }) - - const result = await service.discardChanges(1, ['dir/'], false, database) - - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', mockRepo.fullPath, 'clean', '-fd', '--', 'dir/'], - { env: expect.any(Object) } - ) - expect(result).toContain('Removing dir/') - }) - }) - - describe('getCommitDetails', () => { - it('returns full commit details with files', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('log') && !args.includes('--not')) { - return Promise.resolve('abc123\x00John Doe\x00john@example.com\x001609459200\x00Initial commit') - } - if (args.includes('show') && args.includes('--name-status')) { - return Promise.resolve('A\tfile1.ts\nM\tfile2.ts\nD\tfile3.ts\n') - } - if (args.includes('show') && args.includes('--numstat')) { - return Promise.resolve('10\t5\tfile1.ts\n20\t15\tfile2.ts\n0\t30\tfile3.ts\n') - } - if (args.includes('--not') && args.includes('--remotes')) { - return Promise.resolve('') - } - return Promise.resolve('') - }) - - const result = await service.getCommitDetails(1, 'abc123', database) - - expect(result).not.toBeNull() - expect(result?.hash).toBe('abc123') - expect(result?.authorName).toBe('John Doe') - expect(result?.authorEmail).toBe('john@example.com') - expect(result?.date).toBe('1609459200') - expect(result?.message).toBe('Initial commit') - expect(result?.files).toHaveLength(3) - expect(result?.files[0]).toEqual({ - path: 'file1.ts', - status: 'added', - additions: 10, - deletions: 5 - }) - expect(result?.files[1]).toEqual({ - path: 'file2.ts', - status: 'modified', - additions: 20, - deletions: 15 - }) - expect(result?.files[2]).toEqual({ - path: 'file3.ts', - status: 'deleted', - oldPath: undefined, - additions: 0, - deletions: 30 - }) - }) - - it('handles renamed files in commit details', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('log') && !args.includes('--not')) { - return Promise.resolve('abc123\x00John Doe\x00john@example.com\x001609459200\x00Rename file') - } - if (args.includes('show') && args.includes('--name-status')) { - return Promise.resolve('R\told.ts\tnew.ts\n') - } - if (args.includes('show') && args.includes('--numstat')) { - return Promise.resolve('0\t0\tnew.ts\n') - } - if (args.includes('--not') && args.includes('--remotes')) { - return Promise.resolve('') - } - return Promise.resolve('') - }) - - const result = await service.getCommitDetails(1, 'abc123', database) - - expect(result?.files).toHaveLength(1) - expect(result?.files[0]).toEqual({ - path: 'new.ts', - status: 'renamed', - oldPath: 'old.ts', - additions: 0, - deletions: 0 - }) - }) - - it('handles copied files in commit details', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('log') && !args.includes('--not')) { - return Promise.resolve('abc123\x00John Doe\x00john@example.com\x001609459200\x00Copy file') - } - if (args.includes('show') && args.includes('--name-status')) { - return Promise.resolve('C\toriginal.ts\tcopy.ts\n') - } - if (args.includes('show') && args.includes('--numstat')) { - return Promise.resolve('0\t0\tcopy.ts\n') - } - if (args.includes('--not') && args.includes('--remotes')) { - return Promise.resolve('') - } - return Promise.resolve('') - }) - - const result = await service.getCommitDetails(1, 'abc123', database) - - expect(result?.files).toHaveLength(1) - expect(result?.files[0]).toEqual({ - path: 'copy.ts', - status: 'copied', - oldPath: 'original.ts', - additions: 0, - deletions: 0 - }) - }) - - it('returns empty files array for empty commit', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('log') && !args.includes('--not')) { - return Promise.resolve('abc123\x00John Doe\x00john@example.com\x001609459200\x00Empty commit') - } - if (args.includes('show') && args.includes('--name-status')) { - return Promise.resolve('') - } - if (args.includes('show') && args.includes('--numstat')) { - return Promise.resolve('') - } - if (args.includes('--not') && args.includes('--remotes')) { - return Promise.resolve('') - } - return Promise.resolve('') - }) - - const result = await service.getCommitDetails(1, 'abc123', database) - - expect(result?.files).toHaveLength(0) - }) - - it('returns null when commit hash not found', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockResolvedValue('') - - const result = await service.getCommitDetails(1, 'nonexistent', database) - - expect(result).toBeNull() - }) - - it('throws error when repository not found', async () => { - getRepoByIdMock.mockReturnValue(null) - - await expect(service.getCommitDetails(1, 'abc123', database)).rejects.toThrow('Repository not found') - }) - - it('marks unpushed commits correctly', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('log') && !args.includes('--not')) { - return Promise.resolve('abc123\x00John Doe\x00john@example.com\x001609459200\x00Local commit') - } - if (args.includes('show') && args.includes('--name-status')) { - return Promise.resolve('M\tfile.ts\n') - } - if (args.includes('show') && args.includes('--numstat')) { - return Promise.resolve('1\t1\tfile.ts\n') - } - if (args.includes('--not') && args.includes('--remotes')) { - return Promise.resolve('abc123\n') - } - return Promise.resolve('') - }) - - const result = await service.getCommitDetails(1, 'abc123', database) - - expect(result?.unpushed).toBe(true) - }) - - it('handles commit message with pipes correctly', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockImplementation((args) => { - if (args.includes('log') && !args.includes('--not')) { - return Promise.resolve('abc123\x00John Doe\x00john@example.com\x001609459200\x00Fix: merge|conflict|handling') - } - if (args.includes('show') && args.includes('--name-status')) { - return Promise.resolve('') - } - if (args.includes('show') && args.includes('--numstat')) { - return Promise.resolve('') - } - if (args.includes('--not') && args.includes('--remotes')) { - return Promise.resolve('') - } - return Promise.resolve('') - }) - - const result = await service.getCommitDetails(1, 'abc123', database) - - expect(result?.message).toBe('Fix: merge|conflict|handling') - }) - - it('throws error on git command failure', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - const error = new Error('Git error') - executeCommandMock.mockRejectedValue(error) - - await expect(service.getCommitDetails(1, 'abc123', database)).rejects.toThrow('Failed to get commit details') - }) - }) - - describe('getCommitDiff', () => { - it('returns diff for specific file in commit', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - const diffOutput = `diff --git a/file.ts b/file.ts -index abc123..def456 100644 ---- a/file.ts -+++ b/file.ts -@@ -1,3 +1,4 @@ -+new line - existing line 1 - existing line 2 - existing line 3` - executeCommandMock.mockResolvedValue(diffOutput) - - const result = await service.getCommitDiff(1, 'abc123', 'file.ts', database) - - expect(getRepoByIdMock).toHaveBeenCalledWith(database, 1) - expect(executeCommandMock).toHaveBeenCalledWith( - ['git', '-C', expect.stringContaining('/path/to/repo'), 'show', '--format=', 'abc123', '--', 'file.ts'], - { env: expect.any(Object) } - ) - expect(result.path).toBe('file.ts') - expect(result.status).toBe('modified') - expect(result.diff).toContain('new line') - expect(result.additions).toBe(1) - expect(result.deletions).toBe(0) - expect(result.isBinary).toBe(false) - }) - - it('detects binary files', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - executeCommandMock.mockResolvedValue('Binary files a/image.png and b/image.png differ') - - const result = await service.getCommitDiff(1, 'abc123', 'image.png', database) - - expect(result.isBinary).toBe(true) - expect(result.diff).toContain('Binary files') - }) - - it('throws error when repository not found', async () => { - getRepoByIdMock.mockReturnValue(null) - - await expect(service.getCommitDiff(1, 'abc123', 'file.ts', database)).rejects.toThrow('Repository not found') - }) - - it('handles deleted files in commit', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - const diffOutput = `diff --git a/deleted.ts b/deleted.ts -deleted file mode 100644 -index abc123..0000000 ---- a/deleted.ts -+++ /dev/null -@@ -1,3 +0,0 @@ --line 1 --line 2 --line 3` - executeCommandMock.mockResolvedValue(diffOutput) - - const result = await service.getCommitDiff(1, 'abc123', 'deleted.ts', database) - - expect(result.deletions).toBe(3) - expect(result.additions).toBe(0) - }) - - it('throws error on git command failure', async () => { - const mockRepo = { - id: 1, - fullPath: '/path/to/repo', - } - getRepoByIdMock.mockReturnValue(mockRepo as any) - const error = new Error('Fatal error') - executeCommandMock.mockRejectedValue(error) - - await expect(service.getCommitDiff(1, 'abc123', 'file.ts', database)).rejects.toThrow('Failed to get commit diff') - }) - }) - - describe('parseDiffOutput', () => { - it('parses small diff and counts additions/deletions', () => { - const diffOutput = `diff --git a/file.ts b/file.ts -index abc123..def456 100644 ---- a/file.ts -+++ b/file.ts -@@ -1,3 +1,4 @@ -+added line - existing line 1 - existing line 2 --removed line - existing line 3` - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'file.ts') - - expect(result).toEqual({ - path: 'file.ts', - status: 'modified', - diff: diffOutput, - additions: 1, - deletions: 1, - isBinary: false, - truncated: false - }) - }) - - it('detects binary files correctly', () => { - const diffOutput = 'GIT binary patch\nliteral 1234\nzcmeAS@N?(olHy`uVBq!ia0vp0{ow;2j9U=!1jd#uLplI' - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'binary.bin') - - expect(result.isBinary).toBe(true) - }) - - it('detects "Binary files" indicator', () => { - const diffOutput = 'Binary files a/image.png and b/image.png differ' - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'image.png') - - expect(result.isBinary).toBe(true) - }) - - it('truncates diff when exceeding MAX_DIFF_SIZE (500KB)', () => { - const largeContent = '+' + 'x'.repeat(500 * 1024 + 1000) - const diffOutput = `diff --git a/large.ts b/large.ts\n${largeContent}` - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'large.ts') - - expect(result.truncated).toBe(true) - expect(result.diff.length).toBe(500 * 1024 + '\n\n... (diff truncated due to size)'.length) - expect(result.diff).toContain('... (diff truncated due to size)') - }) - - it('does not truncate diff under MAX_DIFF_SIZE', () => { - const diffOutput = '+' + 'x'.repeat(100 * 1024) - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'file.ts') - - expect(result.truncated).toBe(false) - expect(result.diff.length).toBeLessThan(500 * 1024) - }) - - it('handles empty diff', () => { - const diffOutput = '' - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'file.ts') - - expect(result).toEqual({ - path: 'file.ts', - status: 'modified', - diff: '', - additions: 0, - deletions: 0, - isBinary: false, - truncated: false - }) - }) - - it('correctly counts multiple additions and deletions', () => { - const diffOutput = `diff --git a/file.ts b/file.ts -@@ -1,5 +1,7 @@ -+added 1 - context -+added 2 --removed 1 - context -+added 3 --removed 2 --removed 3` - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'file.ts') - - expect(result.additions).toBe(3) - expect(result.deletions).toBe(3) - }) - - it('ignores diff headers (+++/---) when counting', () => { - const diffOutput = `diff --git a/file.ts b/file.ts ---- a/file.ts -+++ b/file.ts -@@ -1,1 +1,2 @@ -+added` - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'file.ts') - - expect(result.additions).toBe(1) - expect(result.deletions).toBe(0) - }) - - it('handles different status types in parseDiffOutput', () => { - const diffOutput = '+new line' - - const resultAdded = (service as any).parseDiffOutput(diffOutput, 'added', 'new.ts') - const resultDeleted = (service as any).parseDiffOutput(diffOutput, 'deleted', 'old.ts') - const resultRenamed = (service as any).parseDiffOutput(diffOutput, 'renamed', 'moved.ts') - - expect(resultAdded.status).toBe('added') - expect(resultDeleted.status).toBe('deleted') - expect(resultRenamed.status).toBe('renamed') - }) - - it('handles diff with large change counts at truncation', () => { - const addedLines = '+' + 'x'.repeat(501 * 1024) - const diffOutput = `diff --git a/file.ts b/file.ts\n${addedLines}` - - const result = (service as any).parseDiffOutput(diffOutput, 'modified', 'file.ts') - - expect(result.truncated).toBe(true) - expect(result.diff).toContain('... (diff truncated due to size)') - }) - }) }) diff --git a/backend/test/utils/git-errors.test.ts b/backend/test/utils/git-errors.test.ts index 5de0b49a..e2c99514 100644 --- a/backend/test/utils/git-errors.test.ts +++ b/backend/test/utils/git-errors.test.ts @@ -1,4 +1,4 @@ -import { describe, it, expect } from 'bun:test' +import { describe, it, expect } from 'vitest' import { parseGitError } from '../../src/utils/git-errors' describe('parseGitError', () => { diff --git a/docs/features/memory.md b/docs/features/memory.md new file mode 100644 index 00000000..8af3a654 --- /dev/null +++ b/docs/features/memory.md @@ -0,0 +1,222 @@ +# Memory Plugin + +`@opencode-manager/memory` is an OpenCode plugin that stores and recalls project knowledge across sessions using vector embeddings and semantic search. + +## Installation + +```bash +pnpm add @opencode-manager/memory +``` + +The local embedding model (`all-MiniLM-L6-v2`) is downloaded automatically via the `postinstall` script. For API-based embeddings (OpenAI or Voyage), skip the local model and set your provider and API key in the configuration instead. + +Then register the plugin in your `opencode.json`: + +```json +{ + "plugin": ["@opencode-manager/memory"] +} +``` + +## Configuration + +On first run, the plugin writes a default config to: + +- `~/.local/share/opencode/memory/config.json` +- Falls back to `$XDG_DATA_HOME/opencode/memory/config.json` + +The file is only created if it does not already exist. + +```json +{ + "embedding": { + "provider": "local", + "model": "all-MiniLM-L6-v2", + "dimensions": 384, + "baseUrl": "", + "apiKey": "" + }, + "dataDir": "~/.local/share/opencode/memory", + "dedupThreshold": 0.25, + "logging": { + "enabled": false, + "file": "~/.local/share/opencode/memory/logs/memory.log" + }, + "compaction": { + "customPrompt": true, + "inlinePlanning": true, + "maxContextTokens": 4000, + "snapshotToKV": true + } +} +``` + +For API-based embeddings: + +```json +{ + "embedding": { + "provider": "openai", + "model": "text-embedding-3-small", + "apiKey": "sk-..." + } +} +``` + +### Embedding Providers + +| Provider | Models | API Key Required | +|----------|--------|-----------------| +| `local` | `all-MiniLM-L6-v2` (384d) | No | +| `openai` | `text-embedding-3-small` (1536d), `text-embedding-3-large` (3072d), `text-embedding-ada-002` (1536d) | Yes | +| `voyage` | `voyage-code-3` (1024d), `voyage-2` (1536d) | Yes | + +Set `baseUrl` to point at any OpenAI-compatible self-hosted service (vLLM, Ollama, LocalAI, LiteLLM, text-embeddings-inference). The URL is automatically normalized — providing `http://localhost:11434` appends `/v1/embeddings`. + +### Options + +| Key | Description | Default | +|-----|-------------|---------| +| `embedding.provider` | `local`, `openai`, or `voyage` | `local` | +| `embedding.model` | Model name | `all-MiniLM-L6-v2` | +| `embedding.dimensions` | Vector dimensions (auto-detected for known models) | — | +| `embedding.apiKey` | API key for OpenAI/Voyage | — | +| `embedding.baseUrl` | Custom endpoint for self-hosted services | — | +| `dataDir` | SQLite database directory | `~/.local/share/opencode/memory` | +| `dedupThreshold` | Similarity threshold for deduplication (0.05–0.40) | `0.25` | +| `logging.enabled` | Write logs to file | `false` | +| `logging.file` | Log file path (10MB limit, auto-rotated) | `…/logs/memory.log` | +| `compaction.customPrompt` | Use optimized compaction prompt | `true` | +| `compaction.inlinePlanning` | Include planning state in compaction context | `true` | +| `compaction.maxContextTokens` | Max tokens for injected memory context | `4000` | +| `compaction.snapshotToKV` | Save pre-compaction snapshot for recovery | `true` | + +## Memory Model + +### Scopes + +| Scope | Description | +|-------|-------------| +| `convention` | Coding style rules, naming patterns, workflow preferences | +| `decision` | Architectural choices and their rationale | +| `context` | Project structure, key file locations, domain knowledge, known issues | + +### Statuses + +| Status | Description | +|--------|-------------| +| `active` | Available for injection and search | +| `archived` | Preserved but excluded from injection | +| `deleted` | Soft-deleted, not returned | + +## Tools + +The plugin registers seven tools that the AI agent can call directly: + +| Tool | Description | +|------|-------------| +| `memory-read` | Search memories by semantic query or list by scope | +| `memory-write` | Store a new memory with a scope | +| `memory-edit` | Update the content or scope of an existing memory | +| `memory-delete` | Soft-delete a memory by ID | +| `memory-health` | Check plugin health or reindex all embeddings | +| `memory-planning-update` | Update session planning state (phases, objectives, progress) | +| `memory-planning-get` | Get the current planning state for a session | + +### memory-read + +``` +query (optional) - Semantic search query +scope (optional) - Filter by convention | decision | context +limit (optional) - Max results (default: 10) +``` + +When `query` is provided, results are ranked by vector similarity. Without `query`, memories are listed in order. + +### memory-write + +``` +content - The memory content to store +scope - convention | decision | context +``` + +Deduplication runs automatically — if a semantically similar memory already exists, the write is skipped and the existing ID is returned. + +### memory-edit + +``` +id - Memory ID to update +content - New content +scope (optional) - New scope +``` + +### memory-delete + +``` +id - Memory ID to delete +``` + +### memory-health + +``` +action - check (default) | reindex +``` + +Use `check` to view embedding provider status, database health, memory count, and whether a reindex is needed. Use `reindex` to regenerate all embeddings after changing the model or dimensions. + +!!! warning "Model Changes Require Reindex" + If you change `embedding.model` or `embedding.dimensions`, existing embeddings will have mismatched dimensions and search will fail. Run `memory-health` with `action: reindex` after any model change. + +### memory-planning-update + +``` +sessionID - The session ID to update +objective (optional) - The main task/goal +current (optional) - Current phase or activity +next (optional) - What comes next +phases (optional) - Phase list with title, status, and optional notes +findings (optional) - Key discoveries (appended to existing) +errors (optional) - Errors to avoid (appended to existing) +``` + +Merges new fields with existing state. Findings and errors are deduplicated and appended rather than replaced. + +### memory-planning-get + +``` +sessionID - The session ID to retrieve planning state for +``` + +Returns the current planning state including objective, phases, findings, and errors. + +## Planning State + +Planning state is separate from memories. It stores temporary session data — objectives, phase progress, findings, and errors — with a **7-day TTL**. After expiry, planning state is automatically cleaned up. + +Use planning state to track multi-step tasks within a session. The plugin injects active planning state into compaction context so progress survives context window resets. + +Memories, by contrast, are persisted indefinitely and retrieved via semantic search across all sessions. + +## Automatic Extraction + +After a session is compacted, the plugin automatically invokes the `ocm - Memory` agent to review the compaction summary and extract durable knowledge using `memory-write`. It checks for duplicates before writing by calling `memory-read` first. + +Only persistent knowledge is stored — ephemeral task progress and session-specific notes are skipped. + +## Compaction Awareness + +When a session compaction is triggered, the plugin injects context into the compaction prompt: + +- **Project memories** — up to 10 conventions and 10 decisions are included under a `## Project Memory` section so the AI's summary preserves them +- **Planning state** — active objective, current phase, next steps, and findings are prepended if present +- **Custom compaction prompt** — replaces the default prompt with one optimized for continuation context + +A pre-compaction snapshot is also saved to key-value storage for recovery if needed. + +## Deduplication + +Before storing a new memory, the plugin: + +1. Checks for exact content matches +2. Computes vector similarity against existing memories +3. Skips the write if similarity exceeds `dedupThreshold` diff --git a/docs/features/overview.md b/docs/features/overview.md index db4bf759..3f147556 100644 --- a/docs/features/overview.md +++ b/docs/features/overview.md @@ -51,6 +51,16 @@ OpenCode Manager provides a comprehensive web interface for managing OpenCode AI [Learn more →](mcp.md) +### Memory Plugin + +- **Semantic Search** - Store and retrieve project knowledge using vector embeddings +- **Memory Scopes** - Categorize as convention, decision, or context +- **Planning State** - Track objectives, phases, and progress across compactions +- **Automatic Extraction** - Durable knowledge extracted after session compaction +- **Compaction Awareness** - Injects project memories and planning state into compaction context + +[Learn more →](memory.md) + ### Text-to-Speech - **Browser TTS** - Built-in Web Speech API support diff --git a/docs/index.md b/docs/index.md index d71ccd8c..7f9e0622 100644 --- a/docs/index.md +++ b/docs/index.md @@ -34,6 +34,7 @@ OpenCode Manager provides a web-based interface for OpenCode AI agents, allowing - **Push Notifications** - Get background alerts for agent events when app is closed - **AI Configuration** - Configure models, providers, OAuth, and custom agents - **MCP Servers** - Add local or remote MCP servers with OAuth support +- **Memory Plugin** - Persistent project knowledge with semantic search and planning state ## Next Steps diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index e0ba81bb..c6a4a69d 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -6,6 +6,7 @@ import { Toaster } from 'sonner' import { Repos } from './pages/Repos' import { RepoDetail } from './pages/RepoDetail' import { SessionDetail } from './pages/SessionDetail' +import { Memories } from './pages/Memories' import { Login } from './pages/Login' import { Register } from './pages/Register' import { Setup } from './pages/Setup' @@ -130,6 +131,11 @@ const router = createBrowserRouter([ element: , loader: protectedLoader, }, + { + path: '/repos/:id/memories', + element: , + loader: protectedLoader, + }, ], }, ]) diff --git a/frontend/src/api/git.ts b/frontend/src/api/git.ts index 66ec61d3..40aa0bed 100644 --- a/frontend/src/api/git.ts +++ b/frontend/src/api/git.ts @@ -1,7 +1,7 @@ import { useQuery } from '@tanstack/react-query' import { fetchWrapper, FetchError } from './fetchWrapper' import { API_BASE_URL } from '@/config' -import type { GitStatusResponse, FileDiffResponse, GitCommit, CommitDetails } from '@/types/git' +import type { GitStatusResponse, FileDiffResponse, GitCommit } from '@/types/git' export async function fetchGitStatus(repoId: number): Promise { return fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/status`) @@ -22,12 +22,6 @@ export async function fetchFileDiff(repoId: number, path: string, includeStaged? }) } -export async function fetchCommitFileDiff(repoId: number, commitHash: string, path: string): Promise { - return fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/commit/${commitHash}/diff`, { - params: { path }, - }) -} - export async function fetchGitDiff(repoId: number, path: string): Promise<{ diff: string }> { const data = await fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/diff`, { params: { path }, @@ -41,10 +35,6 @@ export async function fetchGitLog(repoId: number, limit?: number): Promise<{ com }) } -export async function fetchCommitDetails(repoId: number, hash: string): Promise { - return fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/commit/${hash}`) -} - export async function gitFetch(repoId: number): Promise { return fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/fetch`, { method: 'POST', @@ -89,14 +79,6 @@ export async function gitUnstageFiles(repoId: number, paths: string[]): Promise< }) } -export async function gitDiscardFiles(repoId: number, paths: string[], staged: boolean): Promise { - return fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/discard`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ paths, staged }), - }) -} - export async function gitReset(repoId: number, commitHash: string): Promise { return fetchWrapper(`${API_BASE_URL}/api/repos/${repoId}/git/reset`, { method: 'POST', @@ -122,14 +104,6 @@ export function useFileDiff(repoId: number | undefined, path: string | undefined }) } -export function useCommitFileDiff(repoId: number | undefined, commitHash: string | undefined, path: string | undefined) { - return useQuery({ - queryKey: ['commitFileDiff', repoId, commitHash, path], - queryFn: () => (repoId && commitHash && path) ? fetchCommitFileDiff(repoId, commitHash, path) : Promise.reject(new Error('Missing params')), - enabled: !!repoId && !!commitHash && !!path, - }) -} - export function useGitLog(repoId: number | undefined, limit?: number) { return useQuery({ queryKey: ['gitLog', repoId, limit], @@ -138,14 +112,6 @@ export function useGitLog(repoId: number | undefined, limit?: number) { }) } -export function useCommitDetails(repoId: number | undefined, hash: string | undefined) { - return useQuery({ - queryKey: ['commitDetails', repoId, hash], - queryFn: () => (repoId && hash) ? fetchCommitDetails(repoId, hash) : Promise.reject(new Error('Missing params')), - enabled: !!repoId && !!hash, - }) -} - function parseGitErrorMessage(message: string): string { if (message.includes('no upstream') || message.includes('does not have any commits yet')) { return 'No upstream branch configured. Push with --set-upstream or create commits first.' diff --git a/frontend/src/api/memory.ts b/frontend/src/api/memory.ts new file mode 100644 index 00000000..f52979cf --- /dev/null +++ b/frontend/src/api/memory.ts @@ -0,0 +1,96 @@ +import { fetchWrapper, fetchWrapperVoid } from './fetchWrapper' +import { API_BASE_URL } from '@/config' +import type { Memory, MemoryStats, CreateMemoryRequest, UpdateMemoryRequest, PluginConfig } from '@opencode-manager/shared/types' + +export async function listMemories(filters?: { + projectId?: string + scope?: string + content?: string + limit?: number + offset?: number +}): Promise<{ memories: Memory[] }> { + const params = new URLSearchParams() + if (filters?.projectId) params.set('projectId', filters.projectId) + if (filters?.scope) params.set('scope', filters.scope) + if (filters?.content) params.set('content', filters.content) + if (filters?.limit) params.set('limit', String(filters.limit)) + if (filters?.offset) params.set('offset', String(filters.offset)) + + const query = params.toString() + return fetchWrapper(`${API_BASE_URL}/api/memory${query ? `?${query}` : ''}`) +} + +export async function createMemory(data: CreateMemoryRequest): Promise<{ memory: Memory }> { + return fetchWrapper(`${API_BASE_URL}/api/memory`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(data), + }) +} + +export async function getMemory(id: number): Promise<{ memory: Memory }> { + return fetchWrapper(`${API_BASE_URL}/api/memory/${id}`) +} + +export async function updateMemory(id: number, data: UpdateMemoryRequest): Promise<{ memory: Memory }> { + return fetchWrapper(`${API_BASE_URL}/api/memory/${id}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(data), + }) +} + +export async function deleteMemory(id: number): Promise { + return fetchWrapperVoid(`${API_BASE_URL}/api/memory/${id}`, { + method: 'DELETE', + }) +} + +export async function getProjectSummary( + repoId: number +): Promise<{ projectId: string | null; stats: MemoryStats; error?: string }> { + return fetchWrapper(`${API_BASE_URL}/api/memory/project-summary?repoId=${repoId}`) +} + +export async function getPluginConfig(): Promise<{ config: PluginConfig }> { + return fetchWrapper(`${API_BASE_URL}/api/memory/plugin-config`) +} + +export async function updatePluginConfig(config: PluginConfig): Promise<{ success: boolean; config: PluginConfig }> { + return fetchWrapper(`${API_BASE_URL}/api/memory/plugin-config`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config), + }) +} + +export interface ReindexResult { + success: boolean + message: string + total: number + embedded: number + failed: number + requiresRestart?: boolean +} + +export async function reindexMemories(): Promise { + return fetchWrapper(`${API_BASE_URL}/api/memory/reindex`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + }) +} + +export interface TestEmbeddingResult { + success: boolean + error?: string + message?: string + dimensions?: number +} + +export async function testEmbeddingConfig(): Promise { + return fetchWrapper(`${API_BASE_URL}/api/memory/test-embedding`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({}), + }) +} diff --git a/frontend/src/api/repos.ts b/frontend/src/api/repos.ts index 1aad635e..f6df024c 100644 --- a/frontend/src/api/repos.ts +++ b/frontend/src/api/repos.ts @@ -1,5 +1,5 @@ import type { Repo } from './types' -import { FetchError, fetchWrapper, fetchWrapperText, fetchWrapperVoid, fetchWrapperBlob } from './fetchWrapper' +import { FetchError, fetchWrapper, fetchWrapperVoid, fetchWrapperBlob } from './fetchWrapper' import { API_BASE_URL } from '@/config' export async function createRepo( @@ -51,10 +51,6 @@ export async function pullRepo(id: number): Promise { }) } -export async function getServerLogs(id: number): Promise { - return fetchWrapperText(`${API_BASE_URL}/api/repos/${id}/server/logs`) -} - export async function switchRepoConfig(id: number, configName: string): Promise { return fetchWrapper(`${API_BASE_URL}/api/repos/${id}/config/switch`, { method: 'POST', diff --git a/frontend/src/api/settings.ts b/frontend/src/api/settings.ts index d853a7ab..a29ed889 100644 --- a/frontend/src/api/settings.ts +++ b/frontend/src/api/settings.ts @@ -210,6 +210,10 @@ export const settingsApi = { getVersionInfo: async (): Promise => { return fetchWrapper(`${API_BASE_URL}/api/health/version`) }, + + getMemoryPluginStatus: async (): Promise<{ memoryPluginEnabled: boolean }> => { + return fetchWrapper(`${API_BASE_URL}/api/settings/memory-plugin-status`) + }, } export interface VersionInfo { diff --git a/frontend/src/api/ssh.ts b/frontend/src/api/ssh.ts index 2ca45907..3ab6bdb9 100644 --- a/frontend/src/api/ssh.ts +++ b/frontend/src/api/ssh.ts @@ -1,14 +1,15 @@ -import { API_BASE_URL } from '@/config' import { fetchWrapper } from './fetchWrapper' +import { API_BASE_URL } from '@/config' + +interface SSHHostKeyResponse { + success: boolean + error?: string +} -export function respondSSHHostKey(requestId: string, approved: boolean): Promise<{ success: boolean; error?: string }> { +export async function respondSSHHostKey(requestId: string, approved: boolean): Promise { return fetchWrapper(`${API_BASE_URL}/api/ssh/host-key/respond`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ requestId, response: approved ? 'accept' : 'reject' }), }) } - -export function getSSHHostKeyStatus(): Promise<{ success: boolean; pendingCount?: number; error?: string }> { - return fetchWrapper(`${API_BASE_URL}/api/ssh/host-key/status`) -} diff --git a/frontend/src/api/types/settings.ts b/frontend/src/api/types/settings.ts index 982c839b..bc393cf0 100644 --- a/frontend/src/api/types/settings.ts +++ b/frontend/src/api/types/settings.ts @@ -19,12 +19,6 @@ export interface CustomCommand { promptTemplate: string } -export interface CustomAgent { - name: string - description: string - config: Record -} - export interface GitCredential { name: string host: string @@ -55,13 +49,13 @@ export interface UserPreferences { directShortcuts?: string[] keyboardShortcuts: Record customCommands: CustomCommand[] - customAgents: CustomAgent[] gitCredentials?: GitCredential[] gitIdentity?: GitIdentity tts?: TTSConfig stt?: STTConfig notifications?: NotificationPreferences repoOrder?: number[] + memoryDedupThreshold?: number } export interface SettingsResponse { diff --git a/frontend/src/components/file-browser/FileDiffView.tsx b/frontend/src/components/file-browser/FileDiffView.tsx index eaca2544..7440497a 100644 --- a/frontend/src/components/file-browser/FileDiffView.tsx +++ b/frontend/src/components/file-browser/FileDiffView.tsx @@ -1,4 +1,4 @@ -import { useFileDiff, useCommitFileDiff } from "@/api/git"; +import { useFileDiff } from "@/api/git"; import { Loader2, FileText, @@ -10,7 +10,6 @@ import { Minus, ArrowLeft, ExternalLink, - X, } from "lucide-react"; import { Button } from "@/components/ui/button"; import { CopyButton } from "@/components/ui/copy-button"; @@ -22,9 +21,7 @@ interface FileDiffViewProps { repoId: number; filePath: string; includeStaged?: boolean; - commitHash?: string; onBack?: () => void; - onClose?: () => void; onOpenFile?: (path: string, lineNumber?: number) => void; isMobile?: boolean; } @@ -139,7 +136,7 @@ function DiffLineComponent({ }) { if (line.type === "header") { return ( -
+
{line.content}
); @@ -147,7 +144,7 @@ function DiffLineComponent({ if (line.type === "hunk") { return ( -
+
{line.content}
); @@ -173,7 +170,7 @@ function DiffLineComponent({ return (
@@ -218,15 +215,11 @@ export function FileDiffView({
   repoId,
   filePath,
   includeStaged,
-  commitHash,
   onBack,
-  onClose,
   onOpenFile,
   isMobile = false,
 }: FileDiffViewProps) {
-  const workingDiff = useFileDiff(repoId, filePath, includeStaged);
-  const commitDiff = useCommitFileDiff(repoId, commitHash, filePath);
-  const { data: diffData, isLoading, error } = commitHash ? commitDiff : workingDiff;
+  const { data: diffData, isLoading, error } = useFileDiff(repoId, filePath, includeStaged);
 
   const fileName = filePath.split("/").pop() || filePath;
   const dirPath = filePath.includes("/")
@@ -266,7 +259,7 @@ export function FileDiffView({
   const diffLines = diffData.diff ? parseDiff(diffData.diff) : [];
 
   return (
-    
+
)} - {onClose && ( - - )}
-
+
{diffData.isBinary ? (

Binary file - cannot display diff

diff --git a/frontend/src/components/memory/MemoryFormDialog.tsx b/frontend/src/components/memory/MemoryFormDialog.tsx new file mode 100644 index 00000000..2b38f8d4 --- /dev/null +++ b/frontend/src/components/memory/MemoryFormDialog.tsx @@ -0,0 +1,147 @@ +import { useEffect } from 'react' +import { useForm } from 'react-hook-form' +import { zodResolver } from '@hookform/resolvers/zod' +import { z } from 'zod' +import { useCreateMemory, useUpdateMemory } from '@/hooks/useMemories' +import type { Memory, CreateMemoryRequest, UpdateMemoryRequest } from '@opencode-manager/shared/types' +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from '@/components/ui/dialog' +import { Button } from '@/components/ui/button' +import { Textarea } from '@/components/ui/textarea' +import { Label } from '@/components/ui/label' +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select' + +const memorySchema = z.object({ + content: z.string().min(1, 'Content is required').max(10000), + scope: z.enum(['convention', 'decision', 'context']), +}) + +type MemoryFormData = z.infer + +interface MemoryFormDialogProps { + memory?: Memory + projectId?: string + open: boolean + onOpenChange: (open: boolean) => void +} + +export function MemoryFormDialog({ memory, projectId, open, onOpenChange }: MemoryFormDialogProps) { + const createMutation = useCreateMemory() + const updateMutation = useUpdateMemory() + + const { + register, + handleSubmit, + setValue, + watch, + reset, + formState: { errors }, + } = useForm({ + resolver: zodResolver(memorySchema), + defaultValues: { + content: '', + scope: 'context', + }, + }) + + const selectedScope = watch('scope') + + useEffect(() => { + if (open) { + if (memory) { + reset({ + content: memory.content, + scope: memory.scope, + }) + } else { + reset({ + content: '', + scope: 'context', + }) + } + } + }, [open, memory, reset]) + + const onSubmit = async (data: MemoryFormData) => { + if (memory) { + const updateData: UpdateMemoryRequest = { + content: data.content, + scope: data.scope, + } + await updateMutation.mutateAsync({ id: memory.id, data: updateData }) + } else if (projectId) { + const createData: CreateMemoryRequest = { + projectId, + content: data.content, + scope: data.scope, + } + await createMutation.mutateAsync(createData) + } + onOpenChange(false) + } + + const isLoading = createMutation.isPending || updateMutation.isPending + + return ( + + + + {memory ? 'Edit Memory' : 'Create Memory'} + + {memory + ? 'Update the memory content and scope.' + : 'Add a new memory to store project knowledge.'} + + + +
+
+ + +
+ +
+ +