diff --git a/.gitignore b/.gitignore index 3490412..5f963cb 100644 --- a/.gitignore +++ b/.gitignore @@ -30,8 +30,8 @@ pnpm-debug.log* # Test coverage coverage/ -# Warden logs -.warden/logs/ +# Warden artifacts (logs, sessions, etc.) +.warden/ # Temporary files *.tmp diff --git a/src/action/triggers/executor.ts b/src/action/triggers/executor.ts index 1cca603..4d7e581 100644 --- a/src/action/triggers/executor.ts +++ b/src/action/triggers/executor.ts @@ -140,6 +140,10 @@ export async function executeTrigger( batchDelayMs: config.defaults?.batchDelayMs, pathToClaudeCodeExecutable: claudePath, auxiliaryMaxRetries: config.defaults?.auxiliaryMaxRetries, + session: { + enabled: config.sessions?.enabled ?? true, + directory: config.sessions?.directory, + }, }, }; diff --git a/src/cli/commands/init.ts b/src/cli/commands/init.ts index f555dd1..70cfa44 100644 --- a/src/cli/commands/init.ts +++ b/src/cli/commands/init.ts @@ -144,20 +144,23 @@ export async function runInit(options: CLIOptions, reporter: Reporter): Promise< filesCreated++; } - // Ensure .warden/logs/ is in .gitignore + // Ensure .warden/ is in .gitignore const gitignorePath = join(repoRoot, '.gitignore'); if (existsSync(gitignorePath)) { const gitignoreContent = readFileSync(gitignorePath, 'utf-8'); - const hasEntry = gitignoreContent.split('\n').some((line) => line.trim() === '.warden/logs/'); - if (!hasEntry) { + const hasWardenEntry = gitignoreContent.split('\n').some((line) => { + const trimmed = line.trim(); + return trimmed === '.warden/' || trimmed === '.warden'; + }); + if (!hasWardenEntry) { const newline = gitignoreContent.endsWith('\n') ? '' : '\n'; - writeFileSync(gitignorePath, gitignoreContent + newline + '.warden/logs/\n', 'utf-8'); - reporter.created('.gitignore entry for .warden/logs/'); + writeFileSync(gitignorePath, gitignoreContent + newline + '.warden/\n', 'utf-8'); + reporter.created('.gitignore entry for .warden/'); filesCreated++; } } else { - writeFileSync(gitignorePath, '.warden/logs/\n', 'utf-8'); - reporter.created('.gitignore with .warden/logs/'); + writeFileSync(gitignorePath, '.warden/\n', 'utf-8'); + reporter.created('.gitignore with .warden/'); filesCreated++; } diff --git a/src/cli/log-cleanup.test.ts b/src/cli/log-cleanup.test.ts index 113585b..57b45c0 100644 --- a/src/cli/log-cleanup.test.ts +++ b/src/cli/log-cleanup.test.ts @@ -2,7 +2,7 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { existsSync, mkdirSync, rmSync, writeFileSync, utimesSync } from 'node:fs'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { findExpiredLogs, cleanupLogs } from './log-cleanup.js'; +import { findExpiredArtifacts, cleanupArtifacts } from './log-cleanup.js'; import { Reporter } from './output/reporter.js'; import { detectOutputMode } from './output/tty.js'; import { Verbosity } from './output/verbosity.js'; @@ -20,7 +20,7 @@ function createLogFile(dir: string, name: string, daysOld: number): string { return filePath; } -describe('findExpiredLogs', () => { +describe('findExpiredArtifacts', () => { let testDir: string; beforeEach(() => { @@ -35,13 +35,13 @@ describe('findExpiredLogs', () => { }); it('returns empty array when directory does not exist', () => { - const result = findExpiredLogs('/nonexistent/path', 30); + const result = findExpiredArtifacts('/nonexistent/path', 30); expect(result).toEqual([]); }); it('returns empty array when no files are expired', () => { createLogFile(testDir, 'recent.jsonl', 1); - const result = findExpiredLogs(testDir, 30); + const result = findExpiredArtifacts(testDir, 30); expect(result).toEqual([]); }); @@ -49,7 +49,7 @@ describe('findExpiredLogs', () => { createLogFile(testDir, 'old.jsonl', 45); createLogFile(testDir, 'recent.jsonl', 1); - const result = findExpiredLogs(testDir, 30); + const result = findExpiredArtifacts(testDir, 30); expect(result).toHaveLength(1); expect(result[0]).toContain('old.jsonl'); }); @@ -60,7 +60,7 @@ describe('findExpiredLogs', () => { const mtime = new Date(Date.now() - 45 * 24 * 60 * 60 * 1000); utimesSync(nonJsonl, mtime, mtime); - const result = findExpiredLogs(testDir, 30); + const result = findExpiredArtifacts(testDir, 30); expect(result).toEqual([]); }); @@ -68,13 +68,13 @@ describe('findExpiredLogs', () => { createLogFile(testDir, 'a.jsonl', 10); createLogFile(testDir, 'b.jsonl', 3); - expect(findExpiredLogs(testDir, 7)).toHaveLength(1); - expect(findExpiredLogs(testDir, 2)).toHaveLength(2); - expect(findExpiredLogs(testDir, 15)).toHaveLength(0); + expect(findExpiredArtifacts(testDir, 7)).toHaveLength(1); + expect(findExpiredArtifacts(testDir, 2)).toHaveLength(2); + expect(findExpiredArtifacts(testDir, 15)).toHaveLength(0); }); }); -describe('cleanupLogs', () => { +describe('cleanupArtifacts', () => { let testDir: string; beforeEach(() => { @@ -91,8 +91,8 @@ describe('cleanupLogs', () => { it('does nothing in "never" mode', async () => { createLogFile(testDir, 'old.jsonl', 45); - const deleted = await cleanupLogs({ - logsDir: testDir, + const deleted = await cleanupArtifacts({ + dir: testDir, retentionDays: 30, mode: 'never', isTTY: false, @@ -107,8 +107,8 @@ describe('cleanupLogs', () => { createLogFile(testDir, 'old.jsonl', 45); createLogFile(testDir, 'recent.jsonl', 1); - const deleted = await cleanupLogs({ - logsDir: testDir, + const deleted = await cleanupArtifacts({ + dir: testDir, retentionDays: 30, mode: 'auto', isTTY: false, @@ -123,8 +123,8 @@ describe('cleanupLogs', () => { it('does nothing in "ask" mode when not TTY', async () => { createLogFile(testDir, 'old.jsonl', 45); - const deleted = await cleanupLogs({ - logsDir: testDir, + const deleted = await cleanupArtifacts({ + dir: testDir, retentionDays: 30, mode: 'ask', isTTY: false, @@ -138,8 +138,8 @@ describe('cleanupLogs', () => { it('returns 0 when no expired files exist', async () => { createLogFile(testDir, 'recent.jsonl', 1); - const deleted = await cleanupLogs({ - logsDir: testDir, + const deleted = await cleanupArtifacts({ + dir: testDir, retentionDays: 30, mode: 'auto', isTTY: false, @@ -149,9 +149,9 @@ describe('cleanupLogs', () => { expect(deleted).toBe(0); }); - it('returns 0 when logsDir does not exist', async () => { - const deleted = await cleanupLogs({ - logsDir: '/nonexistent/path', + it('returns 0 when dir does not exist', async () => { + const deleted = await cleanupArtifacts({ + dir: '/nonexistent/path', retentionDays: 30, mode: 'auto', isTTY: false, diff --git a/src/cli/log-cleanup.ts b/src/cli/log-cleanup.ts index 8136eea..b4d0692 100644 --- a/src/cli/log-cleanup.ts +++ b/src/cli/log-cleanup.ts @@ -5,14 +5,14 @@ import type { Reporter } from './output/reporter.js'; import { readSingleKey } from './input.js'; /** - * Find .jsonl log files in a directory that are older than retentionDays. + * Find .jsonl files in a directory that are older than retentionDays. */ -export function findExpiredLogs(logsDir: string, retentionDays: number): string[] { +export function findExpiredArtifacts(dir: string, retentionDays: number): string[] { const cutoff = Date.now() - retentionDays * 24 * 60 * 60 * 1000; let entries: string[]; try { - entries = readdirSync(logsDir); + entries = readdirSync(dir); } catch { return []; } @@ -20,7 +20,7 @@ export function findExpiredLogs(logsDir: string, retentionDays: number): string[ const expired: string[] = []; for (const entry of entries) { if (!entry.endsWith('.jsonl')) continue; - const fullPath = join(logsDir, entry); + const fullPath = join(dir, entry); try { const stat = statSync(fullPath); if (stat.mtimeMs < cutoff) { @@ -35,28 +35,29 @@ export function findExpiredLogs(logsDir: string, retentionDays: number): string[ } /** - * Clean up expired log files based on the configured mode. + * Clean up expired .jsonl artifact files based on the configured mode. + * Works for both log and session directories. * Returns the number of files deleted. */ -export async function cleanupLogs(opts: { - logsDir: string; +export async function cleanupArtifacts(opts: { + dir: string; retentionDays: number; mode: LogCleanupMode; isTTY: boolean; reporter: Reporter; }): Promise { - const { logsDir, retentionDays, mode, isTTY, reporter } = opts; + const { dir, retentionDays, mode, isTTY, reporter } = opts; if (mode === 'never') return 0; - const expired = findExpiredLogs(logsDir, retentionDays); + const expired = findExpiredArtifacts(dir, retentionDays); if (expired.length === 0) return 0; if (mode === 'ask') { if (!isTTY || !process.stdin.isTTY) return 0; process.stderr.write( - `Found ${expired.length} log ${expired.length === 1 ? 'file' : 'files'} older than ${retentionDays} days. Remove? [y/N] ` + `Found ${expired.length} expired ${expired.length === 1 ? 'file' : 'files'} older than ${retentionDays} days. Remove? [y/N] ` ); const key = await readSingleKey(); process.stderr.write(key + '\n'); @@ -75,7 +76,7 @@ export async function cleanupLogs(opts: { } if (deleted > 0) { - reporter.debug(`Cleaned up ${deleted} expired log ${deleted === 1 ? 'file' : 'files'}`); + reporter.debug(`Cleaned up ${deleted} expired ${deleted === 1 ? 'file' : 'files'}`); } return deleted; diff --git a/src/cli/main.ts b/src/cli/main.ts index f667d94..7802a50 100644 --- a/src/cli/main.ts +++ b/src/cli/main.ts @@ -27,7 +27,8 @@ import { generateRunId, type SkillTaskOptions, } from './output/index.js'; -import { cleanupLogs } from './log-cleanup.js'; +import { cleanupArtifacts } from './log-cleanup.js'; +import { resolveSessionsDir } from '../sdk/session.js'; import { collectFixableFindings, applyAllFixes, @@ -192,9 +193,10 @@ async function outputResultsAndHandleFixes( // Always write repo-local JSONL log (non-fatal — don't lose analysis output) const logPath = getRepoLogPath(repoPath, runId, timestamp); + let logWritten = false; try { writeJsonlContent(logPath, jsonlContent); - reporter.debug(`Run log: ${logPath}`); + logWritten = true; } catch (err) { reporter.warning(`Failed to write run log: ${err instanceof Error ? err.message : String(err)}`); } @@ -243,6 +245,11 @@ async function outputResultsAndHandleFixes( reporter.blank(); reporter.renderSummary(filteredReports, totalDuration, { traceId }); + // Show log file path after summary (only if write succeeded) + if (!options.json && logWritten) { + reporter.dim(`Log: ${logPath}`); + } + // Handle fixes: --fix (automatic) always runs, interactive step-through in TTY mode if (fixableFindings.length > 0) { if (options.fix) { @@ -363,6 +370,7 @@ async function runSkills( batchDelayMs: config?.defaults?.batchDelayMs, maxContextFiles: config?.defaults?.chunking?.maxContextFiles, auxiliaryMaxRetries: config?.defaults?.auxiliaryMaxRetries, + session: config?.sessions ?? { enabled: true }, }; const tasks: SkillTaskOptions[] = skillsToRun.map(({ skill, remote, filters }) => ({ name: skill, @@ -644,6 +652,7 @@ async function runConfigMode(options: CLIOptions, reporter: Reporter): Promise { }, ); - // Run log cleanup after all output is complete (covers all exit paths) + // Run log and session cleanup after all output is complete (covers all exit paths) try { - let logsRoot: string; + let cleanupRoot: string; try { - logsRoot = getRepoRoot(cwd); + cleanupRoot = getRepoRoot(cwd); } catch { - logsRoot = cwd; + cleanupRoot = cwd; } - const cfgPath = resolve(logsRoot, 'warden.toml'); - const logsConfig = existsSync(cfgPath) ? loadWardenConfig(dirname(cfgPath)).logs : undefined; - await cleanupLogs({ - logsDir: join(logsRoot, '.warden', 'logs'), - retentionDays: logsConfig?.retentionDays ?? 30, - mode: logsConfig?.cleanup ?? 'ask', + const cfgPath = resolve(cleanupRoot, 'warden.toml'); + const cfg = existsSync(cfgPath) ? loadWardenConfig(dirname(cfgPath)) : undefined; + await cleanupArtifacts({ + dir: join(cleanupRoot, '.warden', 'logs'), + retentionDays: cfg?.logs?.retentionDays ?? 30, + mode: cfg?.logs?.cleanup ?? 'ask', + isTTY: reporter.mode.isTTY, + reporter, + }); + // Session cleanup mirrors log cleanup + await cleanupArtifacts({ + dir: resolveSessionsDir(cleanupRoot, cfg?.sessions?.directory), + retentionDays: cfg?.sessions?.retentionDays ?? 7, + mode: cfg?.sessions?.cleanup ?? 'auto', isTTY: reporter.mode.isTTY, reporter, }); diff --git a/src/cli/output/jsonl.test.ts b/src/cli/output/jsonl.test.ts index d37e953..41a78c3 100644 --- a/src/cli/output/jsonl.test.ts +++ b/src/cli/output/jsonl.test.ts @@ -428,13 +428,13 @@ describe('getRepoLogPath', () => { it('returns path under .warden/logs/', () => { const timestamp = new Date('2026-02-18T14:32:15.123Z'); const result = getRepoLogPath('/path/to/repo', 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', timestamp); - expect(result).toBe('/path/to/repo/.warden/logs/2026-02-18T14-32-15.123Z-a1b2c3d4.jsonl'); + expect(result).toBe('/path/to/repo/.warden/logs/a1b2c3d4-2026-02-18T14-32-15-123Z.jsonl'); }); - it('replaces colons in timestamp with hyphens', () => { + it('replaces colons and dots in timestamp with hyphens', () => { const timestamp = new Date('2026-02-18T10:05:30.000Z'); const result = getRepoLogPath('/repo', 'abcdef12-3456-7890-abcd-ef1234567890', timestamp); - expect(result).toMatch(/2026-02-18T10-05-30\.000Z-abcdef12\.jsonl$/); + expect(result).toMatch(/abcdef12-2026-02-18T10-05-30-000Z\.jsonl$/); }); it('uses different runId for different paths', () => { diff --git a/src/cli/output/jsonl.ts b/src/cli/output/jsonl.ts index 50c2d22..610c1c4 100644 --- a/src/cli/output/jsonl.ts +++ b/src/cli/output/jsonl.ts @@ -30,11 +30,11 @@ export function shortRunId(runId: string): string { /** * Get the repo-local log file path. - * Returns: {repoRoot}/.warden/logs/{ISO-datetime}-{runId8}.jsonl + * Returns: {repoRoot}/.warden/logs/{runId8}-{ISO-datetime}.jsonl */ export function getRepoLogPath(repoRoot: string, runId: string, timestamp: Date = new Date()): string { - const ts = timestamp.toISOString().replace(/:/g, '-'); - return join(repoRoot, '.warden', 'logs', `${ts}-${shortRunId(runId)}.jsonl`); + const ts = timestamp.toISOString().replace(/[:.]/g, '-'); + return join(repoRoot, '.warden', 'logs', `${shortRunId(runId)}-${ts}.jsonl`); } /** diff --git a/src/cli/output/reporter.ts b/src/cli/output/reporter.ts index 6d792b6..20b91bb 100644 --- a/src/cli/output/reporter.ts +++ b/src/cli/output/reporter.ts @@ -404,6 +404,21 @@ export class Reporter { // No tips in CI mode } + /** + * Log dim/subtle text (visible at normal verbosity, hidden in quiet mode). + */ + dim(message: string): void { + if (this.verbosity === Verbosity.Quiet) { + return; + } + + if (this.mode.isTTY) { + this.log(chalk.dim(message)); + } else { + this.logPlain(message); + } + } + /** * Log plain text (no prefix). */ diff --git a/src/cli/output/tasks.ts b/src/cli/output/tasks.ts index 4445e91..1cb54ed 100644 --- a/src/cli/output/tasks.ts +++ b/src/cli/output/tasks.ts @@ -22,6 +22,7 @@ import { type PreparedFile, type PRPromptContext, } from '../../sdk/runner.js'; +import { snapshotSessionFiles, moveNewSessions, resolveSessionsDir } from '../../sdk/session.js'; import chalk from 'chalk'; import figures from 'figures'; import { Verbosity } from './verbosity.js'; @@ -358,6 +359,12 @@ export async function runSkillTask( return { findings: [], durationMs: 0, failedHunks: 0, failedExtractions: 0 }; }; + // Snapshot session files before any SDK calls so we can capture new ones after + const sessionsDir = runnerOptions.session?.enabled + ? resolveSessionsDir(context.repoPath, runnerOptions.session.directory) + : undefined; + const sessionSnapshot = sessionsDir ? snapshotSessionFiles(context.repoPath) : undefined; + // Process files with sliding-window concurrency pool const batchDelayMs = runnerOptions.batchDelayMs ?? 0; const shouldAbort = () => runnerOptions.abortController?.signal.aborted ?? false; @@ -390,6 +397,18 @@ export async function runSkillTask( } } + // Move new session files now that all SDK processes have exited and flushed + if (sessionsDir && sessionSnapshot) { + try { + moveNewSessions(context.repoPath, sessionSnapshot, sessionsDir, displayName); + } catch (err) { + logger.warn('Failed to move session files', { + error: err instanceof Error ? err.message : String(err), + skill: displayName, + }); + } + } + // Build report const duration = Date.now() - startTime; const allFindings = allResults.flatMap((r) => r.findings); diff --git a/src/config/schema.ts b/src/config/schema.ts index c2ba4e2..ee7fb64 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -195,6 +195,19 @@ export const LogsConfigSchema = z.object({ }); export type LogsConfig = z.infer; +// Sessions configuration +export const SessionsConfigSchema = z.object({ + /** Enable session storage (default: true). Sessions are moved from Claude SDK's internal storage to .warden/sessions/ after each run. */ + enabled: z.boolean().default(true), + /** Directory to store sessions relative to the repo root (default: .warden/sessions) */ + directory: z.string().optional(), + /** How to handle expired session files: 'auto' (default, silently delete), 'ask' (prompt in TTY), 'never' (keep all) */ + cleanup: LogCleanupModeSchema.default('auto'), + /** Number of days to retain session files before considering them expired. Default: 7 */ + retentionDays: z.number().int().positive().default(7), +}); +export type SessionsConfig = z.infer; + // Main warden.toml configuration export const WardenConfigSchema = z .object({ @@ -203,6 +216,7 @@ export const WardenConfigSchema = z skills: z.array(SkillConfigSchema).default([]), runner: RunnerConfigSchema.optional(), logs: LogsConfigSchema.optional(), + sessions: SessionsConfigSchema.optional(), }) .superRefine((config, ctx) => { const names = config.skills.map((s) => s.name); diff --git a/src/sdk/analyze.ts b/src/sdk/analyze.ts index 511e9fd..9323a25 100644 --- a/src/sdk/analyze.ts +++ b/src/sdk/analyze.ts @@ -2,12 +2,13 @@ import { query, type SDKResultMessage } from '@anthropic-ai/claude-agent-sdk'; import type { SkillDefinition } from '../config/schema.js'; import type { Finding, RetryConfig } from '../types/index.js'; import { getHunkLineRange, type HunkWithContext } from '../diff/index.js'; -import { Sentry, emitExtractionMetrics, emitRetryMetric, emitDedupMetrics } from '../sentry.js'; +import { Sentry, logger, emitExtractionMetrics, emitRetryMetric, emitDedupMetrics } from '../sentry.js'; import { SkillRunnerError, WardenAuthenticationError, isRetryableError, isAuthenticationError, isAuthenticationErrorMessage } from './errors.js'; import { DEFAULT_RETRY_CONFIG, calculateRetryDelay, sleep } from './retry.js'; import { extractUsage, aggregateUsage, emptyUsage, estimateTokens, aggregateAuxiliaryUsage } from './usage.js'; import { buildHunkSystemPrompt, buildHunkUserPrompt, type PRPromptContext } from './prompt.js'; import { extractFindingsJson, extractFindingsWithLLM, validateFindings, deduplicateFindings, mergeCrossLocationFindings } from './extract.js'; +import { snapshotSessionFiles, moveNewSessions, resolveSessionsDir } from './session.js'; import { LARGE_PROMPT_THRESHOLD_CHARS, DEFAULT_FILE_CONCURRENCY, @@ -842,6 +843,14 @@ export async function runSkill( // Collect results in input order (Promise.all preserves order) const fileResults: { filename: string; result: FileAnalysisResult; durationMs: number }[] = []; + // Snapshot session files before any SDK calls so we can capture new ones after all analysis completes. + // This is done at the runSkill level (not per-hunk) to ensure all SDK processes have fully exited + // and flushed their session data before we copy files. + const sessionsDir = options.session?.enabled + ? resolveSessionsDir(context.repoPath, options.session.directory) + : undefined; + const sessionSnapshot = sessionsDir ? snapshotSessionFiles(context.repoPath) : undefined; + // Process files - parallel or sequential based on options if (parallel) { // Process files with sliding-window concurrency pool @@ -879,6 +888,18 @@ export async function runSkill( } } + // Move any new session files now that all SDK processes have exited + if (sessionsDir && sessionSnapshot) { + try { + moveNewSessions(context.repoPath, sessionSnapshot, sessionsDir, skill.name); + } catch (err) { + logger.warn('Failed to move session files', { + error: err instanceof Error ? err.message : String(err), + skill: skill.name, + }); + } + } + // Check if all analysis failed (indicates a systemic problem like auth failure) if (totalFailedHunks > 0 && totalFailedHunks === totalHunks && allFindings.length === 0) { throw new SkillRunnerError( diff --git a/src/sdk/runner.ts b/src/sdk/runner.ts index 28dfbbd..7251026 100644 --- a/src/sdk/runner.ts +++ b/src/sdk/runner.ts @@ -62,3 +62,14 @@ export type { FileAnalysisCallbacks, FileAnalysisResult, } from './types.js'; + +// Re-export session storage utilities +export { + snapshotSessionFiles, + moveNewSessions, + ensureSessionsDir, + resolveSessionsDir, + getClaudeProjectDir, + DEFAULT_SESSIONS_DIR, +} from './session.js'; +export type { SessionStorageOptions } from './session.js'; diff --git a/src/sdk/session.test.ts b/src/sdk/session.test.ts new file mode 100644 index 0000000..409ff77 --- /dev/null +++ b/src/sdk/session.test.ts @@ -0,0 +1,107 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdirSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + snapshotSessionFiles, + moveNewSessions, + ensureSessionsDir, + getClaudeProjectDir, + resolveSessionsDir, + DEFAULT_SESSIONS_DIR, +} from './session.js'; + +describe('session storage', () => { + let tempDir: string; + + beforeEach(() => { + tempDir = join(tmpdir(), `warden-session-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + mkdirSync(tempDir, { recursive: true }); + }); + + afterEach(() => { + rmSync(tempDir, { recursive: true, force: true }); + }); + + describe('DEFAULT_SESSIONS_DIR', () => { + it('has expected default value', () => { + expect(DEFAULT_SESSIONS_DIR).toBe('.warden/sessions'); + }); + }); + + describe('getClaudeProjectDir', () => { + it('maps repo path to Claude project directory', () => { + const result = getClaudeProjectDir('/home/user/myproject'); + expect(result).toContain('.claude/projects/-home-user-myproject'); + }); + + it('replaces all slashes with dashes', () => { + const result = getClaudeProjectDir('/a/b/c'); + expect(result).toContain('-a-b-c'); + }); + }); + + describe('resolveSessionsDir', () => { + it('uses default when no directory specified', () => { + const result = resolveSessionsDir('/repo'); + expect(result).toBe('/repo/.warden/sessions'); + }); + + it('resolves relative directory against repo path', () => { + const result = resolveSessionsDir('/repo', 'custom/sessions'); + expect(result).toBe('/repo/custom/sessions'); + }); + + it('uses absolute directory as-is', () => { + const result = resolveSessionsDir('/repo', '/absolute/path'); + expect(result).toBe('/absolute/path'); + }); + }); + + describe('ensureSessionsDir', () => { + it('creates directory if it does not exist', () => { + const dir = join(tempDir, 'new', 'nested', 'sessions'); + expect(existsSync(dir)).toBe(false); + + ensureSessionsDir(dir); + + expect(existsSync(dir)).toBe(true); + }); + + it('does nothing if directory already exists', () => { + const dir = join(tempDir, 'existing'); + mkdirSync(dir, { recursive: true }); + + ensureSessionsDir(dir); + + expect(existsSync(dir)).toBe(true); + }); + }); + + describe('snapshotSessionFiles', () => { + it('returns empty set for non-existent directory', () => { + const result = snapshotSessionFiles('/nonexistent/repo/path'); + expect(result).toEqual(new Set()); + }); + }); + + describe('moveNewSessions', () => { + it('returns empty array when source dir does not exist', () => { + const result = moveNewSessions('/nonexistent', new Set(), join(tempDir, 'sessions')); + expect(result).toEqual([]); + }); + + it('returns empty array when no new files since snapshot', () => { + const result = moveNewSessions('/nonexistent', new Set(['existing.jsonl']), join(tempDir, 'sessions')); + expect(result).toEqual([]); + }); + + it('skips files already moved by another caller', () => { + // This tests the race condition guard: if the source file + // no longer exists (another concurrent hunk moved it), we skip it + const result = moveNewSessions('/nonexistent', new Set(), join(tempDir, 'sessions')); + expect(result).toEqual([]); + }); + }); + +}); diff --git a/src/sdk/session.ts b/src/sdk/session.ts new file mode 100644 index 0000000..d9c683a --- /dev/null +++ b/src/sdk/session.ts @@ -0,0 +1,126 @@ +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; + +/** Default directory for session storage relative to repo root */ +export const DEFAULT_SESSIONS_DIR = '.warden/sessions'; + +/** Options for session storage */ +export interface SessionStorageOptions { + /** Enable session storage (default: true) */ + enabled?: boolean; + /** Directory to store sessions (default: .warden/sessions) */ + directory?: string; +} + +/** + * Derive the directory key Claude Code uses for a given project path. + * Claude Code maps /abs/path/to/project → -abs-path-to-project + */ +export function getClaudeProjectHash(projectPath: string): string { + return projectPath.replace(/\//g, '-'); +} + +/** + * Return the directory where Claude Code stores session files for a given repo path. + * Sessions are stored as .jsonl files inside this directory. + */ +export function getClaudeProjectDir(repoPath: string): string { + const homeDir = os.homedir(); + const hash = getClaudeProjectHash(repoPath); + return path.join(homeDir, '.claude', 'projects', hash); +} + +/** + * Ensure the sessions directory exists. + * Creates the directory and any parent directories if they don't exist. + */ +export function ensureSessionsDir(dir: string): void { + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } +} + +/** + * Snapshot the set of .jsonl files in Claude's project directory for a given repo. + * Call before analysis, then use moveNewSessions after to capture any new files. + */ +export function snapshotSessionFiles(repoPath: string): Set { + const projectDir = getClaudeProjectDir(repoPath); + try { + return new Set( + fs.readdirSync(projectDir).filter(f => f.endsWith('.jsonl')) + ); + } catch { + return new Set(); + } +} + +/** + * Move any new session files that appeared since the snapshot. + * Files are named -.jsonl where prefix identifies the warden run + * (e.g. "notseer-a049e7f7") and uuid is the Claude session ID. + * + * Safe to call concurrently -- skips files already moved by another caller. + * Returns paths of moved files. + */ +export function moveNewSessions( + repoPath: string, + before: Set, + targetDir: string, + prefix?: string +): string[] { + const projectDir = getClaudeProjectDir(repoPath); + let current: string[]; + try { + current = fs.readdirSync(projectDir).filter(f => f.endsWith('.jsonl')); + } catch { + return []; + } + + const newFiles = current.filter(f => !before.has(f)); + if (newFiles.length === 0) return []; + + ensureSessionsDir(targetDir); + const moved: string[] = []; + + for (const file of newFiles) { + const sourceFile = path.join(projectDir, file); + // Guard against race: another concurrent hunk may have already moved this file + if (!fs.existsSync(sourceFile)) continue; + + // Skip empty files (SDK may not have flushed yet) + try { + const stat = fs.statSync(sourceFile); + if (stat.size === 0) continue; + } catch { + continue; + } + + const uuid = file.replace('.jsonl', ''); + // Short UUID: first 8 chars of the session ID + const shortUuid = uuid.split('-')[0] || uuid.slice(0, 8); + const ts = new Date().toISOString().replace(/[:.]/g, '-'); + const targetName = prefix ? `${prefix}-${shortUuid}-${ts}.jsonl` : `${shortUuid}-${ts}.jsonl`; + const targetFile = path.join(targetDir, targetName); + try { + // Use copy+delete instead of rename to handle cross-device moves (EXDEV) + fs.copyFileSync(sourceFile, targetFile); + fs.unlinkSync(sourceFile); + moved.push(targetFile); + } catch { + // Non-fatal: file may have been moved by a concurrent hunk + } + } + + return moved; +} + +/** + * Resolve the absolute sessions directory from options and repo path. + */ +export function resolveSessionsDir(repoPath: string, directory?: string): string { + const dir = directory ?? DEFAULT_SESSIONS_DIR; + return path.isAbsolute(dir) ? dir : path.join(repoPath, dir); +} + diff --git a/src/sdk/types.ts b/src/sdk/types.ts index ab98d61..8e9d437 100644 --- a/src/sdk/types.ts +++ b/src/sdk/types.ts @@ -1,6 +1,7 @@ import type { Finding, UsageStats, SkippedFile, RetryConfig } from '../types/index.js'; import type { HunkWithContext } from '../diff/index.js'; import type { ChunkingConfig } from '../config/schema.js'; +import type { SessionStorageOptions } from './session.js'; /** A single auxiliary usage entry, keyed by agent name (e.g. 'extraction', 'dedup'). */ export interface AuxiliaryUsageEntry { @@ -81,6 +82,8 @@ export interface SkillRunnerOptions { maxContextFiles?: number; /** Max retries for auxiliary Haiku calls (extraction repair, merging, dedup, fix evaluation). Default: 5 */ auxiliaryMaxRetries?: number; + /** Session storage options for capturing SDK conversations */ + session?: SessionStorageOptions; } /**