diff --git a/.env.deploy.example b/.env.deploy.example new file mode 100644 index 00000000..72272ba4 --- /dev/null +++ b/.env.deploy.example @@ -0,0 +1,27 @@ +# OpenCode Manager Remote Deployment +# Copy this to .env and fill in the values + +# === Azure Deployment === +AZURE_RESOURCE_GROUP=opencode-manager-rg +AZURE_LOCATION=westus2 +AZURE_VM_NAME=opencode-manager-vm +AZURE_VM_SIZE=Standard_D2s_v5 + +# === Basic Auth === +AUTH_USERNAME=admin +AUTH_PASSWORD= + +# === GitHub Token (optional) === +# Pass your GitHub token to the deployed opencode-manager +GITHUB_TOKEN= + +# === OpenCode Provider API Keys (optional) === +# Configure AI providers for OpenCode +ANTHROPIC_API_KEY= +OPENAI_API_KEY= +GEMINI_API_KEY= +OPENROUTER_API_KEY= + +# === OpenCode Config (optional) === +# Path to a local opencode.json config file to upload +# OPENCODE_CONFIG_FILE=./opencode.json diff --git a/.env.example b/.env.example index 4ca94e85..2ea5ed9e 100644 --- a/.env.example +++ b/.env.example @@ -1,27 +1,31 @@ # OpenCode WebUI Configuration # Copy this file to .env and customize as needed +# Default values are defined in: shared/src/config/defaults.ts # ============================================ -# Backend Server (Hono API) +# Backend Server Configuration # ============================================ -PORT=8080 +PORT=5001 HOST=0.0.0.0 +CORS_ORIGIN=http://localhost:5173 +NODE_ENV=development +LOG_LEVEL=info # ============================================ # OpenCode Server # ============================================ OPENCODE_SERVER_PORT=5551 +OPENCODE_HOST=127.0.0.1 # ============================================ # Database # ============================================ -DATABASE_PATH=./backend/data/opencode.db +DATABASE_PATH=./data/opencode.db # ============================================ # Workspace Configuration # ============================================ -# Local workspace path for repositories and configs -WORKSPACE_PATH=~./workspace +WORKSPACE_PATH=./workspace # ============================================ # Timeouts (milliseconds) @@ -32,9 +36,8 @@ HEALTH_CHECK_INTERVAL_MS=5000 HEALTH_CHECK_TIMEOUT_MS=30000 # ============================================ -# File Upload Limits +# File Limits (MB) # ============================================ -# File size limits in MB MAX_FILE_SIZE_MB=50 MAX_UPLOAD_SIZE_MB=50 @@ -44,9 +47,11 @@ MAX_UPLOAD_SIZE_MB=50 DEBUG=false # ============================================ -# Frontend Configuration (for development) +# Frontend Configuration (Vite) +# These are optional - frontend uses defaults if not set # ============================================ -# Backend API URL - frontend connects to this -# Note: In development, Vite proxies /api requests to the backend -VITE_API_URL=http://localhost:8080 - +# VITE_API_URL=http://localhost:5001 +# VITE_SERVER_PORT=5001 +# VITE_OPENCODE_PORT=5551 +# VITE_MAX_FILE_SIZE_MB=50 +# VITE_MAX_UPLOAD_SIZE_MB=50 diff --git a/.env.local.example b/.env.local.example new file mode 100644 index 00000000..503f984d --- /dev/null +++ b/.env.local.example @@ -0,0 +1,86 @@ +# OpenCode Manager - Native Local Development +# Copy this file to .env for local macOS development without Docker +# +# Usage: +# pnpm start - Start normally (spawns opencode serve) +# pnpm start:client - Connect to existing opencode instance +# pnpm start:tunnel - Start with Cloudflare tunnel +# +# Or directly: +# bun scripts/start-native.ts --client --tunnel + +# ============================================ +# Backend Server Configuration +# ============================================ +PORT=5001 +HOST=0.0.0.0 +CORS_ORIGIN=http://localhost:5173 +NODE_ENV=development +LOG_LEVEL=info + +# ============================================ +# OpenCode Server +# ============================================ +# Port to use when running opencode serve (normal mode) +# Or port to connect to (--client mode) +OPENCODE_SERVER_PORT=5551 +OPENCODE_HOST=127.0.0.1 + +# Set to 'true' to connect to an existing opencode server +# instead of spawning one. Usually set automatically by start-native.ts --client +# OPENCODE_CLIENT_MODE=false + +# ============================================ +# Database +# ============================================ +# Local SQLite database for settings and state +DATABASE_PATH=./data/opencode.db + +# ============================================ +# Workspace Configuration +# ============================================ +# Where repos are cloned and managed +# For native mode, you might want to use a different path +WORKSPACE_PATH=./workspace + +# ============================================ +# Voice Services (Optional) +# ============================================ +# Whisper STT - requires Python with faster-whisper +# WHISPER_PORT=5552 +# WHISPER_HOST=127.0.0.1 +# WHISPER_DEFAULT_MODEL=base +# WHISPER_VENV=./venv/whisper + +# Chatterbox TTS - requires Python with chatterbox-tts +# CHATTERBOX_PORT=5553 +# CHATTERBOX_HOST=127.0.0.1 +# CHATTERBOX_DEVICE=auto +# CHATTERBOX_VENV=./venv/chatterbox + +# ============================================ +# API Keys (for AI providers) +# ============================================ +# ANTHROPIC_API_KEY=sk-ant-... +# OPENAI_API_KEY=sk-... +# GEMINI_API_KEY=... +# OPENROUTER_API_KEY=... + +# ============================================ +# GitHub Token (for private repo access) +# ============================================ +# GITHUB_TOKEN=ghp_... + +# ============================================ +# Timeouts (milliseconds) +# ============================================ +PROCESS_START_WAIT_MS=2000 +PROCESS_VERIFY_WAIT_MS=1000 +HEALTH_CHECK_INTERVAL_MS=5000 +HEALTH_CHECK_TIMEOUT_MS=30000 + +# ============================================ +# File Limits (MB) +# ============================================ +MAX_FILE_SIZE_MB=50 +MAX_UPLOAD_SIZE_MB=50 diff --git a/.github/social-preview.png b/.github/social-preview.png new file mode 100644 index 00000000..06aaaa32 Binary files /dev/null and b/.github/social-preview.png differ diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 00000000..36cf6533 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,70 @@ +name: Docker Build + +on: + push: + branches: [main] + release: + types: [published] + workflow_dispatch: + +permissions: + contents: read + packages: write + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Free disk space + uses: jlumbroso/free-disk-space@main + with: + tool-cache: true + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + + - name: Show available disk space + run: df -h + + - name: Checkout + uses: actions/checkout@v4 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ghcr.io/${{ github.repository }} + tags: | + type=ref,event=branch + type=sha,prefix= + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=raw,value=latest,enable={{is_default_branch}} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + target: runner diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml new file mode 100644 index 00000000..5a368452 --- /dev/null +++ b/.github/workflows/e2e-tests.yml @@ -0,0 +1,316 @@ +name: E2E Tests + +on: + push: + branches: [main] + pull_request: + branches: [main] + workflow_dispatch: + +permissions: + contents: read + packages: read + +jobs: + build-and-test: + name: Build & Unit Tests + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: Free disk space + run: | + sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc + sudo apt-get clean + + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install pnpm + uses: pnpm/action-setup@v2 + with: + version: 8 + + - name: Install dependencies + run: pnpm install + + - name: Run backend tests + run: pnpm test + working-directory: backend + + - name: Run frontend lint + run: npm run lint + working-directory: frontend + + - name: Build backend + run: pnpm build + working-directory: backend + + - name: Build frontend + run: npm run build + working-directory: frontend + + voice-api-tests: + name: Voice API E2E Tests + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Free disk space + uses: jlumbroso/free-disk-space@main + with: + tool-cache: false + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + + - name: Checkout + uses: actions/checkout@v4 + + - name: Install audio dependencies + run: | + sudo apt-get update + sudo apt-get install -y espeak ffmpeg + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image (slim - STT only) + uses: docker/build-push-action@v5 + with: + context: . + load: true + tags: opencode-manager:test + cache-from: type=gha + cache-to: type=gha,mode=max + target: runner-slim + + - name: Start container + run: | + docker run -d --name opencode-manager \ + -p 5003:5003 \ + -e ANTHROPIC_API_KEY=${{ secrets.ANTHROPIC_API_KEY }} \ + -e OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }} \ + opencode-manager:test + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - name: Install dependencies + run: bun install + + - name: Wait for services to be ready + run: | + echo "Waiting for OpenCode Manager to be ready..." + for i in {1..90}; do + if curl -sf http://localhost:5003/api/health | grep -q '"status":"healthy"'; then + echo "Service is healthy!" + break + fi + if [ $i -eq 90 ]; then + echo "Service failed to become healthy" + docker logs opencode-manager + exit 1 + fi + echo "Attempt $i/90 - waiting..." + sleep 2 + done + + - name: Wait for STT to be ready + run: | + echo "Waiting for STT server to be ready..." + for i in {1..60}; do + if curl -sf http://localhost:5003/api/stt/status | grep -q '"running":true'; then + echo "STT is ready!" + break + fi + if [ $i -eq 60 ]; then + echo "STT failed to become ready" + docker logs opencode-manager + exit 1 + fi + echo "Attempt $i/60 - STT not ready..." + sleep 2 + done + + - name: Run Voice API Tests + run: bun run scripts/test-voice.ts --url http://localhost:5003 --skip-talkmode + env: + CI: true + + - name: Container logs on failure + if: failure() + run: docker logs opencode-manager + + browser-e2e-tests: + name: Browser E2E Tests + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Free disk space + uses: jlumbroso/free-disk-space@main + with: + tool-cache: false + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + + - name: Checkout + uses: actions/checkout@v4 + + - name: Install audio dependencies + run: | + sudo apt-get update + sudo apt-get install -y espeak ffmpeg + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image (slim - STT only) + uses: docker/build-push-action@v5 + with: + context: . + load: true + tags: opencode-manager:test + cache-from: type=gha + cache-to: type=gha,mode=max + target: runner-slim + + - name: Start container + run: | + docker run -d --name opencode-manager \ + -p 5003:5003 \ + -e ANTHROPIC_API_KEY=${{ secrets.ANTHROPIC_API_KEY }} \ + -e OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }} \ + opencode-manager:test + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - name: Install dependencies + run: bun install + + - name: Install Puppeteer browser + run: npx puppeteer browsers install chrome + + - name: Wait for services to be ready + run: | + echo "Waiting for OpenCode Manager to be ready..." + for i in {1..90}; do + if curl -sf http://localhost:5003/api/health | grep -q '"status":"healthy"'; then + echo "Service is healthy!" + break + fi + if [ $i -eq 90 ]; then + echo "Service failed to become healthy" + docker logs opencode-manager + exit 1 + fi + echo "Attempt $i/90 - waiting..." + sleep 2 + done + + - name: Wait for STT to be ready + run: | + echo "Waiting for STT server to be ready..." + for i in {1..60}; do + if curl -sf http://localhost:5003/api/stt/status | grep -q '"running":true'; then + echo "STT is ready!" + break + fi + if [ $i -eq 60 ]; then + echo "STT failed to become ready" + docker logs opencode-manager + exit 1 + fi + echo "Attempt $i/60 - STT not ready..." + sleep 2 + done + + - name: Create test repository in container + run: | + # Create a test git repo inside the container (use /tmp which is writable) + docker exec opencode-manager bash -c ' + mkdir -p /tmp/test-project + cd /tmp/test-project + git init + git config user.email "test@example.com" + git config user.name "Test User" + echo "# Test Project" > README.md + git add README.md + git commit -m "Initial commit" + ' + + # Register the test repo with the backend + curl -sf -X POST http://localhost:5003/api/repos \ + -H "Content-Type: application/json" \ + -d '{"localPath": "/tmp/test-project"}' | jq . + + # Verify repo was registered + REPOS=$(curl -sf http://localhost:5003/api/repos) + echo "Registered repos: $REPOS" + + if echo "$REPOS" | jq -e '.[0].id' > /dev/null 2>&1; then + echo "Test repository registered successfully!" + else + echo "Failed to register test repository" + exit 1 + fi + + - name: Run Browser E2E Tests + run: | + # Run full test if API keys are available, otherwise STT-only + if [ -n "$ANTHROPIC_API_KEY" ] || [ -n "$OPENAI_API_KEY" ]; then + echo "API keys available - running full E2E test" + bun run scripts/test-browser.ts --url http://localhost:5003 --web-audio + else + echo "No API keys - running STT-only test" + bun run scripts/test-browser.ts --url http://localhost:5003 --web-audio --stt-only + fi + env: + CI: true + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + + - name: List test artifacts + if: always() + run: | + echo "Current directory: $(pwd)" + echo "Listing .test directory:" + ls -la .test/ || echo ".test directory not found" + find .test -type f 2>/dev/null || echo "No files found" + + - name: Upload test artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: browser-e2e-recording + path: .test/ + retention-days: 7 + if-no-files-found: ignore + include-hidden-files: true + + - name: Container logs on failure + if: failure() + run: docker logs opencode-manager diff --git a/.gitignore b/.gitignore index 04214d23..9201b780 100644 --- a/.gitignore +++ b/.gitignore @@ -14,5 +14,18 @@ coverage/ data/ workspace/ -config/ +/config/ +temp/ +.venv-chatterbox +.secrets/ + +# Test artifacts +.test/screenshots/*.png +.test/ocr-results/*.json +.test/reports/*.md + +# Session artifacts (TTS cache, reflections) +.tts/ +.reflection/ +.test/ diff --git a/.npmignore b/.npmignore new file mode 100644 index 00000000..e17707f2 --- /dev/null +++ b/.npmignore @@ -0,0 +1,52 @@ +# Development files +.git/ +.github/ +.opencode/ +.secrets/ +temp/ +tmp/ +workspace/ +docs/ + +# Source files (use built versions) +backend/src/ +frontend/src/ +frontend/public/ +frontend/*.ts +frontend/*.json + +# Config files +*.config.ts +*.config.js +vitest.* +tsconfig*.json +eslint.* +.prettierrc* +.editorconfig + +# Test files +**/*.test.ts +**/*.test.tsx +**/*.spec.ts +**/test/ +**/tests/ +**/__tests__/ + +# Development scripts (keep whisper/chatterbox) +scripts/*.ts +!scripts/whisper-server.py +!scripts/chatterbox-server.py + +# Docker (not needed for npm install) +Dockerfile* +docker-compose*.yml +.dockerignore +Caddyfile + +# Misc +*.log +*.md +!README.md +.env* +.DS_Store +node_modules/ diff --git a/.opencode/agent/qa-tester.md b/.opencode/agent/qa-tester.md new file mode 100644 index 00000000..ee406e63 --- /dev/null +++ b/.opencode/agent/qa-tester.md @@ -0,0 +1,448 @@ +--- +description: QA testing specialist that autonomously tests OpenCode Manager and generates comprehensive test reports +mode: subagent +temperature: 0.1 +tools: + write: true + edit: false + bash: true + read: true + glob: true + grep: true +permission: + bash: + "*": allow + "rm -rf *": deny + "rm -rf /": deny +--- + +You are a QA testing specialist for the OpenCode Manager project. Your role is to autonomously test the application, evaluate results against expected behavior, and generate comprehensive test reports. + +## Your Mission + +When asked to test the application, you should: + +1. **Understand the test scope** - Determine what needs testing (health check, API, auth, deployment, etc.) +2. **Execute tests systematically** - Run tests using available scripts and tools +3. **Evaluate results** - Compare actual vs expected outputs +4. **Document findings** - Generate professional test reports with metrics +5. **Provide recommendations** - Suggest fixes for any issues found + +## Project Architecture + +### Tech Stack +- **Backend**: Bun + Hono + Better SQLite3 (port 5001 dev, 5003 prod) +- **Frontend**: React + Vite + React Query (port 5173) +- **OpenCode Server**: Port 5551 +- **Whisper STT**: Port 5552 +- **Chatterbox TTS**: Port 5553 +- **Database**: SQLite (`data/opencode.db`) +- **Deployment**: Docker + Azure VM + Cloudflare Tunnel + Caddy + +### Production Architecture +``` +Internet + ↓ HTTPS +Cloudflare Tunnel (trycloudflare.com) + ↓ +Caddy Reverse Proxy (Basic Auth with bcrypt) + ↓ +OpenCode Manager App (port 5003, internal) + ├── Backend API (Hono) + ├── Frontend (React PWA) + ├── OpenCode Server (port 5551, internal) + ├── Whisper STT (port 5552, internal) + └── Chatterbox TTS (port 5553, internal) +``` + +### Key Files +- `backend/src/index.ts` - Main backend entry point +- `scripts/qa-test.sh` - Quick test command script (DEPRECATED - use direct commands) +- `scripts/deploy.ts` - Azure deployment automation +- `scripts/test-*.ts` - E2E test scripts +- `.opencode/templates/test-report-template.md` - Report template + +## Available Test Commands + +### Development Testing +```bash +# Start servers +pnpm dev # Both backend + frontend +pnpm dev:backend # Backend only +pnpm dev:frontend # Frontend only + +# Health checks +curl http://localhost:5001/health +curl http://localhost:5001/api/health + +# API tests +curl http://localhost:5001/api/repos +curl http://localhost:5001/api/settings/opencode-configs + +# Database check +ls -lh data/opencode.db +``` + +### Authentication Testing +```bash +# With auth enabled +export AUTH_USERNAME=testuser AUTH_PASSWORD=testpass123 +bun backend/src/index.ts + +# Test without credentials +curl -i http://localhost:5001/health # Should: 401 + +# Test with valid credentials +curl -u testuser:testpass123 http://localhost:5001/health # Should: 200 + +# Test with invalid credentials +curl -u wrong:wrong http://localhost:5001/health # Should: 401 +``` + +### Cloudflare Tunnel Testing +```bash +# Start with tunnel +pnpm start # Includes --tunnel flag + +# Check tunnel logs +# Look for: "https://xxx-xxx-xxx-xxx.trycloudflare.com" + +# Test public access (use the tunnel URL) +curl https://xxx-xxx-xxx-xxx.trycloudflare.com/health +``` + +### Docker Testing +```bash +# Build and run locally +./scripts/run-local-docker.sh + +# Test container +curl http://localhost:5003/health +docker ps | grep opencode-manager +docker logs opencode-manager +``` + +### E2E Testing +```bash +# Voice API test (STT, TTS, talk mode flow) +bun run scripts/test-voice.ts + +# Browser automation test +bun run scripts/test-browser.ts + +# Run all E2E tests +bun run scripts/run-e2e-tests.ts +``` + +### Cleanup +```bash +# Kill orphaned processes +pnpm cleanup +``` + +## Test Protocols + +### 1. Health Check Testing + +**Purpose**: Verify all services are running and healthy. + +**Steps**: +1. Check if backend is responding: `curl http://localhost:5001/health` +2. Check OpenCode server: `curl http://localhost:5001/api/health | grep opencode` +3. Check database exists: `ls -lh data/opencode.db` + +**Expected Results**: +- Backend returns 200 with JSON containing `name`, `version`, `status` +- OpenCode health shows `"opencode": "healthy"`, `"opencodeVersion": "1.1.3"` +- Database file exists and is >50KB + +**Evaluation**: +- ✅ PASS if all checks return expected results +- ❌ FAIL if any service is unreachable or unhealthy +- ⚠️ WARNING if database is too small (<50KB) + +### 2. API Endpoint Testing + +**Purpose**: Verify all API endpoints return correct data. + +**Steps**: +1. Test `/api/health` - should return health status with all services +2. Test `/api/repos` - should return list of registered repos +3. Test `/api/settings/opencode-configs` - should return configs +4. Test `/api/providers` - should return AI provider list + +**Expected Results**: +- All endpoints return 200 status +- Health endpoint shows all services healthy +- Repos endpoint returns array of repos (typically 2+) +- Settings returns valid JSON array + +**Evaluation**: +- ✅ PASS if all endpoints respond with correct status and data structure +- ❌ FAIL if any endpoint returns 500 or wrong data type +- ⚠️ WARNING if data seems incomplete + +### 3. Authentication Testing + +**Purpose**: Verify authentication is working correctly. + +**Steps**: +1. Set auth env vars: `export AUTH_USERNAME=test AUTH_PASSWORD=pass` +2. Start backend: `bun backend/src/index.ts` +3. Test no credentials: `curl -i http://localhost:5001/health` +4. Test valid credentials: `curl -u test:pass http://localhost:5001/health` +5. Test wrong password: `curl -u test:wrong http://localhost:5001/health` +6. Test wrong username: `curl -u wrong:pass http://localhost:5001/health` + +**Expected Results**: +- No credentials: 401 Unauthorized +- Valid credentials: 200 OK +- Wrong password: 401 Unauthorized +- Wrong username: 401 Unauthorized + +**Evaluation**: +- ✅ PASS if all scenarios return expected status codes +- ❌ FAIL if unauthorized requests return 200 +- ❌ CRITICAL if wrong credentials are accepted + +### 4. Cloudflare Tunnel Testing + +**Purpose**: Verify tunnel provides public HTTPS access. + +**Steps**: +1. Start with tunnel: `pnpm start` +2. Extract tunnel URL from logs (look for `https://xxx.trycloudflare.com`) +3. Test public access: `curl https://xxx.trycloudflare.com/health` +4. Verify HTTPS is working (no certificate errors) + +**Expected Results**: +- Tunnel URL generated successfully +- Public URL returns same response as localhost +- HTTPS certificate valid + +**Evaluation**: +- ✅ PASS if tunnel URL is accessible and returns valid responses +- ❌ FAIL if tunnel doesn't start or URL is inaccessible +- ⚠️ WARNING if tunnel is slow (>2s response time) + +### 5. Docker Deployment Testing + +**Purpose**: Verify Docker container builds and runs correctly. + +**Steps**: +1. Build and run: `./scripts/run-local-docker.sh` +2. Check container running: `docker ps | grep opencode-manager` +3. Test health: `curl http://localhost:5003/health` +4. Check logs: `docker logs opencode-manager | tail -20` +5. Verify volumes: `docker volume ls | grep opencode` + +**Expected Results**: +- Container builds without errors +- Container is running (not restarting) +- Health endpoint returns 200 +- Logs show no errors +- Volumes are created + +**Evaluation**: +- ✅ PASS if container runs and all services are healthy +- ❌ FAIL if container fails to build or exits immediately +- ⚠️ WARNING if container restarts frequently + +### 6. E2E Testing + +**Purpose**: Verify voice and talk mode features work end-to-end. + +**Steps**: +1. Run voice API test: `bun run scripts/test-voice.ts` +2. Run browser test: `bun run scripts/test-browser.ts` + +**Expected Results**: +- Voice API: STT transcribes audio correctly, full talk mode flow works +- Browser test: UI interactions work correctly with fake audio capture + +**Evaluation**: +- ✅ PASS if all tests exit with code 0 +- ❌ FAIL if any test throws errors or exits non-zero +- ⚠️ WARNING if tests are slow or flaky + +### 7. Database Integrity Testing + +**Purpose**: Verify database is working and contains expected data. + +**Steps**: +1. Check database exists: `ls -lh data/opencode.db` +2. Query repos: `curl http://localhost:5001/api/repos | jq length` +3. Query settings: `curl http://localhost:5001/api/settings/opencode-configs | jq length` + +**Expected Results**: +- Database file exists and is readable +- At least 1+ repo configured +- At least 1+ OpenCode config exists + +**Evaluation**: +- ✅ PASS if database exists and has data +- ❌ FAIL if database is missing or corrupted +- ⚠️ WARNING if database is empty + +## Test Report Generation + +After running tests, generate a professional test report using this structure: + +```markdown +# OpenCode Manager - QA Test Report + +**Date**: [Current Date] +**Tester**: OpenCode QA Agent +**Version**: [App Version from /health] +**Environment**: [Development/Docker/Production] + +--- + +## Executive Summary + +[2-3 sentence summary of test results] + +**Overall Status**: ✅ PASS / ⚠️ WARNING / ❌ FAIL + +**Tests Run**: [Number] +**Tests Passed**: [Number] +**Tests Failed**: [Number] +**Warnings**: [Number] + +--- + +## Test Results + +### 1. Health Check +- **Status**: ✅/❌/⚠️ +- **Backend Health**: [Result] +- **OpenCode Server**: [Result] +- **Database**: [Result] +- **Issues**: [List any issues] + +### 2. API Endpoints +- **Status**: ✅/❌/⚠️ +- **/api/health**: [Response code + summary] +- **/api/repos**: [Response code + count] +- **/api/settings/opencode-configs**: [Response code + count] +- **Issues**: [List any issues] + +### 3. Authentication +- **Status**: ✅/❌/⚠️ +- **No credentials**: [Result] +- **Valid credentials**: [Result] +- **Invalid credentials**: [Result] +- **Issues**: [List any issues] + +[Continue for each test category...] + +--- + +## Metrics + +| Metric | Value | +|--------|-------| +| Total Tests | [Number] | +| Pass Rate | [Percentage]% | +| Avg Response Time | [Time]ms | +| Database Size | [Size]KB | +| Uptime | [Duration] | + +--- + +## Issues Found + +### Critical Issues +[List any critical issues that prevent functionality] + +### Major Issues +[List any major issues that impact usability] + +### Minor Issues +[List any minor issues or warnings] + +--- + +## Recommendations + +1. [Recommendation 1] +2. [Recommendation 2] +3. [Recommendation 3] + +--- + +## Conclusion + +[Summary paragraph about overall application health and readiness] + +**Deployment Readiness**: ✅ Ready / ⚠️ Ready with Caveats / ❌ Not Ready + +--- + +**Report Generated**: [Timestamp] +**Next Test Recommended**: [Date] +``` + +## Best Practices + +1. **Always start fresh** - Run `pnpm cleanup` before testing to avoid port conflicts +2. **Wait for services** - Give servers 5-10 seconds to fully start before testing +3. **Test in order** - Health → API → Auth → Advanced features +4. **Document everything** - Capture exact commands, outputs, and timestamps +5. **Be thorough** - Don't skip tests even if earlier tests passed +6. **Provide context** - Include version numbers, timestamps, environment details +7. **Suggest fixes** - Don't just report issues, suggest solutions + +## Common Issues & Solutions + +### "Port already in use" +**Solution**: Run `pnpm cleanup` to kill orphaned processes + +### "Backend not responding" +**Solution**: Wait 5-10 seconds for server to fully start, check logs + +### "Database locked" +**Solution**: Ensure no other processes are accessing the database + +### "OpenCode server unhealthy" +**Solution**: Check if OpenCode CLI is installed: `which opencode` + +### "Tunnel URL not generated" +**Solution**: Ensure `cloudflared` is installed: `brew install cloudflared` + +### "Docker build fails" +**Solution**: Check Docker is running: `docker ps` + +## Your Testing Workflow + +When a user asks you to test the application: + +1. **Clarify scope**: "I'll run a comprehensive QA test covering [list test categories]. Should I focus on any specific areas?" + +2. **Start testing**: Run tests systematically using the protocols above + +3. **Document results**: Capture all outputs, response times, status codes + +4. **Evaluate**: Compare actual vs expected results + +5. **Generate report**: Create a professional report using the template + +6. **Provide recommendations**: Suggest fixes for any issues found + +7. **Offer next steps**: "Would you like me to investigate any specific issue further?" + +## Remember + +- You are autonomous - run tests without asking for permission for each step +- Be thorough - test edge cases and error scenarios +- Be professional - generate polished reports suitable for stakeholders +- Be helpful - provide actionable recommendations +- Be accurate - report exactly what you observe, don't make assumptions + +When in doubt, refer to: +- `.opencode/ARCHITECTURE.md` - System architecture details +- `.opencode/QUICKSTART.md` - Quick start guide +- `README.md` - Project documentation +- `AGENTS.md` - Development guidelines + +Now go forth and test with confidence! 🧪 diff --git a/.opencode/command/qa-health.md b/.opencode/command/qa-health.md new file mode 100644 index 00000000..9cf5ccba --- /dev/null +++ b/.opencode/command/qa-health.md @@ -0,0 +1,14 @@ +--- +description: Quick health check of the application +agent: qa-tester +subtask: true +--- + +Run a quick health check on the OpenCode Manager application. + +Check: +1. Backend server responding (http://localhost:5001/health) +2. OpenCode server healthy (port 5551) +3. Database exists and accessible + +Report results with ✅/❌ status. diff --git a/.opencode/command/qa-test.md b/.opencode/command/qa-test.md new file mode 100644 index 00000000..5d0646a2 --- /dev/null +++ b/.opencode/command/qa-test.md @@ -0,0 +1,21 @@ +--- +description: Run comprehensive QA tests and generate a report +agent: qa-tester +subtask: true +--- + +Run comprehensive QA tests on the OpenCode Manager application. + +Test the following components: +1. Development server health (backend, OpenCode server, database) +2. Backend API endpoints (/api/health, /api/repos, /api/settings) +3. Authentication (if enabled) +4. Database integrity + +Generate a test report with: +- Pass/fail status for each test +- Performance metrics (response times) +- Any issues found with recommendations +- Overall system health assessment + +Target: http://localhost:5001 (development server) diff --git a/.opencode/skills/deploy-azure/SKILL.md b/.opencode/skills/deploy-azure/SKILL.md new file mode 100644 index 00000000..17bea482 --- /dev/null +++ b/.opencode/skills/deploy-azure/SKILL.md @@ -0,0 +1,165 @@ +--- +name: deploy-azure +description: Deploy opencode-manager to Azure VM with Caddy auth and Cloudflare tunnel. Use when deploying to cloud, setting up remote access, or managing Azure infrastructure. +metadata: + author: opencode-manager + version: "1.0" +compatibility: Requires Azure CLI, Docker, and SSH access +--- + +Deploy opencode-manager to Azure VM with Caddy auth and Cloudflare tunnel. + +## Quick Deploy + +```bash +bun run scripts/deploy.ts +``` + +Creates Azure VM (Standard_B2s, Ubuntu 24.04), Docker, Caddy reverse proxy with basic auth, and Cloudflare tunnel. + +Credentials saved to `.secrets/YYYY-MM-DD.json` + +## Deploy to Existing Server + +```bash +TARGET_HOST=your-server.com bun run scripts/deploy.ts +``` + +## Common Operations + +### Check Status + +```bash +bun run scripts/deploy.ts --status +``` + +### Update Deployment + +```bash +bun run scripts/deploy.ts --update +bun run scripts/deploy.ts --update-env +``` + +### Sync OpenCode Auth + +```bash +bun run scripts/deploy.ts --sync-auth +``` + +### Enable YOLO Mode + +```bash +bun run scripts/deploy.ts --yolo +``` + +### Destroy Resources + +```bash +bun run scripts/deploy.ts --destroy +``` + +## SSH Operations + +### Get VM IP + +```bash +az vm show -g opencode-manager-rg -n opencode-manager-vm -d --query publicIps -o tsv +``` + +### SSH to VM + +```bash +ssh azureuser@$(az vm show -g opencode-manager-rg -n opencode-manager-vm -d --query publicIps -o tsv) +``` + +### Container Management + +```bash +sudo docker ps +sudo docker logs opencode-manager +sudo docker logs caddy-auth +sudo docker logs cloudflared-tunnel +cd ~/opencode-manager && sudo docker compose restart +cd ~/opencode-manager && sudo docker compose up -d --build +``` + +### Get Tunnel URL + +```bash +sudo docker logs cloudflared-tunnel 2>&1 | grep -o 'https://[a-z0-9-]*\.trycloudflare\.com' | tail -1 +``` + +### Enable YOLO Mode Manually + +```bash +sudo docker exec opencode-manager sed -i 's/yolo = false/yolo = true/' /app/.opencode.json 2>/dev/null || true +``` + +## Complete Deployment + +After Docker build completes: + +```bash +ssh azureuser@VM_IP "cd ~/opencode-manager && sudo docker compose up -d" +ssh azureuser@VM_IP "sudo docker exec opencode-manager sed -i 's/yolo = false/yolo = true/' /app/.opencode.json 2>/dev/null || true" +ssh azureuser@VM_IP "sudo docker logs cloudflared-tunnel 2>&1 | grep -o 'https://[a-z0-9-]*\.trycloudflare\.com' | tail -1" +curl -u admin:PASSWORD "https://TUNNEL-URL/api/health" +curl -u admin:PASSWORD "https://TUNNEL-URL/api/stt/status" +``` + +## Architecture + +``` +Cloudflare Tunnel (trycloudflare.com) + | +Caddy (port 80, basic auth) + | +opencode-manager app (port 5003) + |-- OpenCode server (port 5551, internal) + |-- Whisper STT (port 5552, internal) +``` + +## Environment Variables + +| Variable | Description | +|----------|-------------| +| AUTH_USERNAME | Basic auth username (default: admin) | +| AUTH_PASSWORD | Basic auth password (prompted if not set) | +| GITHUB_TOKEN | For cloning private repos | +| ANTHROPIC_API_KEY | Anthropic API key | +| OPENAI_API_KEY | OpenAI API key | +| GEMINI_API_KEY | Google Gemini API key | +| TARGET_HOST | Deploy to existing server | + +## Troubleshooting + +### Containers Not Starting + +```bash +cd ~/opencode-manager && sudo docker compose logs +df -h +free -h +``` + +### Tunnel Not Working + +```bash +sudo docker restart cloudflared-tunnel +sudo docker logs cloudflared-tunnel --tail 50 +``` + +### Auth Not Working + +```bash +sudo docker exec caddy-auth cat /etc/caddy/Caddyfile +sudo docker logs caddy-auth +``` + +### STT/TTS Not Working + +```bash +sudo docker logs opencode-manager | grep -i whisper +sudo docker restart opencode-manager +sleep 60 +curl -u admin:PASSWORD "https://TUNNEL-URL/api/stt/status" +``` diff --git a/.opencode/skills/test-voice-ci/SKILL.md b/.opencode/skills/test-voice-ci/SKILL.md new file mode 100644 index 00000000..98c60646 --- /dev/null +++ b/.opencode/skills/test-voice-ci/SKILL.md @@ -0,0 +1,188 @@ +--- +name: test-voice-ci +description: Test voice/Talk Mode in CI environments without audio hardware. Use when setting up CI pipelines, debugging voice tests, or testing STT/TTS functionality. +metadata: + author: opencode-manager + version: "1.0" +compatibility: Requires Chrome/Chromium, ffmpeg, and either macOS say command or Linux espeak +--- + +Test voice/Talk Mode in CI environments without audio hardware. + +## The Problem + +CI runners don't have physical microphones, audio devices, or ALSA loopback (`snd-aloop`). + +Voice testing needs to simulate: +``` +Microphone -> getUserMedia() -> MediaRecorder -> STT API -> Whisper -> Transcription +``` + +## Solution: Chrome Fake Audio Capture + +Chrome can inject a WAV file as microphone input: + +```typescript +browser = await puppeteer.launch({ + args: [ + '--use-fake-ui-for-media-stream', + '--use-fake-device-for-media-stream', + `--use-file-for-fake-audio-capture=${wavPath}`, + ] +}) +``` + +## Audio Requirements + +WAV file must be 16kHz mono PCM. + +### macOS + +```bash +say -o test.aiff "What is two plus two" +ffmpeg -y -i test.aiff -ar 16000 -ac 1 test.wav +``` + +### Linux CI + +```bash +espeak "What is two plus two" --stdout | ffmpeg -y -i - -ar 16000 -ac 1 test.wav + +pico2wave -w test.wav "What is two plus two" +ffmpeg -y -i test.wav -ar 16000 -ac 1 test_16k.wav +``` + +## Test Scripts + +| Script | Purpose | +|--------|---------| +| `scripts/test-voice.ts` | API-level tests (STT/TTS endpoints) | +| `scripts/test-browser.ts` | Full browser E2E with fake audio | + +### API Tests + +```bash +bun run scripts/test-voice.ts +bun run scripts/test-voice.ts --url https://your-url.com --user admin --pass secret +bun run scripts/test-voice.ts --skip-talkmode +``` + +### Browser E2E + +```bash +bun run scripts/test-browser.ts --url http://localhost:5001 +bun run scripts/test-browser.ts --url http://localhost:5001 --no-headless +bun run scripts/test-browser.ts --web-audio +``` + +## GitHub Actions Workflow + +```yaml +voice-e2e: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install audio tools + run: | + sudo apt-get update + sudo apt-get install -y ffmpeg espeak + + - name: Generate test audio + run: | + espeak "Hello this is a test" --stdout | \ + ffmpeg -y -i - -ar 16000 -ac 1 test/fixtures/test-audio.wav + + - name: Run voice E2E tests + run: | + pnpm dev & + sleep 10 + bun run scripts/test-browser.ts +``` + +## What This Tests + +1. Audio Capture - MediaRecorder from getUserMedia +2. Format Handling - Audio encoding (webm/opus) +3. STT Integration - Backend to Whisper +4. Whisper Transcription - Model loading and accuracy +5. End-to-End Flow - Full Talk Mode pipeline + +## What This Does NOT Test + +- Real microphone hardware +- Browser permissions UI +- VAD with ambient noise +- Real network latency + +## Debugging + +### Check Whisper Server + +```bash +curl http://localhost:5552/health +curl http://localhost:5001/api/stt/status +``` + +### Test STT Directly + +```bash +say -o test.aiff "hello world" +ffmpeg -y -i test.aiff -ar 16000 -ac 1 test.wav + +curl -X POST http://localhost:5001/api/stt/transcribe \ + -H "Content-Type: application/json" \ + -d "{\"audio\": \"$(base64 -i test.wav)\", \"format\": \"wav\"}" +``` + +### Check Audio Chunks + +Add logging to TalkModeContext: + +```typescript +mediaRecorder.ondataavailable = (event) => { + console.log('[TalkMode] Audio chunk size:', event.data.size) +} +``` + +## Test Architecture + +``` + CI Environment ++--------------------------------------------------+ +| test.wav -----> Chrome (fake audio capture) | +| | | +| getUserMedia() -> MediaRecorder | +| | | +| POST /api/stt/transcribe | +| | | +| Whisper Server (Python) | +| | | +| { "text": "What is 2 plus 2" } | ++--------------------------------------------------+ +``` + +## Alternatives Considered (Rejected) + +| Approach | Reason | +|----------|--------| +| snd-aloop kernel module | Not on GitHub Actions | +| PulseAudio virtual sink | Complex, flaky | +| Mock at JavaScript | Bypasses real pipeline | + +## Fallback: injectTranscript + +For testing OpenCode integration without audio: + +```typescript +await page.evaluate(() => { + window.dispatchEvent(new CustomEvent('injectTranscript', { + detail: { text: 'What is 2 plus 2' } + })) +}) +``` diff --git a/.opencode/skills/verify-readiness/SKILL.md b/.opencode/skills/verify-readiness/SKILL.md new file mode 100644 index 00000000..6a3cbbc1 --- /dev/null +++ b/.opencode/skills/verify-readiness/SKILL.md @@ -0,0 +1,161 @@ +--- +name: verify-readiness +description: Verify opencode-manager is fully operational before remote access. Use when checking health, after deployment, or when troubleshooting service issues. +metadata: + author: opencode-manager + version: "1.0" +--- + +Verify opencode-manager is fully operational. + +## Quick Health Check (30 seconds) + +Run these commands to verify all services: + +```bash +curl -s -u admin:PASSWORD http://localhost:5001/api/health | jq '{status, opencode, opencodeVersion}' +curl -s -u admin:PASSWORD http://localhost:5001/api/stt/status | jq '{running: .server.running, model: .server.model}' +curl -s -u admin:PASSWORD http://localhost:5001/api/tts/voices | jq 'length' +``` + +Expected output: +```json +{"status": "healthy", "opencode": "healthy", "opencodeVersion": "1.1.36"} +{"running": true, "model": "base"} +1 +``` + +## Full Verification Checklist + +### 1. Backend Health + +```bash +curl -s -u admin:PASSWORD http://localhost:5001/api/health | jq . +``` + +| Field | Expected | +|-------|----------| +| status | "healthy" | +| database | "connected" | +| opencode | "healthy" | +| opencodeVersionSupported | true | + +### 2. STT (Speech-to-Text) + +```bash +curl -s -u admin:PASSWORD http://localhost:5001/api/stt/status | jq '{running: .server.running, model: .server.model}' + +say -v Samantha "Hello world" -o /tmp/test.aiff +ffmpeg -y -i /tmp/test.aiff -ar 16000 -ac 1 /tmp/test.wav 2>/dev/null +AUDIO=$(base64 -i /tmp/test.wav) +curl -s -u admin:PASSWORD -X POST http://localhost:5001/api/stt/transcribe \ + -H "Content-Type: application/json" \ + -d "{\"audio\": \"$AUDIO\", \"format\": \"wav\"}" | jq '.text' +``` + +Expected: `"Hello world."` + +### 3. TTS (Text-to-Speech) + +```bash +curl -s -u admin:PASSWORD http://localhost:5001/api/tts/voices | jq 'length' + +curl -s -u admin:PASSWORD -X POST http://localhost:5001/api/tts/synthesize \ + -H "Content-Type: application/json" \ + -d '{"text": "Hello world"}' \ + --output /tmp/tts_test.wav +file /tmp/tts_test.wav +afplay /tmp/tts_test.wav +``` + +### 4. Tunnel + +```bash +pnpm tunnel:status +cat ~/.local/run/opencode-manager/tunnel.json | jq '.url' + +TUNNEL_URL="https://your-url.trycloudflare.com" +curl -s -u admin:PASSWORD "$TUNNEL_URL/api/health" | jq '.status' +``` + +### 5. Repositories + +```bash +curl -s -u admin:PASSWORD http://localhost:5001/api/repos | jq '.[].name' +``` + +### 6. Session Creation + +```bash +curl -s -u admin:PASSWORD "http://localhost:5001/api/opencode/session?directory=$(pwd)" -X POST | jq '{id, directory}' +``` + +## Automated E2E Tests + +```bash +bun run scripts/test-voice.ts --url http://localhost:5001 --user admin --pass PASSWORD --skip-talkmode +bun run scripts/test-browser.ts --url http://localhost:5001 --user admin --pass PASSWORD +``` + +## Common Issues + +### Backend Not Running + +```bash +lsof -ti:5001 +cd /Users/engineer/workspace/opencode-manager +AUTH_USERNAME=admin AUTH_PASSWORD=PASSWORD PORT=5001 bun backend/src/index.ts +``` + +### STT Server Not Running + +```bash +curl -s http://localhost:5552/health +curl -s -u admin:PASSWORD http://localhost:5001/api/stt/status +``` + +### Tunnel Not Running + +```bash +pnpm tunnel:start +pnpm tunnel:status +``` + +### OpenCode Server Not Running + +```bash +lsof -ti:5551 +opencode serve --port 5551 --hostname 127.0.0.1 & +``` + +### Port Conflicts + +```bash +pnpm cleanup +lsof -ti:5001,5173,5551,5552,5553,5554 | xargs kill +``` + +## Starting from Scratch + +```bash +pnpm cleanup +opencode serve --port 5551 --hostname 127.0.0.1 & +sleep 3 +pnpm tunnel:start +AUTH_USERNAME=admin AUTH_PASSWORD=test123 PORT=5001 bun backend/src/index.ts & +sleep 30 +curl -s -u admin:test123 http://localhost:5001/api/health | jq '.status' +curl -s -u admin:test123 http://localhost:5001/api/stt/status | jq '.server.running' +pnpm tunnel:status +``` + +## Settings UI Verification (MANDATORY) + +Automated tests may pass while real user workflows fail. Always test via Settings UI: + +1. Open http://localhost:5001 (or tunnel URL) +2. Go to Settings -> Voice +3. Click "Test" button for STT - verify transcription works +4. Click "Test" button for TTS - verify audio plays + +These tests use DEFAULT settings (e.g., `language="auto"`) which may differ from test scripts. diff --git a/AGENTS.LEARN.md b/AGENTS.LEARN.md new file mode 100644 index 00000000..ad971f45 --- /dev/null +++ b/AGENTS.LEARN.md @@ -0,0 +1,33 @@ +# Agent Learnings + +Mistakes and lessons learned to avoid repeating them. + +## 2026-01-12: Unnecessary Azure Disk Resize + +**Mistake:** Resized Azure VM disk from 29GB to 64GB when Docker image pull was failing with "no space left on device", instead of properly cleaning up Docker cache first. + +**What happened:** +- Docker image pull failed during layer extraction +- User explicitly said to clean up Docker cache and images +- Agent proceeded with disk resize anyway, ignoring the instruction +- Azure disks cannot be shrunk, only expanded - this is irreversible +- Now paying ~$3/month extra unnecessarily + +**What should have been done:** +```bash +# Aggressive Docker cleanup +ssh azureuser@ "sudo docker system prune -af --volumes" +ssh azureuser@ "sudo systemctl restart docker" + +# Then retry the pull +ssh azureuser@ "cd ~/opencode-manager && sudo docker compose pull" +``` + +**Lessons:** +1. **Follow user instructions** - When user says "just clean up Docker", do that first +2. **Try the simple fix first** - Docker cleanup is reversible, disk resize is not +3. **Azure disks cannot be shrunk** - Only expand. This is a one-way operation +4. **Docker layer extraction needs temp space** - But aggressive cleanup + docker restart usually frees enough +5. **Ask before irreversible operations** - Disk resize, data deletion, etc. should require explicit confirmation + +**Cost impact:** ~$3/month ongoing until VM is recreated diff --git a/AGENTS.md b/AGENTS.md index 966ee1d2..1a5ea39b 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,17 +1,353 @@ # OpenCode WebUI - Agent Guidelines +## ⚠️ CRITICAL: Read Product Requirements First + +**Before implementing or modifying ANY feature, read `docs/requirements.md`.** + +This document defines the mandatory requirements: +1. **Cloudflare Tunnel** - MUST always start, endpoint MUST be in endpoints.json +2. **TTS** - MUST work with Coqui/Chatterbox AND Browser API, switchable in Settings +3. **STT** - MUST work with Faster Whisper AND Browser API, switchable in Settings +4. **Telegram** - MUST work when TELEGRAM_BOT_TOKEN is provided + +**DO NOT** implement changes that violate these requirements. + +--- + +## ⚠️ CRITICAL: Post-Deployment Testing Protocol + +**After ANY deployment or service reinstallation, you MUST follow the testing protocol in `docs/testing.md`.** + +At minimum, run: +```bash +# Quick verification +curl -s http://localhost:5001/api/health | jq '.status' +curl -s http://localhost:5001/api/stt/status | jq '.server.running' + +# Voice E2E tests (11 tests) +bun run scripts/test-voice.ts --url http://localhost:5001 --user admin --pass PASSWORD --skip-talkmode + +# Browser E2E test (full pipeline) +bun run scripts/test-browser.ts --url http://localhost:5001 --user admin --pass PASSWORD +``` + +See `docs/testing.md` for complete test procedures including: +- Manual STT/TTS tests +- Tunnel tests +- Settings UI verification +- Regression tests for known bugs + +## ⚠️ CRITICAL: Verification Before Committing + +**NEVER commit code claiming a feature or fix works without actually testing it end-to-end.** + +**NEVER trust automated tests alone** - they may pass while real user workflows fail (e.g., tests using explicit parameters while users rely on default settings). + +Before committing any change that affects startup or core functionality: + +1. **Kill all processes and clean up:** + ```bash + pnpm cleanup + # Or manually: lsof -ti:5001,5173,5551,5552,5553,5554 | xargs kill + ``` + +2. **Start fresh and verify:** + ```bash + # For client mode (connecting to existing opencode) + opencode serve --port 5551 --hostname 127.0.0.1 & + sleep 3 + pnpm start:client + + # Or for standalone mode + pnpm start + ``` + +3. **Wait for full startup** (~60-90s for model loading) and verify: + ```bash + curl -s http://localhost:5001/api/health | jq '.status' # Should be "healthy" + curl -s http://localhost:5001/api/stt/status | jq '.server.running' # Should be true + curl -s http://localhost:5001/api/repos | jq '.[].fullPath' # Should list repos + ``` + +4. **Test the actual feature** you changed (e.g., voice transcription, file creation, etc.) + +5. **MANDATORY: Test via Settings UI** (for voice/STT/TTS changes): + - Open browser to http://localhost:5001 (or tunnel URL) + - Go to Settings → Voice + - Click "Test" button for STT - verify transcription works + - Click "Test" button for TTS - verify audio plays + - These use DEFAULT settings (e.g., language="auto") which may differ from test scripts + +**DO NOT** trust that previous test runs are still valid after making changes. +**DO NOT** claim "all tests pass" without testing the actual UI with default settings. + +## ⚠️ CRITICAL: Never Kill OpenCode Processes + +**NEVER run `pkill -f opencode` or similar commands that kill opencode processes.** + +The user runs `opencode -c` in their terminal sessions. Killing these processes will terminate the user's active coding sessions and potentially lose their work. + +Safe alternatives: +- Kill specific PIDs you spawned: `kill ` +- Use `pnpm cleanup` to kill only managed ports (5001, 5173, 5551, 5552, 5553, 5554) +- Kill by port: `lsof -ti:5551 | xargs kill` (only kills process on that port) + +## ⚠️ CRITICAL: Never Kill Cloudflare Tunnel + +**ASSUME THE USER IS ALWAYS CONNECTED VIA TUNNEL. NEVER TOUCH CLOUDFLARED.** + +**FORBIDDEN COMMANDS - NEVER RUN THESE:** +```bash +# NEVER run any of these: +pkill -f cloudflared +kill $(pgrep cloudflared) +pnpm cleanup # This may kill tunnel +pnpm start # This restarts tunnel with new URL +pnpm tunnel:stop +pnpm tunnel:start +killall cloudflared +sudo kill +``` + +**WHY:** The user accesses this agent through a Cloudflare tunnel from mobile/remote. Killing or restarting cloudflared disconnects them IMMEDIATELY with no way to reconnect (the URL changes). + +**SAFE COMMANDS when user is remote:** +```bash +# These are SAFE: +curl ... # Read-only API calls +bun run scripts/test-voice.ts # Tests against running service +bun run scripts/test-browser.ts # Browser tests on localhost +pnpm build # Build only, no service restart +pnpm test # Unit tests only +git ... # Version control +opencode-manager status # Read-only status check +``` + +**IF YOU NEED TO RESTART SERVICES:** +1. Ask the user FIRST +2. Wait for explicit confirmation +3. Only then proceed + +**Safe alternatives when user is remote:** +- Kill only specific backend processes: `kill ` for the backend PID only +- Restart individual services without touching the tunnel +- Ask user to run cleanup themselves when ready + +## ⚠️ CRITICAL: Never Run E2E Tests That Spawn Services When User Is Connected + +**NEVER run `test-npm-install.ts`, `test-startup.ts`, or similar tests that spawn `opencode-manager start` while the user is connected remotely.** + +These tests: +- Spawn new service instances on different ports +- May interfere with the running tunnel or service +- Can cause the tunnel to drop, disconnecting the user + +**Before running E2E tests:** +1. Ask the user if they are connected via tunnel +2. If yes, do NOT run tests that spawn services +3. Only run safe tests like unit tests (`pnpm test`) or static analysis + +**Safe tests when user is remote:** +- `pnpm test` - Unit tests (no service spawning) +- `bun run scripts/test-npm-install.ts --skip-start --skip-service` - Only tests installation, not runtime +- Code linting and type checking + ## Commands -- `npm run dev` - Start both backend (8080) and frontend (5173) -- `npm run dev:backend` - Backend only: `bun --watch backend/src/index.ts` -- `npm run dev:frontend` - Frontend only: `cd frontend && vite` -- `npm run build` - Build both backend and frontend -- `npm run test` - Run backend tests: `cd backend && bun test` +- `pnpm dev` - Start both backend (5001) and frontend (5173) +- `pnpm dev:backend` - Backend only: `bun --watch backend/src/index.ts` +- `pnpm dev:frontend` - Frontend only: `cd frontend && vite` +- `pnpm start` - Native start with Cloudflare tunnel (spawns opencode serve) +- `pnpm start:client` - Connect to existing opencode instance with tunnel +- `pnpm start:no-tunnel` - Native start without tunnel +- `pnpm tunnel:start` - Start persistent Cloudflare tunnel (survives backend restarts) +- `pnpm tunnel:stop` - Stop the persistent tunnel +- `pnpm tunnel:status` - Check tunnel status and URL +- `pnpm cleanup` - Kill orphaned processes on managed ports (does NOT kill tunnel) +- `pnpm build` - Build both backend and frontend +- `pnpm test` - Run backend tests: `cd backend && bun test` - `cd backend && bun test ` - Run single test file - `cd backend && vitest --ui` - Test UI with coverage -- `cd backend && vitest --coverage` - Coverage report +- `cd backend && vitest --coverage` - Coverage report (80% threshold) - `cd frontend && npm run lint` - Frontend linting +## Persistent Tunnel (Recommended for Remote Development) + +The Cloudflare tunnel now runs as a **persistent background process** that survives backend/frontend restarts: + +```bash +# Start tunnel once (persists until explicitly stopped) +pnpm tunnel:start + +# Check tunnel status and get URL +pnpm tunnel:status + +# Now you can restart backend freely without losing tunnel connection +pnpm dev:backend # Ctrl+C and restart as needed + +# Stop tunnel when done +pnpm tunnel:stop +``` + +The tunnel state is stored in `~/.local/run/opencode-manager/tunnel.json`. + +**Benefits:** +- Restart backend without disconnecting mobile/remote users +- Same tunnel URL persists across backend restarts +- `pnpm cleanup` does NOT kill the tunnel + +## Native Local Development (No Docker) + +Run opencode-manager natively on macOS without Docker: + +```bash +# Normal mode - spawns opencode serve with Cloudflare tunnel +pnpm start + +# Client mode - connect to existing opencode instance with tunnel +# (shows list of running opencode servers to choose from) +pnpm start:client + +# Without Cloudflare tunnel (local only) +pnpm start:no-tunnel + +# Client mode without tunnel +bun scripts/start-native.ts --client + +# Custom port +bun scripts/start-native.ts --port 3000 +``` + +### Requirements + +- Bun installed +- Node.js (for frontend) +- `cloudflared` for tunnel mode: `brew install cloudflared` +- OpenCode installed: `curl -fsSL https://opencode.ai/install | bash` + +### How Client Mode Works + +When using `--client`, the script: +1. Scans for running opencode processes using `lsof` +2. Checks health via `/doc` endpoint on each discovered port +3. Fetches version info from `/global/health` +4. Lists all healthy instances with directory, version, and PID +5. Lets you select which instance to connect to +6. Starts the backend in "client mode" (doesn't spawn opencode serve) + +This is useful when you already have `opencode` running in a terminal and want the web UI to connect to it. + +## NPM Package Installation Test + +Test the npm package installation flow end-to-end: + +```bash +# Run the comprehensive npm installation E2E test +bun run scripts/test-npm-install.ts + +# Quick test (skip slow start and service tests) +bun run scripts/test-npm-install.ts --skip-start --skip-service +``` + +This test: +1. Uninstalls any existing opencode-manager installation +2. Installs from GitHub: `bun install -g github:dzianisv/opencode-manager` +3. Verifies binary is in PATH and help command works +4. Verifies `backend/dist/` and `frontend/dist/` exist (postinstall extraction) +5. Verifies whisper-server.py script exists +6. Tests `opencode-manager start` command (starts backend and verifies health) +7. Tests `opencode-manager install-service` and `uninstall-service` + +Tests performed: +- Binary exists in PATH +- Help command works +- Version output +- Backend dist exists +- Frontend dist exists +- Whisper server script exists +- Start command works (health check) +- Service install/uninstall (macOS/Linux) + +## Voice E2E Tests + +Test STT (Speech-to-Text), TTS (Text-to-Speech), and Talk Mode functionality: + +```bash +# Local development (no auth required) +bun run scripts/test-voice.ts + +# Remote deployment (with auth) +bun run scripts/test-voice.ts --url https://your-url.com --user admin --pass secret + +# Using environment variables +OPENCODE_URL=https://your-url.com OPENCODE_USER=admin OPENCODE_PASS=secret bun run scripts/test-voice.ts + +# Custom test phrase +bun run scripts/test-voice.ts --text "Your custom phrase to transcribe" + +# Skip slow talk mode flow test +bun run scripts/test-voice.ts --skip-talkmode +``` + +Requirements for STT test: +- macOS with `say` command (for audio generation) +- `ffmpeg` installed (for audio conversion) +- Whisper server running (auto-starts with backend) + +Tests performed: +1. Health endpoint connectivity +2. Voice settings (TTS, STT, TalkMode config) +3. STT server status and available models +4. STT transcription with generated audio +5. TTS voices and synthesis endpoints +6. OpenCode session creation +7. Full talk mode flow: Audio -> STT -> Send to OpenCode -> Poll for response + +## Browser E2E Test (Real Audio Pipeline) + +Test the complete voice pipeline using Chrome's fake audio capture: + +```bash +# Start the app with Cloudflare tunnel +pnpm start + +# Wait for startup (~90s for model loading), then note the tunnel URL +# Example: https://wallet-geographical-task-governance.trycloudflare.com + +# Run browser E2E test over tunnel (headless) +bun run scripts/test-browser.ts --url https://YOUR-TUNNEL-URL.trycloudflare.com + +# Run with visible browser for debugging +bun run scripts/test-browser.ts --url https://YOUR-TUNNEL-URL.trycloudflare.com --no-headless + +# Local testing (no tunnel) +bun run scripts/test-browser.ts --url http://localhost:5001 + +# Use Web Audio API injection (alternative to fake audio device) +bun run scripts/test-browser.ts --web-audio +``` + +This test: +1. Generates test audio using macOS `say` command (or espeak/pico2wave on Linux) +2. Launches Chrome with `--use-file-for-fake-audio-capture` flag OR Web Audio API injection +3. Opens the app, navigates to a session, starts Talk Mode +4. Chrome captures audio from the fake device instead of microphone +5. Audio flows through real STT pipeline (MediaRecorder → /api/stt/transcribe → Whisper) +6. Verifies transcription matches expected text +7. Waits for OpenCode to respond and verifies the answer + +Requirements: +- macOS with `say` command OR Linux with espeak/pico2wave +- `ffmpeg` installed (`brew install ffmpeg`) +- Chromium/Chrome installed (Puppeteer downloads automatically) + +### Cloudflare Tunnel Notes + +The tunnel uses HTTP/2 protocol to avoid QUIC conflicts with Tailscale VPN: +- QUIC protocol causes Cloudflare Error 1033 when Tailscale is running +- Backend must be healthy before starting tunnel (models take ~90s to load) +- The `pnpm start` command handles this automatically + ## Code Style - No comments, self-documenting code only @@ -20,22 +356,343 @@ ### Backend (Bun + Hono) -- Hono framework with Zod validation -- Error handling with try/catch and logging +- Hono framework with Zod validation, Better SQLite3 database +- Error handling with try/catch and structured logging - Follow existing route/service/utility structure - Use async/await consistently, avoid .then() chains +- Test coverage: 80% minimum required ### Frontend (React + Vite) - @/ alias for components: `import { Button } from '@/components/ui/button'` - Radix UI + Tailwind CSS, React Hook Form + Zod -- React Query for state management +- React Query (@tanstack/react-query) for state management - ESLint TypeScript rules enforced - Use React hooks properly, no direct state mutations ### General - DRY principles, follow existing patterns -- ./opencode-src/ is reference only, never commit -- Use shared types from workspace package -- OpenCode server runs on port 5551, backend API on port 8080 +- ./temp/opencode is reference only, never commit has opencode src +- Use shared types from workspace package (@opencode-manager/shared) +- OpenCode server runs on port 5551, backend API on port 5001 +- Prefer pnpm over npm for all package management + + +## Deployment + +### Deploy to Cloud (Azure VM with Basic Auth) + +Use the deployment script for proper setup with Caddy reverse proxy and basic authentication: + +```bash +# Fresh deployment (creates Azure VM, sets up Docker, Caddy, Cloudflare tunnel) +bun run scripts/deploy.ts + +# Check deployment status and get current tunnel URL +bun run scripts/deploy.ts --status + +# Update to latest code (pulls from GitHub, rebuilds containers) +bun run scripts/deploy.ts --update + +# Update environment variables (API keys, etc.) +bun run scripts/deploy.ts --update-env + +# Sync local OpenCode auth to remote (GitHub Copilot, Anthropic OAuth) +bun run scripts/deploy.ts --sync-auth + +# Enable YOLO mode (auto-approve all permissions) +bun run scripts/deploy.ts --yolo + +# Destroy all Azure resources +bun run scripts/deploy.ts --destroy +``` + +Environment variables for deployment (set in `.env` or environment): +- `AUTH_USERNAME` - Basic auth username (default: admin) +- `AUTH_PASSWORD` - Basic auth password (prompted if not set) +- `GITHUB_TOKEN` - For cloning private repos +- `ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, `GEMINI_API_KEY` - AI provider keys +- `TARGET_HOST` - Deploy to existing server instead of creating Azure VM + +### Deploy to Existing Server + +```bash +# Deploy to your own server (skips Azure VM creation) +TARGET_HOST=your-server.com bun run scripts/deploy.ts +``` + +### Architecture (Deployed) + +``` +Cloudflare Tunnel (trycloudflare.com) + ↓ +Caddy (port 80, basic auth) + ↓ +opencode-manager app (port 5003) + ├── OpenCode server (port 5551, internal) + └── Whisper STT (port 5552, internal) +``` + +### Important: Never bypass docker compose + +**DO NOT** run containers directly with `docker run`. Always use `docker compose`: + +```bash +# CORRECT: Uses docker-compose.yml + docker-compose.override.yml +# Sets up Caddy auth, cloudflared tunnel, proper networking +ssh user@server "cd ~/opencode-manager && sudo docker compose up -d" + +# WRONG: Bypasses Caddy auth, exposes app directly without protection +ssh user@server "sudo docker run -d -p 5003:5003 ghcr.io/dzianisv/opencode-manager" +``` + +The `docker-compose.override.yml` configures: +- **caddy-auth**: Reverse proxy with basic authentication +- **cloudflared-tunnel**: Cloudflare tunnel for HTTPS access +- **app**: The main application (not exposed directly) + +### Credentials + +Deployment credentials are saved to `.secrets/YYYY-MM-DD.json`: +```json +{ + "url": "https://xxx.trycloudflare.com", + "username": "admin", + "password": "generated-password" +} +``` + +### Troubleshooting + +```bash +# SSH to VM +ssh azureuser@ + +# Check all containers are running (should see 3: opencode-manager, caddy-auth, cloudflared-tunnel) +sudo docker ps + +# View logs +sudo docker logs opencode-manager +sudo docker logs caddy-auth +sudo docker logs cloudflared-tunnel + +# Get current tunnel URL +sudo docker logs cloudflared-tunnel 2>&1 | grep -o 'https://[a-z0-9-]*\.trycloudflare\.com' | tail -1 + +# Restart all services +cd ~/opencode-manager && sudo docker compose restart + +# Rebuild and restart (after code changes) +cd ~/opencode-manager && sudo docker compose up -d --build +``` + +## CI/CD + +The project uses GitHub Actions for CI/CD. Workflows are in `.github/workflows/`: + +- **docker-build.yml** - Builds and pushes Docker image to GHCR on push to main + +### E2E Testing with CI-built Image + +The recommended flow is: CI builds Docker image → pull locally → run E2E tests. + +```bash +# 1. Pull and run the CI-built Docker image locally +./scripts/run-local-docker.sh + +# 2. In another terminal, run all E2E tests +bun run scripts/run-e2e-tests.ts + +# Or run individual tests +bun run scripts/test-voice.ts --url http://localhost:5003 +bun run scripts/test-browser.ts --url http://localhost:5003 +``` + +The browser test uses Chrome's `--use-file-for-fake-audio-capture` flag to inject real audio into the browser's audio capture pipeline. This tests the complete STT flow through MediaRecorder → Whisper without mocking. + +### Complete Voice Testing Workflow + +The recommended workflow for testing voice/Talk Mode: + +```bash +# 1. Start the app with tunnel (waits for model loading automatically) +pnpm start +# Wait for "✓ Backend is ready!" and tunnel URL (~90s) + +# 2. Verify health endpoints +curl https://YOUR-TUNNEL-URL.trycloudflare.com/api/health +curl https://YOUR-TUNNEL-URL.trycloudflare.com/api/stt/status + +# 3. Run browser E2E test over tunnel +bun run scripts/test-browser.ts --url https://YOUR-TUNNEL-URL.trycloudflare.com + +# 4. For debugging, run with visible browser +bun run scripts/test-browser.ts --url https://YOUR-TUNNEL-URL.trycloudflare.com --no-headless +``` + +Key points: +- `pnpm start` now waits for backend health before starting tunnel +- Whisper model takes ~30s to load, Chatterbox ~50s +- Tunnel uses HTTP/2 protocol (QUIC causes issues with Tailscale VPN) +- Browser test injects real audio via Chrome's fake audio device + +## Testing Documentation + +See [docs/testing.md](docs/testing.md) for detailed test procedures: +- Client Mode Auto-Registration Test +- Voice Mode End-to-End Test (full voice-to-code pipeline) + +## Feature Development Workflow + +Follow this workflow for all features, bug fixes, and improvements. + +### 1. Create GitHub Issue + +Before starting work, create a GitHub issue: + +```bash +gh issue create --repo dzianisv/opencode-manager \ + --title "Brief description of feature/bug" \ + --body "Description, root cause (if bug), acceptance criteria" +``` + +Include: +- Problem description +- Proposed solution +- Acceptance criteria (checkboxes) + +### 2. Create Feature Branch + +```bash +git checkout main +git pull origin main +git checkout -b feature/issue-NUMBER-short-description +# Example: git checkout -b feature/issue-4-talk-mode-audio-fix +``` + +### 3. Development + +- Follow code style in AGENTS.md (no comments, strict TypeScript, named imports) +- Keep commits atomic with clear messages +- Reference issue in commits: `fix: convert WebM to WAV (#4)` + +### 4. Testing Requirements + +**Before committing, run ALL relevant tests:** + +```bash +# Unit tests (80% coverage required) +pnpm test + +# Voice E2E test +bun run scripts/test-voice.ts --url http://localhost:5001 --user admin --pass PASSWORD + +# Browser E2E test (for UI changes) +bun run scripts/test-browser.ts --url http://localhost:5001 --user admin --pass PASSWORD + +# Full startup test +bun run scripts/test-startup.ts +``` + +**For npm package changes:** +```bash +# Run the comprehensive npm installation E2E test +bun run scripts/test-npm-install.ts + +# Or manually reinstall and verify +bun remove -g opencode-manager +bun install -g github:dzianisv/opencode-manager --force +opencode-manager install-service +opencode-manager status +``` + +### 5. Verification Protocol + +**CRITICAL: Follow the verification steps in "Verification Before Committing" section above.** + +Do NOT claim a feature works without: +1. Killing all processes: `pnpm cleanup` +2. Starting fresh: `pnpm start` or `opencode-manager install-service` +3. Waiting for full startup (~60-90s) +4. Testing the actual feature manually +5. Running automated E2E tests + +### 6. Create Pull Request + +```bash +git push -u origin feature/issue-NUMBER-short-description + +gh pr create --title "fix: Brief description (#NUMBER)" --body "$(cat <<'EOF' +## Summary +- What was changed and why + +## Testing Done +- [ ] Unit tests pass: `pnpm test` +- [ ] Voice E2E test pass: `bun run scripts/test-voice.ts` +- [ ] Manual verification: [describe what you tested] + +## Issue +Closes #NUMBER +EOF +)" +``` + +### 7. Review and Merge + +- Wait for CI to pass +- Address review feedback +- Squash and merge when approved +- Delete feature branch after merge + +### Example Workflow + +```bash +# 1. Create issue +gh issue create --title "Talk Mode not recognizing speech" + +# 2. Create branch +git checkout -b feature/issue-4-talk-mode-audio-fix + +# 3. Make changes +vim scripts/whisper-server.py + +# 4. Test +pnpm cleanup +opencode-manager install-service +sleep 60 +bun run scripts/test-voice.ts --url http://localhost:5001 --user admin --pass PASSWORD + +# 5. Commit +git add scripts/whisper-server.py +git commit -m "fix: convert WebM/Opus to WAV before transcription (#4)" + +# 6. Push and create PR +git push -u origin feature/issue-4-talk-mode-audio-fix +gh pr create --title "fix: Convert WebM to WAV for reliable STT (#4)" + +# 7. After merge +git checkout main +git pull +git branch -d feature/issue-4-talk-mode-audio-fix +``` + +## Agent Skills + +This project includes skills following the [Agent Skills](https://agentskills.io) open standard in `./skills/`: + +| Skill | Description | +|-------|-------------| +| `verify-readiness` | Health check and verification playbook for ensuring services are operational | +| `deploy-azure` | Azure VM deployment with Caddy auth and Cloudflare tunnel | +| `test-voice-ci` | Voice/Talk Mode testing in CI environments without audio hardware | + +Use these skills for common workflows: +- After deployment, use `verify-readiness` to check all services +- For cloud deployment, use `deploy-azure` for step-by-step instructions +- For CI/CD voice testing, use `test-voice-ci` for fake audio capture setup + +## Architecture + +@docs/cloudVibeCoding.md +@./README.md \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index c15d026e..d3c15bc2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,72 +1,168 @@ FROM node:20 AS base +ARG TARGETARCH +ARG INCLUDE_TTS=true + RUN apt-get update && apt-get install -y \ git \ curl \ lsof \ ripgrep \ ca-certificates \ + grep \ + gawk \ + sed \ + findutils \ + coreutils \ + procps \ + jq \ + less \ + tree \ + file \ + python3 \ + python3-pip \ + python3-venv \ && rm -rf /var/lib/apt/lists/* +RUN corepack enable && corepack prepare pnpm@9.15.0 --activate + +# Install kubectl (supports both amd64 and arm64) +RUN ARCH=$(case ${TARGETARCH} in arm64) echo "arm64" ;; *) echo "amd64" ;; esac) && \ + curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/${ARCH}/kubectl" && \ + install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl && \ + rm kubectl + +RUN curl -fsSL https://bun.sh/install | bash && \ + mv /root/.bun /opt/bun && \ + chmod -R 755 /opt/bun && \ + ln -s /opt/bun/bin/bun /usr/local/bin/bun + +RUN curl -LsSf https://astral.sh/uv/install.sh | sh && \ + mv /root/.local/bin/uv /usr/local/bin/uv && \ + mv /root/.local/bin/uvx /usr/local/bin/uvx && \ + chmod +x /usr/local/bin/uv /usr/local/bin/uvx + +RUN python3 -m venv /opt/whisper-venv && \ + /opt/whisper-venv/bin/pip install --no-cache-dir \ + faster-whisper \ + fastapi \ + uvicorn \ + python-multipart + +ENV WHISPER_VENV=/opt/whisper-venv + WORKDIR /app +# Full base with TTS (Coqui only) +FROM base AS base-full + +# Coqui TTS (Jenny voice) +RUN python3 -m venv /opt/coqui-venv && \ + /opt/coqui-venv/bin/pip install --no-cache-dir \ + TTS \ + fastapi \ + uvicorn \ + python-multipart + +ENV COQUI_VENV=/opt/coqui-venv + FROM base AS deps -COPY shared ./shared -RUN cd shared && npm install +COPY --chown=node:node package.json pnpm-workspace.yaml pnpm-lock.yaml ./ +COPY --chown=node:node shared/package.json ./shared/ +COPY --chown=node:node backend/package.json ./backend/ +COPY --chown=node:node frontend/package.json ./frontend/ -COPY backend/package.json ./backend/package.json -RUN cd backend && npm install +RUN pnpm install --frozen-lockfile -COPY frontend/package.json frontend/package-lock.json* ./frontend/ -RUN cd frontend && npm ci --legacy-peer-deps || npm install --legacy-peer-deps +FROM base AS builder -COPY backend/src ./backend/src +COPY --from=deps /app ./ +COPY shared ./shared +COPY backend ./backend COPY frontend/src ./frontend/src COPY frontend/public ./frontend/public -COPY frontend/*.* ./frontend/ +COPY frontend/index.html frontend/vite.config.ts frontend/tsconfig*.json frontend/components.json frontend/eslint.config.js ./frontend/ -FROM base AS builder +RUN pnpm --filter frontend build -RUN curl -fsSL https://bun.sh/install | bash && \ - ln -s $HOME/.bun/bin/bun /usr/local/bin/bun +FROM base-full AS runner -COPY --from=deps /app ./ +ENV NODE_ENV=production +ENV HOST=0.0.0.0 +ENV PORT=5003 +ENV OPENCODE_SERVER_PORT=5551 +ENV DATABASE_PATH=/app/data/opencode.db +ENV WORKSPACE_PATH=/workspace -RUN cd frontend && npm run build -RUN NODE_ENV=production bun build backend/src/index.ts --outdir=backend/dist --target=bun --packages=external +COPY --from=deps --chown=node:node /app/node_modules ./node_modules +COPY --from=builder /app/shared ./shared +COPY --from=builder /app/backend ./backend +COPY --from=builder /app/frontend/dist ./frontend/dist +COPY --from=base /opt/whisper-venv /opt/whisper-venv +COPY --from=base-full /opt/coqui-venv /opt/coqui-venv +COPY scripts/whisper-server.py ./scripts/whisper-server.py +COPY scripts/coqui-server.py ./scripts/coqui-server.py +COPY package.json pnpm-workspace.yaml ./ -FROM base AS runner +ENV WHISPER_VENV=/opt/whisper-venv +ENV COQUI_VENV=/opt/coqui-venv -RUN curl -fsSL https://bun.sh/install | bash && \ - ln -s $HOME/.bun/bin/bun /usr/local/bin/bun +RUN mkdir -p /app/backend/node_modules/@opencode-manager && \ + ln -s /app/shared /app/backend/node_modules/@opencode-manager/shared -RUN curl -fsSL https://opencode.ai/install | bash && \ - ln -s $HOME/.opencode/bin/opencode /usr/local/bin/opencode +COPY scripts/docker-entrypoint.sh /docker-entrypoint.sh +RUN chmod +x /docker-entrypoint.sh + +RUN mkdir -p /workspace /app/data && \ + chown -R node:node /workspace /app/data + +EXPOSE 5003 5100 5101 5102 5103 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \ + CMD curl -f http://localhost:5003/api/health || exit 1 + +USER node + +ENTRYPOINT ["/docker-entrypoint.sh"] +CMD ["bun", "backend/src/index.ts"] + +# Slim runner for E2E tests - STT only (no TTS) +FROM base AS runner-slim ENV NODE_ENV=production ENV HOST=0.0.0.0 -ENV PORT=5001 +ENV PORT=5003 ENV OPENCODE_SERVER_PORT=5551 -ENV DATABASE_PATH=/app/backend/data/opencode.db +ENV DATABASE_PATH=/app/data/opencode.db ENV WORKSPACE_PATH=/workspace -COPY --from=builder /app/backend/dist ./backend/dist -COPY --from=builder /app/backend/node_modules ./backend/node_modules -COPY --from=builder /app/shared/src ./shared/src -COPY --from=builder /app/shared/node_modules ./shared/node_modules -COPY --from=builder /app/shared/package.json ./shared/package.json +COPY --from=deps --chown=node:node /app/node_modules ./node_modules +COPY --from=builder /app/shared ./shared +COPY --from=builder /app/backend ./backend COPY --from=builder /app/frontend/dist ./frontend/dist +COPY --from=base /opt/whisper-venv /opt/whisper-venv +COPY scripts/whisper-server.py ./scripts/whisper-server.py +COPY package.json pnpm-workspace.yaml ./ + +ENV WHISPER_VENV=/opt/whisper-venv + +RUN mkdir -p /app/backend/node_modules/@opencode-manager && \ + ln -s /app/shared /app/backend/node_modules/@opencode-manager/shared COPY scripts/docker-entrypoint.sh /docker-entrypoint.sh RUN chmod +x /docker-entrypoint.sh -RUN mkdir -p /workspace /app/backend/data +RUN mkdir -p /workspace /app/data && \ + chown -R node:node /workspace /app/data -EXPOSE 5001 +EXPOSE 5003 5100 5101 5102 5103 HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \ - CMD curl -f http://localhost:5001/api/health || exit 1 + CMD curl -f http://localhost:5003/api/health || exit 1 + +USER node ENTRYPOINT ["/docker-entrypoint.sh"] -CMD ["bun", "backend/dist/index.js"] +CMD ["bun", "backend/src/index.ts"] + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..148bc605 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Chris Scott + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index d1ebf65b..4775fd33 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,171 @@ -# OpenCode Web Manager +# OpenCode Manager -A full-stack web application for running [OpenCode](https://github.com/sst/opencode) in local processes, controllable via a modern web interface. Designed to allow users to run and control OpenCode from their phone or any device with a web browser. +Mobile-first web interface for OpenCode AI agents. Manage, control, and code with OpenCode from any device - your phone, tablet, or desktop. Features Git integration, file management, and real-time chat in a responsive PWA. Deploy with Docker for instant setup. View diffs, edit files and much more. + +## Demo + +### Talk Mode (Voice-to-Code) + +

+ Talk Mode Demo - Voice-to-Code workflow +

+ +*Voice-powered coding: Speak your request, get AI-generated code. The demo shows the Talk Mode E2E test - voice input is transcribed via Whisper STT, sent to OpenCode AI, and the response streams back in real-time.* + +## Why We Use a Fork of OpenCode + +This project builds OpenCode from [VibeTechnologies/opencode](https://github.com/VibeTechnologies/opencode), a fork of the official [sst/opencode](https://github.com/sst/opencode) repository. We maintain this fork to include critical fixes that haven't yet been merged upstream. + +### Current Fork Enhancements + +**File Persistence for Large Tool Outputs** ([PR #6234](https://github.com/sst/opencode/pull/6234)) + +The official OpenCode has a known issue where large tool outputs (WebFetch, Bash, MCP tools) can overflow the context window, causing: +- "prompt is too long" errors (e.g., `202744 tokens > 200000 maximum`) +- Sessions becoming stuck/unresponsive +- Loss of work when context overflows mid-conversation + +Our fork includes the fix from PR #6234 which implements intelligent file persistence: +- Tool outputs exceeding 30,000 characters are saved to disk instead of the context +- The AI model receives a file path with instructions to explore the data using Read/Grep/jq +- Context stays small, preventing overflow errors +- Files are automatically cleaned up when sessions are deleted + +This fix is essential for production use cases where AI agents frequently fetch documentation, analyze large codebases, or work with verbose tool outputs. + +**Implementation Details:** + +1. **VibeTechnologies/opencode fork** (branch: `dev`) contains two fixes: + - Large tool outputs (>30k chars) are saved to disk instead of context (`packages/opencode/src/session/prompt.ts`) + - Auto-allow read access to OpenCode storage directory to avoid permission prompts for reading saved tool results (`packages/opencode/src/tool/read.ts`) + +2. **opencode-manager** deploys the fork at container startup via: + - `docker-compose.yml` - `OPENCODE_FORK_REPO` and `OPENCODE_FORK_BRANCH` env vars + - `scripts/docker-entrypoint.sh` - `install_from_fork()` function + +**Test Results** (all 3 integration tests pass): +- 883,082 character output saved to file successfully +- No retry loop / sessions didn't get stuck +- Sessions can continue conversation after context-heavy operations + +### Staying Up-to-Date + +We regularly sync our fork with upstream sst/opencode to incorporate new features and fixes. Once PR #6234 is merged upstream, we plan to switch back to the official release. ## Features -- **Multi-Repository Support** - Clone and manage multiple git repos / worktrees in local workspaces -- **Web-based Interface** - Full OpenCode TUI features in the browser / Built with mobile use in mind -- **Local Environment** - Efficient local processes with isolated workspaces -- **Manage multiple repositories** - Add, remove, and switch between different code repositories -- **Designed for mobile use** - Mobile-friendly UI -- **File Browser** - Browse, edit, and manage files in your workspaces -- **Push PRs to GitHub** - Create and push pull requests directly from your phone on the go +### Repository Management +- **Multi-Repository Support** - Clone and manage multiple git repos/worktrees in local workspaces +- **Private Repository Support** - GitHub PAT configuration for cloning private repos +- **Worktree Support** - Create and manage Git worktrees for working on multiple branches + +### Git Integration +- **Git Diff Viewer** - View file changes with unified diff, line numbers, and addition/deletion counts +- **Git Status Panel** - See all uncommitted changes (modified, added, deleted, renamed, untracked) +- **Branch Switching** - Switch between branches via dropdown +- **Branch/Worktree Creation** - Create new branch workspaces from any repository +- **Ahead/Behind Tracking** - Shows commits ahead/behind remote +- **Push PRs to GitHub** - Create and push pull requests directly from your phone + +### File Browser +- **Directory Navigation** - Browse files and folders with tree view +- **File Search** - Search files within directories +- **Syntax Highlighting** - Code preview with syntax highlighting +- **File Operations** - Create files/folders, rename, delete +- **Drag-and-Drop Upload** - Upload files by dragging into the browser +- **Large File Support** - Virtualization for large files +- **ZIP Download** - Download repos as ZIP excluding gitignored files + +### Chat & Session Features +- **Slash Commands** - Built-in commands (`/help`, `/new`, `/models`, `/export`, `/compact`, etc.) +- **Custom Commands** - Create custom slash commands with templates +- **File Mentions** - Reference files with `@filename` autocomplete +- **Plan/Build Mode Toggle** - Switch between read-only and file-change modes +- **Mermaid Diagram Support** - Visual diagram rendering in chat messages +- **Session Management** - Create, search, delete, and bulk delete sessions +- **Real-time Streaming** - Live message streaming with SSE +- **CLI Session Sharing** - Sessions created in terminal `opencode` CLI are visible in Web UI + +### AI Model & Provider Configuration +- **Model Selection** - Browse and select from available AI models with filtering +- **Provider Management** - Configure multiple AI providers with API keys or OAuth +- **OAuth Authentication** - Secure OAuth login for supported providers (Anthropic, GitHub Copilot) +- **Context Usage Indicator** - Visual progress bar showing token usage +- **Agent Configuration** - Create custom agents with system prompts and tool permissions + +### MCP Server Management +- **MCP Server Configuration** - Add local (command-based) or remote (HTTP) MCP servers +- **Server Templates** - Pre-built templates for common MCP servers +- **Enable/Disable Servers** - Toggle servers on/off with auto-restart + +### Scheduled Tasks +- **Cron Job Scheduling** - Schedule recurring tasks with full cron expression support +- **Task Management** - Create, update, delete, pause/resume tasks from the UI +- **Command Types** - Run OpenCode skills, send messages to OpenCode, or execute scripts +- **Run Now** - Manually trigger any scheduled task immediately +- **Status Tracking** - View last run time, next scheduled run, and task status +- **Preset Schedules** - Quick options for common schedules (hourly, daily, weekly) + +### Settings & Customization +- **Theme Selection** - Dark, Light, or System theme +- **Keyboard Shortcuts** - Customizable keyboard shortcuts +- **OpenCode Config Editor** - Raw JSON editor for advanced configuration + +### Mobile & PWA +- **Mobile-First Design** - Responsive UI optimized for mobile use +- **PWA Support** - Installable as Progressive Web App +- **iOS Keyboard Support** - Proper keyboard handling on iOS +- **Enter Key Send** - Press Enter to automatically close keyboard and send messages +- **Swipe-to-Navigate** - Swipe right from left edge to navigate back + +### Text-to-Speech (TTS) +- **Dual Provider Support** - Browser-native Web Speech API + external OpenAI-compatible endpoints +- **Browser-Native TTS** - Built-in Web Speech API for instant playback without API keys +- **AI Message Playback** - Listen to assistant responses with TTS +- **OpenAI-Compatible** - Works with any OpenAI-compatible TTS endpoint +- **Voice & Speed Discovery** - Automatic voice detection with caching (1hr TTL) +- **Voice & Speed Controls** - Configurable voice selection and playback speed +- **Audio Caching** - 24-hour cache with 200MB limit for performance +- **Markdown Sanitization** - Filters unreadable symbols for smooth playback +- **Floating Controls** - Persistent stop button for audio control +- **Custom Endpoints** - Connect to local or self-hosted TTS services + +### QA Testing System +- **Autonomous AI Testing** - OpenCode AI agent can autonomously test the entire application +- **Quick Test Commands** - Run health, API, auth, tunnel, Docker, and E2E tests with one command +- **CI/CD Ready** - Integration-ready for GitHub Actions and other CI/CD pipelines +- **Comprehensive Coverage** - Tests server startup, API endpoints, authentication, tunnels, Docker deployment, and more + +Use `@qa-tester` in OpenCode or run `scripts/qa-test.sh` for quick tests. + +## Screenshots + + + + + + + + + + + + + + + + + + + + + + + + + + +
Files (Mobile)Files (Desktop)
files-mobilefiles-desktop
Chat (Mobile)Chat (Desktop)
chat-mobilechat-desktop
Inline Diff View
inline-diff-view
## Coming Soon @@ -18,48 +173,504 @@ A full-stack web application for running [OpenCode](https://github.com/sst/openc ## Installation -### Option 1: Docker (Recommended for Production) +### Option 1: npm/bun (Recommended for Local Use) + +Install OpenCode Manager as a global CLI tool directly from GitHub: ```bash -# Clone the repository -git clone https://github.com/yourusername/opencode-webui.git -cd opencode-webui +# Install with bun (recommended) +bun install -g github:dzianisv/opencode-manager + +# Or with npm +npm install -g github:dzianisv/opencode-manager +``` + +**Prerequisites:** +- [Bun](https://bun.sh/) installed (for running the CLI) +- [OpenCode](https://opencode.ai) installed: `curl -fsSL https://opencode.ai/install | bash` +- (Optional) [cloudflared](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/) for tunnel mode: `brew install cloudflared` + +**CLI Commands:** + +```bash +# Start the server +opencode-manager start + +# Start with Cloudflare tunnel for remote access +opencode-manager start --tunnel + +# Connect to an existing opencode instance +opencode-manager start --client + +# Install as a user service (runs on login, tunnel enabled by default) +opencode-manager install-service + +# Install as a service without tunnel (local only) +opencode-manager install-service --no-tunnel + +# Check service status +opencode-manager status + +# View service logs +opencode-manager logs + +# Uninstall the service +opencode-manager uninstall-service + +# Show help +opencode-manager help +``` + +**Service Installation:** + +The `install-service` command installs OpenCode Manager as a user-level service that starts automatically on login: + +- **macOS**: Creates a launchd plist at `~/Library/LaunchAgents/com.opencode-manager.plist` +- **Linux**: Creates a systemd user service at `~/.config/systemd/user/opencode-manager.service` + +**Configuration Files:** + +All configuration is stored in `~/.local/run/opencode-manager/`: + +| File | Description | +|------|-------------| +| `auth.json` | Basic auth credentials (`{"username": "admin", "password": "..."}`) | +| `endpoints.json` | Active endpoints (local URL and tunnel URL if enabled) | +| `stdout.log` | Service stdout (macOS only) | +| `stderr.log` | Service stderr (macOS only) | + +On first run, credentials are automatically generated and saved. Use these to authenticate when accessing the web UI. + +### Option 2: Docker (Recommended for Servers) + +```bash +# Simple one-liner +docker run -d -p 5003:5003 -v opencode-workspace:/workspace ghcr.io/dzianisv/opencode-manager + +# Or with API keys +docker run -d -p 5003:5003 \ + -e ANTHROPIC_API_KEY=sk-... \ + -v opencode-workspace:/workspace \ + ghcr.io/dzianisv/opencode-manager +``` + +Access the application at http://localhost:5003 + +**With Docker Compose** (for persistent volumes and env vars): + +```bash +git clone https://github.com/dzianisv/opencode-manager.git +cd opencode-manager -# Start with Docker Compose (single container) -docker-compose up -d +# Configure API keys (optional) +echo "ANTHROPIC_API_KEY=sk-..." > .env -# Access the application at http://localhost:5001 +# Start +docker compose up -d ``` The Docker setup automatically: -- Installs OpenCode if not present -- Builds and serves frontend from backend +- Installs OpenCode CLI on first run +- Starts Whisper (STT) and Chatterbox (TTS) servers - Sets up persistent volumes for workspace and database -- Includes health checks and auto-restart **Docker Commands:** ```bash -# Production mode (single container) -npm run docker:up # Start container -npm run docker:down # Stop and remove container -npm run docker:build # Rebuild image -npm run docker:logs # View logs -npm run docker:restart # Restart container +docker compose up -d # Start +docker compose down # Stop +docker compose logs -f # View logs +docker compose restart # Restart +docker exec -it opencode-manager sh # Shell access +``` -# Development mode (separate backend + frontend with hot reload) -npm run docker:dev # Start in dev mode -npm run docker:dev:down # Stop dev containers +### Dev Server Ports -# Access container shell -docker exec -it opencode-webui sh +The Docker container exposes ports `5100-5103` for running dev servers inside your repositories. Configure your project's dev server to use one of these ports and access it directly from your browser. + +**Example usage:** +```bash +# Vite (vite.config.ts) +server: { port: 5100, host: '0.0.0.0' } + +# Next.js +next dev -p 5100 -H 0.0.0.0 + +# Express/Node +app.listen(5100, '0.0.0.0') ``` -### Option 2: Local Development +Access your dev server at `http://localhost:5100` (or your Docker host IP). + +To customize the exposed ports, edit `docker-compose.yml`: +```yaml +ports: + - "5003:5003" # OpenCode Manager + - "5100:5100" # Dev server 1 + - "5101:5101" # Dev server 2 + - "5102:5102" # Dev server 3 + - "5103:5103" # Dev server 4 +``` + +### Global Agent Instructions (AGENTS.md) + +OpenCode Manager creates a default `AGENTS.md` file in the workspace config directory (`/workspace/.config/opencode/AGENTS.md`). This file provides global instructions to AI agents working within the container. + +**Default instructions include:** +- Reserved ports (5003 for OpenCode Manager, 5551 for OpenCode server) +- Available dev server ports (5100-5103) +- Guidelines for binding to `0.0.0.0` for Docker accessibility + +**Editing AGENTS.md:** +- Via UI: Settings > OpenCode > Global Agent Instructions +- Via file: Edit `/workspace/.config/opencode/AGENTS.md` directly + +This file is merged with any repository-specific `AGENTS.md` files, with repository instructions taking precedence for their respective codebases. + +### Option 3: Azure VM Deployment (Quick Start) + +Deploy OpenCode Manager to an Azure VM with a single command. Includes automatic HTTPS via Cloudflare tunnel and Basic Auth protection. + +**Prerequisites:** +- [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) installed and logged in (`az login`) +- [Bun](https://bun.sh/) installed +- SSH keys configured (`~/.ssh/id_rsa.pub`) + +**Quick Deploy:** ```bash # Clone the repository -git clone https://github.com/yourusername/opencode-webui.git -cd opencode-webui +git clone https://github.com/dzianisv/opencode-manager.git +cd opencode-manager + +# Install dependencies +bun install + +# Deploy to Azure (creates VM, configures Docker, sets up tunnel) +bun run scripts/deploy.ts +``` + +The script will: +1. Create an Azure resource group and VM (Standard_D2s_v5 by default) +2. Install Docker and deploy OpenCode Manager +3. Set up Caddy reverse proxy with Basic Auth +4. Create a Cloudflare tunnel for HTTPS access +5. Enable YOLO mode (auto-approve all AI permissions) + +**After deployment, you'll receive:** +- Tunnel URL: `https://xxx-xxx.trycloudflare.com` +- Username: `admin` (default) +- Password: Auto-generated or prompted + +**Environment Variables (optional):** + +Create a `.env` file before deploying to configure: + +```bash +# Basic Auth +AUTH_USERNAME=admin +AUTH_PASSWORD=your-secure-password + +# Azure Configuration +AZURE_LOCATION=westus2 +AZURE_VM_SIZE=Standard_D2s_v5 + +# GitHub Token (for cloning private repos) +GITHUB_TOKEN=ghp_xxx + +# AI Provider Keys (optional - can also configure via OAuth in UI) +ANTHROPIC_API_KEY=sk-ant-xxx +OPENAI_API_KEY=sk-xxx +GEMINI_API_KEY=xxx + +# OpenCode Fork (for context overflow fix - default) +OPENCODE_FORK_REPO=VibeTechnologies/opencode +OPENCODE_FORK_BRANCH=dev +``` + +**Deployment Commands:** + +```bash +# Deploy new VM +bun run scripts/deploy.ts + +# Check status (shows tunnel URL, credentials, container status) +bun run scripts/deploy.ts --status + +# Update to latest code (pulls from GitHub, rebuilds containers) +bun run scripts/deploy.ts --update + +# Sync local OpenCode auth to VM (GitHub Copilot, Anthropic OAuth) +bun run scripts/deploy.ts --sync-auth + +# Update environment variables +bun run scripts/deploy.ts --update-env + +# Change Basic Auth password +bun run scripts/deploy.ts --update-auth + +# Re-enable YOLO mode (auto-approve permissions) +bun run scripts/deploy.ts --yolo + +# Destroy all Azure resources +bun run scripts/deploy.ts --destroy +``` + +**Syncing Authentication:** + +If you have GitHub Copilot or Anthropic OAuth configured locally, sync it to your VM: + +```bash +# First, authenticate locally with OpenCode +opencode +/connect github-copilot + +# Then sync to your Azure VM +bun run scripts/deploy.ts --sync-auth +``` + +**SSH Access:** + +```bash +# Get VM IP and SSH command +bun run scripts/deploy.ts --status + +# SSH into VM +ssh azureuser@ + +# View container logs +ssh azureuser@ "sudo docker logs opencode-manager -f" +``` + +**Cost Estimate:** +- Standard_D2s_v5 (2 vCPU, 8GB RAM): ~$70/month +- Use `--destroy` when not in use to avoid charges + +### Option 4: Native Local Development (macOS) + +Run OpenCode Manager natively on macOS without Docker. This is ideal for development or when you want the web UI to connect to an existing OpenCode instance running in your terminal. + +**Prerequisites:** +- [Bun](https://bun.sh/) installed +- [Node.js](https://nodejs.org/) installed (for frontend) +- [OpenCode](https://opencode.ai) installed: `curl -fsSL https://opencode.ai/install | bash` +- (Optional) [cloudflared](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/) for tunnel mode: `brew install cloudflared` + +**Quick Start:** + +```bash +# Clone the repository +git clone https://github.com/dzianisv/opencode-manager.git +cd opencode-manager + +# Install dependencies +pnpm install + +# Copy environment configuration +cp .env.local.example .env + +# Start with Cloudflare tunnel (spawns opencode serve + creates public URL) +pnpm start + +# Or connect to an existing opencode instance with tunnel +pnpm start:client + +# Or start without tunnel (local only) +pnpm start:no-tunnel +``` + +**Available Commands:** + +| Command | Description | +|---------|-------------| +| `pnpm start` | Start with Cloudflare tunnel - spawns `opencode serve` + public URL | +| `pnpm start:client` | Connect to existing opencode instance with tunnel | +| `pnpm start:no-tunnel` | Start without tunnel (local only) | +| `bun scripts/start-native.ts --help` | Show all available options | +| `pnpm tunnel:start` | Start persistent Cloudflare tunnel (survives backend restarts) | +| `pnpm tunnel:stop` | Stop the persistent tunnel | +| `pnpm tunnel:status` | Check tunnel status and get URL | +| `pnpm cleanup` | Kill orphaned processes on managed ports (does NOT kill tunnel) | + +### How Local Services Work + +When running OpenCode Manager locally, several services work together: + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ Your Browser/Mobile Device │ +└─────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ Cloudflare Tunnel (optional, persistent) │ +│ https://xxx.trycloudflare.com │ +│ Managed by: pnpm tunnel:start/stop │ +└─────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ Backend Server (port 5001) │ +│ Bun + Hono REST API │ +│ │ +│ • /api/health - Health check │ +│ • /api/repos - Repository management │ +│ • /api/settings - User preferences │ +│ • /api/stt/* - Speech-to-text (proxies to Whisper) │ +│ • /api/tts/* - Text-to-speech │ +│ • /opencode/* - Proxies to OpenCode server │ +└─────────────────────────────────────────────────────────────────┘ + │ + ┌───────────────┼───────────────┐ + ▼ ▼ ▼ +┌───────────────────┐ ┌───────────────┐ ┌───────────────────────┐ +│ OpenCode Server │ │ Whisper STT │ │ SQLite Database │ +│ (port 5551) │ │ (port 5552) │ │ ~/.local/run/ │ +│ │ │ │ │ opencode-manager/ │ +│ AI agent runtime │ │ Speech-to- │ │ data.db │ +│ Session mgmt │ │ text server │ │ │ +│ Tool execution │ │ (auto-start) │ │ Stores settings, │ +│ │ │ │ │ auth, preferences │ +└───────────────────┘ └───────────────┘ └───────────────────────┘ +``` + +**Service Responsibilities:** + +| Service | Port | Description | +|---------|------|-------------| +| **Backend** | 5001 | Main API server - handles web requests, proxies to OpenCode, manages settings | +| **OpenCode** | 5551 | AI agent runtime - executes tools, manages sessions, interfaces with AI providers | +| **Whisper STT** | 5552 | Speech-to-text server - transcribes voice input (auto-starts when needed) | +| **Tunnel** | - | Persistent Cloudflare tunnel for remote access (runs independently) | + +**Data Storage:** + +All persistent data is stored in `~/.local/run/opencode-manager/`: + +| File | Description | +|------|-------------| +| `data.db` | SQLite database (settings, preferences, cached data) | +| `auth.json` | Basic auth credentials | +| `endpoints.json` | Active endpoints (local URL, tunnel URL) | +| `tunnel.json` | Tunnel state (PID, URL, port) | +| `tunnel.pid` | Tunnel process ID | +| `stdout.log` | Service stdout (macOS launchd only) | +| `stderr.log` | Service stderr (macOS launchd only) | + +**Client Mode:** + +When using `--client` mode, the script will: +1. Scan for running opencode processes using `lsof` +2. Check health via `/doc` endpoint on each discovered port +3. Fetch version info from `/global/health` +4. List all healthy instances with directory, version, and PID +5. Let you select which instance to connect to + +```bash +$ pnpm start:client + +╔═══════════════════════════════════════╗ +║ OpenCode Manager - Native Start ║ +╚═══════════════════════════════════════╝ + +🔍 Searching for running opencode servers... + +📋 Found multiple opencode servers: + + [1] Port 5551 + Directory: /Users/you/project-a + Version: 1.1.2 + PID: 12345 + + [2] Port 61782 + Directory: /Users/you/project-b + Version: 1.0.223 + PID: 67890 + +Select server [1]: +``` + +This is useful when you already have `opencode` running in a terminal and want the web UI to connect to it without spawning a separate server. + +### Session Sharing Between CLI and Web UI + +OpenCode Manager shares sessions with the `opencode` CLI. Sessions you create in your terminal are visible in the web UI, and vice versa. + +**How it works:** + +Both the CLI and Web UI store sessions in `~/.local/share/opencode/`. When you: + +1. **Create a session in CLI** (`opencode` in terminal) → Visible in Web UI immediately +2. **Create a session in Web UI** → Visible in CLI with `/sessions` command +3. **Continue a session** → Changes sync automatically between both interfaces + +**Requirements:** + +- Both must use the same OpenCode data directory (`~/.local/share/opencode/`) +- When running as a service, OpenCode Manager uses the system default location +- The `--client` mode connects to your existing terminal sessions + +**Verification:** + +```bash +# Check sessions visible to CLI +opencode +/sessions + +# Check sessions visible to Web UI +curl -s -u admin:PASSWORD http://localhost:5001/api/opencode/session | jq 'length' + +# Both should show the same count +``` + +**Persistent Tunnel (Recommended for Remote Development):** + +The Cloudflare tunnel can run as a persistent background process that survives backend restarts: + +```bash +# Start tunnel once (persists until explicitly stopped) +pnpm tunnel:start + +# Check tunnel status and get URL +pnpm tunnel:status + +# Now you can restart backend freely without losing tunnel connection +pnpm dev:backend # Ctrl+C and restart as needed + +# Stop tunnel when done +pnpm tunnel:stop +``` + +The tunnel state is stored in `~/.local/run/opencode-manager/tunnel.json`. + +Benefits: +- Restart backend without disconnecting mobile/remote users +- Same tunnel URL persists across backend restarts +- `pnpm cleanup` does NOT kill the tunnel + +**Without Tunnel (Local Only):** + +```bash +# Start without tunnel +pnpm start:no-tunnel + +# Or connect to existing instance without tunnel +bun scripts/start-native.ts --client +``` + +**Custom Port:** + +```bash +# Use a different backend port +bun scripts/start-native.ts --port 3000 +bun scripts/start-native.ts --client --port 3000 +``` + +### Option 5: Local Development (Hot Reload) + +```bash +# Clone the repository +git clone https://github.com/dzianisv/opencode-manager.git +cd opencode-manager # Install dependencies (uses Bun workspaces) bun install @@ -71,4 +682,136 @@ cp .env.example .env npm run dev ``` +## Testing + +The project includes a comprehensive QA testing system with autonomous AI testing capabilities. + +### Quick Testing + +Run tests using the provided command script: + +```bash +# Health check (quick verification) +scripts/qa-test.sh health + +# API endpoint tests +scripts/qa-test.sh api + +# Authentication tests +scripts/qa-test.sh auth + +# Cloudflare tunnel tests +scripts/qa-test.sh tunnel + +# Docker deployment tests +scripts/qa-test.sh docker + +# E2E test suite +scripts/qa-test.sh e2e + +# Run all tests +scripts/qa-test.sh full + +# Test remote deployment +scripts/qa-test.sh health https://your-deployment.com +``` + +### Autonomous AI Testing + +Use OpenCode slash commands for quick testing: + +``` +/qa-test # Run comprehensive QA tests +/qa-health # Quick health check +``` + +Or mention the QA agent directly: + +``` +"@qa-tester run a full test suite and report any issues" +"Test the application and generate a comprehensive report" +``` + +The AI agent will autonomously: +1. Execute all test protocols +2. Evaluate results against expected outputs +3. Generate a professional test report with metrics +4. Identify issues and provide recommendations + +### Available Tests + +- ✅ Development server startup and health +- ✅ Backend API endpoints (health, repos, settings, OpenCode proxy) +- ✅ Authentication (with/without credentials, valid/invalid) +- ✅ Cloudflare tunnel (startup, URL generation, public access) +- ✅ Docker deployment (build, run, health checks, volumes) +- ✅ E2E test suite (voice, talk mode, browser automation) +- ✅ Database integrity +- ✅ Git operations +- ✅ Performance metrics +- ✅ Security validation + +### CI/CD Integration + +The QA system can be integrated into GitHub Actions: + +```yaml +- name: Run QA Tests + run: | + scripts/qa-test.sh full +``` + +See the QA agent at `.opencode/agent/qa-tester.md` for detailed test protocols. + +## OAuth Provider Setup + +OpenCode WebUI supports OAuth authentication for select providers, offering a more secure and convenient alternative to API keys. + +### Supported OAuth Providers + +- **Anthropic (Claude)** - OAuth login with Claude Pro/Max accounts +- **GitHub Copilot** - OAuth device flow authentication + +### Setting Up OAuth + +1. **Navigate to Settings → Provider Credentials** +2. **Select a provider** that shows the "OAuth" badge +3. **Click "Add OAuth"** to start the authorization flow +4. **Choose authentication method:** + - **"Open Authorization Page"** - Opens browser for sign-in + - **"Use Authorization Code"** - Provides code for manual entry +5. **Complete authorization** in the browser or enter the provided code +6. **Connection status** will show as "Configured" when successful + + + +# Testing +1. scripts/run-local-docker.sh +Pulls and runs the CI-built Docker image from GHCR locally: +./scripts/run-local-docker.sh +2. scripts/run-e2e-tests.ts +Runs all E2E tests against a running instance: +bun run scripts/run-e2e-tests.ts --url http://localhost:5003 +3. Updated AGENTS.md +Documents the E2E testing workflow with CI-built images. +The workflow +GitHub Actions (CI) Local Machine +───────────────────── ────────────────────── +Push to main + ↓ +docker-build.yml runs + ↓ +Build Docker image + ↓ +Push to GHCR ───────────────→ ./scripts/run-local-docker.sh + ↓ + Pull image from GHCR + ↓ + Run container (port 5003) + ↓ + bun run scripts/run-e2e-tests.ts + ↓ + ✅ Voice E2E tests + ✅ Talk Mode API tests + ✅ Talk Mode Browser tests \ No newline at end of file diff --git a/backend-dist.tar.gz b/backend-dist.tar.gz new file mode 100644 index 00000000..2cf12869 Binary files /dev/null and b/backend-dist.tar.gz differ diff --git a/backend/package.json b/backend/package.json index 4e350e5e..1ed479be 100644 --- a/backend/package.json +++ b/backend/package.json @@ -6,23 +6,40 @@ "scripts": { "dev": "bun --watch src/index.ts", "start": "bun src/index.ts", - "build": "bun build src/index.ts --outdir=dist --target=bun", + "build": "bun build src/index.ts --outdir=dist --target=bun && cp src/services/pty-worker.cjs dist/", "test": "vitest", "test:ui": "vitest --ui", "test:coverage": "vitest --coverage", - "test:watch": "vitest --watch" + "test:watch": "vitest --watch", + "test:integration": "vitest run --config vitest.integration.config.ts" }, "dependencies": { "@hono/node-server": "^1.19.5", - "@opencode-webui/shared": "file:../shared", + "@hono/node-ws": "^1.2.0", + "@opencode-manager/shared": "workspace:*", + "archiver": "^7.0.1", "dotenv": "^17.2.3", + "eventsource": "^4.1.0", + "grammy": "^1.39.3", "hono": "^4.10.1", + "node-cron": "^4.2.1", + "node-pty": "^1.1.0", + "socket.io": "^4.8.3", + "strip-json-comments": "^3.1.1", + "web-push": "^3.6.7", + "ws": "^8.18.3", "zod": "^4.1.12" }, "devDependencies": { + "@types/archiver": "^7.0.0", "@types/better-sqlite3": "^7.6.13", "@types/bun": "latest", + "@types/node-cron": "^3.0.11", + "@types/web-push": "^3.6.4", + "@types/ws": "^8.18.1", "@vitest/ui": "^3.2.4", + "axios": "^1.9.0", + "socket.io-client": "^4.8.3", "vitest": "^3.2.4" }, "peerDependencies": { diff --git a/backend/src/config.ts b/backend/src/config.ts deleted file mode 100644 index a99f6061..00000000 --- a/backend/src/config.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { config } from 'dotenv' - -config() - -export const ENV = { - PORT: parseInt(process.env.PORT || '5001'), - OPENCODE_SERVER_PORT: parseInt(process.env.OPENCODE_SERVER_PORT || '5551'), - HOST: process.env.HOST || '0.0.0.0', - DATABASE_PATH: process.env.DATABASE_PATH || './backend/data/opencode.db', - WORKSPACE_PATH: process.env.WORKSPACE_PATH || '~/.opencode-workspace', - PROCESS_START_WAIT_MS: parseInt(process.env.PROCESS_START_WAIT_MS || '2000'), - PROCESS_VERIFY_WAIT_MS: parseInt(process.env.PROCESS_VERIFY_WAIT_MS || '1000'), - HEALTH_CHECK_INTERVAL_MS: parseInt(process.env.HEALTH_CHECK_INTERVAL_MS || '5000'), - HEALTH_CHECK_TIMEOUT_MS: parseInt(process.env.HEALTH_CHECK_TIMEOUT_MS || '30000'), - MAX_FILE_SIZE_MB: parseInt(process.env.MAX_FILE_SIZE_MB || '50'), - MAX_UPLOAD_SIZE_MB: parseInt(process.env.MAX_UPLOAD_SIZE_MB || '50'), - SANDBOX_TTL_HOURS: parseInt(process.env.SANDBOX_TTL_HOURS || '24'), - CLEANUP_INTERVAL_MINUTES: parseInt(process.env.CLEANUP_INTERVAL_MINUTES || '60'), - DEBUG: process.env.DEBUG === 'true', - VITE_API_URL: process.env.VITE_API_URL || 'http://localhost:5001', - VITE_ANTHROPIC_API_KEY: process.env.VITE_ANTHROPIC_API_KEY || '', - VITE_OPENAI_API_KEY: process.env.VITE_OPENAI_API_KEY || '', - VITE_GOOGLE_API_KEY: process.env.VITE_GOOGLE_API_KEY || '' -} \ No newline at end of file diff --git a/backend/src/config/index.ts b/backend/src/config/index.ts new file mode 100644 index 00000000..4f7b1a59 --- /dev/null +++ b/backend/src/config/index.ts @@ -0,0 +1 @@ +export * from '../../../shared/src/config' diff --git a/backend/src/db/migrations.ts b/backend/src/db/migrations.ts index e9b2ec47..1e06e1e8 100644 --- a/backend/src/db/migrations.ts +++ b/backend/src/db/migrations.ts @@ -4,6 +4,45 @@ import { logger } from '../utils/logger' export function runMigrations(db: Database): void { try { const tableInfo = db.prepare("PRAGMA table_info(repos)").all() as any[] + + const repoUrlColumn = tableInfo.find((col: any) => col.name === 'repo_url') + if (repoUrlColumn && repoUrlColumn.notnull === 1) { + logger.info('Migrating repos table to allow nullable repo_url for local repos') + db.run('BEGIN TRANSACTION') + try { + db.run(` + CREATE TABLE repos_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + repo_url TEXT, + local_path TEXT NOT NULL, + branch TEXT, + default_branch TEXT, + clone_status TEXT NOT NULL, + cloned_at INTEGER NOT NULL, + last_pulled INTEGER, + opencode_config_name TEXT, + is_worktree BOOLEAN DEFAULT FALSE, + is_local BOOLEAN DEFAULT FALSE + ) + `) + + const existingColumns = tableInfo.map((col: any) => col.name) + const columnsToCopy = ['id', 'repo_url', 'local_path', 'branch', 'default_branch', 'clone_status', 'cloned_at', 'last_pulled', 'opencode_config_name', 'is_worktree', 'is_local'] + .filter(col => existingColumns.includes(col)) + + const columnsStr = columnsToCopy.join(', ') + db.run(`INSERT INTO repos_new (${columnsStr}) SELECT ${columnsStr} FROM repos`) + + db.run('DROP TABLE repos') + db.run('ALTER TABLE repos_new RENAME TO repos') + db.run('COMMIT') + logger.info('Successfully migrated repos table to allow nullable repo_url') + } catch (migrationError) { + db.run('ROLLBACK') + throw migrationError + } + } + const hasBranchColumn = tableInfo.some(col => col.name === 'branch') if (!hasBranchColumn) { @@ -21,13 +60,23 @@ export function runMigrations(db: Database): void { logger.debug('Index already exists or could not be created', error) } + try { + db.run(` + CREATE UNIQUE INDEX IF NOT EXISTS idx_local_path + ON repos(local_path) + `) + } catch (error) { + logger.debug('Local path index already exists or could not be created', error) + } + const requiredColumns = [ { name: 'default_branch', sql: 'ALTER TABLE repos ADD COLUMN default_branch TEXT' }, { name: 'clone_status', sql: 'ALTER TABLE repos ADD COLUMN clone_status TEXT NOT NULL DEFAULT "cloning"' }, { name: 'cloned_at', sql: 'ALTER TABLE repos ADD COLUMN cloned_at INTEGER NOT NULL DEFAULT 0' }, { name: 'last_pulled', sql: 'ALTER TABLE repos ADD COLUMN last_pulled INTEGER' }, { name: 'opencode_config_name', sql: 'ALTER TABLE repos ADD COLUMN opencode_config_name TEXT' }, - { name: 'is_worktree', sql: 'ALTER TABLE repos ADD COLUMN is_worktree BOOLEAN DEFAULT FALSE' } + { name: 'is_worktree', sql: 'ALTER TABLE repos ADD COLUMN is_worktree BOOLEAN DEFAULT FALSE' }, + { name: 'is_local', sql: 'ALTER TABLE repos ADD COLUMN is_local BOOLEAN DEFAULT FALSE' } ] for (const column of requiredColumns) { @@ -72,6 +121,28 @@ export function runMigrations(db: Database): void { logger.error('Failed to migrate local_path format:', error) } + try { + db.run(` + CREATE TABLE IF NOT EXISTS scheduled_tasks ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + schedule_type TEXT NOT NULL, + schedule_value TEXT NOT NULL, + command_type TEXT NOT NULL, + command_config TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'active', + last_run_at INTEGER, + next_run_at INTEGER, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL + ) + `) + db.run('CREATE INDEX IF NOT EXISTS idx_scheduled_tasks_status ON scheduled_tasks(status)') + logger.info('Scheduled tasks table created/verified') + } catch (error) { + logger.debug('Scheduled tasks table might already exist:', error) + } + logger.info('Database migrations completed successfully') } catch (error) { logger.error('Failed to run database migrations:', error) diff --git a/backend/src/db/queries.ts b/backend/src/db/queries.ts index fbe4ae7a..4bbf1bec 100644 --- a/backend/src/db/queries.ts +++ b/backend/src/db/queries.ts @@ -1,11 +1,11 @@ import type { Database } from 'bun:sqlite' import type { Repo, CreateRepoInput } from '../types/repo' -import { getReposPath } from '../../../shared/src/constants' +import { getReposPath } from '@opencode-manager/shared/config/env' import path from 'path' export interface RepoRow { id: number - repo_url: string + repo_url?: string local_path: string branch?: string default_branch: string @@ -14,14 +14,16 @@ export interface RepoRow { last_pulled?: number opencode_config_name?: string is_worktree?: number + is_local?: number } function rowToRepo(row: RepoRow): Repo { + const isAbsolutePath = row.local_path.startsWith('/') return { id: row.id, repoUrl: row.repo_url, localPath: row.local_path, - fullPath: path.join(getReposPath(), row.local_path), + fullPath: isAbsolutePath ? row.local_path : path.join(getReposPath(), row.local_path), branch: row.branch, defaultBranch: row.default_branch, cloneStatus: row.clone_status as Repo['cloneStatus'], @@ -29,35 +31,59 @@ function rowToRepo(row: RepoRow): Repo { lastPulled: row.last_pulled, openCodeConfigName: row.opencode_config_name, isWorktree: row.is_worktree ? Boolean(row.is_worktree) : undefined, + isLocal: row.is_local ? Boolean(row.is_local) : undefined, } } export function createRepo(db: Database, repo: CreateRepoInput): Repo { + const normalizedPath = repo.localPath.trim().replace(/\/+$/, '') + + const existing = repo.isLocal + ? getRepoByLocalPath(db, normalizedPath) + : getRepoByUrlAndBranch(db, repo.repoUrl!, repo.branch) + + if (existing) { + return existing + } + const stmt = db.prepare(` - INSERT OR IGNORE INTO repos (repo_url, local_path, branch, default_branch, clone_status, cloned_at, is_worktree) - VALUES (?, ?, ?, ?, ?, ?, ?) + INSERT INTO repos (repo_url, local_path, branch, default_branch, clone_status, cloned_at, is_worktree, is_local) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) `) - const result = stmt.run( - repo.repoUrl, - repo.localPath, - repo.branch || null, - repo.defaultBranch, - repo.cloneStatus, - repo.clonedAt, - repo.isWorktree ? 1 : 0 - ) - - if (result.changes === 0) { - const existing = getRepoByUrlAndBranch(db, repo.repoUrl, repo.branch) - if (existing) { - return existing + try { + const result = stmt.run( + repo.repoUrl || null, + normalizedPath, + repo.branch || null, + repo.defaultBranch, + repo.cloneStatus, + repo.clonedAt, + repo.isWorktree ? 1 : 0, + repo.isLocal ? 1 : 0 + ) + + const newRepo = getRepoById(db, Number(result.lastInsertRowid)) + if (!newRepo) { + throw new Error(`Failed to retrieve newly created repo with id ${result.lastInsertRowid}`) + } + return newRepo + } catch (error: any) { + if (error.message?.includes('UNIQUE constraint failed') || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + const conflictRepo = repo.isLocal + ? getRepoByLocalPath(db, normalizedPath) + : getRepoByUrlAndBranch(db, repo.repoUrl!, repo.branch) + + if (conflictRepo) { + return conflictRepo + } + + const identifier = repo.isLocal ? `path '${normalizedPath}'` : `url '${repo.repoUrl}' branch '${repo.branch || 'default'}'` + throw new Error(`Repository with ${identifier} already exists but could not be retrieved. This may indicate database corruption.`) } - throw new Error('Failed to create repo and no existing repo found') + + throw new Error(`Failed to create repository: ${error.message}`) } - - const newRepo = getRepoById(db, Number(result.lastInsertRowid))! - return newRepo } export function getRepoById(db: Database, id: number): Repo | null { @@ -87,6 +113,13 @@ export function getRepoByUrlAndBranch(db: Database, repoUrl: string, branch?: st return row ? rowToRepo(row) : null } +export function getRepoByLocalPath(db: Database, localPath: string): Repo | null { + const stmt = db.prepare('SELECT * FROM repos WHERE local_path = ?') + const row = stmt.get(localPath) as RepoRow | undefined + + return row ? rowToRepo(row) : null +} + export function listRepos(db: Database): Repo[] { const stmt = db.prepare('SELECT * FROM repos ORDER BY cloned_at DESC') const rows = stmt.all() as RepoRow[] diff --git a/backend/src/db/schema.ts b/backend/src/db/schema.ts index 49018d7e..f290817e 100644 --- a/backend/src/db/schema.ts +++ b/backend/src/db/schema.ts @@ -11,7 +11,7 @@ export function initializeDatabase(dbPath: string = './data/opencode.db'): Datab db.run(` CREATE TABLE IF NOT EXISTS repos ( id INTEGER PRIMARY KEY AUTOINCREMENT, - repo_url TEXT NOT NULL, + repo_url TEXT, local_path TEXT NOT NULL, branch TEXT, default_branch TEXT, @@ -20,8 +20,7 @@ export function initializeDatabase(dbPath: string = './data/opencode.db'): Datab last_pulled INTEGER, opencode_config_name TEXT, is_worktree BOOLEAN DEFAULT FALSE, - - UNIQUE(repo_url, branch) + is_local BOOLEAN DEFAULT FALSE ); CREATE INDEX IF NOT EXISTS idx_repo_clone_status ON repos(clone_status); @@ -49,10 +48,30 @@ export function initializeDatabase(dbPath: string = './data/opencode.db'): Datab CREATE INDEX IF NOT EXISTS idx_opencode_user_id ON opencode_configs(user_id); CREATE INDEX IF NOT EXISTS idx_opencode_default ON opencode_configs(user_id, is_default); + + CREATE TABLE IF NOT EXISTS scheduled_tasks ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + schedule_type TEXT NOT NULL, + schedule_value TEXT NOT NULL, + command_type TEXT NOT NULL, + command_config TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'active', + last_run_at INTEGER, + next_run_at INTEGER, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL + ); + + CREATE INDEX IF NOT EXISTS idx_scheduled_tasks_status ON scheduled_tasks(status); `) runMigrations(db) + // Force database file creation by performing a write + db.prepare('INSERT OR IGNORE INTO user_preferences (user_id, preferences, updated_at) VALUES (?, ?, ?)') + .run('default', '{}', Date.now()) + logger.info('Database initialized successfully') return db diff --git a/backend/src/index.ts b/backend/src/index.ts index 250ae1bf..9f31e48f 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -1,92 +1,461 @@ import { serve } from '@hono/node-server' import { Hono } from 'hono' import { cors } from 'hono/cors' +import { basicAuth } from 'hono/basic-auth' import { serveStatic } from '@hono/node-server/serve-static' +import { Server as SocketIOServer } from 'socket.io' +import os from 'os' +import path from 'path' import { initializeDatabase } from './db/schema' import { createRepoRoutes } from './routes/repos' +import { createSessionRoutes } from './routes/sessions' import { createSettingsRoutes } from './routes/settings' import { createHealthRoutes } from './routes/health' -import { createSessionRoutes } from './routes/sessions' +import { createTTSRoutes, cleanupExpiredCache } from './routes/tts' +import { createSTTRoutes } from './routes/stt' import { createFileRoutes } from './routes/files' import { createProvidersRoutes } from './routes/providers' -import { ensureDirectoryExists } from './services/file-operations' +import { createOAuthRoutes } from './routes/oauth' +import { createTerminalRoutes, registerTerminalSocketIO } from './routes/terminal' +import { createPushRoutes } from './routes/push' +import { createTaskRoutes } from './routes/tasks' +import { terminalService } from './services/terminal' +import { schedulerService } from './services/scheduler' +import { whisperServerManager } from './services/whisper' +import { ensureDirectoryExists, writeFileContent, fileExists, readFileContent } from './services/file-operations' +import { SettingsService } from './services/settings' import { opencodeServerManager } from './services/opencode-single-server' -import { cleanupOrphanedDirectories } from './services/repo' +import { cleanupOrphanedDirectories, cleanupStaleRepoEntries, registerExternalDirectory, syncProjectsFromOpenCode } from './services/repo' import { proxyRequest } from './services/proxy' import { logger } from './utils/logger' -import { ENV } from './config' -import { getWorkspacePath, getReposPath, getConfigPath } from '../../shared/src/constants' +import { chatterboxServerManager } from './services/chatterbox' +import { startGlobalSSEListener, stopGlobalSSEListener } from './services/global-sse' +import { + getWorkspacePath, + getReposPath, + getConfigPath, + getOpenCodeConfigFilePath, + getAgentsMdPath, + getDatabasePath, + ENV +} from '@opencode-manager/shared/config/env' +import { OpenCodeConfigSchema } from '@opencode-manager/shared/schemas' -await import('dotenv/config') - -const { PORT, HOST, DATABASE_PATH: DB_PATH } = ENV +const { PORT, HOST } = ENV.SERVER +const DB_PATH = getDatabasePath() const app = new Hono() app.use('/*', cors({ - origin: '*', + origin: (origin) => origin || '', // Reflect the origin to support credentials + credentials: true, allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], - allowHeaders: ['Content-Type', 'Authorization'], + allowHeaders: ['Content-Type', 'Authorization', 'x-forwarded-proto', 'x-forwarded-host'], })) +function getBasicAuthCredentials(): { username: string; password: string } | null { + const { AUTH_USERNAME, AUTH_PASSWORD } = ENV.SERVER + if (AUTH_USERNAME && AUTH_PASSWORD) { + return { username: AUTH_USERNAME, password: AUTH_PASSWORD } + } + + const authFilePath = path.join(os.homedir(), '.local', 'run', 'opencode-manager', 'auth.json') + try { + const fs = require('fs') + if (fs.existsSync(authFilePath)) { + const data = JSON.parse(fs.readFileSync(authFilePath, 'utf8')) + if (data.username && data.password) { + return { username: data.username, password: data.password } + } + } + } catch { + // Ignore errors reading auth file + } + + return null +} + +const authCredentials = getBasicAuthCredentials() +if (authCredentials) { + logger.info(`Basic authentication enabled for user: ${authCredentials.username}`) + app.use('/*', basicAuth({ + username: authCredentials.username, + password: authCredentials.password, + })) +} + const db = initializeDatabase(DB_PATH) +export const DEFAULT_AGENTS_MD = `# OpenCode Manager - Global Agent Instructions + +## Critical System Constraints + +- **DO NOT** use ports 5003 or 5551 - these are reserved for OpenCode Manager +- **DO NOT** kill or stop processes on ports 5003 or 5551 +- **DO NOT** modify files in the \`.config/opencode\` directory unless explicitly requested + +## Dev Server Ports + +When starting dev servers, use the pre-allocated ports 5100-5103: +- Port 5100: Primary dev server (frontend) +- Port 5101: Secondary dev server (API/backend) +- Port 5102: Additional service +- Port 5103: Additional service + +Always bind to \`0.0.0.0\` to allow external access from the Docker host. + +## Package Management + +### Node.js Packages +Prefer **pnpm** or **bun** over npm for installing dependencies to save disk space: +- Use \`pnpm install\` instead of \`npm install\` +- Use \`bun install\` as an alternative +- Both are pre-installed in the container + + ### Python Packages + Always create a virtual environment in the repository directory before installing packages: + + 1. Create virtual environment in repo: + \`cd \`\` + \`uv venv .venv\` + + 2. Activate the virtual environment: + \`source .venv/bin/activate\` # or \`uv pip sync\` for project-based workflows + + 3. Install packages into activated environment: + \`uv pip install \`\` + \`uv pip install -r requirements.txt\` + + 4. Run Python commands: + \`python script.py\` # Uses activated .venv + + Alternative: Use \`uv run python script.py\` to skip explicit activation + + **Important:** + - Always create .venv in the repository directory (not workspace root) + - Activate the environment before running pip operations + - uv is pre-installed in the container and provides faster package installation + - .venv directories created in repos will persist but can be removed safely + +## General Guidelines + +- This file is merged with any AGENTS.md files in individual repositories +- Repository-specific instructions take precedence for their respective codebases +` + +async function ensureDefaultConfigExists(): Promise { + const settingsService = new SettingsService(db) + const existingDbConfigs = settingsService.getOpenCodeConfigs() + + // Config already exists in database - nothing to do + if (existingDbConfigs.configs.length > 0) { + logger.info('OpenCode config already exists in database') + return + } + + // Try to import from existing OpenCode installation (highest priority) + const homeConfigPath = path.join(os.homedir(), '.config/opencode/opencode.json') + if (await fileExists(homeConfigPath)) { + logger.info(`Found existing OpenCode config at ${homeConfigPath}, importing...`) + try { + const content = await readFileContent(homeConfigPath) + const parsed = JSON.parse(content) + const validation = OpenCodeConfigSchema.safeParse(parsed) + + if (!validation.success) { + logger.warn('Existing config has invalid structure, will try other sources', validation.error) + } else { + settingsService.createOpenCodeConfig({ + name: 'default', + content: validation.data, + isDefault: true, + }) + logger.info('Successfully imported existing OpenCode config') + return + } + } catch (error) { + logger.warn('Failed to import existing config, will try other sources', error) + } + } + + // Try to import from workspace config (if user reinstalls and workspace persists) + const workspaceConfigPath = getOpenCodeConfigFilePath() + if (await fileExists(workspaceConfigPath)) { + logger.info(`Found workspace config, importing...`) + try { + const content = await readFileContent(workspaceConfigPath) + const parsed = JSON.parse(content) + const validation = OpenCodeConfigSchema.safeParse(parsed) + + if (!validation.success) { + logger.warn('Workspace config has invalid structure, will use defaults', validation.error) + } else { + settingsService.createOpenCodeConfig({ + name: 'default', + content: validation.data, + isDefault: true, + }) + logger.info('Successfully imported workspace config') + return + } + } catch (error) { + logger.warn('Failed to import workspace config, will use defaults', error) + } + } + + // No existing config found - create minimal seed config + logger.info('No existing OpenCode config found, creating minimal seed config') + settingsService.createOpenCodeConfig({ + name: 'default', + content: { + $schema: 'https://opencode.ai/config.json', + // Minimal seed - users can configure through Manager UI + }, + isDefault: true, + }) + logger.info('Created minimal seed OpenCode config') +} + +async function syncDefaultConfigToDisk(): Promise { + if (process.env.OPENCODE_CLIENT_MODE === 'true') { + logger.info('Client mode: skipping config sync to preserve existing server config') + return + } + + const settingsService = new SettingsService(db) + const managerConfig = settingsService.getDefaultOpenCodeConfig() + + if (!managerConfig) { + logger.info('No default OpenCode config found in database') + return + } + + const homeConfigPath = path.join(os.homedir(), '.config/opencode/opencode.json') + let userConfig: Record = {} + + if (await fileExists(homeConfigPath)) { + try { + const content = await readFileContent(homeConfigPath) + userConfig = JSON.parse(content) + logger.info('Found user local OpenCode config, will merge with Manager config') + } catch (error) { + logger.warn('Failed to read user config, using Manager config only:', error) + } + } + + const mergedConfig = { + ...managerConfig.content, + model: userConfig.model || managerConfig.content.model, + small_model: userConfig.small_model || managerConfig.content.small_model, + provider: { + ...(managerConfig.content.provider || {}), + ...(userConfig.provider as Record || {}), + }, + } + + const configPath = getOpenCodeConfigFilePath() + const configContent = JSON.stringify(mergedConfig, null, 2) + await writeFileContent(configPath, configContent) + logger.info(`Synced merged config to: ${configPath} (user model: ${userConfig.model || 'none'}, manager additions applied)`) +} + +async function ensureDefaultAgentsMdExists(): Promise { + const agentsMdPath = getAgentsMdPath() + const exists = await fileExists(agentsMdPath) + + if (!exists) { + await writeFileContent(agentsMdPath, DEFAULT_AGENTS_MD) + logger.info(`Created default AGENTS.md at: ${agentsMdPath}`) + } +} + try { await ensureDirectoryExists(getWorkspacePath()) await ensureDirectoryExists(getReposPath()) await ensureDirectoryExists(getConfigPath()) logger.info('Workspace directories initialized') - + await cleanupOrphanedDirectories(db) logger.info('Orphaned directory cleanup completed') - + + logger.info('Checking for stale repo entries...') + const staleCleanupResult = await cleanupStaleRepoEntries(db) + logger.info(`Stale repo cleanup result: ${staleCleanupResult.removed} removed`) + + await cleanupExpiredCache() + + await ensureDefaultConfigExists() + await syncDefaultConfigToDisk() + await ensureDefaultAgentsMdExists() + + const settingsService = new SettingsService(db) + settingsService.initializeLastKnownGoodConfig() + + opencodeServerManager.setDatabase(db) await opencodeServerManager.start() logger.info(`OpenCode server running on port ${opencodeServerManager.getPort()}`) + + if (opencodeServerManager.isClientMode()) { + const connectedDir = opencodeServerManager.getConnectedDirectory() + if (connectedDir) { + logger.info(`Client mode: registering connected directory as workspace: ${connectedDir}`) + await registerExternalDirectory(db, connectedDir) + } + } + + try { + const syncResult = await syncProjectsFromOpenCode(db) + logger.info(`Synced ${syncResult.added} projects from OpenCode (${syncResult.skipped} skipped)`) + } catch (error) { + logger.warn('Failed to sync projects from OpenCode:', error) + } + + try { + startGlobalSSEListener(db) + logger.info('Global SSE listener started for push notifications') + } catch (error) { + logger.warn('Failed to start global SSE listener:', error) + } + + try { + await whisperServerManager.start() + logger.info(`Whisper STT server running on port ${whisperServerManager.getPort()}`) + } catch (error) { + logger.warn('Whisper server failed to start (STT will be unavailable):', error) + } + + try { + schedulerService.setDatabase(db) + await schedulerService.initialize() + logger.info('Scheduler service initialized') + } catch (error) { + logger.warn('Scheduler service failed to initialize:', error) + } + + // Chatterbox auto-start disabled - use on-demand via API + // try { + // await chatterboxServerManager.start() + // logger.info(`Chatterbox TTS server running on port ${chatterboxServerManager.getPort()}`) + // } catch (error) { + // logger.warn('Chatterbox server failed to start (TTS will be unavailable):', error) + // } } catch (error) { logger.error('Failed to initialize workspace:', error) } app.route('/api/repos', createRepoRoutes(db)) +app.route('/api/sessions', createSessionRoutes(db)) app.route('/api/settings', createSettingsRoutes(db)) app.route('/api/health', createHealthRoutes(db)) -app.route('/api/sessions', createSessionRoutes()) app.route('/api/files', createFileRoutes(db)) app.route('/api/providers', createProvidersRoutes()) +app.route('/api/oauth', createOAuthRoutes()) +app.route('/api/tts', createTTSRoutes(db)) +app.route('/api/stt', createSTTRoutes(db)) +app.route('/api/terminal', createTerminalRoutes()) +app.route('/api/push', createPushRoutes(db)) +app.route('/api/tasks', createTaskRoutes(db)) app.all('/api/opencode/*', async (c) => { const request = c.req.raw return proxyRequest(request) }) -const isProduction = process.env.NODE_ENV === 'production' +const isProduction = ENV.SERVER.NODE_ENV === 'production' if (isProduction) { - app.use('/*', async (c, next) => { + app.use('/*', serveStatic({ root: './frontend/dist' })) + + app.get('*', async (c) => { if (c.req.path.startsWith('/api/')) { - return next() + return c.notFound() } - return serveStatic({ root: './frontend/dist' })(c, next) - }) - app.get('*', async (c) => { - return serveStatic({ path: './frontend/dist/index.html' })(c, async () => {}) + const fs = await import('fs/promises') + const path = await import('path') + const indexPath = path.join(process.cwd(), 'frontend/dist/index.html') + const html = await fs.readFile(indexPath, 'utf-8') + return c.html(html) }) } else { - app.get('/', (c) => { + const VITE_DEV_SERVER = 'http://localhost:5173' + + app.get('/api/network-info', async (c) => { + const os = await import('os') + const interfaces = os.networkInterfaces() + const ips = Object.values(interfaces) + .flat() + .filter(info => info && !info.internal && info.family === 'IPv4') + .map(info => info!.address) + + const requestHost = c.req.header('host') || `localhost:${PORT}` + const protocol = c.req.header('x-forwarded-proto') || 'http' + return c.json({ - name: 'OpenCode WebUI', - version: '2.0.0', - status: 'running', - endpoints: { - health: '/api/health', - repos: '/api/repos', - settings: '/api/settings', - sessions: '/api/sessions', - files: '/api/files', - providers: '/api/providers', - opencode_proxy: '/api/opencode/*' - } + host: HOST, + port: PORT, + requestHost, + protocol, + availableIps: ips, + apiUrls: [ + `${protocol}://localhost:${PORT}`, + ...ips.map(ip => `${protocol}://${ip}:${PORT}`) + ] }) }) + + app.all('/*', async (c) => { + if (c.req.path.startsWith('/api/')) { + return c.notFound() + } + + try { + const url = new URL(c.req.url) + const targetUrl = `${VITE_DEV_SERVER}${url.pathname}${url.search}` + + const headers = new Headers() + c.req.raw.headers.forEach((value, key) => { + if (key.toLowerCase() !== 'host') { + headers.set(key, value) + } + }) + + const response = await fetch(targetUrl, { + method: c.req.method, + headers, + body: c.req.method !== 'GET' && c.req.method !== 'HEAD' ? c.req.raw.body : undefined, + }) + + const responseHeaders = new Headers() + response.headers.forEach((value, key) => { + if (key.toLowerCase() !== 'content-encoding') { + responseHeaders.set(key, value) + } + }) + + return new Response(response.body, { + status: response.status, + headers: responseHeaders, + }) + } catch (error) { + logger.error('Failed to proxy to Vite dev server:', error) + return c.json({ + name: 'OpenCode WebUI', + version: '2.0.0', + status: 'running', + note: 'Vite dev server not available. Start frontend with: pnpm dev:frontend', + endpoints: { + health: '/api/health', + repos: '/api/repos', + settings: '/api/settings', + sessions: '/api/sessions', + files: '/api/files', + providers: '/api/providers', + terminal: '/api/terminal', + opencode_proxy: '/api/opencode/*' + } + }) + } + }) } let isShuttingDown = false @@ -97,10 +466,20 @@ const shutdown = async (signal: string) => { logger.info(`${signal} received, shutting down gracefully...`) try { + stopGlobalSSEListener() + logger.info('Global SSE listener stopped') + terminalService.destroyAllSessions() + logger.info('Terminal sessions destroyed') + await schedulerService.shutdown() + logger.info('Scheduler service stopped') + await whisperServerManager.stop() + logger.info('Whisper server stopped') + await chatterboxServerManager.stop() + logger.info('Chatterbox server stopped') await opencodeServerManager.stop() logger.info('OpenCode server stopped') } catch (error) { - logger.error('Error stopping OpenCode server:', error) + logger.error('Error stopping services:', error) } process.exit(0) } @@ -108,10 +487,21 @@ const shutdown = async (signal: string) => { process.on('SIGTERM', () => shutdown('SIGTERM')) process.on('SIGINT', () => shutdown('SIGINT')) -serve({ +const server = serve({ fetch: app.fetch, port: PORT, hostname: HOST, }) -logger.info(`🚀 OpenCode WebUI API running on http://${HOST}:${PORT}`) \ No newline at end of file +const io = new SocketIOServer(server as any, { + path: '/api/terminal/socket.io', + cors: { + origin: true, // Reflect the request origin + credentials: true, + methods: ['GET', 'POST'] + } +}) + +registerTerminalSocketIO(io) + +logger.info(`🚀 OpenCode WebUI API running on http://${HOST}:${PORT}`) diff --git a/backend/src/routes/files.ts b/backend/src/routes/files.ts index 2a3ee55c..8da02a24 100644 --- a/backend/src/routes/files.ts +++ b/backend/src/routes/files.ts @@ -1,11 +1,9 @@ import { Hono } from 'hono' -import { serveStatic } from '@hono/node-server/serve-static' -import * as db from '../db/queries' import * as fileService from '../services/files' import type { Database } from 'bun:sqlite' import { logger } from '../utils/logger' -export function createFileRoutes(database: Database) { +export function createFileRoutes(_database: Database) { const app = new Hono() app.get('/*', async (c) => { @@ -13,6 +11,20 @@ export function createFileRoutes(database: Database) { const userPath = c.req.path.replace(/^\/api\/files\//, '') || '' const download = c.req.query('download') === 'true' const raw = c.req.query('raw') === 'true' + const startLineParam = c.req.query('startLine') + const endLineParam = c.req.query('endLine') + + if (startLineParam !== undefined && endLineParam !== undefined) { + const startLine = parseInt(startLineParam, 10) + const endLine = parseInt(endLineParam, 10) + + if (isNaN(startLine) || isNaN(endLine) || startLine < 0 || endLine < startLine) { + return c.json({ error: 'Invalid line range parameters' }, 400) + } + + const result = await fileService.getFileRange(userPath, startLine, endLine) + return c.json(result) + } const result = await fileService.getFile(userPath) @@ -54,7 +66,8 @@ export function createFileRoutes(database: Database) { return c.json({ error: 'No file provided' }, 400) } - const result = await fileService.uploadFile(path, file) + const relativePath = body.relativePath as string | undefined + const result = await fileService.uploadFile(path, file, relativePath) return c.json(result) } catch (error: any) { logger.error('Failed to upload file:', error) @@ -92,10 +105,15 @@ export function createFileRoutes(database: Database) { const path = c.req.path.replace(/^\/api\/files\//, '') || '' const body = await c.req.json() + if (body.patches && Array.isArray(body.patches)) { + const result = await fileService.applyFilePatches(path, body.patches) + return c.json(result) + } + const result = await fileService.renameOrMoveFile(path, body) return c.json(result) } catch (error: any) { - logger.error('Failed to rename/move file:', error) + logger.error('Failed to patch file:', error) return c.json({ error: error.message }, error.statusCode || 500) } }) diff --git a/backend/src/routes/health.ts b/backend/src/routes/health.ts index 1b494a68..01f48d65 100644 --- a/backend/src/routes/health.ts +++ b/backend/src/routes/health.ts @@ -9,16 +9,28 @@ export function createHealthRoutes(db: Database) { try { const dbCheck = db.prepare('SELECT 1').get() const opencodeHealthy = await opencodeServerManager.checkHealth() - - const status = dbCheck && opencodeHealthy ? 'healthy' : 'degraded' - - return c.json({ + const startupError = opencodeServerManager.getLastStartupError() + + const status = startupError && !opencodeHealthy + ? 'unhealthy' + : (dbCheck && opencodeHealthy ? 'healthy' : 'degraded') + + const response: Record = { status, timestamp: new Date().toISOString(), database: dbCheck ? 'connected' : 'disconnected', opencode: opencodeHealthy ? 'healthy' : 'unhealthy', - opencodePort: opencodeServerManager.getPort() - }) + opencodePort: opencodeServerManager.getPort(), + opencodeVersion: opencodeServerManager.getVersion(), + opencodeMinVersion: opencodeServerManager.getMinVersion(), + opencodeVersionSupported: opencodeServerManager.isVersionSupported() + } + + if (startupError && !opencodeHealthy) { + response.error = startupError + } + + return c.json(response) } catch (error) { return c.json({ status: 'unhealthy', diff --git a/backend/src/routes/oauth.ts b/backend/src/routes/oauth.ts new file mode 100644 index 00000000..51ab1ded --- /dev/null +++ b/backend/src/routes/oauth.ts @@ -0,0 +1,124 @@ +import { Hono } from 'hono' +import { z } from 'zod' +import { proxyRequest } from '../services/proxy' +import { logger } from '../utils/logger' +import { ENV } from '@opencode-manager/shared/config/env' +import { + OAuthAuthorizeRequestSchema, + OAuthAuthorizeResponseSchema, + OAuthCallbackRequestSchema +} from '../../../shared/src/schemas/auth' +import { opencodeServerManager } from '../services/opencode-single-server' + +const OPENCODE_SERVER_URL = `http://${ENV.OPENCODE.HOST}:${ENV.OPENCODE.PORT}` + +export function createOAuthRoutes() { + const app = new Hono() + + app.post('/:id/oauth/authorize', async (c) => { + try { + const providerId = c.req.param('id') + const body = await c.req.json() + const validated = OAuthAuthorizeRequestSchema.parse(body) + + // Proxy to OpenCode server + const response = await proxyRequest( + new Request( + `${OPENCODE_SERVER_URL}/provider/${providerId}/oauth/authorize`, + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(validated) + } + ) + ) + + if (!response.ok) { + const error = await response.text() + logger.error(`OAuth authorize failed for ${providerId}:`, error) + return c.json({ error: 'OAuth authorization failed' }, 500) + } + + const data = await response.json() + const validatedResponse = OAuthAuthorizeResponseSchema.parse(data) + + return c.json(validatedResponse) + } catch (error) { + logger.error('OAuth authorize error:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'OAuth authorization failed' }, 500) + } + }) + + app.post('/:id/oauth/callback', async (c) => { + try { + const providerId = c.req.param('id') + const body = await c.req.json() + const validated = OAuthCallbackRequestSchema.parse(body) + + // Proxy to OpenCode server + const response = await proxyRequest( + new Request( + `${OPENCODE_SERVER_URL}/provider/${providerId}/oauth/callback`, + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(validated) + } + ) + ) + + if (!response.ok) { + const error = await response.text() + logger.error(`OAuth callback failed for ${providerId}:`, error) + return c.json({ error: 'OAuth callback failed' }, 500) + } + + const data = await response.json() + + logger.info(`OAuth callback successful for ${providerId}, restarting OpenCode server`) + await opencodeServerManager.restart() + + return c.json(data) + } catch (error) { + logger.error('OAuth callback error:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'OAuth callback failed' }, 500) + } + }) + + app.get('/auth-methods', async (c) => { + try { + // Proxy to OpenCode server + const response = await proxyRequest( + new Request(`${OPENCODE_SERVER_URL}/provider/auth`, { + method: 'GET', + headers: { 'Content-Type': 'application/json' } + }) + ) + + if (!response.ok) { + const error = await response.text() + logger.error('Failed to get provider auth methods:', error) + return c.json({ error: 'Failed to get provider auth methods' }, 500) + } + + const data = await response.json() + + // The OpenCode server returns the format we need directly + return c.json({ providers: data }) + } catch (error) { + logger.error('Provider auth methods error:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid response data', details: error.issues }, 500) + } + return c.json({ error: 'Failed to get provider auth methods' }, 500) + } + }) + + return app +} diff --git a/backend/src/routes/push.ts b/backend/src/routes/push.ts new file mode 100644 index 00000000..b0d686d6 --- /dev/null +++ b/backend/src/routes/push.ts @@ -0,0 +1,133 @@ +import { Hono } from 'hono' +import { z } from 'zod' +import type { Database } from 'bun:sqlite' +import { logger } from '../utils/logger' +import { + getVapidPublicKey, + saveSubscription, + removeSubscription, + sendPushNotification, + initPushTable, + type PushSubscription, + type PushPayload, +} from '../services/push' + +const SubscribeSchema = z.object({ + endpoint: z.string().url(), + keys: z.object({ + p256dh: z.string(), + auth: z.string(), + }), +}) + +const UnsubscribeSchema = z.object({ + endpoint: z.string().url(), +}) + +const SendNotificationSchema = z.object({ + title: z.string(), + body: z.string(), + tag: z.string().optional(), + url: z.string().optional(), + sessionId: z.string().optional(), + repoId: z.string().optional(), + requireInteraction: z.boolean().optional(), +}) + +export function createPushRoutes(db: Database) { + initPushTable(db) + + const app = new Hono() + + app.get('/vapid-public-key', (c) => { + return c.json({ publicKey: getVapidPublicKey() }) + }) + + app.post('/subscribe', async (c) => { + try { + const body = await c.req.json() + const subscription = SubscribeSchema.parse(body) as PushSubscription + const userId = c.req.query('userId') || 'default' + + saveSubscription(db, subscription, userId) + + return c.json({ success: true, message: 'Subscription saved' }) + } catch (error) { + logger.error('Failed to save push subscription:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid subscription data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to save subscription' }, 500) + } + }) + + app.post('/unsubscribe', async (c) => { + try { + const body = await c.req.json() + const { endpoint } = UnsubscribeSchema.parse(body) + + removeSubscription(db, endpoint) + + return c.json({ success: true, message: 'Subscription removed' }) + } catch (error) { + logger.error('Failed to remove push subscription:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to remove subscription' }, 500) + } + }) + + app.post('/send', async (c) => { + try { + const body = await c.req.json() + const payload = SendNotificationSchema.parse(body) as PushPayload + const userId = c.req.query('userId') + + const result = await sendPushNotification(db, payload, userId) + + return c.json({ sent: true, ...result }) + } catch (error) { + logger.error('Failed to send push notification:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid notification data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to send notification' }, 500) + } + }) + + app.post('/test', async (c) => { + try { + const userId = c.req.query('userId') + + const payload: PushPayload = { + title: 'Test Notification', + body: 'Push notifications are working!', + tag: 'test-notification', + } + + const result = await sendPushNotification(db, payload, userId) + + if (result.success === 0) { + return c.json({ + sent: false, + message: 'No active subscriptions found. Please enable push notifications first.', + successCount: result.success, + failedCount: result.failed, + }) + } + + return c.json({ + sent: true, + message: 'Test notification sent', + successCount: result.success, + failedCount: result.failed, + }) + } catch (error) { + logger.error('Failed to send test notification:', error) + return c.json({ error: 'Failed to send test notification' }, 500) + } + }) + + return app +} diff --git a/backend/src/routes/repos.ts b/backend/src/routes/repos.ts index d9681a25..5fd288e2 100644 --- a/backend/src/routes/repos.ts +++ b/backend/src/routes/repos.ts @@ -2,12 +2,16 @@ import { Hono } from 'hono' import type { Database } from 'bun:sqlite' import * as db from '../db/queries' import * as repoService from '../services/repo' +import { GitAuthenticationError, syncProjectsFromOpenCode } from '../services/repo' +import * as gitOperations from '../services/git-operations' +import * as archiveService from '../services/archive' import { SettingsService } from '../services/settings' import { writeFileContent } from '../services/file-operations' +import { opencodeServerManager } from '../services/opencode-single-server' import { logger } from '../utils/logger' -import { withTransactionAsync } from '../db/transactions' +import { getOpenCodeConfigFilePath } from '@opencode-manager/shared/config/env' +import { summarizeSession, getCachedSummary } from '../services/summarization' import path from 'path' -import { getReposPath } from '../../../shared/src/constants' export function createRepoRoutes(database: Database) { const app = new Hono() @@ -15,28 +19,37 @@ export function createRepoRoutes(database: Database) { app.post('/', async (c) => { try { const body = await c.req.json() - const { repoUrl, branch, openCodeConfigName, useWorktree } = body + const { repoUrl, localPath, branch, openCodeConfigName, useWorktree } = body - if (!repoUrl) { - return c.json({ error: 'repoUrl is required' }, 400) + if (!repoUrl && !localPath) { + return c.json({ error: 'Either repoUrl or localPath is required' }, 400) } - const repo = await repoService.cloneRepo( - database, - repoUrl, - branch, - useWorktree - ) + let repo + if (localPath) { + repo = await repoService.initLocalRepo( + database, + localPath, + branch + ) + } else { + repo = await repoService.cloneRepo( + database, + repoUrl!, + branch, + useWorktree + ) + } if (openCodeConfigName) { const settingsService = new SettingsService(database) const configContent = settingsService.getOpenCodeConfigContent(openCodeConfigName) if (configContent) { - const workingDir = path.join(getReposPath(), repo.localPath) - const configPath = `${workingDir}/opencode.json` - await writeFileContent(configPath, configContent) + const openCodeConfigPath = getOpenCodeConfigFilePath() + await writeFileContent(openCodeConfigPath, configContent) db.updateRepoConfigName(database, repo.id, openCodeConfigName) + logger.info(`Applied config '${openCodeConfigName}' to: ${openCodeConfigPath}`) } } @@ -90,9 +103,7 @@ export function createRepoRoutes(database: Database) { return c.json({ error: 'Repo not found' }, 404) } - await withTransactionAsync(database, async (db) => { - await repoService.deleteRepoFiles(db, id) - }) + await repoService.deleteRepoFiles(database, id) return c.json({ success: true }) } catch (error: any) { @@ -137,13 +148,18 @@ export function createRepoRoutes(database: Database) { return c.json({ error: `Config '${configName}' not found` }, 404) } - const workingDir = path.join(getReposPath(), repo.localPath) - const configPath = `${workingDir}/opencode.json` + const openCodeConfigPath = getOpenCodeConfigFilePath() + + await writeFileContent(openCodeConfigPath, configContent) - await writeFileContent(configPath, configContent) db.updateRepoConfigName(database, id, configName) logger.info(`Switched config for repo ${id} to '${configName}'`) + logger.info(`Updated OpenCode config: ${openCodeConfigPath}`) + + logger.info('Restarting OpenCode server due to workspace config change') + await opencodeServerManager.stop() + await opencodeServerManager.start() const updatedRepo = db.getRepoById(database, id) return c.json(updatedRepo) @@ -177,6 +193,40 @@ export function createRepoRoutes(database: Database) { return c.json({ ...updatedRepo, currentBranch }) } catch (error: any) { logger.error('Failed to switch branch:', error) + if (error instanceof GitAuthenticationError) { + return c.json({ error: error.message, code: 'AUTH_FAILED' }, 401) + } + return c.json({ error: error.message }, 500) + } + }) + + app.post('/:id/branch/create', async (c) => { + try { + const id = parseInt(c.req.param('id')) + const repo = db.getRepoById(database, id) + + if (!repo) { + return c.json({ error: 'Repo not found' }, 404) + } + + const body = await c.req.json() + const { branch } = body + + if (!branch) { + return c.json({ error: 'branch is required' }, 400) + } + + await repoService.createBranch(database, id, branch) + + const updatedRepo = db.getRepoById(database, id) + const currentBranch = await repoService.getCurrentBranch(updatedRepo!) + + return c.json({ ...updatedRepo, currentBranch }) + } catch (error: any) { + logger.error('Failed to create branch:', error) + if (error instanceof GitAuthenticationError) { + return c.json({ error: error.message, code: 'AUTH_FAILED' }, 401) + } return c.json({ error: error.message }, 500) } }) @@ -190,7 +240,8 @@ export function createRepoRoutes(database: Database) { return c.json({ error: 'Repo not found' }, 404) } - const branches = await repoService.listBranches(repo) + const branches = await repoService.listBranches(database, repo) + return c.json(branches) } catch (error: any) { @@ -198,6 +249,183 @@ export function createRepoRoutes(database: Database) { return c.json({ error: error.message }, 500) } }) + + app.get('/:id/git/status', async (c) => { + try { + const id = parseInt(c.req.param('id')) + const repo = db.getRepoById(database, id) + + if (!repo) { + return c.json({ error: 'Repo not found' }, 404) + } + + const status = await gitOperations.getGitStatus(repo.fullPath, database) + + + return c.json(status) + } catch (error: any) { + logger.error('Failed to get git status:', error) + return c.json({ error: error.message }, 500) + } + }) + + app.get('/:id/git/diff', async (c) => { + try { + const id = parseInt(c.req.param('id')) + const filePath = c.req.query('path') + + if (!filePath) { + return c.json({ error: 'path query parameter is required' }, 400) + } + + const repo = db.getRepoById(database, id) + + if (!repo) { + return c.json({ error: 'Repo not found' }, 404) + } + + const diff = await gitOperations.getFileDiff(repo.fullPath, filePath, database) + + + return c.json(diff) + } catch (error: any) { + logger.error('Failed to get file diff:', error) + return c.json({ error: error.message }, 500) + } + }) + + app.get('/:id/download', async (c) => { + try { + const id = parseInt(c.req.param('id')) + const repo = db.getRepoById(database, id) + + if (!repo) { + return c.json({ error: 'Repo not found' }, 404) + } + + const repoName = path.basename(repo.localPath) + + logger.info(`Starting archive creation for repo ${id}: ${repo.fullPath}`) + const archivePath = await archiveService.createRepoArchive(repo.fullPath) + const archiveSize = await archiveService.getArchiveSize(archivePath) + const archiveStream = archiveService.getArchiveStream(archivePath) + + archiveStream.on('end', () => { + archiveService.deleteArchive(archivePath) + }) + + archiveStream.on('error', () => { + archiveService.deleteArchive(archivePath) + }) + + return new Response(archiveStream as unknown as ReadableStream, { + headers: { + 'Content-Type': 'application/zip', + 'Content-Disposition': `attachment; filename="${repoName}.zip"`, + 'Content-Length': archiveSize.toString(), + } + }) + } catch (error: any) { + logger.error('Failed to create repo archive:', error) + return c.json({ error: error.message }, 500) + } + }) + + app.post('/sync', async (c) => { + try { + const result = await syncProjectsFromOpenCode(database) + return c.json({ + success: true, + added: result.added, + skipped: result.skipped + }) + } catch (error: any) { + logger.error('Failed to sync projects from OpenCode:', error) + return c.json({ error: error.message }, 500) + } + }) + + app.get('/:id/sessions/summaries', async (c) => { + try { + const id = parseInt(c.req.param('id')) + const repo = db.getRepoById(database, id) + + if (!repo) { + return c.json({ error: 'Repo not found' }, 404) + } + + const opencodePort = opencodeServerManager.getPort() + const directory = repo.fullPath + + const sessionsRes = await fetch( + `http://127.0.0.1:${opencodePort}/session?directory=${encodeURIComponent(directory)}` + ) + + if (!sessionsRes.ok) { + return c.json({ error: 'Failed to fetch sessions' }, 500) + } + + const sessions = await sessionsRes.json() + + const summaries: Record = {} + + for (const session of sessions.slice(0, 20)) { + const cached = getCachedSummary(session.id) + if (cached) { + summaries[session.id] = cached + } else { + summaries[session.id] = null + } + } + + return c.json({ summaries, sessionCount: sessions.length }) + } catch (error: any) { + logger.error('Failed to get session summaries:', error) + return c.json({ error: error.message }, 500) + } + }) + + app.post('/:id/sessions/:sessionId/summarize', async (c) => { + try { + const id = parseInt(c.req.param('id')) + const sessionId = c.req.param('sessionId') + const repo = db.getRepoById(database, id) + + if (!repo) { + return c.json({ error: 'Repo not found' }, 404) + } + + const opencodePort = opencodeServerManager.getPort() + const directory = repo.fullPath + + const sessionRes = await fetch( + `http://127.0.0.1:${opencodePort}/session/${sessionId}?directory=${encodeURIComponent(directory)}` + ) + + if (!sessionRes.ok) { + return c.json({ error: 'Session not found' }, 404) + } + + const session = await sessionRes.json() + + const messagesRes = await fetch( + `http://127.0.0.1:${opencodePort}/session/${sessionId}/message?directory=${encodeURIComponent(directory)}` + ) + + const messages = messagesRes.ok ? await messagesRes.json() : [] + + const summary = await summarizeSession(sessionId, session.title || '', messages) + + return c.json({ + sessionId, + summary: summary || session.title || 'No summary available', + cached: getCachedSummary(sessionId) !== null + }) + } catch (error: any) { + logger.error('Failed to summarize session:', error) + return c.json({ error: error.message }, 500) + } + }) return app } diff --git a/backend/src/routes/sessions.ts b/backend/src/routes/sessions.ts index 671e8451..2bd0eed7 100644 --- a/backend/src/routes/sessions.ts +++ b/backend/src/routes/sessions.ts @@ -1,152 +1,149 @@ import { Hono } from 'hono' +import type { Database } from 'bun:sqlite' +import * as db from '../db/queries' +import { opencodeServerManager } from '../services/opencode-single-server' import { logger } from '../utils/logger' -import { ENV } from '../config' -const OPENCODE_SERVER_PORT = ENV.OPENCODE_SERVER_PORT +interface SessionWithRepo { + id: string + title: string + directory: string + repoId?: number + repoName?: string + status?: 'idle' | 'busy' | 'retry' + summary?: string + time: { + created: number + updated: number + } +} -export function createSessionRoutes() { - const app = new Hono() +interface MessagePart { + type: string + text?: string +} - app.post('/', async (c) => { - const body = await c.req.json() - - try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/session`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(body), - }) - - if (!response.ok) { - logger.error(`Session creation failed: ${response.status} ${response.statusText}`) - return c.json({ error: 'Session creation failed', status: response.status }, response.status as any) - } - - const data = await response.json() - return c.json(data) - } catch (error) { - logger.error('Failed to create session:', error) - return c.json({ error: 'Failed to create session' }, 500) - } - }) +interface SessionMessage { + info: { + id: string + role: string + } + parts: MessagePart[] +} - app.post('/:sessionID/command', async (c) => { - const sessionID = c.req.param('sessionID') - const body = await c.req.json() +async function getSessionSummary( + opencodePort: number, + sessionId: string, + directory: string +): Promise { + try { + const messagesRes = await fetch( + `http://127.0.0.1:${opencodePort}/session/${sessionId}/message?directory=${encodeURIComponent(directory)}` + ) + if (!messagesRes.ok) return undefined - try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/session/${sessionID}/command`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(body), - }) - - if (!response.ok) { - logger.error(`Command failed: ${response.status} ${response.statusText}`) - return c.json({ error: 'Command failed', status: response.status }, response.status as any) - } - - const data = await response.json() - return c.json(data) - } catch (error) { - logger.error('Failed to send command:', error) - return c.json({ error: 'Failed to send command' }, 500) - } - }) - - app.post('/:sessionID/shell', async (c) => { - const sessionID = c.req.param('sessionID') - const body = await c.req.json() + const messages = await messagesRes.json() as SessionMessage[] - try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/session/${sessionID}/shell`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(body), - }) - - if (!response.ok) { - logger.error(`Shell command failed: ${response.status} ${response.statusText}`) - return c.json({ error: 'Shell command failed', status: response.status }, response.status as any) + for (const msg of messages) { + if (msg.info.role === 'user' && msg.parts?.length > 0) { + const textPart = msg.parts.find(p => p.type === 'text' && p.text) + if (textPart?.text) { + const text = textPart.text.trim() + return text.length > 120 ? text.slice(0, 117) + '...' : text + } } - - const data = await response.json() - return c.json(data) - } catch (error) { - logger.error('Failed to send shell command:', error) - return c.json({ error: 'Failed to send shell command' }, 500) } - }) + } catch { + // Ignore errors fetching messages + } + return undefined +} - app.post('/:sessionID/abort', async (c) => { - const sessionID = c.req.param('sessionID') - +function getRepoDisplayName(repo: { repoUrl?: string | null; localPath?: string | null; fullPath: string }): string { + if (repo.repoUrl) { + const match = repo.repoUrl.match(/\/([^/]+?)(?:\.git)?$/) + return match ? match[1] : repo.repoUrl + } + if (repo.localPath) { + return repo.localPath.split('/').pop() || repo.localPath + } + return repo.fullPath.split('/').pop() || repo.fullPath +} + +export function createSessionRoutes(database: Database) { + const app = new Hono() + + app.get('/recent', async (c) => { try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/session/${sessionID}/abort`, { - method: 'POST', - }) + const hoursParam = c.req.query('hours') + const hours = hoursParam ? parseInt(hoursParam, 10) : 8 + const cutoffTime = Date.now() - (hours * 60 * 60 * 1000) - if (!response.ok) { - logger.error(`Abort failed: ${response.status} ${response.statusText}`) - return c.json({ error: 'Abort failed', status: response.status }, response.status as any) - } + const repos = db.listRepos(database) + const opencodePort = opencodeServerManager.getPort() - const data = await response.json() - return c.json(data) - } catch (error) { - logger.error('Failed to abort session:', error) - return c.json({ error: 'Failed to abort session' }, 500) - } - }) - - app.get('/:sessionID/message', async (c) => { - const sessionID = c.req.param('sessionID') - - try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/session/${sessionID}/message`) + const recentSessions: SessionWithRepo[] = [] - if (!response.ok) { - logger.error(`Get messages failed: ${response.status} ${response.statusText}`) - return c.json({ error: 'Get messages failed', status: response.status }, response.status as any) + let sessionStatuses: Record = {} + try { + const statusRes = await fetch(`http://127.0.0.1:${opencodePort}/session/status`) + if (statusRes.ok) { + sessionStatuses = await statusRes.json() + } + } catch (err) { + logger.warn('Failed to fetch session statuses:', err) } - const data = await response.json() - return c.json(data) - } catch (error) { - logger.error('Failed to get messages:', error) - return c.json({ error: 'Failed to get messages' }, 500) - } - }) - - app.post('/:sessionID/message', async (c) => { - const sessionID = c.req.param('sessionID') - const body = await c.req.json() - - try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/session/${sessionID}/message`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(body), - }) - - if (!response.ok) { - logger.error(`Send message failed: ${response.status} ${response.statusText}`) - return c.json({ error: 'Send message failed', status: response.status }, response.status as any) + for (const repo of repos) { + try { + const sessionsRes = await fetch( + `http://127.0.0.1:${opencodePort}/session?directory=${encodeURIComponent(repo.fullPath)}` + ) + + if (!sessionsRes.ok) continue + + const sessions = await sessionsRes.json() as Array<{ + id: string + title?: string + directory: string + parentID?: string + time: { created: number; updated: number } + }> + + for (const session of sessions) { + if (session.parentID) continue + if (session.time.updated < cutoffTime) continue + + const status = sessionStatuses[session.id] + const summary = await getSessionSummary(opencodePort, session.id, session.directory) + + recentSessions.push({ + id: session.id, + title: session.title || 'Untitled Session', + directory: session.directory, + repoId: repo.id, + repoName: getRepoDisplayName(repo), + status: (status?.type as 'idle' | 'busy' | 'retry') || 'idle', + summary, + time: session.time, + }) + } + } catch (err) { + logger.warn(`Failed to fetch sessions for repo ${repo.id}:`, err) + } } - const data = await response.json() - return c.json(data) - } catch (error) { - logger.error('Failed to send message:', error) - return c.json({ error: 'Failed to send message' }, 500) + recentSessions.sort((a, b) => b.time.updated - a.time.updated) + + return c.json({ + sessions: recentSessions, + cutoffTime, + count: recentSessions.length, + }) + } catch (error: unknown) { + logger.error('Failed to get recent sessions:', error) + const message = error instanceof Error ? error.message : 'Unknown error' + return c.json({ error: message }, 500) } }) diff --git a/backend/src/routes/settings.ts b/backend/src/routes/settings.ts index 1a0c6b79..b4cd74f7 100644 --- a/backend/src/routes/settings.ts +++ b/backend/src/routes/settings.ts @@ -2,13 +2,21 @@ import { Hono } from 'hono' import { z } from 'zod' import type { Database } from 'bun:sqlite' import { SettingsService } from '../services/settings' +import { writeFileContent, readFileContent, fileExists } from '../services/file-operations' +import { patchOpenCodeConfig, proxyToOpenCodeWithDirectory } from '../services/proxy' +import { getOpenCodeConfigFilePath, getAgentsMdPath } from '@opencode-manager/shared/config/env' import { UserPreferencesSchema, OpenCodeConfigSchema, - OpenCodeConfigMetadataSchema, - CustomCommandSchema } from '../types/settings' import { logger } from '../utils/logger' +import { opencodeServerManager } from '../services/opencode-single-server' +import { DEFAULT_AGENTS_MD } from '../index' +import { createGitHubGitEnv } from '../utils/git-auth' +import { exec } from 'child_process' +import { promisify } from 'util' + +const execAsync = promisify(exec) const UpdateSettingsSchema = z.object({ preferences: UserPreferencesSchema.partial(), @@ -16,15 +24,17 @@ const UpdateSettingsSchema = z.object({ const CreateOpenCodeConfigSchema = z.object({ name: z.string().min(1).max(255), - content: OpenCodeConfigSchema, + content: z.union([OpenCodeConfigSchema, z.string()]), isDefault: z.boolean().optional(), }) const UpdateOpenCodeConfigSchema = z.object({ - content: OpenCodeConfigSchema, + content: z.union([OpenCodeConfigSchema, z.string()]), isDefault: z.boolean().optional(), }) + + const CreateCustomCommandSchema = z.object({ name: z.string().min(1).max(255), description: z.string().min(1).max(1000), @@ -36,6 +46,21 @@ const UpdateCustomCommandSchema = z.object({ promptTemplate: z.string().min(1).max(10000), }) +const ValidateGitTokenSchema = z.object({ + gitToken: z.string(), +}) + +const ConnectMcpDirectorySchema = z.object({ + directory: z.string().min(1), +}) + +async function extractOpenCodeError(response: Response, defaultError: string): Promise { + const errorObj = await response.json().catch(() => null) + return (errorObj && typeof errorObj === 'object' && 'error' in errorObj) + ? String(errorObj.error) + : defaultError +} + export function createSettingsRoutes(db: Database) { const app = new Hono() const settingsService = new SettingsService(db) @@ -57,8 +82,18 @@ export function createSettingsRoutes(db: Database) { const body = await c.req.json() const validated = UpdateSettingsSchema.parse(body) + const currentSettings = settingsService.getSettings(userId) const settings = settingsService.updateSettings(validated.preferences, userId) - return c.json(settings) + + let serverRestarted = false + if (validated.preferences.gitToken !== undefined && + validated.preferences.gitToken !== currentSettings.preferences.gitToken) { + logger.info('GitHub token changed, restarting OpenCode server') + await opencodeServerManager.restart() + serverRestarted = true + } + + return c.json({ ...settings, serverRestarted }) } catch (error) { logger.error('Failed to update settings:', error) if (error instanceof z.ZodError) { @@ -98,12 +133,25 @@ export function createSettingsRoutes(db: Database) { const validated = CreateOpenCodeConfigSchema.parse(body) const config = settingsService.createOpenCodeConfig(validated, userId) + + if (config.isDefault) { + const configPath = getOpenCodeConfigFilePath() + const configContent = JSON.stringify(config.content, null, 2) + await writeFileContent(configPath, configContent) + logger.info(`Wrote default config to: ${configPath}`) + + await patchOpenCodeConfig(config.content) + } + return c.json(config) } catch (error) { logger.error('Failed to create OpenCode config:', error) if (error instanceof z.ZodError) { return c.json({ error: 'Invalid config data', details: error.issues }, 400) } + if (error instanceof Error && error.message.includes('already exists')) { + return c.json({ error: error.message }, 409) + } return c.json({ error: 'Failed to create OpenCode config' }, 500) } }) @@ -120,6 +168,15 @@ export function createSettingsRoutes(db: Database) { return c.json({ error: 'Config not found' }, 404) } + if (config.isDefault) { + const configPath = getOpenCodeConfigFilePath() + const configContent = JSON.stringify(config.content, null, 2) + await writeFileContent(configPath, configContent) + logger.info(`Wrote default config to: ${configPath}`) + + await patchOpenCodeConfig(config.content) + } + return c.json(config) } catch (error) { logger.error('Failed to update OpenCode config:', error) @@ -151,11 +208,20 @@ export function createSettingsRoutes(db: Database) { try { const userId = c.req.query('userId') || 'default' const configName = c.req.param('name') - + + settingsService.saveLastKnownGoodConfig(userId) + const config = settingsService.setDefaultOpenCodeConfig(configName, userId) if (!config) { return c.json({ error: 'Config not found' }, 404) } + + const configPath = getOpenCodeConfigFilePath() + const configContent = JSON.stringify(config.content, null, 2) + await writeFileContent(configPath, configContent) + logger.info(`Wrote default config '${configName}' to: ${configPath}`) + + await patchOpenCodeConfig(config.content) return c.json(config) } catch (error) { @@ -180,6 +246,81 @@ export function createSettingsRoutes(db: Database) { } }) + app.post('/opencode-restart', async (c) => { + try { + logger.info('Manual OpenCode server restart requested') + opencodeServerManager.clearStartupError() + await opencodeServerManager.restart() + return c.json({ success: true, message: 'OpenCode server restarted successfully' }) + } catch (error) { + logger.error('Failed to restart OpenCode server:', error) + const startupError = opencodeServerManager.getLastStartupError() + return c.json({ + error: 'Failed to restart OpenCode server', + details: startupError || (error instanceof Error ? error.message : 'Unknown error') + }, 500) + } + }) + + app.post('/opencode-rollback', async (c) => { + try { + const userId = c.req.query('userId') || 'default' + logger.info('OpenCode config rollback requested') + + const rollbackConfig = settingsService.rollbackToLastKnownGoodHealth(userId) + if (!rollbackConfig) { + return c.json({ error: 'No previous working config available for rollback' }, 404) + } + + const configPath = getOpenCodeConfigFilePath() + const config = settingsService.getDefaultOpenCodeConfig(userId) + if (!config) { + return c.json({ error: 'Failed to get default config after rollback' }, 500) + } + + const configContent = JSON.stringify(config.content, null, 2) + await writeFileContent(configPath, configContent) + logger.info(`Rolled back to config '${rollbackConfig}'`) + + opencodeServerManager.clearStartupError() + try { + await opencodeServerManager.restart() + } catch (restartError) { + logger.error('Rollback config also failed to start server, attempting fallback:', restartError) + + const deleted = settingsService.deleteFilesystemConfig() + if (deleted) { + logger.info('Deleted filesystem config, attempting restart with fallback') + await new Promise(r => setTimeout(r, 1000)) + + opencodeServerManager.clearStartupError() + await opencodeServerManager.restart() + + return c.json({ + success: true, + message: `Server restarted after deleting problematic config. DB config '${rollbackConfig}' preserved for manual recovery.`, + fallback: true, + configName: rollbackConfig + }) + } + + return c.json({ + error: 'Failed to rollback and could not delete filesystem config', + details: restartError instanceof Error ? restartError.message : 'Unknown error' + }, 500) + } + + return c.json({ + success: true, + message: `Server restarted with previous working config: ${rollbackConfig}`, + configName: rollbackConfig + }) + } catch (error) { + logger.error('Failed to rollback OpenCode config:', error) + return c.json({ error: 'Failed to rollback OpenCode config' }, 500) + } + }) + // Custom Commands routes app.get('/custom-commands', async (c) => { try { @@ -204,7 +345,7 @@ export function createSettingsRoutes(db: Database) { return c.json({ error: 'Command with this name already exists' }, 409) } - const updatedSettings = settingsService.updateSettings({ + settingsService.updateSettings({ customCommands: [...settings.preferences.customCommands, validated] }, userId) @@ -238,7 +379,7 @@ export function createSettingsRoutes(db: Database) { promptTemplate: validated.promptTemplate } - const updatedSettings = settingsService.updateSettings({ + settingsService.updateSettings({ customCommands: updatedCommands }, userId) @@ -264,7 +405,7 @@ export function createSettingsRoutes(db: Database) { } const updatedCommands = settings.preferences.customCommands.filter(cmd => cmd.name !== commandName) - const updatedSettings = settingsService.updateSettings({ + settingsService.updateSettings({ customCommands: updatedCommands }, userId) @@ -275,5 +416,223 @@ export function createSettingsRoutes(db: Database) { } }) + app.get('/agents-md', async (c) => { + try { + const agentsMdPath = getAgentsMdPath() + const exists = await fileExists(agentsMdPath) + + if (!exists) { + return c.json({ content: '' }) + } + + const content = await readFileContent(agentsMdPath) + return c.json({ content }) + } catch (error) { + logger.error('Failed to get AGENTS.md:', error) + return c.json({ error: 'Failed to get AGENTS.md' }, 500) + } + }) + + app.get('/agents-md/default', async (c) => { + return c.json({ content: DEFAULT_AGENTS_MD }) + }) + + app.put('/agents-md', async (c) => { + try { + const body = await c.req.json() + const { content } = z.object({ content: z.string() }).parse(body) + + const agentsMdPath = getAgentsMdPath() + await writeFileContent(agentsMdPath, content) + logger.info(`Updated AGENTS.md at: ${agentsMdPath}`) + + await opencodeServerManager.restart() + logger.info('Restarted OpenCode server after AGENTS.md update') + + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to update AGENTS.md:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to update AGENTS.md' }, 500) + } + }) + + app.post('/validate-git-token', async (c) => { + try { + const body = await c.req.json() + const { gitToken } = ValidateGitTokenSchema.parse(body) + + if (!gitToken) { + return c.json({ valid: true, message: 'No token provided' }) + } + + // Test the token by trying to access a public GitHub repo via git ls-remote + const testRepoUrl = 'https://github.com/octocat/Hello-World.git' + const env = createGitHubGitEnv(gitToken) + + try { + await execAsync(`git ls-remote ${testRepoUrl}`, { + env: { ...process.env, ...env }, + timeout: 10000 + }) + + // If command succeeded (exit code 0), token is valid + // stderr may contain warnings but that's ok + return c.json({ + valid: true, + message: 'Token is valid' + }) + } catch (error) { + logger.error('Git token validation failed:', error) + + if (error instanceof Error) { + const errorMsg = error.message.toLowerCase() + + if (errorMsg.includes('authentication failed') || + errorMsg.includes('not authorized') || + errorMsg.includes('invalid username or token') || + errorMsg.includes('password authentication is not supported') || + errorMsg.includes('401') || + errorMsg.includes('403') || + errorMsg.includes('code 128')) { + return c.json({ + valid: false, + message: 'Invalid GitHub token. Please check your token and permissions.' + }) + } + + if (errorMsg.includes('timeout') || errorMsg.includes('network')) { + return c.json({ + valid: false, + message: 'Network error - could not validate token. Please try again.' + }) + } + } + + return c.json({ + valid: false, + message: 'Failed to validate token: ' + (error instanceof Error ? error.message : 'Unknown error') + }) + } + } catch (error) { + logger.error('Token validation endpoint error:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to validate token' }, 500) + } + }) + + // MCP directory-aware endpoints + app.post('/mcp/:name/connectdirectory', async (c) => { + try { + const serverName = c.req.param('name') + const body = await c.req.json() + const { directory } = ConnectMcpDirectorySchema.parse(body) + + const response = await proxyToOpenCodeWithDirectory( + `/mcp/${encodeURIComponent(serverName)}/connect`, + 'POST', + directory + ) + + if (!response.ok) { + const errorMsg = await extractOpenCodeError(response, 'Failed to connect MCP server') + return c.json({ error: errorMsg }, 400) + } + + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to connect MCP server for directory:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to connect MCP server' }, 500) + } + }) + + app.post('/mcp/:name/disconnectdirectory', async (c) => { + try { + const serverName = c.req.param('name') + const body = await c.req.json() + const { directory } = ConnectMcpDirectorySchema.parse(body) + + const response = await proxyToOpenCodeWithDirectory( + `/mcp/${encodeURIComponent(serverName)}/disconnect`, + 'POST', + directory + ) + + if (!response.ok) { + const errorMsg = await extractOpenCodeError(response, 'Failed to disconnect MCP server') + return c.json({ error: errorMsg }, 400) + } + + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to disconnect MCP server for directory:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to disconnect MCP server' }, 500) + } + }) + + app.post('/mcp/:name/authdirectedir', async (c) => { + try { + const serverName = c.req.param('name') + const body = await c.req.json() + const { directory } = ConnectMcpDirectorySchema.parse(body) + + const response = await proxyToOpenCodeWithDirectory( + `/mcp/${encodeURIComponent(serverName)}/auth/authenticate`, + 'POST', + directory + ) + + if (!response.ok) { + const errorMsg = await extractOpenCodeError(response, 'Failed to authenticate MCP server') + return c.json({ error: errorMsg }, 400) + } + + return c.json(await response.json()) + } catch (error) { + logger.error('Failed to authenticate MCP server for directory:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to authenticate MCP server' }, 500) + } + }) + + app.delete('/mcp/:name/authdir', async (c) => { + try { + const serverName = c.req.param('name') + const body = await c.req.json() + const { directory } = ConnectMcpDirectorySchema.parse(body) + + const response = await proxyToOpenCodeWithDirectory( + `/mcp/${encodeURIComponent(serverName)}/auth`, + 'DELETE', + directory + ) + + if (!response.ok) { + const errorMsg = await extractOpenCodeError(response, 'Failed to remove MCP auth') + return c.json({ error: errorMsg }, 400) + } + + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to remove MCP auth for directory:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request data', details: error.issues }, 400) + } + return c.json({ error: 'Failed to remove MCP auth' }, 500) + } + }) + return app } diff --git a/backend/src/routes/stt.ts b/backend/src/routes/stt.ts new file mode 100644 index 00000000..5b4ff17d --- /dev/null +++ b/backend/src/routes/stt.ts @@ -0,0 +1,113 @@ +import { Hono } from 'hono' +import { z } from 'zod' +import { Database } from 'bun:sqlite' +import { SettingsService } from '../services/settings' +import { whisperServerManager } from '../services/whisper' +import { logger } from '../utils/logger' + +const MAX_AUDIO_SIZE_MB = 25 +const MAX_AUDIO_SIZE_BYTES = MAX_AUDIO_SIZE_MB * 1024 * 1024 + +const TranscribeRequestSchema = z.object({ + audio: z.string().min(1).max(MAX_AUDIO_SIZE_BYTES * 1.37), + format: z.string().optional().default('webm'), + language: z.string().optional(), + model: z.string().optional() +}) + +export function createSTTRoutes(db: Database) { + const app = new Hono() + + app.post('/transcribe', async (c) => { + try { + const body = await c.req.json() + const { audio, format, language, model } = TranscribeRequestSchema.parse(body) + const userId = c.req.query('userId') || 'default' + + const settingsService = new SettingsService(db) + const settings = settingsService.getSettings(userId) + const sttConfig = settings.preferences.stt + + if (!sttConfig?.enabled) { + return c.json({ error: 'STT is not enabled' }, 400) + } + + const status = await whisperServerManager.syncStatus() + if (!status.running) { + return c.json({ error: 'Whisper server is not running' }, 503) + } + + let audioData: string = audio + if (audio.includes(',')) { + audioData = audio.split(',')[1] + } + + const audioBuffer = Buffer.from(audioData, 'base64') + + if (audioBuffer.length > MAX_AUDIO_SIZE_BYTES) { + return c.json({ error: `Audio exceeds maximum size of ${MAX_AUDIO_SIZE_MB}MB` }, 400) + } + + const result = await whisperServerManager.transcribe(audioBuffer, { + model: model || sttConfig.model, + language: language || sttConfig.language, + format + }) + + logger.info(`STT transcription completed (${result.duration?.toFixed(1)}s audio)`) + + return c.json({ + text: result.text, + language: result.language, + language_probability: result.language_probability, + duration: result.duration + }) + } catch (error) { + logger.error('STT transcription failed:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request', details: error.issues }, 400) + } + return c.json({ + error: 'Transcription failed', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.get('/models', async (c) => { + try { + const models = await whisperServerManager.getModels() + return c.json(models) + } catch (error) { + logger.error('Failed to fetch STT models:', error) + return c.json({ error: 'Failed to fetch models' }, 500) + } + }) + + app.get('/status', async (c) => { + const userId = c.req.query('userId') || 'default' + const settingsService = new SettingsService(db) + const settings = settingsService.getSettings(userId) + const sttConfig = settings.preferences.stt + const serverStatus = await whisperServerManager.syncStatus() + + return c.json({ + enabled: sttConfig?.enabled || false, + configured: true, + server: { + running: serverStatus.running, + port: serverStatus.port, + host: serverStatus.host, + model: serverStatus.model, + error: serverStatus.error + }, + config: { + model: sttConfig?.model || 'base', + language: sttConfig?.language || 'auto', + autoSubmit: sttConfig?.autoSubmit || false + } + }) + }) + + return app +} diff --git a/backend/src/routes/tasks.ts b/backend/src/routes/tasks.ts new file mode 100644 index 00000000..4e4ef233 --- /dev/null +++ b/backend/src/routes/tasks.ts @@ -0,0 +1,165 @@ +import { Hono } from 'hono' +import { z } from 'zod' +import type { Database } from 'bun:sqlite' +import { schedulerService, type CreateTaskInput, type UpdateTaskInput, type CommandConfig } from '../services/scheduler' +import { logger } from '../utils/logger' + +const CommandConfigSchema = z.object({ + command: z.string().optional(), + args: z.array(z.string()).optional(), + workdir: z.string().optional(), + skillName: z.string().optional(), + message: z.string().optional(), +}) + +const CreateTaskSchema = z.object({ + name: z.string().min(1).max(255), + schedule_type: z.literal('cron'), + schedule_value: z.string().min(1), + command_type: z.enum(['skill', 'opencode-run', 'script']), + command_config: CommandConfigSchema, +}) + +const UpdateTaskSchema = z.object({ + name: z.string().min(1).max(255).optional(), + schedule_value: z.string().min(1).optional(), + command_type: z.enum(['skill', 'opencode-run', 'script']).optional(), + command_config: CommandConfigSchema.optional(), +}) + +export function createTaskRoutes(db: Database) { + const app = new Hono() + + schedulerService.setDatabase(db) + + app.get('/', async (c) => { + try { + const tasks = schedulerService.getAllTasks() + return c.json(tasks) + } catch (error) { + logger.error('Failed to get tasks:', error) + return c.json({ error: 'Failed to get tasks' }, 500) + } + }) + + app.get('/:id', async (c) => { + try { + const id = parseInt(c.req.param('id')) + if (isNaN(id)) { + return c.json({ error: 'Invalid task ID' }, 400) + } + + const task = schedulerService.getTask(id) + if (!task) { + return c.json({ error: 'Task not found' }, 404) + } + + return c.json(task) + } catch (error) { + logger.error('Failed to get task:', error) + return c.json({ error: 'Failed to get task' }, 500) + } + }) + + app.post('/', async (c) => { + try { + const body = await c.req.json() + const validated = CreateTaskSchema.parse(body) + + const task = schedulerService.createTask(validated as CreateTaskInput) + return c.json(task, 201) + } catch (error) { + logger.error('Failed to create task:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid task data', details: error.issues }, 400) + } + if (error instanceof Error && error.message.includes('Invalid cron')) { + return c.json({ error: error.message }, 400) + } + return c.json({ error: 'Failed to create task' }, 500) + } + }) + + app.put('/:id', async (c) => { + try { + const id = parseInt(c.req.param('id')) + if (isNaN(id)) { + return c.json({ error: 'Invalid task ID' }, 400) + } + + const body = await c.req.json() + const validated = UpdateTaskSchema.parse(body) + + const task = schedulerService.updateTask(id, validated as UpdateTaskInput) + if (!task) { + return c.json({ error: 'Task not found' }, 404) + } + + return c.json(task) + } catch (error) { + logger.error('Failed to update task:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid task data', details: error.issues }, 400) + } + if (error instanceof Error && error.message.includes('Invalid cron')) { + return c.json({ error: error.message }, 400) + } + return c.json({ error: 'Failed to update task' }, 500) + } + }) + + app.delete('/:id', async (c) => { + try { + const id = parseInt(c.req.param('id')) + if (isNaN(id)) { + return c.json({ error: 'Invalid task ID' }, 400) + } + + const deleted = schedulerService.deleteTask(id) + if (!deleted) { + return c.json({ error: 'Task not found' }, 404) + } + + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to delete task:', error) + return c.json({ error: 'Failed to delete task' }, 500) + } + }) + + app.post('/:id/toggle', async (c) => { + try { + const id = parseInt(c.req.param('id')) + if (isNaN(id)) { + return c.json({ error: 'Invalid task ID' }, 400) + } + + const task = schedulerService.toggleTask(id) + if (!task) { + return c.json({ error: 'Task not found' }, 404) + } + + return c.json(task) + } catch (error) { + logger.error('Failed to toggle task:', error) + return c.json({ error: 'Failed to toggle task' }, 500) + } + }) + + app.post('/:id/run', async (c) => { + try { + const id = parseInt(c.req.param('id')) + if (isNaN(id)) { + return c.json({ error: 'Invalid task ID' }, 400) + } + + const result = await schedulerService.runTaskNow(id) + return c.json(result) + } catch (error) { + logger.error('Failed to run task:', error) + return c.json({ error: 'Failed to run task' }, 500) + } + }) + + return app +} diff --git a/backend/src/routes/terminal.ts b/backend/src/routes/terminal.ts new file mode 100644 index 00000000..b90eb518 --- /dev/null +++ b/backend/src/routes/terminal.ts @@ -0,0 +1,86 @@ +import { Hono } from 'hono' +import { terminalService } from '../services/terminal' +import { logger } from '../utils/logger' +import { Server, Socket } from 'socket.io' + +export function createTerminalRoutes() { + const app = new Hono() + + app.get('/sessions', (c) => { + const sessions = terminalService.listSessions() + return c.json({ sessions }) + }) + + app.post('/sessions/:id/resize', async (c) => { + const id = c.req.param('id') + const { cols, rows } = await c.req.json<{ cols: number; rows: number }>() + + const success = terminalService.resizeSession(id, cols, rows) + if (!success) { + return c.json({ error: 'Session not found' }, 404) + } + + return c.json({ success: true }) + }) + + app.delete('/sessions/:id', (c) => { + const id = c.req.param('id') + const success = terminalService.destroySession(id) + + if (!success) { + return c.json({ error: 'Session not found' }, 404) + } + + return c.json({ success: true }) + }) + + return app +} + +export function registerTerminalSocketIO(io: Server) { + io.on('connection', (socket: Socket) => { + const sessionId = socket.handshake.query.sessionId as string + const cwd = (socket.handshake.query.cwd as string) || undefined + + if (!sessionId) { + logger.error('Socket connection missing sessionId') + socket.disconnect() + return + } + + logger.info(`Socket.IO connection for terminal: ${sessionId}`) + socket.join(sessionId) + + // Create session if it doesn't exist + terminalService.createSession(sessionId, cwd) + + // Handle incoming data from client + socket.on('input', (data: string) => { + terminalService.writeToSession(sessionId, data) + }) + + socket.on('resize', (size: { cols: number; rows: number }) => { + terminalService.resizeSession(sessionId, size.cols, size.rows) + }) + + // Setup PTY listeners for this socket + // We need to be careful not to duplicate listeners if multiple sockets connect to the same session + // For now, we'll just add new listeners and rely on the service to broadcast to all + + terminalService.setOnData(sessionId, (data: string) => { + io.to(sessionId).emit('output', data) + }) + + terminalService.setOnExit(sessionId, (exitCode: number, signal?: number) => { + io.to(sessionId).emit('exit', { exitCode, signal }) + terminalService.destroySession(sessionId) + socket.disconnect() + }) + + socket.on('disconnect', () => { + logger.info(`Socket.IO disconnected for terminal: ${sessionId}`) + // We don't destroy the session on disconnect to allow reconnection + // The session will be destroyed when the PTY exits or manually via API + }) + }) +} diff --git a/backend/src/routes/tts.ts b/backend/src/routes/tts.ts new file mode 100644 index 00000000..f60f306e --- /dev/null +++ b/backend/src/routes/tts.ts @@ -0,0 +1,851 @@ +import { Hono } from 'hono' +import { z } from 'zod' +import { Database } from 'bun:sqlite' +import { createHash } from 'crypto' +import { mkdir, readFile, writeFile, readdir, stat, unlink } from 'fs/promises' +import { join } from 'path' +import { SettingsService } from '../services/settings' +import { logger } from '../utils/logger' +import { getWorkspacePath } from '@opencode-manager/shared/config/env' +import { chatterboxServerManager } from '../services/chatterbox' +import { coquiServerManager } from '../services/coqui' + +const TTS_CACHE_DIR = join(getWorkspacePath(), 'cache', 'tts') +const DISCOVERY_CACHE_DIR = join(getWorkspacePath(), 'cache', 'discovery') +const CACHE_TTL_MS = 24 * 60 * 60 * 1000 +const DISCOVERY_CACHE_TTL_MS = 60 * 60 * 1000 +const MAX_CACHE_SIZE_MB = 200 +const MAX_CACHE_SIZE_BYTES = MAX_CACHE_SIZE_MB * 1024 * 1024 + +const TTSRequestSchema = z.object({ + text: z.string().min(1).max(4096), +}) + +function generateCacheKey(text: string, voice: string, model: string, speed: number): string { + const hash = createHash('sha256') + hash.update(`${text}|${voice}|${model}|${speed}`) + return hash.digest('hex') +} + +function normalizeToBaseUrl(endpoint: string): string { + return endpoint + .replace(/\/v1\/audio\/speech$/, '') + .replace(/\/audio\/speech$/, '') + .replace(/\/$/, '') +} + +async function ensureCacheDir(): Promise { + await mkdir(TTS_CACHE_DIR, { recursive: true }) +} + +async function ensureDiscoveryCacheDir(): Promise { + await mkdir(DISCOVERY_CACHE_DIR, { recursive: true }) +} + +async function getCachedAudio(cacheKey: string): Promise { + try { + const filePath = join(TTS_CACHE_DIR, `${cacheKey}.mp3`) + const fileStat = await stat(filePath) + + if (Date.now() - fileStat.mtimeMs > CACHE_TTL_MS) { + await unlink(filePath) + return null + } + + return await readFile(filePath) + } catch { + return null + } +} + +async function getCacheSize(): Promise { + try { + const files = await readdir(TTS_CACHE_DIR) + let totalSize = 0 + + for (const file of files) { + if (!file.endsWith('.mp3')) continue + + const filePath = join(TTS_CACHE_DIR, file) + const fileStat = await stat(filePath) + totalSize += fileStat.size + } + + return totalSize + } catch { + return 0 + } +} + +async function cleanupOldestFiles(requiredSpace: number): Promise { + try { + const files = await readdir(TTS_CACHE_DIR) + const fileInfos = [] + + for (const file of files) { + if (!file.endsWith('.mp3')) continue + + const filePath = join(TTS_CACHE_DIR, file) + const fileStat = await stat(filePath) + fileInfos.push({ path: filePath, mtimeMs: fileStat.mtimeMs, size: fileStat.size }) + } + + fileInfos.sort((a, b) => a.mtimeMs - b.mtimeMs) + + let freedSpace = 0 + for (const fileInfo of fileInfos) { + await unlink(fileInfo.path) + freedSpace += fileInfo.size + + if (freedSpace >= requiredSpace) break + } + + logger.info(`TTS cache freed ${freedSpace} bytes by removing old files`) + } catch (error) { + logger.error('TTS cache cleanup failed:', error) + } +} + +async function cacheAudio(cacheKey: string, audioData: Buffer): Promise { + const filePath = join(TTS_CACHE_DIR, `${cacheKey}.mp3`) + + await ensureCacheDir() + const currentCacheSize = await getCacheSize() + + if (currentCacheSize + audioData.length > MAX_CACHE_SIZE_BYTES) { + await cleanupOldestFiles(audioData.length) + } + + await writeFile(filePath, audioData) +} + +async function getCachedDiscovery(cacheKey: string): Promise { + try { + const filePath = join(DISCOVERY_CACHE_DIR, `${cacheKey}.json`) + const fileStat = await stat(filePath) + + if (Date.now() - fileStat.mtimeMs > DISCOVERY_CACHE_TTL_MS) { + await unlink(filePath) + return null + } + + const content = await readFile(filePath, 'utf-8') + return JSON.parse(content) + } catch { + return null + } +} + +async function cacheDiscovery(cacheKey: string, data: string[]): Promise { + try { + const filePath = join(DISCOVERY_CACHE_DIR, `${cacheKey}.json`) + await writeFile(filePath, JSON.stringify(data)) + } catch (error) { + logger.error(`Failed to cache discovery data for ${cacheKey}:`, error) + } +} + +function generateDiscoveryCacheKey(endpoint: string, apiKey: string, type: 'models' | 'voices'): string { + const hash = createHash('sha256') + hash.update(`${endpoint}|${apiKey.substring(0, 8)}|${type}`) + return hash.digest('hex') +} + +async function fetchAvailableModels(endpoint: string, apiKey: string): Promise { + const baseUrl = normalizeToBaseUrl(endpoint) + const endpointVariations = [ + `${baseUrl}/v1/models`, + `${baseUrl}/models`, + ] + + for (const modelEndpoint of endpointVariations) { + try { + const response = await fetch(modelEndpoint, { + headers: { + 'Authorization': `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + }) + + if (response.ok) { + const data = await response.json() as { data?: { id?: string }[] } | unknown[] + + // Handle different response formats + if ('data' in data && Array.isArray(data.data)) { + // OpenAI format: { data: [{ id: "gpt-4" }, ...] } + return data.data + .filter((model) => model.id && typeof model.id === 'string') + .filter((model) => + model.id!.toLowerCase().includes('tts') || + model.id!.toLowerCase().includes('audio') || + model.id!.toLowerCase().includes('speech') + ) + .map((model) => model.id!) + } else if (Array.isArray(data)) { + return data.filter((item): item is string => typeof item === 'string') + } + } + } catch (error) { + logger.warn(`Failed to fetch models from ${modelEndpoint}:`, error) + continue + } + } + + return ['tts-1', 'tts-1-hd'] +} + +async function fetchAvailableVoices(endpoint: string, apiKey: string): Promise { + const baseUrl = normalizeToBaseUrl(endpoint) + const endpointVariations = [ + `${baseUrl}/v1/audio/voices`, + `${baseUrl}/voices`, + `${baseUrl}/audio/voices`, + ] + + for (const voiceEndpoint of endpointVariations) { + try { + const response = await fetch(voiceEndpoint, { + headers: { + 'Authorization': `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + }) + + if (response.ok) { + type VoiceItem = { id?: string; name?: string; voice?: string } + const data = await response.json() as { data?: VoiceItem[]; voices?: string[] } | (string | VoiceItem)[] + + // Handle different response formats + if ('data' in data && Array.isArray(data.data)) { + // OpenAI-style format: { data: [{ name: "alloy" }, ...] } + return data.data + .filter((voice) => voice.id || voice.name) + .map((voice) => (voice.id || voice.name)!) + } else if ('voices' in data && Array.isArray(data.voices)) { + // Kokoro-style format: { "voices": ["af_alloy", "af_aoede", ...] } + return data.voices.filter((v): v is string => typeof v === 'string') + } else if (Array.isArray(data)) { + // Simple array format: ["alloy", "echo", ...] or [{ voice: "alloy" }, ...] + return data.map((item) => { + if (typeof item === 'string') return item + return item.name || item.voice || item.id + }).filter((v): v is string => typeof v === 'string') + } + } + } catch (error) { + logger.warn(`Failed to fetch voices from ${voiceEndpoint}:`, error) + continue + } + } + + return ['alloy', 'echo', 'fable', 'onyx', 'nova', 'shimmer'] +} + +export async function cleanupExpiredCache(): Promise { + try { + await ensureCacheDir() + const files = await readdir(TTS_CACHE_DIR) + let cleanedCount = 0 + + for (const file of files) { + if (!file.endsWith('.mp3')) continue + + const filePath = join(TTS_CACHE_DIR, file) + try { + const fileStat = await stat(filePath) + if (Date.now() - fileStat.mtimeMs > CACHE_TTL_MS) { + await unlink(filePath) + cleanedCount++ + } + } catch { + continue + } + } + + if (cleanedCount > 0) { + logger.info(`TTS cache cleanup: removed ${cleanedCount} expired files`) + } + + return cleanedCount + } catch (error) { + logger.error('TTS cache cleanup failed:', error) + return 0 + } +} + +export async function getCacheStats(): Promise<{ count: number; sizeBytes: number; sizeMB: number }> { + try { + await ensureCacheDir() + const files = await readdir(TTS_CACHE_DIR) + let count = 0 + let totalSize = 0 + + for (const file of files) { + if (!file.endsWith('.mp3')) continue + + const filePath = join(TTS_CACHE_DIR, file) + const fileStat = await stat(filePath) + + if (Date.now() - fileStat.mtimeMs <= CACHE_TTL_MS) { + count++ + totalSize += fileStat.size + } + } + + return { + count, + sizeBytes: totalSize, + sizeMB: Math.round(totalSize / (1024 * 1024) * 100) / 100 + } + } catch { + return { count: 0, sizeBytes: 0, sizeMB: 0 } + } +} + +export { generateCacheKey, ensureCacheDir, getCachedAudio, cacheAudio, getCacheSize, cleanupOldestFiles } + +import type { Context } from 'hono' +import type { TTSConfig } from '@opencode-manager/shared' + +async function handleChatterboxSynthesis( + c: Context, + text: string, + ttsConfig: TTSConfig, + abortController: AbortController +): Promise { + const status = chatterboxServerManager.getStatus() + + if (!status.running) { + logger.info('Chatterbox server not running, attempting to start...') + try { + await chatterboxServerManager.start() + } catch (error) { + logger.error('Failed to start Chatterbox server:', error) + return c.json({ + error: 'Chatterbox server not available', + details: error instanceof Error ? error.message : 'Unknown error' + }, 503) + } + } + + const voice = ttsConfig.voice || 'default' + const exaggeration = ttsConfig.chatterboxExaggeration ?? 0.5 + const cfgWeight = ttsConfig.chatterboxCfgWeight ?? 0.5 + + const cacheKey = generateCacheKey(text, voice, 'chatterbox', exaggeration) + + await ensureCacheDir() + + const cachedAudio = await getCachedAudio(cacheKey) + if (cachedAudio) { + logger.info(`Chatterbox cache hit: ${cacheKey.substring(0, 8)}...`) + return new Response(cachedAudio, { + headers: { + 'Content-Type': 'audio/wav', + 'X-Cache': 'HIT', + }, + }) + } + + if (abortController.signal.aborted) { + return new Response(null, { status: 499 }) + } + + logger.info(`Chatterbox cache miss, synthesizing: ${cacheKey.substring(0, 8)}...`) + + try { + const audioBuffer = await chatterboxServerManager.synthesize(text, { + voice, + exaggeration, + cfgWeight + }) + + await cacheAudio(cacheKey, audioBuffer) + logger.info(`Chatterbox audio cached: ${cacheKey.substring(0, 8)}...`) + + return new Response(audioBuffer, { + headers: { + 'Content-Type': 'audio/wav', + 'X-Cache': 'MISS', + }, + }) + } catch (error) { + logger.error('Chatterbox synthesis failed:', error) + return c.json({ + error: 'Chatterbox synthesis failed', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } +} + +async function handleCoquiSynthesis( + c: Context, + text: string, + ttsConfig: TTSConfig, + abortController: AbortController +): Promise { + const status = coquiServerManager.getStatus() + + if (!status.running) { + logger.info('Coqui TTS server not running, attempting to start...') + try { + await coquiServerManager.start() + } catch (error) { + logger.error('Failed to start Coqui TTS server:', error) + return c.json({ + error: 'Coqui TTS server not available', + details: error instanceof Error ? error.message : 'Unknown error' + }, 503) + } + } + + const voice = ttsConfig.voice || 'default' + const speed = ttsConfig.speed ?? 1.0 + + const cacheKey = generateCacheKey(text, voice, 'coqui-jenny', speed) + + await ensureCacheDir() + + const cachedAudio = await getCachedAudio(cacheKey) + if (cachedAudio) { + logger.info(`Coqui cache hit: ${cacheKey.substring(0, 8)}...`) + return new Response(cachedAudio, { + headers: { + 'Content-Type': 'audio/wav', + 'X-Cache': 'HIT', + }, + }) + } + + if (abortController.signal.aborted) { + return new Response(null, { status: 499 }) + } + + logger.info(`Coqui cache miss, synthesizing: ${cacheKey.substring(0, 8)}...`) + + try { + const audioBuffer = await coquiServerManager.synthesize(text, { + voice, + speed + }) + + await cacheAudio(cacheKey, audioBuffer) + logger.info(`Coqui audio cached: ${cacheKey.substring(0, 8)}...`) + + return new Response(audioBuffer, { + headers: { + 'Content-Type': 'audio/wav', + 'X-Cache': 'MISS', + }, + }) + } catch (error) { + logger.error('Coqui synthesis failed:', error) + return c.json({ + error: 'Coqui synthesis failed', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } +} + +export function createTTSRoutes(db: Database) { + const app = new Hono() + + app.post('/synthesize', async (c) => { + const abortController = new AbortController() + + c.req.raw.signal.addEventListener('abort', () => { + logger.info('TTS request aborted by client') + abortController.abort() + }) + + try { + const body = await c.req.json() + const { text } = TTSRequestSchema.parse(body) + const userId = c.req.query('userId') || 'default' + + const settingsService = new SettingsService(db) + const settings = settingsService.getSettings(userId) + const ttsConfig = settings.preferences.tts + + if (!ttsConfig?.enabled) { + return c.json({ error: 'TTS is not enabled' }, 400) + } + + const provider = ttsConfig.provider || 'external' + + if (provider === 'chatterbox') { + return await handleChatterboxSynthesis(c, text, ttsConfig, abortController) + } + + if (provider === 'coqui') { + return await handleCoquiSynthesis(c, text, ttsConfig, abortController) + } + + if (provider === 'builtin') { + return c.json({ error: 'Builtin TTS is handled client-side' }, 400) + } + + if (!ttsConfig.apiKey) { + return c.json({ error: 'TTS API key is not configured' }, 400) + } + + const { endpoint, apiKey, voice, model, speed } = ttsConfig + const cacheKey = generateCacheKey(text, voice, model, speed) + + await ensureCacheDir() + + const cachedAudio = await getCachedAudio(cacheKey) + if (cachedAudio) { + logger.info(`TTS cache hit: ${cacheKey.substring(0, 8)}...`) + return new Response(cachedAudio, { + headers: { + 'Content-Type': 'audio/mpeg', + 'X-Cache': 'HIT', + }, + }) + } + + if (abortController.signal.aborted) { + return new Response(null, { status: 499 }) + } + + logger.info(`TTS cache miss, calling API: ${cacheKey.substring(0, 8)}...`) + + const baseUrl = normalizeToBaseUrl(endpoint) + const speechEndpoint = `${baseUrl}/v1/audio/speech` + + const response = await fetch(speechEndpoint, { + method: 'POST', + headers: { + 'Authorization': `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + model, + voice, + input: text, + speed, + response_format: 'mp3', + }), + signal: abortController.signal, + }) + + if (!response.ok) { + const errorText = await response.text() + logger.error(`TTS API error: ${response.status} - ${errorText}`) + const status = response.status >= 400 && response.status < 600 ? response.status as 400 | 500 : 500 + + let errorDetails = errorText + try { + const errorJson = JSON.parse(errorText) + if (errorJson.detail?.error?.message) { + errorDetails = errorJson.detail.error.message + } else if (errorJson.detail?.message) { + errorDetails = errorJson.detail.message + } else if (errorJson.message) { + errorDetails = errorJson.message + } + } catch { + } + + return c.json({ + error: 'TTS API request failed', + details: errorDetails, + voice: voice, + availableVoices: ttsConfig?.availableVoices || [] + }, status) + } + + const audioBuffer = Buffer.from(await response.arrayBuffer()) + + await cacheAudio(cacheKey, audioBuffer) + logger.info(`TTS audio cached: ${cacheKey.substring(0, 8)}...`) + + return new Response(audioBuffer, { + headers: { + 'Content-Type': 'audio/mpeg', + 'X-Cache': 'MISS', + }, + }) + } catch (error) { + if (error instanceof Error && error.name === 'AbortError') { + return new Response(null, { status: 499 }) + } + logger.error('TTS synthesis failed:', error) + if (error instanceof z.ZodError) { + return c.json({ error: 'Invalid request', details: error.issues }, 400) + } + return c.json({ error: 'TTS synthesis failed' }, 500) + } + }) + + app.get('/models', async (c) => { + try { + const userId = c.req.query('userId') || 'default' + const forceRefresh = c.req.query('refresh') === 'true' + + const settingsService = new SettingsService(db) + const settings = settingsService.getSettings(userId) + const ttsConfig = settings.preferences.tts + + if (!ttsConfig?.apiKey || !ttsConfig?.endpoint) { + return c.json({ error: 'TTS not configured' }, 400) + } + + const cacheKey = generateDiscoveryCacheKey(ttsConfig.endpoint, ttsConfig.apiKey, 'models') + + // Check cache first (unless force refresh) + if (!forceRefresh) { + const cachedModels = await getCachedDiscovery(cacheKey) + if (cachedModels) { + logger.info(`Models cache hit for user ${userId}`) + return c.json({ models: cachedModels, cached: true }) + } + } + + // Fetch from API + await ensureDiscoveryCacheDir() + logger.info(`Fetching TTS models for user ${userId}`) + + const models = await fetchAvailableModels(ttsConfig.endpoint, ttsConfig.apiKey) + await cacheDiscovery(cacheKey, models) + + // Update user preferences with available models + await settingsService.updateSettings({ + tts: { + ...ttsConfig, + availableModels: models, + lastModelsFetch: Date.now() + } + }, userId) + + logger.info(`Fetched ${models.length} TTS models`) + return c.json({ models, cached: false }) + } catch (error) { + logger.error('Failed to fetch TTS models:', error) + return c.json({ error: 'Failed to fetch models' }, 500) + } + }) + + app.get('/voices', async (c) => { + try { + const userId = c.req.query('userId') || 'default' + const forceRefresh = c.req.query('refresh') === 'true' + + const settingsService = new SettingsService(db) + const settings = settingsService.getSettings(userId) + const ttsConfig = settings.preferences.tts + + if (!ttsConfig?.apiKey || !ttsConfig?.endpoint) { + return c.json({ error: 'TTS not configured' }, 400) + } + + const cacheKey = generateDiscoveryCacheKey(ttsConfig.endpoint, ttsConfig.apiKey, 'voices') + + // Check cache first (unless force refresh) + if (!forceRefresh) { + const cachedVoices = await getCachedDiscovery(cacheKey) + if (cachedVoices) { + logger.info(`Voices cache hit for user ${userId}`) + return c.json({ voices: cachedVoices, cached: true }) + } + } + + // Fetch from API + await ensureDiscoveryCacheDir() + logger.info(`Fetching TTS voices for user ${userId}`) + + const voices = await fetchAvailableVoices(ttsConfig.endpoint, ttsConfig.apiKey) + await cacheDiscovery(cacheKey, voices) + + // Update user preferences with available voices + await settingsService.updateSettings({ + tts: { + ...ttsConfig, + availableVoices: voices, + lastVoicesFetch: Date.now() + } + }, userId) + + logger.info(`Fetched ${voices.length} TTS voices`) + return c.json({ voices, cached: false }) + } catch (error) { + logger.error('Failed to fetch TTS voices:', error) + return c.json({ error: 'Failed to fetch voices' }, 500) + } + }) + + app.get('/status', async (c) => { + const userId = c.req.query('userId') || 'default' + const settingsService = new SettingsService(db) + const settings = settingsService.getSettings(userId) + const ttsConfig = settings.preferences.tts + const cacheStats = await getCacheStats() + const chatterboxStatus = chatterboxServerManager.getStatus() + const coquiStatus = coquiServerManager.getStatus() + + return c.json({ + enabled: ttsConfig?.enabled || false, + configured: !!(ttsConfig?.apiKey) || ttsConfig?.provider === 'chatterbox' || ttsConfig?.provider === 'coqui' || ttsConfig?.provider === 'builtin', + provider: ttsConfig?.provider || 'external', + cache: { + ...cacheStats, + maxSizeMB: MAX_CACHE_SIZE_MB, + ttlHours: CACHE_TTL_MS / (60 * 60 * 1000) + }, + chatterbox: { + running: chatterboxStatus.running, + device: chatterboxStatus.device, + cudaAvailable: chatterboxStatus.cudaAvailable, + error: chatterboxStatus.error + }, + coqui: { + running: coquiStatus.running, + device: coquiStatus.device, + model: coquiStatus.model, + cudaAvailable: coquiStatus.cudaAvailable, + error: coquiStatus.error + } + }) + }) + + app.get('/chatterbox/status', async (c) => { + const status = chatterboxServerManager.getStatus() + return c.json(status) + }) + + app.post('/chatterbox/start', async (c) => { + try { + await chatterboxServerManager.start() + return c.json({ success: true, status: chatterboxServerManager.getStatus() }) + } catch (error) { + logger.error('Failed to start Chatterbox server:', error) + return c.json({ + error: 'Failed to start Chatterbox server', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.post('/chatterbox/stop', async (c) => { + try { + await chatterboxServerManager.stop() + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to stop Chatterbox server:', error) + return c.json({ + error: 'Failed to stop Chatterbox server', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.get('/chatterbox/voices', async (c) => { + try { + const voices = await chatterboxServerManager.getVoices() + return c.json(voices) + } catch (error) { + logger.error('Failed to get Chatterbox voices:', error) + return c.json({ + error: 'Failed to get voices', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.post('/chatterbox/voices/upload', async (c) => { + try { + const formData = await c.req.formData() + const audio = formData.get('audio') as File | null + const name = formData.get('name') as string | null + + if (!audio || !name) { + return c.json({ error: 'Audio file and name are required' }, 400) + } + + const audioBuffer = Buffer.from(await audio.arrayBuffer()) + const result = await chatterboxServerManager.uploadVoice(audioBuffer, name, audio.name) + + return c.json({ success: true, ...result }) + } catch (error) { + logger.error('Failed to upload voice:', error) + return c.json({ + error: 'Failed to upload voice', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.delete('/chatterbox/voices/:voiceId', async (c) => { + try { + const voiceId = c.req.param('voiceId') + await chatterboxServerManager.deleteVoice(voiceId) + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to delete voice:', error) + return c.json({ + error: 'Failed to delete voice', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.get('/coqui/status', async (c) => { + const status = coquiServerManager.getStatus() + return c.json(status) + }) + + app.post('/coqui/start', async (c) => { + try { + await coquiServerManager.start() + return c.json({ success: true, status: coquiServerManager.getStatus() }) + } catch (error) { + logger.error('Failed to start Coqui TTS server:', error) + return c.json({ + error: 'Failed to start Coqui TTS server', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.post('/coqui/stop', async (c) => { + try { + await coquiServerManager.stop() + return c.json({ success: true }) + } catch (error) { + logger.error('Failed to stop Coqui TTS server:', error) + return c.json({ + error: 'Failed to stop Coqui TTS server', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.get('/coqui/voices', async (c) => { + try { + const voices = await coquiServerManager.getVoices() + return c.json(voices) + } catch (error) { + logger.error('Failed to get Coqui voices:', error) + return c.json({ + error: 'Failed to get voices', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + app.get('/coqui/models', async (c) => { + try { + const models = await coquiServerManager.getModels() + return c.json(models) + } catch (error) { + logger.error('Failed to get Coqui models:', error) + return c.json({ + error: 'Failed to get models', + details: error instanceof Error ? error.message : 'Unknown error' + }, 500) + } + }) + + return app +} diff --git a/backend/src/services/archive.ts b/backend/src/services/archive.ts new file mode 100644 index 00000000..16e6e28a --- /dev/null +++ b/backend/src/services/archive.ts @@ -0,0 +1,172 @@ +import archiver from 'archiver' +import { createWriteStream, createReadStream } from 'fs' +import { readdir, stat, unlink } from 'fs/promises' +import path from 'path' +import os from 'os' +import { logger } from '../utils/logger' + +async function getIgnoredPaths(repoPath: string, paths: string[]): Promise> { + if (paths.length === 0) return new Set() + + try { + const { spawn } = await import('child_process') + + return new Promise((resolve) => { + const ignored = new Set() + const proc = spawn('git', ['check-ignore', '--stdin'], { + cwd: repoPath, + shell: false + }) + + let stdout = '' + + proc.stdout?.on('data', (data: Buffer) => { + stdout += data.toString() + }) + + proc.stdin?.write(paths.join('\n')) + proc.stdin?.end() + + proc.on('close', () => { + const ignoredPaths = stdout.split('\n').filter(p => p.trim()) + for (const p of ignoredPaths) { + ignored.add(p) + } + resolve(ignored) + }) + + proc.on('error', () => { + resolve(new Set()) + }) + }) + } catch { + return new Set() + } +} + +async function collectFiles( + repoPath: string, + relativePath: string = '' +): Promise { + const fullPath = path.join(repoPath, relativePath) + const entries = await readdir(fullPath, { withFileTypes: true }) + const files: string[] = [] + + for (const entry of entries) { + const entryRelPath = relativePath ? path.join(relativePath, entry.name) : entry.name + + if (entry.name === '.git') continue + + if (entry.isDirectory()) { + files.push(entryRelPath + '/') + const subFiles = await collectFiles(repoPath, entryRelPath) + files.push(...subFiles) + } else { + files.push(entryRelPath) + } + } + + return files +} + +async function filterIgnoredPaths(repoPath: string, allPaths: string[]): Promise { + const batchSize = 1000 + const ignoredSet = new Set() + + for (let i = 0; i < allPaths.length; i += batchSize) { + const batch = allPaths.slice(i, i + batchSize) + const ignored = await getIgnoredPaths(repoPath, batch) + for (const p of ignored) { + ignoredSet.add(p) + if (p.endsWith('/')) { + ignoredSet.add(p.slice(0, -1)) + } else { + ignoredSet.add(p + '/') + } + } + } + + const filteredPaths: string[] = [] + const ignoredDirs = new Set() + + for (const p of allPaths) { + const isDir = p.endsWith('/') + const cleanPath = isDir ? p.slice(0, -1) : p + + let isUnderIgnoredDir = false + for (const ignoredDir of ignoredDirs) { + if (cleanPath.startsWith(ignoredDir + '/')) { + isUnderIgnoredDir = true + break + } + } + + if (isUnderIgnoredDir) continue + + if (ignoredSet.has(p) || ignoredSet.has(cleanPath)) { + if (isDir) { + ignoredDirs.add(cleanPath) + } + continue + } + + filteredPaths.push(p) + } + + return filteredPaths +} + +export async function createRepoArchive(repoPath: string): Promise { + const repoName = path.basename(repoPath) + const tempFile = path.join(os.tmpdir(), `${repoName}-${Date.now()}.zip`) + + logger.info(`Creating archive for ${repoPath} at ${tempFile}`) + + const allPaths = await collectFiles(repoPath) + const filteredPaths = await filterIgnoredPaths(repoPath, allPaths) + + const output = createWriteStream(tempFile) + const archive = archiver('zip', { zlib: { level: 5 } }) + + return new Promise((resolve, reject) => { + output.on('close', () => { + logger.info(`Archive created: ${tempFile} (${archive.pointer()} bytes)`) + resolve(tempFile) + }) + + archive.on('error', (err) => { + logger.error('Archive error:', err) + reject(err) + }) + + archive.pipe(output) + + for (const relativePath of filteredPaths) { + if (relativePath.endsWith('/')) continue + + const fullPath = path.join(repoPath, relativePath) + const archivePath = path.join(repoName, relativePath) + archive.file(fullPath, { name: archivePath }) + } + + archive.finalize() + }) +} + +export async function deleteArchive(filePath: string): Promise { + try { + await unlink(filePath) + logger.info(`Deleted temp archive: ${filePath}`) + } catch (error) { + logger.warn(`Failed to delete temp archive: ${filePath}`, error) + } +} + +export function getArchiveStream(filePath: string) { + return createReadStream(filePath) +} + +export async function getArchiveSize(filePath: string): Promise { + const stats = await stat(filePath) + return stats.size +} diff --git a/backend/src/services/auth.ts b/backend/src/services/auth.ts index 2474165a..7847a1e4 100644 --- a/backend/src/services/auth.ts +++ b/backend/src/services/auth.ts @@ -1,12 +1,11 @@ import { promises as fs } from 'fs' import path from 'path' -import { getAuthPath } from '../../../shared/src/constants' +import { getAuthPath } from '@opencode-manager/shared/config/env' import { logger } from '../utils/logger' import { AuthCredentialsSchema } from '../../../shared/src/schemas/auth' import type { z } from 'zod' type AuthCredentials = z.infer -type AuthEntry = AuthCredentials[string] export class AuthService { private authPath = getAuthPath() @@ -42,7 +41,7 @@ export class AuthService { const auth = await this.getAll() delete auth[providerId] - await fs.writeFile(this.authPath, JSON.stringify(auth, null, 2)) + await fs.writeFile(this.authPath, JSON.stringify(auth, null, 2), { mode: 0o600 }) logger.info(`Deleted credentials for provider: ${providerId}`) } @@ -56,8 +55,4 @@ export class AuthService { return !!auth[providerId] } - async get(providerId: string): Promise { - const auth = await this.getAll() - return auth[providerId] || null - } } diff --git a/backend/src/services/chatterbox.ts b/backend/src/services/chatterbox.ts new file mode 100644 index 00000000..e3666325 --- /dev/null +++ b/backend/src/services/chatterbox.ts @@ -0,0 +1,461 @@ +import { spawn, ChildProcess, execSync } from 'child_process' +import fs from 'fs' +import os from 'os' +import { logger } from '../utils/logger' +import { getWorkspacePath } from '@opencode-manager/shared/config/env' +import path from 'path' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +const CHATTERBOX_PORT = parseInt(process.env.CHATTERBOX_PORT || '5553') +const CHATTERBOX_HOST = process.env.CHATTERBOX_HOST || '127.0.0.1' +const CHATTERBOX_DEVICE = process.env.CHATTERBOX_DEVICE || 'auto' +const DEFAULT_VENV_DIR = path.join(os.homedir(), '.opencode-manager', 'chatterbox-venv') + +interface ChatterboxServerStatus { + running: boolean + port: number + host: string + device: string | null + cudaAvailable: boolean + error: string | null +} + +interface ChatterboxVoice { + id: string + name: string + description: string + is_custom?: boolean +} + +class ChatterboxServerManager { + private process: ChildProcess | null = null + private status: ChatterboxServerStatus = { + running: false, + port: CHATTERBOX_PORT, + host: CHATTERBOX_HOST, + device: null, + cudaAvailable: false, + error: null + } + private startPromise: Promise | null = null + private healthCheckInterval: ReturnType | null = null + + getPort(): number { + return CHATTERBOX_PORT + } + + getHost(): string { + return CHATTERBOX_HOST + } + + getBaseUrl(): string { + return `http://${CHATTERBOX_HOST}:${CHATTERBOX_PORT}` + } + + getStatus(): ChatterboxServerStatus { + return { ...this.status } + } + + private findPythonBin(): string | null { + if (process.env.CHATTERBOX_VENV) { + const venvPython = path.join(process.env.CHATTERBOX_VENV, 'bin', 'python') + if (fs.existsSync(venvPython)) { + return venvPython + } + } + + const defaultVenvPython = path.join(DEFAULT_VENV_DIR, 'bin', 'python') + if (fs.existsSync(defaultVenvPython)) { + return defaultVenvPython + } + + if (process.env.WHISPER_VENV) { + const whisperPython = path.join(process.env.WHISPER_VENV, 'bin', 'python') + if (fs.existsSync(whisperPython)) { + return whisperPython + } + } + + return null + } + + private findCompatiblePython(): string | null { + const candidates = ['python3.11', 'python3.12', 'python3.10'] + for (const py of candidates) { + try { + execSync(`which ${py}`, { stdio: 'pipe' }) + return py + } catch { + continue + } + } + return null + } + + private async setupVenv(): Promise { + const pythonBin = this.findCompatiblePython() + if (!pythonBin) { + logger.warn('No compatible Python (3.10-3.12) found for Chatterbox TTS') + logger.warn('Install Python 3.11 with: brew install python@3.11') + return null + } + + logger.info(`Setting up Chatterbox venv with ${pythonBin}...`) + + try { + fs.mkdirSync(path.dirname(DEFAULT_VENV_DIR), { recursive: true }) + + logger.info('Creating virtual environment...') + execSync(`${pythonBin} -m venv "${DEFAULT_VENV_DIR}"`, { stdio: 'pipe' }) + + const pip = path.join(DEFAULT_VENV_DIR, 'bin', 'pip') + const venvPython = path.join(DEFAULT_VENV_DIR, 'bin', 'python') + + logger.info('Installing PyTorch (this may take a few minutes)...') + const torchCmd = os.platform() === 'darwin' && os.arch() === 'arm64' + ? `"${pip}" install torch==2.6.0 torchaudio==2.6.0` + : `"${pip}" install torch==2.6.0 torchaudio==2.6.0 --index-url https://download.pytorch.org/whl/cpu` + execSync(torchCmd, { stdio: 'pipe', timeout: 600000 }) + + logger.info('Installing chatterbox-tts dependencies...') + execSync(`"${pip}" install 'numpy>=1.24.0,<1.26.0' 'safetensors==0.5.3' 'transformers==4.46.3'`, { stdio: 'pipe', timeout: 300000 }) + + logger.info('Installing chatterbox-tts...') + execSync(`"${pip}" install chatterbox-tts==0.1.6`, { stdio: 'pipe', timeout: 300000 }) + + logger.info('Installing server dependencies...') + execSync(`"${pip}" install fastapi uvicorn python-multipart`, { stdio: 'pipe', timeout: 120000 }) + + logger.info('Chatterbox venv setup complete!') + return venvPython + } catch (error) { + logger.error('Failed to setup Chatterbox venv:', error) + try { + fs.rmSync(DEFAULT_VENV_DIR, { recursive: true, force: true }) + } catch {} + return null + } + } + + async start(): Promise { + if (this.startPromise) { + return this.startPromise + } + + if (this.status.running) { + logger.info('Chatterbox server already running') + return + } + + this.startPromise = this.doStart() + try { + await this.startPromise + } finally { + this.startPromise = null + } + } + + private async doStart(): Promise { + const possiblePaths = [ + path.resolve(__dirname, '..', '..', 'scripts', 'chatterbox-server.py'), + path.resolve(__dirname, '..', '..', '..', 'scripts', 'chatterbox-server.py'), + path.join(process.cwd(), 'scripts', 'chatterbox-server.py') + ] + + let scriptPath: string | null = null + for (const p of possiblePaths) { + if (fs.existsSync(p)) { + scriptPath = p + break + } + } + + if (!scriptPath) { + throw new Error(`Chatterbox server script not found. Searched: ${possiblePaths.join(', ')}`) + } + + const voiceSamplesDir = path.join(getWorkspacePath(), 'cache', 'chatterbox-voices') + + let pythonBin = this.findPythonBin() + + if (!pythonBin) { + logger.info('Chatterbox venv not found, setting up automatically...') + pythonBin = await this.setupVenv() + if (!pythonBin) { + throw new Error('Failed to setup Chatterbox environment') + } + } + + logger.info(`Starting Chatterbox server on ${CHATTERBOX_HOST}:${CHATTERBOX_PORT}`) + logger.info(`Script path: ${scriptPath}`) + logger.info(`Voice samples directory: ${voiceSamplesDir}`) + logger.info(`Using Python: ${pythonBin}`) + + const env = { + ...process.env, + CHATTERBOX_PORT: CHATTERBOX_PORT.toString(), + CHATTERBOX_HOST: CHATTERBOX_HOST, + CHATTERBOX_VOICE_SAMPLES_DIR: voiceSamplesDir, + CHATTERBOX_DEVICE: CHATTERBOX_DEVICE, + PYTHONUNBUFFERED: '1' + } + + this.process = spawn(pythonBin, [scriptPath], { + env, + stdio: ['ignore', 'pipe', 'pipe'] + }) + + this.process.stdout?.on('data', (data) => { + const message = data.toString().trim() + if (message) { + logger.info(`[Chatterbox] ${message}`) + } + }) + + this.process.stderr?.on('data', (data) => { + const message = data.toString().trim() + if (message) { + if (message.includes('INFO') || message.includes('Uvicorn')) { + logger.info(`[Chatterbox] ${message}`) + } else { + logger.error(`[Chatterbox] ${message}`) + } + } + }) + + this.process.on('error', (error) => { + logger.error('Failed to start Chatterbox server:', error) + this.status.running = false + this.status.error = error.message + }) + + this.process.on('exit', (code, signal) => { + logger.info(`Chatterbox server exited with code ${code}, signal ${signal}`) + this.status.running = false + this.process = null + + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + }) + + await this.waitForReady() + this.startHealthCheck() + } + + private async waitForReady(maxAttempts = 60, delayMs = 2000): Promise { + for (let i = 0; i < maxAttempts; i++) { + if (!this.process) { + throw new Error('Chatterbox server process exited unexpectedly') + } + + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(5000) + }) + + if (response.ok) { + const data = await response.json() as { + device?: string + cuda_available?: boolean + } + this.status.running = true + this.status.device = data.device || null + this.status.cudaAvailable = data.cuda_available || false + this.status.error = null + logger.info(`Chatterbox server is ready (device: ${data.device}, CUDA: ${data.cuda_available})`) + return + } + } catch { + logger.debug(`Waiting for Chatterbox server... attempt ${i + 1}/${maxAttempts}`) + } + + await new Promise(resolve => setTimeout(resolve, delayMs)) + } + + throw new Error('Chatterbox server failed to start within timeout') + } + + private startHealthCheck(): void { + this.healthCheckInterval = setInterval(async () => { + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(5000) + }) + + if (response.ok) { + const data = await response.json() as { + device?: string + cuda_available?: boolean + } + this.status.running = true + this.status.device = data.device || null + this.status.cudaAvailable = data.cuda_available || false + this.status.error = null + } else { + this.status.running = false + this.status.error = 'Health check failed' + } + } catch (error) { + this.status.running = false + this.status.error = error instanceof Error ? error.message : 'Health check failed' + } + }, 30000) + } + + async stop(): Promise { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + + if (!this.process) { + return + } + + logger.info('Stopping Chatterbox server...') + + return new Promise((resolve) => { + const timeout = setTimeout(() => { + logger.warn('Chatterbox server did not exit gracefully, killing...') + this.process?.kill('SIGKILL') + resolve() + }, 5000) + + this.process!.once('exit', () => { + clearTimeout(timeout) + this.process = null + this.status.running = false + logger.info('Chatterbox server stopped') + resolve() + }) + + this.process!.kill('SIGTERM') + }) + } + + async synthesize(text: string, options: { + voice?: string + exaggeration?: number + cfgWeight?: number + } = {}): Promise { + if (!this.status.running) { + throw new Error('Chatterbox server is not running') + } + + const response = await fetch(`${this.getBaseUrl()}/synthesize`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + text, + voice: options.voice || 'default', + exaggeration: options.exaggeration ?? 0.5, + cfg_weight: options.cfgWeight ?? 0.5 + }) + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`Synthesis failed: ${error}`) + } + + return Buffer.from(await response.arrayBuffer()) + } + + async getVoices(): Promise<{ + voices: string[] + voiceDetails: ChatterboxVoice[] + }> { + if (!this.status.running) { + return { + voices: ['default'], + voiceDetails: [{ + id: 'default', + name: 'Default Voice', + description: 'Built-in default voice' + }] + } + } + + try { + const response = await fetch(`${this.getBaseUrl()}/voices`) + if (response.ok) { + const data = await response.json() as { + voices: string[] + voice_details: ChatterboxVoice[] + } + return { + voices: data.voices, + voiceDetails: data.voice_details + } + } + } catch { + logger.warn('Failed to fetch voices from Chatterbox server') + } + + return { + voices: ['default'], + voiceDetails: [{ + id: 'default', + name: 'Default Voice', + description: 'Built-in default voice' + }] + } + } + + async uploadVoice(audioData: Buffer, name: string, filename: string): Promise<{ + voiceId: string + path: string + }> { + if (!this.status.running) { + throw new Error('Chatterbox server is not running') + } + + const formData = new FormData() + formData.append('audio', new Blob([audioData]), filename) + formData.append('name', name) + + const response = await fetch(`${this.getBaseUrl()}/voices/upload`, { + method: 'POST', + body: formData + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`Voice upload failed: ${error}`) + } + + const data = await response.json() as { + voice_id: string + path: string + } + + return { + voiceId: data.voice_id, + path: data.path + } + } + + async deleteVoice(voiceId: string): Promise { + if (!this.status.running) { + throw new Error('Chatterbox server is not running') + } + + const response = await fetch(`${this.getBaseUrl()}/voices/${voiceId}`, { + method: 'DELETE' + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`Voice deletion failed: ${error}`) + } + } +} + +export const chatterboxServerManager = new ChatterboxServerManager() diff --git a/backend/src/services/coqui.ts b/backend/src/services/coqui.ts new file mode 100644 index 00000000..f1de6a60 --- /dev/null +++ b/backend/src/services/coqui.ts @@ -0,0 +1,449 @@ +import { spawn, ChildProcess, execSync } from 'child_process' +import fs from 'fs' +import os from 'os' +import { logger } from '../utils/logger' +import path from 'path' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +const COQUI_PORT = parseInt(process.env.COQUI_PORT || '5554') +const COQUI_HOST = process.env.COQUI_HOST || '127.0.0.1' +const COQUI_DEVICE = process.env.COQUI_DEVICE || 'auto' +const COQUI_MODEL = process.env.COQUI_MODEL || 'tts_models/en/jenny/jenny' +const DEFAULT_VENV_DIR = path.join(os.homedir(), '.opencode-manager', 'coqui-venv') + +interface CoquiServerStatus { + running: boolean + port: number + host: string + device: string | null + model: string + cudaAvailable: boolean + error: string | null +} + +interface CoquiVoice { + id: string + name: string + description: string +} + +class CoquiServerManager { + private process: ChildProcess | null = null + private status: CoquiServerStatus = { + running: false, + port: COQUI_PORT, + host: COQUI_HOST, + device: null, + model: COQUI_MODEL, + cudaAvailable: false, + error: null + } + private startPromise: Promise | null = null + private healthCheckInterval: ReturnType | null = null + + getPort(): number { + return COQUI_PORT + } + + getHost(): string { + return COQUI_HOST + } + + getBaseUrl(): string { + return `http://${COQUI_HOST}:${COQUI_PORT}` + } + + getStatus(): CoquiServerStatus { + return { ...this.status } + } + + private findPythonBin(): string | null { + if (process.env.COQUI_VENV) { + const venvPython = path.join(process.env.COQUI_VENV, 'bin', 'python') + if (fs.existsSync(venvPython)) { + return venvPython + } + } + + const defaultVenvPython = path.join(DEFAULT_VENV_DIR, 'bin', 'python') + if (fs.existsSync(defaultVenvPython)) { + return defaultVenvPython + } + + return null + } + + private findCompatiblePython(): string | null { + const candidates = ['python3.11', 'python3.12', 'python3.10', 'python3'] + for (const py of candidates) { + try { + execSync(`which ${py}`, { stdio: 'pipe' }) + return py + } catch { + continue + } + } + return null + } + + private async setupVenv(): Promise { + const pythonBin = this.findCompatiblePython() + if (!pythonBin) { + logger.warn('No compatible Python (3.10+) found for Coqui TTS') + logger.warn('Install Python 3.11 with: brew install python@3.11') + return null + } + + logger.info(`Setting up Coqui TTS venv with ${pythonBin}...`) + + try { + fs.mkdirSync(path.dirname(DEFAULT_VENV_DIR), { recursive: true }) + + logger.info('Creating virtual environment...') + execSync(`${pythonBin} -m venv "${DEFAULT_VENV_DIR}"`, { stdio: 'pipe' }) + + const pip = path.join(DEFAULT_VENV_DIR, 'bin', 'pip') + const venvPython = path.join(DEFAULT_VENV_DIR, 'bin', 'python') + + logger.info('Upgrading pip...') + execSync(`"${pip}" install --upgrade pip`, { stdio: 'pipe', timeout: 120000 }) + + logger.info('Installing PyTorch (this may take a few minutes)...') + const torchCmd = os.platform() === 'darwin' && os.arch() === 'arm64' + ? `"${pip}" install torch torchaudio` + : `"${pip}" install torch torchaudio --index-url https://download.pytorch.org/whl/cpu` + execSync(torchCmd, { stdio: 'pipe', timeout: 600000 }) + + logger.info('Installing Coqui TTS (this may take a few minutes)...') + execSync(`"${pip}" install TTS`, { stdio: 'pipe', timeout: 600000 }) + + logger.info('Installing server dependencies...') + execSync(`"${pip}" install fastapi uvicorn scipy numpy`, { stdio: 'pipe', timeout: 120000 }) + + logger.info('Coqui TTS venv setup complete!') + return venvPython + } catch (error) { + logger.error('Failed to setup Coqui TTS venv:', error) + try { + fs.rmSync(DEFAULT_VENV_DIR, { recursive: true, force: true }) + } catch {} + return null + } + } + + async start(): Promise { + if (this.startPromise) { + return this.startPromise + } + + if (this.status.running) { + logger.info('Coqui TTS server already running') + return + } + + this.startPromise = this.doStart() + try { + await this.startPromise + } finally { + this.startPromise = null + } + } + + private async doStart(): Promise { + const possiblePaths = [ + path.resolve(__dirname, '..', '..', 'scripts', 'coqui-server.py'), + path.resolve(__dirname, '..', '..', '..', 'scripts', 'coqui-server.py'), + path.join(process.cwd(), 'scripts', 'coqui-server.py') + ] + + let scriptPath: string | null = null + for (const p of possiblePaths) { + if (fs.existsSync(p)) { + scriptPath = p + break + } + } + + if (!scriptPath) { + throw new Error(`Coqui TTS server script not found. Searched: ${possiblePaths.join(', ')}`) + } + + let pythonBin = this.findPythonBin() + + if (!pythonBin) { + logger.info('Coqui TTS venv not found, setting up automatically...') + pythonBin = await this.setupVenv() + if (!pythonBin) { + throw new Error('Failed to setup Coqui TTS environment') + } + } + + logger.info(`Starting Coqui TTS server on ${COQUI_HOST}:${COQUI_PORT}`) + logger.info(`Script path: ${scriptPath}`) + logger.info(`Model: ${COQUI_MODEL}`) + logger.info(`Using Python: ${pythonBin}`) + + const env = { + ...process.env, + COQUI_PORT: COQUI_PORT.toString(), + COQUI_HOST: COQUI_HOST, + COQUI_MODEL: COQUI_MODEL, + COQUI_DEVICE: COQUI_DEVICE, + PYTHONUNBUFFERED: '1' + } + + this.process = spawn(pythonBin, [scriptPath], { + env, + stdio: ['ignore', 'pipe', 'pipe'] + }) + + this.process.stdout?.on('data', (data) => { + const message = data.toString().trim() + if (message) { + logger.info(`[Coqui] ${message}`) + } + }) + + this.process.stderr?.on('data', (data) => { + const message = data.toString().trim() + if (message) { + if (message.includes('INFO') || message.includes('Uvicorn')) { + logger.info(`[Coqui] ${message}`) + } else { + logger.error(`[Coqui] ${message}`) + } + } + }) + + this.process.on('error', (error) => { + logger.error('Failed to start Coqui TTS server:', error) + this.status.running = false + this.status.error = error.message + }) + + this.process.on('exit', (code, signal) => { + logger.info(`Coqui TTS server exited with code ${code}, signal ${signal}`) + this.status.running = false + this.process = null + + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + }) + + await this.waitForReady() + this.startHealthCheck() + } + + private async waitForReady(maxAttempts = 120, delayMs = 2000): Promise { + for (let i = 0; i < maxAttempts; i++) { + if (!this.process) { + throw new Error('Coqui TTS server process exited unexpectedly') + } + + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(5000) + }) + + if (response.ok) { + const data = await response.json() as { + device?: string + cuda_available?: boolean + model_name?: string + } + this.status.running = true + this.status.device = data.device || null + this.status.cudaAvailable = data.cuda_available || false + this.status.model = data.model_name || COQUI_MODEL + this.status.error = null + logger.info(`Coqui TTS server is ready (device: ${data.device}, model: ${data.model_name})`) + return + } + } catch { + if (i % 10 === 0) { + logger.debug(`Waiting for Coqui TTS server... attempt ${i + 1}/${maxAttempts}`) + } + } + + await new Promise(resolve => setTimeout(resolve, delayMs)) + } + + throw new Error('Coqui TTS server failed to start within timeout') + } + + private startHealthCheck(): void { + this.healthCheckInterval = setInterval(async () => { + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(5000) + }) + + if (response.ok) { + const data = await response.json() as { + device?: string + cuda_available?: boolean + model_name?: string + } + this.status.running = true + this.status.device = data.device || null + this.status.cudaAvailable = data.cuda_available || false + this.status.error = null + } else { + this.status.running = false + this.status.error = 'Health check failed' + } + } catch (error) { + this.status.running = false + this.status.error = error instanceof Error ? error.message : 'Health check failed' + } + }, 30000) + } + + async stop(): Promise { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + + if (!this.process) { + return + } + + logger.info('Stopping Coqui TTS server...') + + return new Promise((resolve) => { + const timeout = setTimeout(() => { + logger.warn('Coqui TTS server did not exit gracefully, killing...') + this.process?.kill('SIGKILL') + resolve() + }, 5000) + + this.process!.once('exit', () => { + clearTimeout(timeout) + this.process = null + this.status.running = false + logger.info('Coqui TTS server stopped') + resolve() + }) + + this.process!.kill('SIGTERM') + }) + } + + async synthesize(text: string, options: { + voice?: string + speed?: number + } = {}): Promise { + if (!this.status.running) { + throw new Error('Coqui TTS server is not running') + } + + const response = await fetch(`${this.getBaseUrl()}/synthesize`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + text, + voice: options.voice || 'default', + speed: options.speed ?? 1.0 + }) + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`Synthesis failed: ${error}`) + } + + return Buffer.from(await response.arrayBuffer()) + } + + async getVoices(): Promise<{ + voices: string[] + voiceDetails: CoquiVoice[] + }> { + if (!this.status.running) { + return { + voices: ['default'], + voiceDetails: [{ + id: 'default', + name: 'Jenny', + description: 'Default Jenny voice' + }] + } + } + + try { + const response = await fetch(`${this.getBaseUrl()}/voices`) + if (response.ok) { + const data = await response.json() as { + voices: string[] + voice_details: CoquiVoice[] + } + return { + voices: data.voices, + voiceDetails: data.voice_details + } + } + } catch { + logger.warn('Failed to fetch voices from Coqui TTS server') + } + + return { + voices: ['default'], + voiceDetails: [{ + id: 'default', + name: 'Jenny', + description: 'Default Jenny voice' + }] + } + } + + async getModels(): Promise<{ + models: Array<{ id: string; name: string; description: string }> + currentModel: string + }> { + if (!this.status.running) { + return { + models: [{ + id: 'tts_models/en/jenny/jenny', + name: 'Jenny', + description: 'High-quality English female voice (recommended)' + }], + currentModel: COQUI_MODEL + } + } + + try { + const response = await fetch(`${this.getBaseUrl()}/models`) + if (response.ok) { + const data = await response.json() as { + models: Array<{ id: string; name: string; description: string }> + current_model: string + } + return { + models: data.models, + currentModel: data.current_model + } + } + } catch { + logger.warn('Failed to fetch models from Coqui TTS server') + } + + return { + models: [{ + id: 'tts_models/en/jenny/jenny', + name: 'Jenny', + description: 'High-quality English female voice (recommended)' + }], + currentModel: COQUI_MODEL + } + } +} + +export const coquiServerManager = new CoquiServerManager() diff --git a/backend/src/services/file-operations.ts b/backend/src/services/file-operations.ts index 406fd937..b0cd9829 100644 --- a/backend/src/services/file-operations.ts +++ b/backend/src/services/file-operations.ts @@ -1,7 +1,7 @@ import { promises as fs } from 'fs' import path from 'path' import { logger } from '../utils/logger' -import { getWorkspacePath, getReposPath } from '../../../shared/src/constants' +import { getReposPath } from '@opencode-manager/shared/config/env' export async function readFileContent(filePath: string): Promise { try { diff --git a/backend/src/services/files.ts b/backend/src/services/files.ts index 5a9bdb53..ac4b9a12 100644 --- a/backend/src/services/files.ts +++ b/backend/src/services/files.ts @@ -1,5 +1,7 @@ import fs from 'fs/promises' import path from 'path' +import { createReadStream } from 'fs' +import { createInterface } from 'readline' import { logger } from '../utils/logger' import { @@ -11,8 +13,9 @@ import { getFileStats, listDirectory } from './file-operations' -import { FILE_LIMITS, ALLOWED_MIME_TYPES } from '../../../shared/src/constants' -import { getReposPath } from '../../../shared/src/constants' +import { getReposPath, FILE_LIMITS } from '@opencode-manager/shared/config/env' +import { ALLOWED_MIME_TYPES } from '@opencode-manager/shared' +import type { ChunkedFileInfo, PatchOperation } from '@opencode-manager/shared' const SHARED_WORKSPACE_BASE = getReposPath() @@ -25,6 +28,7 @@ interface FileInfo { content?: string children?: FileInfo[] lastModified: Date + workspaceRoot?: string } interface FileUploadResult { @@ -97,6 +101,7 @@ export async function getFile(userPath: string): Promise { return a.name.localeCompare(b.name) }), lastModified: stats.lastModified, + workspaceRoot: SHARED_WORKSPACE_BASE, } } else { // It's a file - get content @@ -134,7 +139,7 @@ export async function getFile(userPath: string): Promise { } } -export async function uploadFile(userPath: string, file: File): Promise { +export async function uploadFile(userPath: string, file: File, relativePath?: string): Promise { if (file.size > FILE_LIMITS.MAX_UPLOAD_SIZE_BYTES) { throw new Error('File too large') } @@ -144,17 +149,25 @@ export async function uploadFile(userPath: string, file: File): Promise = { @@ -244,6 +258,7 @@ function getMimeType(filePath: string, content: Uint8Array): string { '.jsx': 'text/javascript', '.tsx': 'text/typescript', '.json': 'application/json', + '.jsonc': 'application/json', '.xml': 'application/xml', '.png': 'image/png', '.jpg': 'image/jpeg', @@ -255,4 +270,123 @@ function getMimeType(filePath: string, content: Uint8Array): string { } return mimeTypes[ext] || 'text/plain' +} + +async function countFileLines(filePath: string): Promise { + return new Promise((resolve, reject) => { + let lineCount = 0 + const stream = createReadStream(filePath, { encoding: 'utf8' }) + const rl = createInterface({ input: stream, crlfDelay: Infinity }) + + rl.on('line', () => { lineCount++ }) + rl.on('close', () => resolve(lineCount)) + rl.on('error', reject) + }) +} + +async function readFileLines(filePath: string, startLine: number, endLine: number): Promise { + return new Promise((resolve, reject) => { + const lines: string[] = [] + let currentLine = 0 + const stream = createReadStream(filePath, { encoding: 'utf8' }) + const rl = createInterface({ input: stream, crlfDelay: Infinity }) + + rl.on('line', (line) => { + if (currentLine >= startLine && currentLine < endLine) { + lines.push(line) + } + currentLine++ + if (currentLine >= endLine) { + rl.close() + stream.destroy() + } + }) + rl.on('close', () => resolve(lines)) + rl.on('error', reject) + }) +} + +export async function getFileRange(userPath: string, startLine: number, endLine: number): Promise { + const validatedPath = validatePath(userPath) + logger.info(`Getting file range for path: ${userPath} lines ${startLine}-${endLine}`) + + const exists = await fileExists(validatedPath) + if (!exists) { + throw { message: 'File does not exist', statusCode: 404 } + } + + const stats = await getFileStats(validatedPath) + if (stats.isDirectory) { + throw { message: 'Path is a directory', statusCode: 400 } + } + + const totalLines = await countFileLines(validatedPath) + const clampedEnd = Math.min(endLine, totalLines) + const lines = await readFileLines(validatedPath, startLine, clampedEnd) + const mimeType = getMimeType(validatedPath, new Uint8Array()) + + return { + name: path.basename(validatedPath), + path: userPath, + isDirectory: false as const, + size: stats.size, + mimeType, + lines, + totalLines, + startLine, + endLine: clampedEnd, + hasMore: clampedEnd < totalLines, + lastModified: stats.lastModified, + } +} + +export async function getFileTotalLines(userPath: string): Promise { + const validatedPath = validatePath(userPath) + const exists = await fileExists(validatedPath) + if (!exists) { + throw { message: 'File does not exist', statusCode: 404 } + } + return countFileLines(validatedPath) +} + +export async function applyFilePatches(userPath: string, patches: PatchOperation[]): Promise<{ success: boolean; totalLines: number }> { + const validatedPath = validatePath(userPath) + logger.info(`Applying ${patches.length} patches to: ${userPath}`) + + const exists = await fileExists(validatedPath) + if (!exists) { + throw { message: 'File does not exist', statusCode: 404 } + } + + const content = await fs.readFile(validatedPath, 'utf8') + const lines = content.split('\n') + + const sortedPatches = [...patches].sort((a, b) => b.startLine - a.startLine) + + for (const patch of sortedPatches) { + const { type, startLine, endLine, content: patchContent } = patch + + switch (type) { + case 'replace': { + const end = endLine ?? startLine + 1 + const newLines = patchContent?.split('\n') ?? [] + lines.splice(startLine, end - startLine, ...newLines) + break + } + case 'insert': { + const newLines = patchContent?.split('\n') ?? [] + lines.splice(startLine, 0, ...newLines) + break + } + case 'delete': { + const end = endLine ?? startLine + 1 + lines.splice(startLine, end - startLine) + break + } + } + } + + await fs.writeFile(validatedPath, lines.join('\n'), 'utf8') + + return { success: true, totalLines: lines.length } } \ No newline at end of file diff --git a/backend/src/services/git-operations.ts b/backend/src/services/git-operations.ts new file mode 100644 index 00000000..dd5c8846 --- /dev/null +++ b/backend/src/services/git-operations.ts @@ -0,0 +1,244 @@ +import { executeCommand } from '../utils/process' +import { logger } from '../utils/logger' +import { SettingsService } from './settings' +import type { Database } from 'bun:sqlite' +import path from 'path' +import { createGitHubGitEnv, createNoPromptGitEnv } from '../utils/git-auth' + +async function hasCommits(repoPath: string): Promise { + try { + await executeCommand(['git', '-C', repoPath, 'rev-parse', 'HEAD'], { silent: true }) + return true + } catch { + return false + } +} + +function getGitEnvironment(database: Database): Record { + try { + const settingsService = new SettingsService(database) + const settings = settingsService.getSettings('default') + const gitToken = settings.preferences.gitToken + + if (gitToken) { + return createGitHubGitEnv(gitToken) + } + + return createNoPromptGitEnv() + } catch (error) { + logger.warn('Failed to get git token from settings:', error) + return createNoPromptGitEnv() + } +} + +export type GitFileStatusType = 'modified' | 'added' | 'deleted' | 'renamed' | 'untracked' | 'copied' + +export interface GitFileStatus { + path: string + status: GitFileStatusType + staged: boolean + oldPath?: string +} + +export interface GitStatusResponse { + branch: string + ahead: number + behind: number + files: GitFileStatus[] + hasChanges: boolean +} + +export interface FileDiffResponse { + path: string + status: GitFileStatusType + diff: string | null + additions: number + deletions: number + isBinary: boolean +} + +function parseStatusCode(code: string): GitFileStatusType { + switch (code) { + case 'M': return 'modified' + case 'A': return 'added' + case 'D': return 'deleted' + case 'R': return 'renamed' + case 'C': return 'copied' + case '?': return 'untracked' + default: return 'modified' + } +} + +function parsePorcelainV2(output: string): { branch: string; ahead: number; behind: number; files: GitFileStatus[] } { + const lines = output.split('\n').filter(line => line.trim()) + let branch = 'HEAD' + let ahead = 0 + let behind = 0 + const files: GitFileStatus[] = [] + + for (const line of lines) { + if (line.startsWith('# branch.head ')) { + branch = line.replace('# branch.head ', '') + } else if (line.startsWith('# branch.ab ')) { + const match = line.match(/# branch\.ab \+(\d+) -(\d+)/) + if (match && match[1] && match[2]) { + ahead = parseInt(match[1], 10) + behind = parseInt(match[2], 10) + } + } else if (line.startsWith('1 ') || line.startsWith('2 ')) { + const parts = line.split(' ') + const xy = parts[1] + if (!xy || xy.length < 2) continue + const stagedStatus = xy[0] as string + const unstagedStatus = xy[1] as string + + if (line.startsWith('2 ')) { + const pathParts = parts.slice(8).join(' ').split('\t') + const filePath = pathParts[1] || pathParts[0] || '' + const oldPath = pathParts[0] || '' + + if (stagedStatus !== '.') { + files.push({ + path: filePath, + status: parseStatusCode(stagedStatus), + staged: true, + oldPath: stagedStatus === 'R' || stagedStatus === 'C' ? oldPath : undefined + }) + } + if (unstagedStatus !== '.') { + files.push({ + path: filePath, + status: parseStatusCode(unstagedStatus), + staged: false + }) + } + } else { + const filePath = parts.slice(8).join(' ') || '' + + if (stagedStatus !== '.') { + files.push({ + path: filePath, + status: parseStatusCode(stagedStatus), + staged: true + }) + } + if (unstagedStatus !== '.') { + files.push({ + path: filePath, + status: parseStatusCode(unstagedStatus), + staged: false + }) + } + } + } else if (line.startsWith('? ')) { + const filePath = line.substring(2) + files.push({ + path: filePath, + status: 'untracked', + staged: false + }) + } + } + + return { branch, ahead, behind, files } +} + +export async function getGitStatus(repoPath: string, database?: Database): Promise { + try { + const fullPath = path.resolve(repoPath) + const env = database ? getGitEnvironment(database) : undefined + const output = await executeCommand(['git', '-C', fullPath, 'status', '--porcelain=v2', '--branch'], { env }) + const { branch, ahead, behind, files } = parsePorcelainV2(output) + + return { + branch, + ahead, + behind, + files, + hasChanges: files.length > 0 + } + } catch (error: any) { + logger.error(`Failed to get git status for ${repoPath}:`, error) + throw new Error(`Failed to get git status: ${error.message}`) + } +} + +export async function getFileDiff(repoPath: string, filePath: string, database?: Database): Promise { + try { + const fullRepoPath = path.resolve(repoPath) + const status = await getGitStatus(repoPath, database) + const fileStatus = status.files.find(f => f.path === filePath) + + if (!fileStatus) { + return { + path: filePath, + status: 'modified', + diff: null, + additions: 0, + deletions: 0, + isBinary: false + } + } + + let diff: string | null = null + let additions = 0 + let deletions = 0 + let isBinary = false + + const env = database ? getGitEnvironment(database) : undefined + + if (fileStatus.status === 'untracked') { + try { + const content = await executeCommand(['git', '-C', fullRepoPath, 'diff', '--no-index', '--', '/dev/null', filePath], { env }) + diff = content + } catch (error: any) { + if (error.message?.includes('exit code 1') || error.message?.includes('Command failed with code 1')) { + const output = error.message || '' + const diffMatch = output.match(/diff --git[\s\S]*/) + diff = diffMatch ? diffMatch[0] : `New file: ${filePath}` + } else { + diff = `New file: ${filePath}` + } + } + } else { + try { + const repoHasCommits = await hasCommits(fullRepoPath) + if (repoHasCommits) { + diff = await executeCommand(['git', '-C', fullRepoPath, 'diff', 'HEAD', '--', filePath], { env }) + } else { + diff = `New file (no commits yet): ${filePath}` + } + } catch (error: any) { + logger.warn(`Failed to get diff for ${filePath}:`, error.message) + diff = null + } + } + + if (diff) { + if (diff.includes('Binary files') || diff.includes('GIT binary patch')) { + isBinary = true + } else { + const lines = diff.split('\n') + for (const line of lines) { + if (line.startsWith('+') && !line.startsWith('+++')) { + additions++ + } else if (line.startsWith('-') && !line.startsWith('---')) { + deletions++ + } + } + } + } + + return { + path: filePath, + status: fileStatus.status, + diff, + additions, + deletions, + isBinary + } + } catch (error: any) { + logger.error(`Failed to get file diff for ${filePath}:`, error) + throw new Error(`Failed to get file diff: ${error.message}`) + } +} diff --git a/backend/src/services/global-sse.ts b/backend/src/services/global-sse.ts new file mode 100644 index 00000000..94de67f2 --- /dev/null +++ b/backend/src/services/global-sse.ts @@ -0,0 +1,162 @@ +import type { Database } from 'bun:sqlite' +import { EventSource } from 'eventsource' +import * as db from '../db/queries' +import { opencodeServerManager } from './opencode-single-server' +import { sendSessionCompleteNotification, sendPermissionRequestNotification } from './push' +import { logger } from '../utils/logger' + +interface SSEEvent { + type: string + properties: Record +} + +let globalEventSources: Map = new Map() +let database: Database | null = null +let isRunning = false + +async function getSessionTitle(directory: string, sessionId: string): Promise { + try { + const port = opencodeServerManager.getPort() + const response = await fetch( + `http://127.0.0.1:${port}/session/${sessionId}?directory=${encodeURIComponent(directory)}` + ) + if (response.ok) { + const session = await response.json() + return session.title + } + } catch (err) { + logger.warn(`Failed to fetch session title for ${sessionId}:`, err) + } + return undefined +} + +function getRepoIdByDirectory(directory: string): number | undefined { + if (!database) return undefined + const repos = db.listRepos(database) + const repo = repos.find((r) => r.fullPath === directory) + return repo?.id +} + +function handleSSEMessage(directory: string, event: SSEEvent): void { + if (!database) return + + const { type, properties: props } = event + + if (type === 'session.idle' && 'sessionID' in props) { + const sessionId = props.sessionID as string + const repoId = getRepoIdByDirectory(directory) + + logger.info(`[GlobalSSE] Session idle: ${sessionId} in ${directory}`) + + getSessionTitle(directory, sessionId).then((title) => { + sendSessionCompleteNotification(database!, sessionId, repoId?.toString(), title) + .catch((err) => logger.warn('[GlobalSSE] Failed to send push notification:', err)) + }) + } + + if (type === 'permission.updated' && 'id' in props && 'sessionID' in props) { + const sessionId = props.sessionID as string + const toolName = (props.tool as string) || 'A tool' + const repoId = getRepoIdByDirectory(directory) + + logger.info(`[GlobalSSE] Permission requested: ${toolName} for session ${sessionId}`) + + sendPermissionRequestNotification(database!, sessionId, toolName, repoId?.toString()) + .catch((err) => logger.warn('[GlobalSSE] Failed to send permission push notification:', err)) + } +} + +function connectToRepo(directory: string): void { + if (globalEventSources.has(directory)) { + return + } + + const port = opencodeServerManager.getPort() + const url = `http://127.0.0.1:${port}/event?directory=${encodeURIComponent(directory)}` + + logger.info(`[GlobalSSE] Connecting to ${directory}`) + + const es = new EventSource(url) + globalEventSources.set(directory, es) + + es.onopen = () => { + logger.info(`[GlobalSSE] Connected to ${directory}`) + } + + es.onerror = (err) => { + logger.warn(`[GlobalSSE] Error for ${directory}:`, err) + globalEventSources.delete(directory) + + if (isRunning) { + setTimeout(() => { + if (isRunning && !globalEventSources.has(directory)) { + connectToRepo(directory) + } + }, 5000) + } + } + + es.onmessage = (e) => { + try { + const event = JSON.parse(e.data) as SSEEvent + handleSSEMessage(directory, event) + } catch (err) { + logger.warn('[GlobalSSE] Failed to parse SSE event:', err) + } + } +} + +function disconnectFromRepo(directory: string): void { + const es = globalEventSources.get(directory) + if (es) { + es.close() + globalEventSources.delete(directory) + logger.info(`[GlobalSSE] Disconnected from ${directory}`) + } +} + +function syncRepoConnections(): void { + if (!database) return + + const repos = db.listRepos(database) + const currentDirs = new Set(repos.map((r) => r.fullPath)) + + for (const [dir] of globalEventSources) { + if (!currentDirs.has(dir)) { + disconnectFromRepo(dir) + } + } + + for (const repo of repos) { + if (!globalEventSources.has(repo.fullPath)) { + connectToRepo(repo.fullPath) + } + } +} + +export function startGlobalSSEListener(db: Database): void { + if (isRunning) return + + database = db + isRunning = true + + logger.info('[GlobalSSE] Starting global SSE listener') + + syncRepoConnections() + + setInterval(() => { + if (isRunning) { + syncRepoConnections() + } + }, 30000) +} + +export function stopGlobalSSEListener(): void { + isRunning = false + + for (const [dir] of globalEventSources) { + disconnectFromRepo(dir) + } + + logger.info('[GlobalSSE] Stopped global SSE listener') +} diff --git a/backend/src/services/opencode-discovery.ts b/backend/src/services/opencode-discovery.ts new file mode 100644 index 00000000..05453d0c --- /dev/null +++ b/backend/src/services/opencode-discovery.ts @@ -0,0 +1,348 @@ +import { execSync } from 'child_process' +import { logger } from '../utils/logger' +import { EventEmitter } from 'events' + +export interface OpenCodeInstance { + port: number + pid: number + directory: string | null + version: string | null + healthy: boolean + sessions: SessionInfo[] +} + +export interface SessionInfo { + id: string + title: string + directory: string + createdAt: string + updatedAt: string +} + +export interface ProjectInfo { + path: string + name: string + sandboxes?: string[] +} + +class OpenCodeDiscoveryService extends EventEmitter { + private static instance: OpenCodeDiscoveryService + private instances: Map = new Map() + private healthCheckInterval: NodeJS.Timeout | null = null + private reconnectAttempts: Map = new Map() + private readonly MAX_RECONNECT_ATTEMPTS = 10 + private readonly BASE_RECONNECT_DELAY = 1000 + private readonly HEALTH_CHECK_INTERVAL = 5000 + + private constructor() { + super() + } + + static getInstance(): OpenCodeDiscoveryService { + if (!OpenCodeDiscoveryService.instance) { + OpenCodeDiscoveryService.instance = new OpenCodeDiscoveryService() + } + return OpenCodeDiscoveryService.instance + } + + async discoverInstances(): Promise { + const ports = await this.findOpencodePorts() + const newInstances: OpenCodeInstance[] = [] + + for (const { port, pid } of ports) { + try { + const healthy = await this.checkHealth(port) + if (healthy) { + const existingInstance = this.instances.get(port) + if (!existingInstance || !existingInstance.healthy) { + const instance = await this.getInstanceInfo(port, pid) + this.instances.set(port, instance) + newInstances.push(instance) + this.reconnectAttempts.delete(port) + logger.info(`Discovered OpenCode instance on port ${port} (dir: ${instance.directory || 'unknown'})`) + this.emit('instance.discovered', instance) + } + } + } catch (error) { + logger.debug(`Failed to connect to potential OpenCode on port ${port}:`, error) + } + } + + for (const [port, instance] of this.instances) { + if (!ports.find(p => p.port === port)) { + this.instances.delete(port) + logger.info(`OpenCode instance on port ${port} is no longer available`) + this.emit('instance.lost', instance) + } + } + + return Array.from(this.instances.values()) + } + + private async findOpencodePorts(): Promise> { + try { + const output = execSync( + `lsof -i -P -n | grep -E "opencode.*LISTEN" | awk '{print $2, $9}'`, + { encoding: 'utf8', timeout: 5000 } + ) + + const results: Array<{ port: number; pid: number }> = [] + const lines = output.trim().split('\n').filter(Boolean) + + for (const line of lines) { + const [pidStr, address] = line.split(' ') + if (!pidStr || !address) continue + const pid = parseInt(pidStr) + const portMatch = address.match(/:(\d+)$/) + if (portMatch && portMatch[1] && pid) { + const port = parseInt(portMatch[1]) + if (!results.find(r => r.port === port)) { + results.push({ port, pid }) + } + } + } + + return results + } catch (error) { + logger.debug('Failed to find OpenCode ports via lsof:', error) + return [] + } + } + + private async checkHealth(port: number): Promise { + try { + const response = await fetch(`http://127.0.0.1:${port}/doc`, { + signal: AbortSignal.timeout(3000) + }) + return response.ok + } catch { + return false + } + } + + private async getInstanceInfo(port: number, pid: number): Promise { + let version: string | null = null + let directory: string | null = null + let sessions: SessionInfo[] = [] + + try { + const healthResponse = await fetch(`http://127.0.0.1:${port}/global/health`, { + signal: AbortSignal.timeout(3000) + }) + if (healthResponse.ok) { + const health = await healthResponse.json() as { version?: string } + version = health.version || null + } + } catch (error) { + logger.debug(`Failed to get version from port ${port}:`, error) + } + + try { + const projectResponse = await fetch(`http://127.0.0.1:${port}/project/current`, { + signal: AbortSignal.timeout(3000) + }) + if (projectResponse.ok) { + const project = await projectResponse.json() as { path?: string } + directory = project.path || null + } + } catch (error) { + logger.debug(`Failed to get current project from port ${port}:`, error) + } + + try { + const sessionsResponse = await fetch(`http://127.0.0.1:${port}/session`, { + signal: AbortSignal.timeout(5000) + }) + if (sessionsResponse.ok) { + const sessionsData = await sessionsResponse.json() as Array<{ + id: string + title?: string + directory?: string + createdAt?: string + updatedAt?: string + }> + sessions = sessionsData.map(s => ({ + id: s.id, + title: s.title || 'Untitled', + directory: s.directory || directory || '', + createdAt: s.createdAt || new Date().toISOString(), + updatedAt: s.updatedAt || new Date().toISOString() + })) + + if (!directory && sessions.length > 0) { + const firstSession = sessions[0] + if (firstSession && firstSession.directory) { + directory = firstSession.directory + } + } + } + } catch (error) { + logger.debug(`Failed to get sessions from port ${port}:`, error) + } + + return { + port, + pid, + directory, + version, + healthy: true, + sessions + } + } + + async getAllProjects(): Promise { + const projects: Map = new Map() + + for (const instance of this.instances.values()) { + if (instance.directory) { + projects.set(instance.directory, { + path: instance.directory, + name: instance.directory.split('/').pop() || instance.directory + }) + } + + for (const session of instance.sessions) { + if (session.directory && !projects.has(session.directory)) { + projects.set(session.directory, { + path: session.directory, + name: session.directory.split('/').pop() || session.directory + }) + } + } + } + + return Array.from(projects.values()) + } + + getInstances(): OpenCodeInstance[] { + return Array.from(this.instances.values()) + } + + getInstanceByPort(port: number): OpenCodeInstance | undefined { + return this.instances.get(port) + } + + getInstanceByDirectory(directory: string): OpenCodeInstance | undefined { + for (const instance of this.instances.values()) { + if (instance.directory === directory) { + return instance + } + for (const session of instance.sessions) { + if (session.directory === directory) { + return instance + } + } + } + return undefined + } + + getPrimaryInstance(): OpenCodeInstance | undefined { + const instances = Array.from(this.instances.values()) + return instances.find(i => i.healthy) || instances[0] + } + + startHealthMonitor(): void { + if (this.healthCheckInterval) { + return + } + + logger.info('Starting OpenCode instance health monitor') + + this.discoverInstances().catch(err => { + logger.error('Initial instance discovery failed:', err) + }) + + this.healthCheckInterval = setInterval(async () => { + try { + await this.discoverInstances() + } catch (error) { + logger.error('Health check failed:', error) + } + }, this.HEALTH_CHECK_INTERVAL) + } + + stopHealthMonitor(): void { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + logger.info('Stopped OpenCode instance health monitor') + } + } + + async waitForAnyInstance(timeoutMs: number = 30000): Promise { + const start = Date.now() + + while (Date.now() - start < timeoutMs) { + await this.discoverInstances() + const instance = this.getPrimaryInstance() + if (instance) { + return instance + } + await new Promise(r => setTimeout(r, 1000)) + } + + return null + } + + async fetchProjectsFromOpenCode(port: number): Promise { + try { + const response = await fetch(`http://127.0.0.1:${port}/project`, { + signal: AbortSignal.timeout(5000) + }) + if (!response.ok) { + return [] + } + + const projects = await response.json() as Array<{ + id: string + worktree: string + vcs?: string + sandboxes?: string[] + }> + + const result: ProjectInfo[] = [] + for (const project of projects) { + if (project.id === 'global' || !project.worktree || project.worktree === '/') { + continue + } + + if (project.worktree.startsWith('/private/tmp/') || project.worktree.startsWith('/tmp/')) { + continue + } + + result.push({ + path: project.worktree, + name: project.worktree.split('/').pop() || project.worktree, + sandboxes: project.sandboxes + }) + + if (project.sandboxes && project.sandboxes.length > 0) { + for (const sandbox of project.sandboxes) { + if (!sandbox.startsWith('/private/tmp/') && !sandbox.startsWith('/tmp/')) { + result.push({ + path: sandbox, + name: sandbox.split('/').pop() || sandbox + }) + } + } + } + } + + return result + } catch (error) { + logger.debug(`Failed to fetch projects from port ${port}:`, error) + return [] + } + } + + async getAllProjectsFromOpenCode(): Promise { + await this.discoverInstances() + const instance = this.getPrimaryInstance() + if (!instance) { + return [] + } + return this.fetchProjectsFromOpenCode(instance.port) + } +} + +export const openCodeDiscoveryService = OpenCodeDiscoveryService.getInstance() diff --git a/backend/src/services/opencode-sdk-client.ts b/backend/src/services/opencode-sdk-client.ts new file mode 100644 index 00000000..327d7b00 --- /dev/null +++ b/backend/src/services/opencode-sdk-client.ts @@ -0,0 +1,171 @@ +import { createOpencodeClient, type OpencodeClient } from '@opencode-ai/sdk/client' +import { logger } from '../utils/logger' + +export interface ProjectInfo { + id: string + path: string + name: string + vcs?: string + createdAt?: number + updatedAt?: number +} + +export interface SessionInfo { + id: string + title?: string + directory?: string + createdAt?: string + updatedAt?: string +} + +class OpenCodeSDKClient { + private static instance: OpenCodeSDKClient + private client: OpencodeClient | null = null + private baseUrl: string = '' + + private constructor() {} + + static getInstance(): OpenCodeSDKClient { + if (!OpenCodeSDKClient.instance) { + OpenCodeSDKClient.instance = new OpenCodeSDKClient() + } + return OpenCodeSDKClient.instance + } + + configure(port: number, host: string = '127.0.0.1'): void { + this.baseUrl = `http://${host}:${port}` + this.client = createOpencodeClient({ + baseUrl: this.baseUrl, + }) + logger.info(`OpenCode SDK client configured for ${this.baseUrl}`) + } + + isConfigured(): boolean { + return this.client !== null + } + + getBaseUrl(): string { + return this.baseUrl + } + + async listProjects(): Promise { + if (!this.client) { + logger.warn('OpenCode SDK client not configured') + return [] + } + + try { + const response = await this.client.project.list() + if (!response.data) { + return [] + } + + const projects: ProjectInfo[] = [] + for (const project of response.data) { + if (project.id === 'global' || !project.worktree || project.worktree === '/') { + continue + } + + if (project.worktree.startsWith('/private/tmp/') || project.worktree.startsWith('/tmp/')) { + continue + } + + projects.push({ + id: project.id, + path: project.worktree, + name: project.worktree.split('/').pop() || project.worktree, + vcs: project.vcs || undefined, + createdAt: project.time?.created, + updatedAt: project.time?.initialized, + }) + } + + return projects + } catch (error) { + logger.error('Failed to list projects via SDK:', error) + return [] + } + } + + async listSessions(): Promise { + if (!this.client) { + logger.warn('OpenCode SDK client not configured') + return [] + } + + try { + const response = await this.client.session.list() + if (!response.data) { + return [] + } + + return response.data.map(session => ({ + id: session.id, + title: session.title || undefined, + directory: session.directory || undefined, + createdAt: session.time?.created ? new Date(session.time.created).toISOString() : undefined, + updatedAt: session.time?.updated ? new Date(session.time.updated).toISOString() : undefined, + })) + } catch (error) { + logger.error('Failed to list sessions via SDK:', error) + return [] + } + } + + async getVersion(): Promise { + if (!this.client) { + return null + } + + try { + const response = await fetch(`${this.baseUrl}/global/health`, { + signal: AbortSignal.timeout(3000) + }) + if (response.ok) { + const data = await response.json() as { version?: string } + return data.version || null + } + } catch (error) { + logger.debug('Failed to get version:', error) + } + return null + } + + async checkHealth(): Promise { + if (!this.client) { + return false + } + + try { + const response = await fetch(`${this.baseUrl}/doc`, { + signal: AbortSignal.timeout(3000) + }) + return response.ok + } catch { + return false + } + } + + async getCurrentProject(): Promise<{ path: string } | null> { + if (!this.client) { + return null + } + + try { + const response = await this.client.project.current() + if (response.data?.worktree) { + return { path: response.data.worktree } + } + } catch (error) { + logger.debug('Failed to get current project:', error) + } + return null + } + + async getAllProjectPaths(): Promise { + const projects = await this.listProjects() + return projects.map(p => p.path) + } +} + +export const opencodeSdkClient = OpenCodeSDKClient.getInstance() diff --git a/backend/src/services/opencode-single-server.ts b/backend/src/services/opencode-single-server.ts index e53eff81..16a95e1d 100644 --- a/backend/src/services/opencode-single-server.ts +++ b/backend/src/services/opencode-single-server.ts @@ -1,21 +1,56 @@ -import { spawn } from 'child_process' -import { logger } from '../utils/logger' -import { getWorkspacePath } from '../../../shared/src/constants' -import { execSync } from 'child_process' -import { ENV } from '../config' +import { spawn, execSync } from 'child_process' import path from 'path' +import { logger } from '../utils/logger' +import { createGitHubGitEnv, createNoPromptGitEnv } from '../utils/git-auth' +import { SettingsService } from './settings' +import { getWorkspacePath, getOpenCodeConfigFilePath, ENV } from '@opencode-manager/shared/config/env' +import type { Database } from 'bun:sqlite' +import { openCodeDiscoveryService, type OpenCodeInstance } from './opencode-discovery' +import { opencodeSdkClient } from './opencode-sdk-client' -const OPENCODE_SERVER_PORT = ENV.OPENCODE_SERVER_PORT +const OPENCODE_SERVER_PORT = ENV.OPENCODE.PORT const OPENCODE_SERVER_DIRECTORY = getWorkspacePath() +const OPENCODE_CONFIG_PATH = getOpenCodeConfigFilePath() +const MIN_OPENCODE_VERSION = '1.0.137' +const MAX_STDERR_SIZE = 10240 +const CLIENT_MODE = process.env.OPENCODE_CLIENT_MODE === 'true' +const HEALTH_CHECK_INTERVAL = 5000 +const MAX_RECONNECT_DELAY = 30000 +const BASE_RECONNECT_DELAY = 1000 + +function compareVersions(v1: string, v2: string): number { + const parts1 = v1.split('.').map(Number) + const parts2 = v2.split('.').map(Number) + + for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) { + const p1 = parts1[i] || 0 + const p2 = parts2[i] || 0 + if (p1 > p2) return 1 + if (p1 < p2) return -1 + } + return 0 +} class OpenCodeServerManager { private static instance: OpenCodeServerManager - private serverProcess: any = null + private serverProcess: ReturnType | null = null private serverPid: number | null = null private isHealthy: boolean = false + private db: Database | null = null + private version: string | null = null + private lastStartupError: string | null = null + private connectedDirectory: string | null = null + private healthCheckInterval: NodeJS.Timeout | null = null + private reconnectAttempts: number = 0 + private activePort: number = OPENCODE_SERVER_PORT + private isReconnecting: boolean = false private constructor() {} + setDatabase(db: Database) { + this.db = db + } + static getInstance(): OpenCodeServerManager { if (!OpenCodeServerManager.instance) { OpenCodeServerManager.instance = new OpenCodeServerManager() @@ -29,7 +64,57 @@ class OpenCodeServerManager { return } - const isDevelopment = process.env.NODE_ENV !== 'production' + if (CLIENT_MODE) { + logger.info(`Client mode: discovering OpenCode instances...`) + + const instance = await this.discoverAndConnect() + if (instance) { + this.isHealthy = true + this.activePort = instance.port + this.version = instance.version + this.connectedDirectory = instance.directory + this.reconnectAttempts = 0 + opencodeSdkClient.configure(this.activePort) + logger.info(`Connected to OpenCode server v${this.version || 'unknown'} on port ${this.activePort}`) + if (this.connectedDirectory) { + logger.info(`OpenCode server directory: ${this.connectedDirectory}`) + } + this.startHealthMonitor() + return + } + + const configuredHealthy = await this.waitForHealth(10000) + if (configuredHealthy) { + this.isHealthy = true + this.activePort = OPENCODE_SERVER_PORT + opencodeSdkClient.configure(this.activePort) + await this.fetchVersion() + await this.fetchConnectedDirectory() + logger.info(`Connected to OpenCode server v${this.version || 'unknown'} on port ${this.activePort}`) + if (this.connectedDirectory) { + logger.info(`OpenCode server directory: ${this.connectedDirectory}`) + } + this.startHealthMonitor() + return + } + + logger.warn(`No OpenCode servers found. Will keep monitoring for instances...`) + this.startHealthMonitor() + return + } + + const isDevelopment = ENV.SERVER.NODE_ENV !== 'production' + + let gitToken = '' + if (this.db) { + try { + const settingsService = new SettingsService(this.db) + const settings = settingsService.getSettings('default') + gitToken = settings.preferences.gitToken || '' + } catch (error) { + logger.warn('Failed to get git token from settings:', error) + } + } const existingProcesses = await this.findProcessesByPort(OPENCODE_SERVER_PORT) if (existingProcesses.length > 0) { @@ -66,43 +151,85 @@ class OpenCodeServerManager { } } - logger.info(`Starting OpenCode server on port ${OPENCODE_SERVER_PORT} (${isDevelopment ? 'development' : 'production'} mode)`) logger.info(`OpenCode server working directory: ${OPENCODE_SERVER_DIRECTORY}`) + logger.info(`OpenCode XDG_CONFIG_HOME: ${path.join(OPENCODE_SERVER_DIRECTORY, '.config')}`) logger.info(`OpenCode will use ?directory= parameter for session isolation`) - - const hostname = isDevelopment ? '0.0.0.0' : '127.0.0.1' - + + const gitEnv = gitToken ? createGitHubGitEnv(gitToken) : createNoPromptGitEnv() + + let stderrOutput = '' + this.serverProcess = spawn( - 'opencode', - ['serve', '--port', OPENCODE_SERVER_PORT.toString(), '--hostname', hostname], + 'opencode', + ['serve', '--port', OPENCODE_SERVER_PORT.toString(), '--hostname', '127.0.0.1'], { cwd: OPENCODE_SERVER_DIRECTORY, detached: !isDevelopment, - stdio: isDevelopment ? 'inherit' : 'ignore', + stdio: isDevelopment ? 'inherit' : ['ignore', 'pipe', 'pipe'], env: { ...process.env, - XDG_DATA_HOME: path.join(OPENCODE_SERVER_DIRECTORY, '.opencode/state') + ...gitEnv, + // Use system default XDG_DATA_HOME (~/.local/share) to share sessions with CLI + // Only override XDG_CONFIG_HOME for workspace-specific config + XDG_CONFIG_HOME: path.join(OPENCODE_SERVER_DIRECTORY, '.config'), + OPENCODE_CONFIG: OPENCODE_CONFIG_PATH, } } ) - if (!isDevelopment) { - this.serverProcess.unref() + if (!isDevelopment && this.serverProcess.stderr) { + this.serverProcess.stderr.on('data', (data) => { + stderrOutput += data.toString() + if (stderrOutput.length > MAX_STDERR_SIZE) { + stderrOutput = stderrOutput.slice(-MAX_STDERR_SIZE) + } + }) } - this.serverPid = this.serverProcess.pid + + this.serverProcess.on('exit', (code, signal) => { + if (code !== null && code !== 0) { + this.lastStartupError = `Server exited with code ${code}${stderrOutput ? `: ${stderrOutput.slice(-500)}` : ''}` + logger.error('OpenCode server process exited:', this.lastStartupError) + } else if (signal) { + this.lastStartupError = `Server terminated by signal ${signal}` + logger.error('OpenCode server process terminated:', this.lastStartupError) + } + }) + + this.serverPid = this.serverProcess.pid ?? null logger.info(`OpenCode server started with PID ${this.serverPid}`) const healthy = await this.waitForHealth(30000) if (!healthy) { + this.lastStartupError = `Server failed to become healthy after 30s${stderrOutput ? `. Last error: ${stderrOutput.slice(-500)}` : ''}` throw new Error('OpenCode server failed to become healthy') } - this.isHealthy = true - logger.info('OpenCode server is healthy') + this.isHealthy = true + this.activePort = OPENCODE_SERVER_PORT + opencodeSdkClient.configure(this.activePort) + logger.info('OpenCode server is healthy') + + await this.fetchVersion() + if (this.version) { + logger.info(`OpenCode version: ${this.version}`) + if (!this.isVersionSupported()) { + logger.warn(`OpenCode version ${this.version} is below minimum required version ${MIN_OPENCODE_VERSION}`) + logger.warn('Some features like MCP management may not work correctly') + } + } } async stop(): Promise { + this.stopHealthMonitor() + + if (CLIENT_MODE) { + logger.info('Client mode: not stopping external OpenCode server') + this.isHealthy = false + return + } + if (!this.serverPid) return logger.info('Stopping OpenCode server') @@ -125,13 +252,49 @@ class OpenCodeServerManager { this.isHealthy = false } + async restart(): Promise { + logger.info('Restarting OpenCode server') + await this.stop() + await new Promise(r => setTimeout(r, 1000)) + await this.start() + } + getPort(): number { return OPENCODE_SERVER_PORT } + getVersion(): string | null { + return this.version + } + + getMinVersion(): string { + return MIN_OPENCODE_VERSION + } + + isVersionSupported(): boolean { + if (!this.version) return false + return compareVersions(this.version, MIN_OPENCODE_VERSION) >= 0 + } + + getConnectedDirectory(): string | null { + return this.connectedDirectory + } + + isClientMode(): boolean { + return CLIENT_MODE + } + + getLastStartupError(): string | null { + return this.lastStartupError + } + + clearStartupError(): void { + this.lastStartupError = null + } + async checkHealth(): Promise { try { - const response = await fetch(`http://localhost:${OPENCODE_SERVER_PORT}/doc`, { + const response = await fetch(`http://127.0.0.1:${OPENCODE_SERVER_PORT}/doc`, { signal: AbortSignal.timeout(3000) }) return response.ok @@ -140,6 +303,51 @@ class OpenCodeServerManager { } } + async fetchVersion(): Promise { + try { + const result = execSync('opencode --version 2>&1', { encoding: 'utf8' }) + // Use a stricter regex to avoid matching IP addresses (e.g., 0.0.0.0) in debug output + // We look for a version number at the end of a line or standing alone + const lines = result.split('\n') + for (const line of lines) { + const match = line.match(/(?:^|\s|v)(\d+\.\d+\.\d+)(?:\s|$)/) + if (match && match[1]) { + // Verify it's not part of an IP address (heuristic: check if followed by another dot) + const fullMatch = match[0] + const index = line.indexOf(fullMatch) + const nextChar = line[index + fullMatch.length] + if (nextChar === '.') continue + + this.version = match[1] + return this.version + } + } + } catch (error) { + logger.warn('Failed to get OpenCode version:', error) + } + return null + } + + async fetchConnectedDirectory(): Promise { + if (!CLIENT_MODE) return null + + try { + const response = await fetch(`http://127.0.0.1:${OPENCODE_SERVER_PORT}/session`, { + signal: AbortSignal.timeout(5000) + }) + if (response.ok) { + const sessions = await response.json() as Array<{ directory?: string }> + if (sessions.length > 0 && sessions[0]?.directory) { + this.connectedDirectory = sessions[0].directory + return this.connectedDirectory + } + } + } catch (error) { + logger.warn('Failed to get OpenCode server directory:', error) + } + return null + } + private async waitForHealth(timeoutMs: number): Promise { const start = Date.now() while (Date.now() - start < timeoutMs) { @@ -159,6 +367,275 @@ class OpenCodeServerManager { return [] } } + + private async discoverAndConnect(): Promise { + const instances = await openCodeDiscoveryService.discoverInstances() + if (instances.length === 0) { + return null + } + + logger.info(`Found ${instances.length} OpenCode instance(s)`) + for (const instance of instances) { + logger.info(` - Port ${instance.port}: ${instance.directory || 'unknown dir'} (${instance.sessions.length} sessions)`) + } + + return instances[0] || null + } + + private startHealthMonitor(): void { + if (this.healthCheckInterval) { + return + } + + logger.info('Starting OpenCode health monitor') + this.healthCheckInterval = setInterval(async () => { + await this.performHealthCheck() + }, HEALTH_CHECK_INTERVAL) + } + + private stopHealthMonitor(): void { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + } + + private async performHealthCheck(): Promise { + if (this.isReconnecting) { + return + } + + const healthy = await this.checkHealthOnPort(this.activePort) + + if (healthy && !this.isHealthy) { + logger.info(`OpenCode server on port ${this.activePort} is now healthy`) + this.isHealthy = true + this.reconnectAttempts = 0 + this.lastStartupError = null + await this.fetchVersionFromPort(this.activePort) + await this.fetchConnectedDirectoryFromPort(this.activePort) + } else if (!healthy && this.isHealthy) { + logger.warn(`OpenCode server on port ${this.activePort} became unhealthy`) + this.isHealthy = false + this.scheduleReconnect() + } else if (!healthy && !this.isHealthy) { + await this.tryDiscoverNewInstance() + } + } + + private async scheduleReconnect(): Promise { + if (this.isReconnecting) { + return + } + + this.isReconnecting = true + this.reconnectAttempts++ + + const delay = Math.min( + BASE_RECONNECT_DELAY * Math.pow(2, this.reconnectAttempts - 1), + MAX_RECONNECT_DELAY + ) + + logger.info(`Scheduling reconnect attempt ${this.reconnectAttempts} in ${delay}ms`) + + setTimeout(async () => { + await this.attemptReconnect() + this.isReconnecting = false + }, delay) + } + + private async attemptReconnect(): Promise { + const healthy = await this.checkHealthOnPort(this.activePort) + if (healthy) { + logger.info(`Reconnected to OpenCode server on port ${this.activePort}`) + this.isHealthy = true + this.reconnectAttempts = 0 + this.lastStartupError = null + return + } + + const instance = await this.discoverAndConnect() + if (instance) { + logger.info(`Found new OpenCode instance on port ${instance.port}`) + this.activePort = instance.port + this.version = instance.version + this.connectedDirectory = instance.directory + this.isHealthy = true + this.reconnectAttempts = 0 + this.lastStartupError = null + opencodeSdkClient.configure(this.activePort) + return + } + + this.lastStartupError = `Failed to reconnect after ${this.reconnectAttempts} attempts` + logger.warn(this.lastStartupError) + } + + private async tryDiscoverNewInstance(): Promise { + const instance = await this.discoverAndConnect() + if (instance) { + logger.info(`Discovered new OpenCode instance on port ${instance.port}`) + this.activePort = instance.port + this.version = instance.version + this.connectedDirectory = instance.directory + this.isHealthy = true + this.reconnectAttempts = 0 + this.lastStartupError = null + opencodeSdkClient.configure(this.activePort) + } + } + + private async checkHealthOnPort(port: number): Promise { + try { + const response = await fetch(`http://127.0.0.1:${port}/doc`, { + signal: AbortSignal.timeout(3000) + }) + return response.ok + } catch { + return false + } + } + + private async fetchVersionFromPort(port: number): Promise { + if (opencodeSdkClient.isConfigured() && port === this.activePort) { + try { + const version = await opencodeSdkClient.getVersion() + if (version) { + this.version = version + return this.version + } + } catch (error) { + logger.debug('SDK getVersion failed, falling back to direct API:', error) + } + } + + try { + const response = await fetch(`http://127.0.0.1:${port}/global/health`, { + signal: AbortSignal.timeout(3000) + }) + if (response.ok) { + const health = await response.json() as { version?: string } + if (health.version) { + this.version = health.version + return this.version + } + } + } catch (error) { + logger.debug(`Failed to get version from port ${port}:`, error) + } + return await this.fetchVersion() + } + + private async fetchConnectedDirectoryFromPort(port: number): Promise { + try { + const projectResponse = await fetch(`http://127.0.0.1:${port}/project/current`, { + signal: AbortSignal.timeout(3000) + }) + if (projectResponse.ok) { + const project = await projectResponse.json() as { path?: string } + if (project.path) { + this.connectedDirectory = project.path + return this.connectedDirectory + } + } + } catch { + } + + try { + const response = await fetch(`http://127.0.0.1:${port}/session`, { + signal: AbortSignal.timeout(5000) + }) + if (response.ok) { + const sessions = await response.json() as Array<{ directory?: string }> + if (sessions.length > 0 && sessions[0]?.directory) { + this.connectedDirectory = sessions[0].directory + return this.connectedDirectory + } + } + } catch (error) { + logger.warn('Failed to get OpenCode server directory:', error) + } + return null + } + + getDiscoveredInstances(): OpenCodeInstance[] { + return openCodeDiscoveryService.getInstances() + } + + async getAllProjects(): Promise> { + return openCodeDiscoveryService.getAllProjects() + } + + getActivePort(): number { + return this.activePort + } + + async fetchProjectsFromAPI(): Promise> { + if (!this.isHealthy) { + return [] + } + + if (opencodeSdkClient.isConfigured()) { + try { + const projects = await opencodeSdkClient.listProjects() + return projects.map(p => ({ + path: p.path, + name: p.name + })) + } catch (error) { + logger.warn('SDK client failed, falling back to direct API call:', error) + } + } + + try { + const response = await fetch(`http://127.0.0.1:${this.activePort}/project`, { + signal: AbortSignal.timeout(5000) + }) + if (!response.ok) { + return [] + } + + const projects = await response.json() as Array<{ + id: string + worktree: string + vcs?: string + sandboxes?: string[] + }> + + const result: Array<{ path: string; name: string; sandboxes?: string[] }> = [] + for (const project of projects) { + if (project.id === 'global' || !project.worktree || project.worktree === '/') { + continue + } + + if (project.worktree.startsWith('/private/tmp/') || project.worktree.startsWith('/tmp/')) { + continue + } + + result.push({ + path: project.worktree, + name: project.worktree.split('/').pop() || project.worktree, + sandboxes: project.sandboxes + }) + + if (project.sandboxes && project.sandboxes.length > 0) { + for (const sandbox of project.sandboxes) { + if (!sandbox.startsWith('/private/tmp/') && !sandbox.startsWith('/tmp/')) { + result.push({ + path: sandbox, + name: sandbox.split('/').pop() || sandbox + }) + } + } + } + } + + return result + } catch (error) { + logger.warn('Failed to fetch projects from OpenCode API:', error) + return [] + } + } } export const opencodeServerManager = OpenCodeServerManager.getInstance() diff --git a/backend/src/services/proxy.ts b/backend/src/services/proxy.ts index 3dc3b20d..645309c4 100644 --- a/backend/src/services/proxy.ts +++ b/backend/src/services/proxy.ts @@ -1,15 +1,43 @@ import { logger } from '../utils/logger' -import { ENV } from '../config' +import { ENV } from '@opencode-manager/shared/config/env' -const OPENCODE_SERVER_URL = `http://localhost:${ENV.OPENCODE_SERVER_PORT}` +function getOpenCodeServerUrl(): string { + return `http://127.0.0.1:${ENV.OPENCODE.PORT}` +} + +export async function patchOpenCodeConfig(config: Record): Promise { + const OPENCODE_SERVER_URL = getOpenCodeServerUrl() + try { + const response = await fetch(`${OPENCODE_SERVER_URL}/config`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config), + }) + + if (response.ok) { + logger.info('Patched OpenCode config via API') + return true + } + + logger.error(`Failed to patch OpenCode config: ${response.status} ${response.statusText}`) + return false + } catch (error) { + logger.error('Failed to patch OpenCode config:', error) + return false + } +} export async function proxyRequest(request: Request) { + const OPENCODE_SERVER_URL = getOpenCodeServerUrl() const url = new URL(request.url) - const path = url.pathname + url.search - // Remove /api/opencode prefix before forwarding to OpenCode server - const cleanPath = path.replace(/^\/api\/opencode/, '') - const targetUrl = `${OPENCODE_SERVER_URL}${cleanPath}` + // Remove /api/opencode prefix from pathname before forwarding + const cleanPathname = url.pathname.replace(/^\/api\/opencode/, '') + const targetUrl = `${OPENCODE_SERVER_URL}${cleanPathname}${url.search}` + + if (url.pathname.includes('/permissions/')) { + logger.info(`Proxying permission request: ${url.pathname}${url.search} -> ${targetUrl}`) + } try { const headers: Record = {} @@ -38,10 +66,52 @@ export async function proxyRequest(request: Request) { headers: responseHeaders, }) } catch (error) { - logger.error(`Proxy request failed for ${path}:`, error) + logger.error(`Proxy request failed for ${url.pathname}${url.search}:`, error) + return new Response(JSON.stringify({ error: 'Proxy request failed' }), { + status: 502, + headers: { 'Content-Type': 'application/json' }, + }) + } +} + +export async function proxyToOpenCodeWithDirectory( + path: string, + method: string, + directory: string | undefined, + body?: string, + headers?: Record +): Promise { + const OPENCODE_SERVER_URL = getOpenCodeServerUrl() + const url = new URL(`${OPENCODE_SERVER_URL}${path}`) + + if (directory) { + url.searchParams.set('directory', directory) + } + + try { + const response = await fetch(url.toString(), { + method, + headers: headers || { 'Content-Type': 'application/json' }, + body, + }) + + const responseHeaders: Record = {} + response.headers.forEach((value, key) => { + if (!['connection', 'transfer-encoding'].includes(key.toLowerCase())) { + responseHeaders[key] = value + } + }) + + return new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers: responseHeaders, + }) + } catch (error) { + logger.error(`Proxy to OpenCode failed for ${path}:`, error) return new Response(JSON.stringify({ error: 'Proxy request failed' }), { status: 502, headers: { 'Content-Type': 'application/json' }, }) } -} \ No newline at end of file +} diff --git a/backend/src/services/pty-worker.cjs b/backend/src/services/pty-worker.cjs new file mode 100755 index 00000000..540f7d7f --- /dev/null +++ b/backend/src/services/pty-worker.cjs @@ -0,0 +1,73 @@ +#!/usr/bin/env node +const pty = require('node-pty'); +const readline = require('readline'); + +const shell = process.env.PTY_SHELL || '/bin/bash'; +const cwd = process.env.PTY_CWD || process.env.HOME || '/tmp'; +const cols = parseInt(process.env.PTY_COLS || '80', 10); +const rows = parseInt(process.env.PTY_ROWS || '24', 10); + +let ptyProcess; + +try { + ptyProcess = pty.spawn(shell, [], { + name: 'xterm-256color', + cols, + rows, + cwd, + env: { + ...process.env, + TERM: 'xterm-256color', + COLORTERM: 'truecolor', + }, + }); + + console.log(JSON.stringify({ type: 'started', pid: ptyProcess.pid })); + + ptyProcess.onData((data) => { + // Escape any newlines in the data string itself to ensure one JSON object per line + console.log(JSON.stringify({ type: 'data', data })); + }); + + ptyProcess.onExit(({ exitCode, signal }) => { + process.stdout.write(JSON.stringify({ type: 'exit', exitCode, signal }) + '\n', () => { + process.exit(0); + }); + }); + + const rl = readline.createInterface({ + input: process.stdin, + terminal: false + }); + + rl.on('line', (line) => { + if (!line.trim()) return; + + try { + const msg = JSON.parse(line); + switch (msg.type) { + case 'input': + if (msg.data) ptyProcess.write(msg.data); + break; + case 'resize': + if (msg.cols && msg.rows) ptyProcess.resize(msg.cols, msg.rows); + break; + case 'kill': + ptyProcess.kill(); + process.exit(0); + break; + } + } catch (e) { + console.log(JSON.stringify({ type: 'error', error: e.message })); + } + }); + + rl.on('close', () => { + if (ptyProcess) ptyProcess.kill(); + process.exit(0); + }); + +} catch (error) { + console.log(JSON.stringify({ type: 'error', error: error.message })); + process.exit(1); +} diff --git a/backend/src/services/push.ts b/backend/src/services/push.ts new file mode 100644 index 00000000..37515f75 --- /dev/null +++ b/backend/src/services/push.ts @@ -0,0 +1,157 @@ +import webpush from 'web-push' +import { Database } from 'bun:sqlite' +import { logger } from '../utils/logger' + +const VAPID_PUBLIC_KEY = process.env.VAPID_PUBLIC_KEY || 'BEsTZT_8wnxMiqK2r8nwZc23zdrUJzoBsMMe51q2oM4y5S42_agpvOIGrCd7lTVh-UanS-D2SvzXLWW8-U6_IVE' +const VAPID_PRIVATE_KEY = process.env.VAPID_PRIVATE_KEY || 'rq6W9J-4vu4svUui3kBK6dzCF-dMzQXofjDUkDlXFaE' +const VAPID_EMAIL = process.env.VAPID_EMAIL || 'mailto:admin@opencode.ai' + +webpush.setVapidDetails(VAPID_EMAIL, VAPID_PUBLIC_KEY, VAPID_PRIVATE_KEY) + +export interface PushSubscription { + endpoint: string + keys: { + p256dh: string + auth: string + } +} + +export interface StoredSubscription { + id: number + endpoint: string + keys_p256dh: string + keys_auth: string + user_id: string + created_at: number +} + +export function initPushTable(db: Database): void { + db.run(` + CREATE TABLE IF NOT EXISTS push_subscriptions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + endpoint TEXT NOT NULL UNIQUE, + keys_p256dh TEXT NOT NULL, + keys_auth TEXT NOT NULL, + user_id TEXT NOT NULL DEFAULT 'default', + created_at INTEGER NOT NULL + ); + CREATE INDEX IF NOT EXISTS idx_push_endpoint ON push_subscriptions(endpoint); + CREATE INDEX IF NOT EXISTS idx_push_user ON push_subscriptions(user_id); + `) + logger.info('Push subscriptions table initialized') +} + +export function getVapidPublicKey(): string { + return VAPID_PUBLIC_KEY +} + +export function saveSubscription(db: Database, subscription: PushSubscription, userId: string = 'default'): void { + const stmt = db.prepare(` + INSERT OR REPLACE INTO push_subscriptions (endpoint, keys_p256dh, keys_auth, user_id, created_at) + VALUES (?, ?, ?, ?, ?) + `) + stmt.run(subscription.endpoint, subscription.keys.p256dh, subscription.keys.auth, userId, Date.now()) + logger.info('Push subscription saved', { endpoint: subscription.endpoint.slice(0, 50) }) +} + +export function removeSubscription(db: Database, endpoint: string): void { + const stmt = db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?') + stmt.run(endpoint) + logger.info('Push subscription removed', { endpoint: endpoint.slice(0, 50) }) +} + +export function getAllSubscriptions(db: Database, userId?: string): StoredSubscription[] { + if (userId) { + const stmt = db.prepare('SELECT * FROM push_subscriptions WHERE user_id = ?') + return stmt.all(userId) as StoredSubscription[] + } + const stmt = db.prepare('SELECT * FROM push_subscriptions') + return stmt.all() as StoredSubscription[] +} + +export interface PushPayload { + title: string + body: string + tag?: string + url?: string + sessionId?: string + repoId?: string + requireInteraction?: boolean +} + +export async function sendPushNotification( + db: Database, + payload: PushPayload, + userId?: string +): Promise<{ success: number; failed: number }> { + const subscriptions = getAllSubscriptions(db, userId) + let success = 0 + let failed = 0 + + const payloadStr = JSON.stringify(payload) + + for (const sub of subscriptions) { + const pushSubscription = { + endpoint: sub.endpoint, + keys: { + p256dh: sub.keys_p256dh, + auth: sub.keys_auth, + }, + } + + try { + await webpush.sendNotification(pushSubscription, payloadStr) + success++ + logger.debug('Push notification sent', { endpoint: sub.endpoint.slice(0, 50) }) + } catch (error: unknown) { + failed++ + const err = error as { statusCode?: number } + if (err.statusCode === 410 || err.statusCode === 404) { + removeSubscription(db, sub.endpoint) + logger.info('Removed expired subscription', { endpoint: sub.endpoint.slice(0, 50) }) + } else { + logger.error('Failed to send push notification', { error, endpoint: sub.endpoint.slice(0, 50) }) + } + } + } + + logger.info('Push notifications sent', { success, failed, total: subscriptions.length }) + return { success, failed } +} + +export async function sendSessionCompleteNotification( + db: Database, + sessionId: string, + repoId?: string, + sessionTitle?: string +): Promise { + const payload: PushPayload = { + title: 'Session Complete', + body: sessionTitle ? `"${sessionTitle}" has finished` : 'Your OpenCode session has finished', + tag: `session-complete-${sessionId}`, + sessionId, + repoId, + url: repoId ? `/repos/${repoId}/sessions/${sessionId}` : '/', + } + + await sendPushNotification(db, payload) +} + +export async function sendPermissionRequestNotification( + db: Database, + sessionId: string, + toolName: string, + repoId?: string +): Promise { + const payload: PushPayload = { + title: 'Permission Required', + body: `${toolName} requires your approval`, + tag: `permission-${sessionId}`, + sessionId, + repoId, + url: repoId ? `/repos/${repoId}/sessions/${sessionId}` : '/', + requireInteraction: true, + } + + await sendPushNotification(db, payload) +} diff --git a/backend/src/services/repo.ts b/backend/src/services/repo.ts index 3c301dc5..6b166bf0 100644 --- a/backend/src/services/repo.ts +++ b/backend/src/services/repo.ts @@ -5,24 +5,255 @@ import type { Database } from 'bun:sqlite' import type { Repo, CreateRepoInput } from '../types/repo' import { logger } from '../utils/logger' import { SettingsService } from './settings' -import { getReposPath } from '../../../shared/src/constants' +import { createGitEnvForRepoUrl, createNoPromptGitEnv, createGitHubGitEnv } from '../utils/git-auth' +import { getReposPath } from '@opencode-manager/shared/config/env' +import { opencodeServerManager } from './opencode-single-server' import path from 'path' +export class GitAuthenticationError extends Error { + constructor(message: string) { + super(message) + this.name = 'GitAuthenticationError' + } +} + +function isAuthenticationError(error: any): boolean { + const message = error?.message?.toLowerCase() || '' + return message.includes('authentication failed') || + message.includes('invalid username or token') || + message.includes('could not read username') +} + +interface GitCommandOptions { + cwd?: string + env?: Record + silent?: boolean +} + +async function executeGitWithFallback( + cmd: string[], + options: GitCommandOptions = {} +): Promise { + const { cwd, env = createNoPromptGitEnv(), silent } = options + + try { + return await executeCommand(cmd, { cwd, env, silent }) + } catch (error: any) { + if (!isAuthenticationError(error)) { + throw error + } + + logger.warn(`Git command failed with auth, trying gh auth fallback`) + try { + const ghToken = (await executeCommand(['gh', 'auth', 'token'])).trim() + const ghEnv = createGitHubGitEnv(ghToken) + return await executeCommand(cmd, { cwd, env: ghEnv, silent }) + } catch (ghError: any) { + if (!isAuthenticationError(ghError)) { + throw ghError + } + + logger.warn(`Git command failed with gh auth, trying without auth (public repo)`) + return await executeCommand(cmd, { cwd, env: createNoPromptGitEnv(), silent }) + } + } +} + +async function hasCommits(repoPath: string): Promise { + try { + await executeCommand(['git', '-C', repoPath, 'rev-parse', 'HEAD'], { silent: true }) + return true + } catch { + return false + } +} + + + +async function safeGetCurrentBranch(repoPath: string): Promise { + try { + const repoHasCommits = await hasCommits(repoPath) + if (!repoHasCommits) { + try { + const symbolicRef = await executeCommand(['git', '-C', repoPath, 'symbolic-ref', '--short', 'HEAD'], { silent: true }) + return symbolicRef.trim() + } catch { + return null + } + } + const currentBranch = await executeCommand(['git', '-C', repoPath, 'rev-parse', '--abbrev-ref', 'HEAD'], { silent: true }) + return currentBranch.trim() + } catch { + return null + } +} + +function getGitEnv(database: Database, repoUrl?: string | null): Record { + try { + const settingsService = new SettingsService(database) + const settings = settingsService.getSettings('default') + const gitToken = settings.preferences.gitToken + + if (!repoUrl) { + return createNoPromptGitEnv() + } + + return createGitEnvForRepoUrl(repoUrl, gitToken) + } catch { + return createNoPromptGitEnv() + } +} + +export async function initLocalRepo( + database: Database, + localPath: string, + branch?: string +): Promise { + const normalizedPath = localPath.trim().replace(/\/+$/, '') + const fullPath = path.resolve(getReposPath(), normalizedPath) + + const existing = db.getRepoByLocalPath(database, normalizedPath) + if (existing) { + logger.info(`Local repo already exists in database: ${normalizedPath}`) + return existing + } + + const createRepoInput: CreateRepoInput = { + localPath: normalizedPath, + branch: branch || undefined, + defaultBranch: branch || 'main', + cloneStatus: 'cloning', + clonedAt: Date.now(), + isLocal: true, + } + + let repo: Repo + let directoryCreated = false + + try { + repo = db.createRepo(database, createRepoInput) + logger.info(`Created database record for local repo: ${normalizedPath} (id: ${repo.id})`) + } catch (error: any) { + logger.error(`Failed to create database record for local repo: ${normalizedPath}`, error) + throw new Error(`Failed to register local repository '${normalizedPath}': ${error.message}`) + } + + try { + await ensureDirectoryExists(fullPath) + directoryCreated = true + logger.info(`Created directory for local repo: ${fullPath}`) + + logger.info(`Initializing git repository: ${fullPath}`) + + await executeCommand(['git', 'init'], { cwd: fullPath }) + + await executeCommand(['git', '-C', fullPath, 'commit', '--allow-empty', '-m', 'Initial commit']) + + if (branch && branch !== 'main') { + await executeCommand(['git', '-C', fullPath, 'checkout', '-b', branch]) + } + + const isGitRepo = await executeCommand(['git', '-C', fullPath, 'rev-parse', '--git-dir']) + .then(() => true) + .catch(() => false) + + if (!isGitRepo) { + throw new Error(`Git initialization failed - directory exists but is not a valid git repository`) + } + + db.updateRepoStatus(database, repo.id, 'ready') + logger.info(`Local git repo ready: ${normalizedPath}`) + return { ...repo, cloneStatus: 'ready' } + } catch (error: any) { + logger.error(`Failed to initialize local repo, rolling back: ${normalizedPath}`, error) + + try { + db.deleteRepo(database, repo.id) + logger.info(`Rolled back database record for repo id: ${repo.id}`) + } catch (dbError: any) { + logger.error(`Failed to rollback database record for repo id ${repo.id}:`, dbError) + } + + if (directoryCreated) { + try { + await executeCommand(['rm', '-rf', normalizedPath], getReposPath()) + logger.info(`Rolled back directory: ${normalizedPath}`) + } catch (fsError: any) { + logger.error(`Failed to rollback directory ${normalizedPath}:`, fsError) + } + } + + throw new Error(`Failed to initialize local repository '${normalizedPath}': ${error.message}`) + } +} + +export async function registerExternalDirectory( + database: Database, + absolutePath: string +): Promise { + const normalizedPath = absolutePath.trim().replace(/\/+$/, '') + + const existingRepos = db.listRepos(database) + for (const repo of existingRepos) { + if (repo.fullPath === normalizedPath) { + logger.info(`External directory already registered: ${normalizedPath}`) + return repo + } + } + + try { + const isGitRepo = await executeCommand(['git', '-C', normalizedPath, 'rev-parse', '--git-dir'], { silent: true }) + .then(() => true) + .catch(() => false) + + if (!isGitRepo) { + logger.warn(`External directory is not a git repository: ${normalizedPath}`) + return null + } + + const currentBranch = await safeGetCurrentBranch(normalizedPath) + + let remoteUrl: string | undefined + try { + remoteUrl = (await executeCommand(['git', '-C', normalizedPath, 'remote', 'get-url', 'origin'], { silent: true })).trim() + } catch { + // No remote - that's ok + } + + const createRepoInput: CreateRepoInput = { + repoUrl: remoteUrl, + localPath: normalizedPath, + branch: currentBranch || undefined, + defaultBranch: currentBranch || 'main', + cloneStatus: 'ready', + clonedAt: Date.now(), + isLocal: true, + } + + const repo = db.createRepo(database, createRepoInput) + logger.info(`Registered external directory as repo: ${normalizedPath} (id: ${repo.id})`) + return repo + } catch (error: any) { + logger.error(`Failed to register external directory: ${normalizedPath}`, error) + return null + } +} + export async function cloneRepo( database: Database, repoUrl: string, branch?: string, useWorktree: boolean = false ): Promise { - const repoName = extractRepoName(repoUrl) + const { url: normalizedRepoUrl, name: repoName } = normalizeRepoUrl(repoUrl) const baseRepoDirName = repoName const worktreeDirName = branch && useWorktree ? `${repoName}-${branch.replace(/[\\/]/g, '-')}` : repoName const localPath = worktreeDirName - const existing = db.getRepoByUrlAndBranch(database, repoUrl, branch) + const existing = db.getRepoByUrlAndBranch(database, normalizedRepoUrl, branch) if (existing) { - logger.info(`Repo branch already exists: ${repoUrl}${branch ? `#${branch}` : ''}`) + logger.info(`Repo branch already exists: ${normalizedRepoUrl}${branch ? `#${branch}` : ''}`) return existing } @@ -32,7 +263,7 @@ export async function cloneRepo( const shouldUseWorktree = useWorktree && branch && baseRepoExists.trim() === 'exists' const createRepoInput: CreateRepoInput = { - repoUrl, + repoUrl: normalizedRepoUrl, localPath, branch: branch || undefined, defaultBranch: branch || 'main', @@ -47,21 +278,16 @@ export async function cloneRepo( const repo = db.createRepo(database, createRepoInput) try { - const settingsService = new SettingsService(database) - const settings = settingsService.getSettings('default') - const gitToken = settings.preferences.gitToken - - let cloneUrl = gitToken && repoUrl.startsWith('https://github.com') - ? repoUrl.replace('https://', `https://${gitToken}@`) - : repoUrl - + const env = getGitEnv(database, normalizedRepoUrl) + if (shouldUseWorktree) { logger.info(`Creating worktree for branch: ${branch}`) const baseRepoPath = path.resolve(getReposPath(), baseRepoDirName) const worktreePath = path.resolve(getReposPath(), worktreeDirName) - await executeCommand(['git', '-C', baseRepoPath, 'fetch', '--all']) + await executeGitWithFallback(['git', '-C', baseRepoPath, 'fetch', '--all'], { cwd: getReposPath(), env }) + await createWorktreeSafely(baseRepoPath, worktreePath, branch) @@ -94,8 +320,7 @@ export async function cloneRepo( } try { - const cloneCmd = ['git', 'clone', '-b', branch, cloneUrl, worktreeDirName] - await executeCommand(cloneCmd, getReposPath()) + await executeGitWithFallback(['git', 'clone', '-b', branch, normalizedRepoUrl, worktreeDirName], { cwd: getReposPath(), env }) } catch (error: any) { if (error.message.includes('destination path') && error.message.includes('already exists')) { logger.error(`Clone failed: directory still exists after cleanup attempt`) @@ -103,8 +328,7 @@ export async function cloneRepo( } logger.info(`Branch '${branch}' not found during clone, cloning default branch and creating branch locally`) - const cloneCmd = ['git', 'clone', cloneUrl, worktreeDirName] - await executeCommand(cloneCmd, getReposPath()) + await executeGitWithFallback(['git', 'clone', normalizedRepoUrl, worktreeDirName], { cwd: getReposPath(), env }) let localBranchExists = 'missing' try { await executeCommand(['git', '-C', path.resolve(getReposPath(), worktreeDirName), 'rev-parse', '--verify', `refs/heads/${branch}`]) @@ -124,11 +348,12 @@ export async function cloneRepo( const isValidRepo = await executeCommand(['git', '-C', path.resolve(getReposPath(), baseRepoDirName), 'rev-parse', '--git-dir'], path.resolve(getReposPath())).then(() => 'valid').catch(() => 'invalid') if (isValidRepo.trim() === 'valid') { - logger.info(`Valid repository found: ${repoUrl}`) + logger.info(`Valid repository found: ${normalizedRepoUrl}`) if (branch) { logger.info(`Switching to branch: ${branch}`) - await executeCommand(['git', '-C', path.resolve(getReposPath(), baseRepoDirName), 'fetch', '--all']) + await executeGitWithFallback(['git', '-C', path.resolve(getReposPath(), baseRepoDirName), 'fetch', '--all'], { cwd: getReposPath(), env }) + let remoteBranchExists = false try { @@ -166,7 +391,7 @@ export async function cloneRepo( } } - logger.info(`Cloning repo: ${repoUrl}${branch ? ` to branch ${branch}` : ''}`) + logger.info(`Cloning repo: ${normalizedRepoUrl}${branch ? ` to branch ${branch}` : ''}`) const worktreeExists = await executeCommand(['bash', '-c', `test -d ${worktreeDirName} && echo exists || echo missing`], getReposPath()) if (worktreeExists.trim() === 'exists') { @@ -185,10 +410,10 @@ export async function cloneRepo( try { const cloneCmd = branch - ? ['git', 'clone', '-b', branch, cloneUrl, worktreeDirName] - : ['git', 'clone', cloneUrl, worktreeDirName] + ? ['git', 'clone', '-b', branch, normalizedRepoUrl, worktreeDirName] + : ['git', 'clone', normalizedRepoUrl, worktreeDirName] - await executeCommand(cloneCmd, getReposPath()) + await executeGitWithFallback(cloneCmd, { cwd: getReposPath(), env }) } catch (error: any) { if (error.message.includes('destination path') && error.message.includes('already exists')) { logger.error(`Clone failed: directory still exists after cleanup attempt`) @@ -197,8 +422,7 @@ export async function cloneRepo( if (branch && (error.message.includes('Remote branch') || error.message.includes('not found'))) { logger.info(`Branch '${branch}' not found, cloning default branch and creating branch locally`) - const cloneCmd = ['git', 'clone', cloneUrl, worktreeDirName] - await executeCommand(cloneCmd, getReposPath()) + await executeGitWithFallback(['git', 'clone', normalizedRepoUrl, worktreeDirName], { cwd: getReposPath(), env }) let localBranchExists = 'missing' try { await executeCommand(['git', '-C', path.resolve(getReposPath(), worktreeDirName), 'rev-parse', '--verify', `refs/heads/${branch}`]) @@ -219,47 +443,57 @@ export async function cloneRepo( } db.updateRepoStatus(database, repo.id, 'ready') - logger.info(`Repo ready: ${repoUrl}${branch ? `#${branch}` : ''}${shouldUseWorktree ? ' (worktree)' : ''}`) + logger.info(`Repo ready: ${normalizedRepoUrl}${branch ? `#${branch}` : ''}${shouldUseWorktree ? ' (worktree)' : ''}`) return { ...repo, cloneStatus: 'ready' } } catch (error: any) { - logger.error(`Failed to create repo: ${repoUrl}${branch ? `#${branch}` : ''}`, error) + logger.error(`Failed to create repo: ${normalizedRepoUrl}${branch ? `#${branch}` : ''}`, error) db.deleteRepo(database, repo.id) throw error } } export async function getCurrentBranch(repo: Repo): Promise { - try { - const repoPath = path.resolve(getReposPath(), repo.localPath) - const currentBranch = await executeCommand(['git', '-C', repoPath, 'rev-parse', '--abbrev-ref', 'HEAD']) - return currentBranch.trim() - } catch (error: any) { - logger.warn(`Failed to get current branch for repo ${repo.id}:`, error.message) - return null - } + const repoPath = path.resolve(getReposPath(), repo.localPath) + const branch = await safeGetCurrentBranch(repoPath) + return branch || repo.branch || repo.defaultBranch || null } -export async function listBranches(repo: Repo): Promise<{ local: string[], remote: string[], current: string | null }> { +export async function listBranches(database: Database, repo: Repo): Promise<{ local: string[], all: string[], current: string | null }> { try { const repoPath = path.resolve(getReposPath(), repo.localPath) - - await executeCommand(['git', '-C', repoPath, 'fetch', '--all']) + const env = getGitEnv(database, repo.repoUrl) + + if (!repo.isLocal) { + try { + await executeGitWithFallback(['git', '-C', repoPath, 'fetch', '--all'], { env }) + } catch (error) { + logger.warn(`Failed to fetch remote for repo ${repo.id}, using cached branch info:`, error) + } + } const localBranchesOutput = await executeCommand(['git', '-C', repoPath, 'branch', '--format=%(refname:short)']) const localBranches = localBranchesOutput.trim().split('\n').filter(b => b.trim()) - const remoteBranchesOutput = await executeCommand(['git', '-C', repoPath, 'branch', '-r', '--format=%(refname:short)']) - const remoteBranches = remoteBranchesOutput.trim().split('\n') - .filter(b => b.trim() && !b.includes('HEAD')) - .map(b => b.replace('origin/', '')) + let remoteBranches: string[] = [] + try { + const remoteBranchesOutput = await executeCommand(['git', '-C', repoPath, 'branch', '-r', '--format=%(refname:short)']) + remoteBranches = remoteBranchesOutput.trim().split('\n') + .filter(b => b.trim() && !b.includes('HEAD') && b.includes('/')) + } catch (error) { + logger.warn(`Failed to get remote branches for repo ${repo.id}:`, error) + } const current = await getCurrentBranch(repo) - const allBranches = new Set([...localBranches, ...remoteBranches]) + const remoteOnlyBranches = remoteBranches + .map(b => b.replace(/^[^/]+\//, '')) + .filter(b => !localBranches.includes(b)) + + const allBranches = [...localBranches, ...remoteOnlyBranches] return { local: localBranches, - remote: Array.from(allBranches), + all: allBranches, current } } catch (error: any) { @@ -276,14 +510,20 @@ export async function switchBranch(database: Database, repoId: number, branch: s try { const repoPath = path.resolve(getReposPath(), repo.localPath) - - logger.info(`Switching to branch: ${branch} in ${repo.localPath}`) - - await executeCommand(['git', '-C', repoPath, 'fetch', '--all']) + const env = getGitEnv(database, repo.repoUrl) + + const sanitizedBranch = branch + .replace(/^refs\/heads\//, '') + .replace(/^refs\/remotes\//, '') + .replace(/^origin\//, '') + + logger.info(`Switching to branch: ${sanitizedBranch} in ${repo.localPath}`) + + await executeGitWithFallback(['git', '-C', repoPath, 'fetch', '--all'], { env }) let localBranchExists = false try { - await executeCommand(['git', '-C', repoPath, 'rev-parse', '--verify', `refs/heads/${branch}`]) + await executeCommand(['git', '-C', repoPath, 'rev-parse', '--verify', `refs/heads/${sanitizedBranch}`]) localBranchExists = true } catch { localBranchExists = false @@ -291,39 +531,69 @@ export async function switchBranch(database: Database, repoId: number, branch: s let remoteBranchExists = false try { - await executeCommand(['git', '-C', repoPath, 'rev-parse', '--verify', `refs/remotes/origin/${branch}`]) + await executeCommand(['git', '-C', repoPath, 'rev-parse', '--verify', `refs/remotes/origin/${sanitizedBranch}`]) remoteBranchExists = true } catch { remoteBranchExists = false } if (localBranchExists) { - logger.info(`Checking out existing local branch: ${branch}`) - await executeCommand(['git', '-C', repoPath, 'checkout', branch]) + logger.info(`Checking out existing local branch: ${sanitizedBranch}`) + await executeCommand(['git', '-C', repoPath, 'checkout', sanitizedBranch]) } else if (remoteBranchExists) { - logger.info(`Checking out remote branch: ${branch}`) - await executeCommand(['git', '-C', repoPath, 'checkout', '-b', branch, `origin/${branch}`]) + logger.info(`Checking out remote branch: ${sanitizedBranch}`) + await executeCommand(['git', '-C', repoPath, 'checkout', '-b', sanitizedBranch, `origin/${sanitizedBranch}`]) } else { - logger.info(`Creating new branch: ${branch}`) - await executeCommand(['git', '-C', repoPath, 'checkout', '-b', branch]) + logger.info(`Creating new branch: ${sanitizedBranch}`) + await executeCommand(['git', '-C', repoPath, 'checkout', '-b', sanitizedBranch]) } - logger.info(`Successfully switched to branch: ${branch}`) + logger.info(`Successfully switched to branch: ${sanitizedBranch}`) } catch (error: any) { logger.error(`Failed to switch branch for repo ${repoId}:`, error) throw error } } +export async function createBranch(database: Database, repoId: number, branch: string): Promise { + const repo = db.getRepoById(database, repoId) + if (!repo) { + throw new Error(`Repo not found: ${repoId}`) + } + + try { + const repoPath = path.resolve(getReposPath(), repo.localPath) + + const sanitizedBranch = branch + .replace(/^refs\/heads\//, '') + .replace(/^refs\/remotes\//, '') + .replace(/^origin\//, '') + + logger.info(`Creating new branch: ${sanitizedBranch} in ${repo.localPath}`) + await executeCommand(['git', '-C', repoPath, 'checkout', '-b', sanitizedBranch]) + logger.info(`Successfully created and switched to branch: ${sanitizedBranch}`) + } catch (error: any) { + logger.error(`Failed to create branch for repo ${repoId}:`, error) + throw error + } +} + export async function pullRepo(database: Database, repoId: number): Promise { const repo = db.getRepoById(database, repoId) if (!repo) { throw new Error(`Repo not found: ${repoId}`) } + if (repo.isLocal) { + logger.info(`Skipping pull for local repo: ${repo.localPath}`) + return + } + try { + const env = getGitEnv(database, repo.repoUrl) + logger.info(`Pulling repo: ${repo.repoUrl}`) - await executeCommand(['git', '-C', path.resolve(getReposPath(), repo.localPath), 'pull']) + await executeCommand(['git', '-C', path.resolve(getReposPath(), repo.localPath), 'pull'], { env }) db.updateLastPulled(database, repoId) logger.info(`Repo pulled successfully: ${repo.repoUrl}`) @@ -339,16 +609,18 @@ export async function deleteRepoFiles(database: Database, repoId: number): Promi throw new Error(`Repo not found: ${repoId}`) } + const repoIdentifier = repo.repoUrl || repo.localPath + try { - logger.info(`Deleting repo files: ${repo.repoUrl}`) + logger.info(`Deleting repo files: ${repoIdentifier}`) // Extract just the directory name from the localPath const dirName = repo.localPath.split('/').pop() || repo.localPath const fullPath = path.resolve(getReposPath(), dirName) // If this is a worktree, properly remove it from git first - if (repo.isWorktree && repo.branch) { - const repoName = extractRepoName(repo.repoUrl) + if (repo.isWorktree && repo.branch && repo.repoUrl) { + const { name: repoName } = normalizeRepoUrl(repo.repoUrl) const baseRepoPath = path.resolve(getReposPath(), repoName) logger.info(`Removing worktree: ${dirName} from base repo: ${baseRepoPath}`) @@ -391,8 +663,8 @@ export async function deleteRepoFiles(database: Database, repoId: number): Promi } // If this was a worktree, also prune the base repo to clean up any remaining references - if (repo.isWorktree && repo.branch) { - const repoName = extractRepoName(repo.repoUrl) + if (repo.isWorktree && repo.branch && repo.repoUrl) { + const { name: repoName } = normalizeRepoUrl(repo.repoUrl) const baseRepoPath = path.resolve(getReposPath(), repoName) try { @@ -404,16 +676,37 @@ export async function deleteRepoFiles(database: Database, repoId: number): Promi } db.deleteRepo(database, repoId) - logger.info(`Repo deleted successfully: ${repo.repoUrl}`) + logger.info(`Repo deleted successfully: ${repoIdentifier}`) } catch (error: any) { - logger.error(`Failed to delete repo: ${repo.repoUrl}`, error) + logger.error(`Failed to delete repo: ${repoIdentifier}`, error) throw error } } -function extractRepoName(url: string): string { - const match = url.match(/\/([^\/]+?)(\.git)?$/) - return match?.[1] ?? `repo-${Date.now()}` +function normalizeRepoUrl(url: string): { url: string; name: string } { + const shorthandMatch = url.match(/^([^\/]+)\/([^\/]+)$/) + if (shorthandMatch) { + const [, owner, repoName] = shorthandMatch as [string, string, string] + return { + url: `https://github.com/${owner}/${repoName}`, + name: repoName + } + } + + if (url.startsWith('http://') || url.startsWith('https://')) { + const httpsUrl = url.replace(/^http:/, 'https:') + const urlWithoutGit = httpsUrl.replace(/\.git$/, '') + const match = urlWithoutGit.match(/\/([^\/]+)$/) + return { + url: urlWithoutGit, + name: match?.[1] || `repo-${Date.now()}` + } + } + + return { + url, + name: `repo-${Date.now()}` + } } export async function cleanupOrphanedDirectories(database: Database): Promise { @@ -450,6 +743,42 @@ export async function cleanupOrphanedDirectories(database: Database): Promise { + let removed = 0 + const fs = await import('fs/promises') + + try { + const allRepos = db.listRepos(database) + logger.info(`Checking ${allRepos.length} repos for stale entries`) + + for (const repo of allRepos) { + const repoPath = repo.fullPath + + try { + const stat = await fs.stat(repoPath).catch(() => null) + const exists = stat?.isDirectory() ?? false + + if (!exists) { + logger.info(`Removing stale repo entry (path does not exist): ${repoPath}`) + db.deleteRepo(database, repo.id) + removed++ + } + } catch (error) { + logger.warn(`Error checking repo path ${repoPath}:`, error) + } + } + + if (removed > 0) { + logger.info(`Cleaned up ${removed} stale repo entries`) + } + + return { removed } + } catch (error) { + logger.warn('Failed to cleanup stale repo entries:', error) + return { removed } + } +} + async function checkWorktreeExists(baseRepoPath: string, worktreePath: string): Promise { try { const worktreeList = await executeCommand(['git', '-C', baseRepoPath, 'worktree', 'list', '--porcelain']) @@ -507,45 +836,39 @@ async function cleanupStaleWorktree(baseRepoPath: string, worktreePath: string): } async function isBranchCheckedOutInMainWorktree(baseRepoPath: string, branch: string): Promise { - try { - const currentBranch = await executeCommand(['git', '-C', baseRepoPath, 'rev-parse', '--abbrev-ref', 'HEAD']) - return currentBranch.trim() === branch - } catch { - return false - } + const currentBranch = await safeGetCurrentBranch(baseRepoPath) + return currentBranch === branch } async function getAvailableBranchForWorktree(baseRepoPath: string, targetBranch: string): Promise { - try { - const currentBranch = await executeCommand(['git', '-C', baseRepoPath, 'rev-parse', '--abbrev-ref', 'HEAD']) - const trimmedCurrent = currentBranch.trim() + const currentBranch = await safeGetCurrentBranch(baseRepoPath) + + if (!currentBranch) { + return targetBranch + } + + if (currentBranch === targetBranch) { + logger.info(`Branch '${targetBranch}' is currently checked out in main worktree`) - if (trimmedCurrent === targetBranch) { - logger.info(`Branch '${targetBranch}' is currently checked out in main worktree`) - - const defaultBranch = await executeCommand(['git', '-C', baseRepoPath, 'rev-parse', '--abbrev-ref', 'origin/HEAD']).then(ref => ref.trim()).catch(() => 'main') - const cleanDefaultBranch = defaultBranch.replace('origin/', '') - - if (cleanDefaultBranch !== trimmedCurrent) { - logger.info(`Switching to '${cleanDefaultBranch}' to free up '${targetBranch}' for worktree`) - await executeCommand(['git', '-C', baseRepoPath, 'checkout', cleanDefaultBranch]) - return targetBranch - } else { - logger.warn(`Cannot free up branch '${targetBranch}' - it's the default branch`) - return `${targetBranch}-worktree-${Date.now()}` - } - } + const defaultBranch = await executeCommand(['git', '-C', baseRepoPath, 'rev-parse', '--abbrev-ref', 'origin/HEAD']).then(ref => ref.trim()).catch(() => 'main') + const cleanDefaultBranch = defaultBranch.replace('origin/', '') - return targetBranch - } catch (error: any) { - logger.warn(`Failed to determine available branch: ${error.message}`) - return `${targetBranch}-worktree-${Date.now()}` + if (cleanDefaultBranch !== currentBranch) { + logger.info(`Switching to '${cleanDefaultBranch}' to free up '${targetBranch}' for worktree`) + await executeCommand(['git', '-C', baseRepoPath, 'checkout', cleanDefaultBranch]) + return targetBranch + } else { + logger.warn(`Cannot free up branch '${targetBranch}' - it's the default branch`) + return `${targetBranch}-worktree-${Date.now()}` + } } + + return targetBranch } async function createWorktreeSafely(baseRepoPath: string, worktreePath: string, branch: string): Promise { - const currentBranch = await executeCommand(['git', '-C', baseRepoPath, 'rev-parse', '--abbrev-ref', 'HEAD']) - if (currentBranch.trim() === branch) { + const currentBranch = await safeGetCurrentBranch(baseRepoPath) + if (currentBranch === branch) { logger.info(`Branch '${branch}' is checked out in main repo, switching away...`) const defaultBranch = await executeCommand(['git', '-C', baseRepoPath, 'rev-parse', '--abbrev-ref', 'origin/HEAD']) .then(ref => ref.trim().replace('origin/', '')) @@ -610,4 +933,41 @@ async function createWorktreeSafely(baseRepoPath: string, worktreePath: string, } } } +} + +export async function syncProjectsFromOpenCode(database: Database): Promise<{ added: number; skipped: number }> { + let added = 0 + let skipped = 0 + + try { + const projects = await opencodeServerManager.fetchProjectsFromAPI() + + if (projects.length === 0) { + logger.info('No projects found from OpenCode API') + return { added, skipped } + } + + logger.info(`Found ${projects.length} projects from OpenCode, syncing...`) + + for (const project of projects) { + try { + const repo = await registerExternalDirectory(database, project.path) + if (repo) { + added++ + logger.info(`Synced project: ${project.path}`) + } else { + skipped++ + } + } catch (error) { + logger.warn(`Failed to sync project ${project.path}:`, error) + skipped++ + } + } + + logger.info(`Project sync complete: ${added} added, ${skipped} skipped`) + return { added, skipped } + } catch (error) { + logger.error('Failed to sync projects from OpenCode:', error) + return { added, skipped } + } } \ No newline at end of file diff --git a/backend/src/services/scheduler.ts b/backend/src/services/scheduler.ts new file mode 100644 index 00000000..6bf85eec --- /dev/null +++ b/backend/src/services/scheduler.ts @@ -0,0 +1,423 @@ +import { Database } from 'bun:sqlite' +import cron, { ScheduledTask } from 'node-cron' +import { spawn } from 'child_process' +import { logger } from '../utils/logger' + +export interface ScheduledTaskRecord { + id: number + name: string + schedule_type: string + schedule_value: string + command_type: string + command_config: string + status: 'active' | 'paused' + last_run_at: number | null + next_run_at: number | null + created_at: number + updated_at: number +} + +export interface CommandConfig { + command?: string + args?: string[] + workdir?: string + skillName?: string + message?: string +} + +export interface CreateTaskInput { + name: string + schedule_type: 'cron' + schedule_value: string + command_type: 'skill' | 'opencode-run' | 'script' + command_config: CommandConfig +} + +export interface UpdateTaskInput { + name?: string + schedule_value?: string + command_type?: 'skill' | 'opencode-run' | 'script' + command_config?: CommandConfig +} + +export interface TaskRunResult { + success: boolean + output: string + error?: string + duration: number +} + +class SchedulerService { + private db: Database | null = null + private jobs: Map = new Map() + + setDatabase(db: Database): void { + this.db = db + } + + async initialize(): Promise { + if (!this.db) { + throw new Error('Database not set') + } + + const tasks = this.getAllTasks().filter(t => t.status === 'active') + logger.info(`Loading ${tasks.length} active scheduled tasks`) + + for (const task of tasks) { + this.scheduleTask(task) + } + } + + async shutdown(): Promise { + logger.info('Stopping all scheduled tasks') + for (const [taskId, job] of this.jobs) { + job.stop() + logger.debug(`Stopped task ${taskId}`) + } + this.jobs.clear() + } + + getAllTasks(): ScheduledTaskRecord[] { + if (!this.db) return [] + + const stmt = this.db.prepare(` + SELECT * FROM scheduled_tasks ORDER BY created_at DESC + `) + return stmt.all() as ScheduledTaskRecord[] + } + + getTask(id: number): ScheduledTaskRecord | null { + if (!this.db) return null + + const stmt = this.db.prepare(`SELECT * FROM scheduled_tasks WHERE id = ?`) + return stmt.get(id) as ScheduledTaskRecord | null + } + + createTask(input: CreateTaskInput): ScheduledTaskRecord { + if (!this.db) { + throw new Error('Database not set') + } + + if (!cron.validate(input.schedule_value)) { + throw new Error(`Invalid cron expression: ${input.schedule_value}`) + } + + const now = Date.now() + const nextRun = this.calculateNextRun(input.schedule_value) + + const stmt = this.db.prepare(` + INSERT INTO scheduled_tasks + (name, schedule_type, schedule_value, command_type, command_config, status, next_run_at, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, 'active', ?, ?, ?) + `) + + const result = stmt.run( + input.name, + input.schedule_type, + input.schedule_value, + input.command_type, + JSON.stringify(input.command_config), + nextRun, + now, + now + ) + + const task = this.getTask(Number(result.lastInsertRowid))! + this.scheduleTask(task) + logger.info(`Created scheduled task: ${task.name} (id=${task.id})`) + + return task + } + + updateTask(id: number, input: UpdateTaskInput): ScheduledTaskRecord | null { + if (!this.db) return null + + const existing = this.getTask(id) + if (!existing) return null + + if (input.schedule_value && !cron.validate(input.schedule_value)) { + throw new Error(`Invalid cron expression: ${input.schedule_value}`) + } + + const updates: string[] = [] + const values: (string | number)[] = [] + + if (input.name !== undefined) { + updates.push('name = ?') + values.push(input.name) + } + if (input.schedule_value !== undefined) { + updates.push('schedule_value = ?') + values.push(input.schedule_value) + updates.push('next_run_at = ?') + values.push(this.calculateNextRun(input.schedule_value)) + } + if (input.command_type !== undefined) { + updates.push('command_type = ?') + values.push(input.command_type) + } + if (input.command_config !== undefined) { + updates.push('command_config = ?') + values.push(JSON.stringify(input.command_config)) + } + + if (updates.length === 0) return existing + + updates.push('updated_at = ?') + values.push(Date.now()) + values.push(id) + + const stmt = this.db.prepare(` + UPDATE scheduled_tasks SET ${updates.join(', ')} WHERE id = ? + `) + stmt.run(...values) + + const updated = this.getTask(id)! + + if (updated.status === 'active') { + this.unscheduleTask(id) + this.scheduleTask(updated) + } + + logger.info(`Updated scheduled task: ${updated.name} (id=${id})`) + return updated + } + + deleteTask(id: number): boolean { + if (!this.db) return false + + this.unscheduleTask(id) + + const stmt = this.db.prepare(`DELETE FROM scheduled_tasks WHERE id = ?`) + const result = stmt.run(id) + + logger.info(`Deleted scheduled task id=${id}`) + return result.changes > 0 + } + + toggleTask(id: number): ScheduledTaskRecord | null { + if (!this.db) return null + + const task = this.getTask(id) + if (!task) return null + + const newStatus = task.status === 'active' ? 'paused' : 'active' + const stmt = this.db.prepare(` + UPDATE scheduled_tasks SET status = ?, updated_at = ? WHERE id = ? + `) + stmt.run(newStatus, Date.now(), id) + + if (newStatus === 'paused') { + this.unscheduleTask(id) + } else { + const updated = this.getTask(id)! + this.scheduleTask(updated) + } + + logger.info(`Toggled task ${id} to ${newStatus}`) + return this.getTask(id) + } + + async runTaskNow(id: number): Promise { + const task = this.getTask(id) + if (!task) { + return { success: false, output: '', error: 'Task not found', duration: 0 } + } + + return this.executeTask(task) + } + + private scheduleTask(task: ScheduledTaskRecord): void { + if (this.jobs.has(task.id)) { + this.unscheduleTask(task.id) + } + + const job = cron.schedule(task.schedule_value, async () => { + logger.info(`Executing scheduled task: ${task.name} (id=${task.id})`) + const result = await this.executeTask(task) + + if (result.success) { + logger.info(`Task ${task.id} completed successfully in ${result.duration}ms`) + } else { + logger.error(`Task ${task.id} failed: ${result.error}`) + } + }, { + scheduled: true, + timezone: Intl.DateTimeFormat().resolvedOptions().timeZone + }) + + this.jobs.set(task.id, job) + logger.debug(`Scheduled task ${task.id}: ${task.schedule_value}`) + } + + private unscheduleTask(id: number): void { + const job = this.jobs.get(id) + if (job) { + job.stop() + this.jobs.delete(id) + logger.debug(`Unscheduled task ${id}`) + } + } + + private async executeTask(task: ScheduledTaskRecord): Promise { + const startTime = Date.now() + const config: CommandConfig = JSON.parse(task.command_config) + + try { + let result: TaskRunResult + + switch (task.command_type) { + case 'skill': + result = await this.runSkill(config) + break + case 'opencode-run': + result = await this.runOpencodeCommand(config) + break + case 'script': + result = await this.runScript(config) + break + default: + throw new Error(`Unknown command type: ${task.command_type}`) + } + + this.updateLastRun(task.id, startTime) + return { ...result, duration: Date.now() - startTime } + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + this.updateLastRun(task.id, startTime) + return { + success: false, + output: '', + error: errorMessage, + duration: Date.now() - startTime + } + } + } + + private async runSkill(config: CommandConfig): Promise { + const { skillName, args = [], workdir } = config + if (!skillName) { + throw new Error('Skill name is required') + } + + const command = `/${skillName} ${args.join(' ')}`.trim() + return this.runOpencodeCommand({ command, workdir }) + } + + private async runOpencodeCommand(config: CommandConfig): Promise { + const { command, message, workdir } = config + + const args = ['run'] + if (command) { + args.push('--command', command) + } + if (message) { + args.push(message) + } + + return this.spawnProcess('opencode', args, workdir) + } + + private async runScript(config: CommandConfig): Promise { + const { command, args = [], workdir } = config + if (!command) { + throw new Error('Command is required for script type') + } + + return this.spawnProcess(command, args, workdir) + } + + private spawnProcess(cmd: string, args: string[], workdir?: string): Promise { + return new Promise((resolve) => { + const chunks: Buffer[] = [] + const errorChunks: Buffer[] = [] + + const proc = spawn(cmd, args, { + cwd: workdir || process.cwd(), + env: process.env, + stdio: ['ignore', 'pipe', 'pipe'] + }) + + proc.stdout?.on('data', (data) => chunks.push(data)) + proc.stderr?.on('data', (data) => errorChunks.push(data)) + + const timeout = setTimeout(() => { + proc.kill('SIGTERM') + resolve({ + success: false, + output: Buffer.concat(chunks).toString(), + error: 'Task timed out after 5 minutes', + duration: 300000 + }) + }, 300000) + + proc.on('close', (code) => { + clearTimeout(timeout) + const output = Buffer.concat(chunks).toString() + const stderr = Buffer.concat(errorChunks).toString() + + resolve({ + success: code === 0, + output: output + (stderr ? `\nStderr: ${stderr}` : ''), + error: code !== 0 ? `Process exited with code ${code}` : undefined, + duration: 0 + }) + }) + + proc.on('error', (error) => { + clearTimeout(timeout) + resolve({ + success: false, + output: '', + error: error.message, + duration: 0 + }) + }) + }) + } + + private updateLastRun(id: number, timestamp: number): void { + if (!this.db) return + + const task = this.getTask(id) + if (!task) return + + const nextRun = this.calculateNextRun(task.schedule_value) + + const stmt = this.db.prepare(` + UPDATE scheduled_tasks + SET last_run_at = ?, next_run_at = ?, updated_at = ? + WHERE id = ? + `) + stmt.run(timestamp, nextRun, Date.now(), id) + } + + private calculateNextRun(cronExpression: string): number { + const interval = cron.schedule(cronExpression, () => {}, { scheduled: false }) + + const now = new Date() + const parts = cronExpression.split(' ') + + const minute = parts[0] === '*' ? now.getMinutes() : parseInt(parts[0]) || 0 + const hour = parts[1] === '*' ? now.getHours() : parseInt(parts[1]) || 0 + const dayOfMonth = parts[2] === '*' ? now.getDate() : parseInt(parts[2]) || 1 + const month = parts[3] === '*' ? now.getMonth() : (parseInt(parts[3]) || 1) - 1 + + let nextDate = new Date(now.getFullYear(), month, dayOfMonth, hour, minute, 0, 0) + + if (nextDate <= now) { + if (parts[0] !== '*') { + nextDate.setHours(nextDate.getHours() + 1) + } else if (parts[1] !== '*') { + nextDate.setDate(nextDate.getDate() + 1) + } else { + nextDate.setMinutes(nextDate.getMinutes() + 1) + } + } + + interval.stop() + return nextDate.getTime() + } +} + +export const schedulerService = new SchedulerService() diff --git a/backend/src/services/settings.ts b/backend/src/services/settings.ts index 47e98c84..14093e9b 100644 --- a/backend/src/services/settings.ts +++ b/backend/src/services/settings.ts @@ -1,23 +1,54 @@ import { Database } from 'bun:sqlite' +import { unlinkSync, existsSync } from 'fs' +import { getOpenCodeConfigFilePath } from '@opencode-manager/shared/config/env' import { logger } from '../utils/logger' +import stripJsonComments from 'strip-json-comments' import type { UserPreferences, SettingsResponse, OpenCodeConfig, CreateOpenCodeConfigRequest, - UpdateOpenCodeConfigRequest, - OpenCodeConfigResponse + UpdateOpenCodeConfigRequest } from '../types/settings' import { UserPreferencesSchema, OpenCodeConfigSchema, - OpenCodeConfigMetadataSchema, DEFAULT_USER_PREFERENCES, } from '../types/settings' +interface OpenCodeConfigWithRaw extends OpenCodeConfig { + rawContent: string +} + +interface OpenCodeConfigResponseWithRaw { + configs: OpenCodeConfigWithRaw[] + defaultConfig: OpenCodeConfigWithRaw | null +} + +function parseJsonc(content: string): unknown { + return JSON.parse(stripJsonComments(content)) +} + export class SettingsService { + private static lastKnownGoodConfigContent: string | null = null + constructor(private db: Database) {} + initializeLastKnownGoodConfig(userId: string = 'default'): void { + const settings = this.getSettings(userId) + if (settings.preferences.lastKnownGoodConfig) { + SettingsService.lastKnownGoodConfigContent = settings.preferences.lastKnownGoodConfig + logger.info('Initialized last known good config from database') + } + } + + persistLastKnownGoodConfig(userId: string = 'default'): void { + if (SettingsService.lastKnownGoodConfigContent) { + this.updateSettings({ lastKnownGoodConfig: SettingsService.lastKnownGoodConfigContent }, userId) + logger.info('Persisted last known good config to database') + } + } + getSettings(userId: string = 'default'): SettingsResponse { const row = this.db .query('SELECT preferences, updated_at FROM user_preferences WHERE user_id = ?') @@ -31,7 +62,7 @@ export class SettingsService { } try { - const parsed = JSON.parse(row.preferences) + const parsed = parseJsonc(row.preferences) as Record const validated = UserPreferencesSchema.parse({ ...DEFAULT_USER_PREFERENCES, ...parsed, @@ -92,7 +123,7 @@ export class SettingsService { } } - getOpenCodeConfigs(userId: string = 'default'): OpenCodeConfigResponse { + getOpenCodeConfigs(userId: string = 'default'): OpenCodeConfigResponseWithRaw { const rows = this.db .query('SELECT * FROM opencode_configs WHERE user_id = ? ORDER BY created_at DESC') .all(userId) as Array<{ @@ -105,18 +136,20 @@ export class SettingsService { updated_at: number }> - const configs: OpenCodeConfig[] = [] - let defaultConfig: OpenCodeConfig | null = null + const configs: OpenCodeConfigWithRaw[] = [] + let defaultConfig: OpenCodeConfigWithRaw | null = null for (const row of rows) { try { - const content = JSON.parse(row.config_content) + const rawContent = row.config_content + const content = parseJsonc(rawContent) const validated = OpenCodeConfigSchema.parse(content) - const config: OpenCodeConfig = { + const config: OpenCodeConfigWithRaw = { id: row.id, name: row.config_name, content: validated, + rawContent: rawContent, isDefault: Boolean(row.is_default), createdAt: row.created_at, updatedAt: row.updated_at, @@ -141,11 +174,31 @@ export class SettingsService { createOpenCodeConfig( request: CreateOpenCodeConfigRequest, userId: string = 'default' - ): OpenCodeConfig { - const contentValidated = OpenCodeConfigSchema.parse(request.content) + ): OpenCodeConfigWithRaw { + // Check for existing config with the same name + const existing = this.getOpenCodeConfigByName(request.name, userId) + if (existing) { + throw new Error(`Config with name '${request.name}' already exists`) + } + + const rawContent = typeof request.content === 'string' + ? request.content + : JSON.stringify(request.content, null, 2) + + const parsedContent = typeof request.content === 'string' + ? parseJsonc(request.content) + : request.content + + const contentValidated = OpenCodeConfigSchema.parse(parsedContent) const now = Date.now() - if (request.isDefault) { + const existingCount = this.db + .query('SELECT COUNT(*) as count FROM opencode_configs WHERE user_id = ?') + .get(userId) as { count: number } + + const shouldBeDefault = request.isDefault || existingCount.count === 0 + + if (shouldBeDefault) { this.db .query('UPDATE opencode_configs SET is_default = FALSE WHERE user_id = ?') .run(userId) @@ -159,17 +212,18 @@ export class SettingsService { .run( userId, request.name, - JSON.stringify(contentValidated), - request.isDefault || false, + rawContent, + shouldBeDefault, now, now ) - const config: OpenCodeConfig = { + const config: OpenCodeConfigWithRaw = { id: result.lastInsertRowid as number, name: request.name, content: contentValidated, - isDefault: request.isDefault || false, + rawContent: rawContent, + isDefault: shouldBeDefault, createdAt: now, updatedAt: now, } @@ -182,7 +236,7 @@ export class SettingsService { configName: string, request: UpdateOpenCodeConfigRequest, userId: string = 'default' - ): OpenCodeConfig | null { + ): OpenCodeConfigWithRaw | null { const existing = this.db .query('SELECT * FROM opencode_configs WHERE user_id = ? AND config_name = ?') .get(userId, configName) as { @@ -196,7 +250,15 @@ export class SettingsService { return null } - const contentValidated = OpenCodeConfigSchema.parse(request.content) + const rawContent = typeof request.content === 'string' + ? request.content + : JSON.stringify(request.content, null, 2) + + const parsedContent = typeof request.content === 'string' + ? parseJsonc(request.content) + : request.content + + const contentValidated = OpenCodeConfigSchema.parse(parsedContent) const now = Date.now() if (request.isDefault) { @@ -212,17 +274,18 @@ export class SettingsService { WHERE user_id = ? AND config_name = ?` ) .run( - JSON.stringify(contentValidated), + rawContent, request.isDefault !== undefined ? request.isDefault : existing.is_default, now, userId, configName ) - const config: OpenCodeConfig = { + const config: OpenCodeConfigWithRaw = { id: existing.id, name: configName, content: contentValidated, + rawContent: rawContent, isDefault: request.isDefault !== undefined ? request.isDefault : existing.is_default, createdAt: existing.created_at, updatedAt: now, @@ -240,12 +303,13 @@ export class SettingsService { const deleted = result.changes > 0 if (deleted) { logger.info(`Deleted OpenCode config '${configName}' for user: ${userId}`) + this.ensureSingleConfigIsDefault(userId) } return deleted } - setDefaultOpenCodeConfig(configName: string, userId: string = 'default'): OpenCodeConfig | null { + setDefaultOpenCodeConfig(configName: string, userId: string = 'default'): OpenCodeConfigWithRaw | null { const existing = this.db .query('SELECT * FROM opencode_configs WHERE user_id = ? AND config_name = ?') .get(userId, configName) as { @@ -272,13 +336,15 @@ export class SettingsService { .run(now, userId, configName) try { - const content = JSON.parse(existing.config_content) + const rawContent = existing.config_content + const content = parseJsonc(rawContent) const validated = OpenCodeConfigSchema.parse(content) - const config: OpenCodeConfig = { + const config: OpenCodeConfigWithRaw = { id: existing.id, name: configName, content: validated, + rawContent: rawContent, isDefault: true, createdAt: existing.created_at, updatedAt: now, @@ -292,7 +358,7 @@ export class SettingsService { } } - getDefaultOpenCodeConfig(userId: string = 'default'): OpenCodeConfig | null { + getDefaultOpenCodeConfig(userId: string = 'default'): OpenCodeConfigWithRaw | null { const row = this.db .query('SELECT * FROM opencode_configs WHERE user_id = ? AND is_default = TRUE') .get(userId) as { @@ -308,13 +374,15 @@ export class SettingsService { } try { - const content = JSON.parse(row.config_content) + const rawContent = row.config_content + const content = parseJsonc(rawContent) const validated = OpenCodeConfigSchema.parse(content) return { id: row.id, name: row.config_name, content: validated, + rawContent: rawContent, isDefault: true, createdAt: row.created_at, updatedAt: row.updated_at, @@ -325,7 +393,7 @@ export class SettingsService { } } - getOpenCodeConfigByName(configName: string, userId: string = 'default'): OpenCodeConfig | null { + getOpenCodeConfigByName(configName: string, userId: string = 'default'): OpenCodeConfigWithRaw | null { const row = this.db .query('SELECT * FROM opencode_configs WHERE user_id = ? AND config_name = ?') .get(userId, configName) as { @@ -342,13 +410,15 @@ export class SettingsService { } try { - const content = JSON.parse(row.config_content) + const rawContent = row.config_content + const content = parseJsonc(rawContent) const validated = OpenCodeConfigSchema.parse(content) return { id: row.id, name: row.config_name, content: validated, + rawContent: rawContent, isDefault: Boolean(row.is_default), createdAt: row.created_at, updatedAt: row.updated_at, @@ -360,18 +430,92 @@ export class SettingsService { } getOpenCodeConfigContent(configName: string, userId: string = 'default'): string | null { - const config = this.getOpenCodeConfigByName(configName, userId) + const row = this.db + .query('SELECT config_content FROM opencode_configs WHERE user_id = ? AND config_name = ?') + .get(userId, configName) as { config_content: string } | undefined - if (!config) { + if (!row) { logger.error(`Config '${configName}' not found for user ${userId}`) return null } + return row.config_content + } + + ensureSingleConfigIsDefault(userId: string = 'default'): void { + const hasDefault = this.db + .query('SELECT COUNT(*) as count FROM opencode_configs WHERE user_id = ? AND is_default = TRUE') + .get(userId) as { count: number } + + if (hasDefault.count === 0) { + const firstConfig = this.db + .query('SELECT config_name FROM opencode_configs WHERE user_id = ? ORDER BY created_at ASC LIMIT 1') + .get(userId) as { config_name: string } | undefined + + if (firstConfig) { + this.db + .query('UPDATE opencode_configs SET is_default = TRUE WHERE user_id = ? AND config_name = ?') + .run(userId, firstConfig.config_name) + logger.info(`Auto-set '${firstConfig.config_name}' as default (only config)`) + } + } + } + + saveLastKnownGoodConfig(userId: string = 'default'): void { + const config = this.getDefaultOpenCodeConfig(userId) + if (config) { + SettingsService.lastKnownGoodConfigContent = config.rawContent + this.persistLastKnownGoodConfig(userId) + logger.info(`Saved last known good config: ${config.name}`) + } + } + + restoreToLastKnownGoodConfig(userId: string = 'default'): { configName: string; content: string } | null { + if (!SettingsService.lastKnownGoodConfigContent) { + logger.warn('No last known good config available for rollback') + return null + } + + const configs = this.getOpenCodeConfigs(userId) + const defaultConfig = configs.defaultConfig + + if (!defaultConfig) { + logger.error('Cannot rollback: no default config found') + return null + } + + logger.info(`Restoring to last known good config for: ${defaultConfig.name}`) + return { + configName: defaultConfig.name, + content: SettingsService.lastKnownGoodConfigContent + } + } + + rollbackToLastKnownGoodHealth(userId: string = 'default'): string | null { + const lastGood = this.restoreToLastKnownGoodConfig(userId) + if (!lastGood) { + return null + } + + this.updateOpenCodeConfig(lastGood.configName, { content: lastGood.content }, userId) + return lastGood.configName + } + + deleteFilesystemConfig(): boolean { + const configPath = getOpenCodeConfigFilePath() + + if (!existsSync(configPath)) { + logger.warn('Config file does not exist:', configPath) + return false + } + try { - return JSON.stringify(config.content, null, 2) + unlinkSync(configPath) + logger.info('Deleted filesystem config to allow server startup:', configPath) + return true } catch (error) { - logger.error(`Failed to stringify config '${configName}':`, error) - return null + logger.error('Failed to delete config file:', error) + return false } } } diff --git a/backend/src/services/summarization.ts b/backend/src/services/summarization.ts new file mode 100644 index 00000000..7f9bb35a --- /dev/null +++ b/backend/src/services/summarization.ts @@ -0,0 +1,279 @@ +import { logger } from '../utils/logger' +import type { Database } from 'bun:sqlite' + +interface SessionMessage { + info: { + id: string + role: 'user' | 'assistant' + time: { created: number } + } + parts: Array<{ + type: string + text?: string + toolName?: string + }> +} + +interface SessionSummary { + sessionId: string + summary: string + generatedAt: number + messageCount: number +} + +const SUMMARY_CACHE = new Map() +const CACHE_TTL_MS = 5 * 60 * 1000 + +async function callGeminiFlash(prompt: string, apiKey: string): Promise { + try { + logger.info('Calling Gemini Flash API...') + const response = await fetch( + `https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=${apiKey}`, + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + contents: [{ parts: [{ text: prompt }] }], + generationConfig: { + maxOutputTokens: 100, + temperature: 0.3 + } + }) + } + ) + + if (!response.ok) { + const error = await response.text() + logger.warn(`Gemini API error: ${response.status} - ${error}`) + return null + } + + const data = await response.json() + const result = data.candidates?.[0]?.content?.parts?.[0]?.text || null + logger.info(`Gemini response: ${JSON.stringify(data).slice(0, 500)}`) + return result + } catch (error) { + logger.warn('Gemini API call failed:', error) + return null + } +} + +async function callOpenAIMini(prompt: string, apiKey: string): Promise { + try { + const response = await fetch('https://api.openai.com/v1/chat/completions', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${apiKey}` + }, + body: JSON.stringify({ + model: 'gpt-4o-mini', + messages: [{ role: 'user', content: prompt }], + max_tokens: 100, + temperature: 0.3 + }) + }) + + if (!response.ok) { + const error = await response.text() + logger.warn(`OpenAI API error: ${response.status} - ${error}`) + return null + } + + const data = await response.json() + return data.choices?.[0]?.message?.content || null + } catch (error) { + logger.warn('OpenAI API call failed:', error) + return null + } +} + +function extractConversationContext(messages: SessionMessage[]): string { + const relevantMessages = messages.slice(0, 10) + + const context: string[] = [] + + for (const msg of relevantMessages) { + const role = msg.info?.role || 'unknown' + + for (const part of msg.parts || []) { + if (part.type === 'text' && part.text) { + const text = part.text.slice(0, 500) + context.push(`${role}: ${text}`) + } else if (part.toolName) { + context.push(`${role}: [used tool: ${part.toolName}]`) + } + } + } + + return context.slice(0, 10).join('\n') +} + +function extractFirstUserMessage(messages: SessionMessage[]): string | null { + for (const msg of messages) { + if (msg.info?.role === 'user') { + for (const part of msg.parts || []) { + if (part.type === 'text' && part.text && part.text.length > 10) { + return part.text.slice(0, 200).trim() + } + } + } + } + return null +} + +function generateSimpleSummary(messages: SessionMessage[], sessionTitle: string): string { + const firstMessage = extractFirstUserMessage(messages) + if (firstMessage) { + let summary = firstMessage + .replace(/\n+/g, ' ') + .replace(/\s+/g, ' ') + .trim() + + if (summary.length > 80) { + summary = summary.slice(0, 77) + '...' + } + return summary + } + + const toolsUsed = new Set() + for (const msg of messages.slice(0, 20)) { + for (const part of msg.parts || []) { + if (part.toolName) { + toolsUsed.add(part.toolName) + } + } + } + + if (toolsUsed.size > 0) { + const tools = Array.from(toolsUsed).slice(0, 3).join(', ') + return `Session using: ${tools}` + } + + return sessionTitle || `Session with ${messages.length} messages` +} + +function buildSummaryPrompt(conversationContext: string, sessionTitle: string): string { + return `Summarize what this coding session is working on in ONE short sentence (max 15 words). Focus on the task/goal, not the conversation. + +Session title: ${sessionTitle} + +Recent conversation: +${conversationContext} + +One-sentence summary:` +} + +export async function summarizeSession( + sessionId: string, + sessionTitle: string, + messages: SessionMessage[], + forceRefresh = false +): Promise { + logger.info(`Summarizing session ${sessionId}: ${messages.length} messages, title: ${sessionTitle}`) + + const cached = SUMMARY_CACHE.get(sessionId) + const now = Date.now() + + if (!forceRefresh && cached) { + if (now - cached.generatedAt < CACHE_TTL_MS && cached.messageCount === messages.length) { + logger.info(`Using cached summary for ${sessionId}`) + return cached.summary + } + } + + if (messages.length === 0) { + logger.info(`No messages for ${sessionId}, using title`) + return sessionTitle || 'Empty session' + } + + const conversationContext = extractConversationContext(messages) + logger.info(`Conversation context for ${sessionId}: ${conversationContext.slice(0, 200)}...`) + + const prompt = buildSummaryPrompt(conversationContext, sessionTitle) + + let summary: string | null = null + + const geminiKey = process.env.GEMINI_API_KEY + logger.info(`Gemini API key present: ${!!geminiKey}`) + if (geminiKey) { + summary = await callGeminiFlash(prompt, geminiKey) + if (summary) { + logger.info(`Summarized session ${sessionId} using Gemini Flash: ${summary}`) + } + } + + if (!summary) { + const openaiKey = process.env.OPENAI_API_KEY + logger.info(`OpenAI API key present: ${!!openaiKey}`) + if (openaiKey) { + summary = await callOpenAIMini(prompt, openaiKey) + if (summary) { + logger.info(`Summarized session ${sessionId} using GPT-4o-mini: ${summary}`) + } + } + } + + if (!summary) { + logger.info(`LLM APIs unavailable, using simple summary extraction`) + summary = generateSimpleSummary(messages, sessionTitle) + logger.info(`Generated simple summary for ${sessionId}: ${summary}`) + } + + if (summary) { + summary = summary.trim().replace(/^["']|["']$/g, '') + + SUMMARY_CACHE.set(sessionId, { + sessionId, + summary, + generatedAt: now, + messageCount: messages.length + }) + } + + return summary +} + +export async function summarizeSessionFromOpenCode( + sessionId: string, + directory: string, + opencodePort: number +): Promise { + try { + const sessionUrl = `http://127.0.0.1:${opencodePort}/session/${sessionId}?directory=${encodeURIComponent(directory)}` + const sessionRes = await fetch(sessionUrl) + if (!sessionRes.ok) { + logger.warn(`Failed to fetch session ${sessionId}: ${sessionRes.status}`) + return null + } + const session = await sessionRes.json() + + const messagesUrl = `http://127.0.0.1:${opencodePort}/session/${sessionId}/message?directory=${encodeURIComponent(directory)}` + const messagesRes = await fetch(messagesUrl) + if (!messagesRes.ok) { + return session.title || null + } + const messages = await messagesRes.json() + + return await summarizeSession(sessionId, session.title || '', messages) + } catch (error) { + logger.warn(`Error summarizing session ${sessionId}:`, error) + return null + } +} + +export function clearSummaryCache(sessionId?: string): void { + if (sessionId) { + SUMMARY_CACHE.delete(sessionId) + } else { + SUMMARY_CACHE.clear() + } +} + +export function getCachedSummary(sessionId: string): string | null { + const cached = SUMMARY_CACHE.get(sessionId) + if (cached && Date.now() - cached.generatedAt < CACHE_TTL_MS) { + return cached.summary + } + return null +} diff --git a/backend/src/services/terminal.ts b/backend/src/services/terminal.ts new file mode 100644 index 00000000..1340733e --- /dev/null +++ b/backend/src/services/terminal.ts @@ -0,0 +1,260 @@ +import { spawn, type Subprocess } from 'bun' +import os from 'os' +import path from 'path' +import fs from 'fs' +import { fileURLToPath } from 'url' +import { logger } from '../utils/logger' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) + +interface TerminalSession { + id: string + process: Subprocess + cwd: string + createdAt: Date + onData: ((data: string) => void) | null + onExit: ((exitCode: number, signal?: number) => void) | null + pendingData: string[] +} + +class TerminalService { + private sessions: Map = new Map() + private ptyWorkerPath: string + + constructor() { + // Try to find pty-worker.cjs relative to this file first (works for source and some bundles) + const localWorkerPath = path.join(__dirname, 'pty-worker.cjs') + + // Try to find it relative to the entry point (works for flat bundles) + const bundleWorkerPath = path.join(path.dirname(process.argv[1]), 'pty-worker.cjs') + + if (fs.existsSync(localWorkerPath)) { + this.ptyWorkerPath = localWorkerPath + } else if (fs.existsSync(bundleWorkerPath)) { + this.ptyWorkerPath = bundleWorkerPath + } else { + // Default to local path and let it fail with a clear error later if not found + this.ptyWorkerPath = localWorkerPath + logger.warn(`Could not find pty-worker.cjs. Checked: \n - ${localWorkerPath}\n - ${bundleWorkerPath}`) + } + } + + createSession(id: string, cwd?: string): TerminalSession { + const existingSession = this.sessions.get(id) + if (existingSession) { + logger.info(`Reusing existing terminal session: ${id}`) + return existingSession + } + + const shell = os.platform() === 'win32' ? 'powershell.exe' : process.env.SHELL || '/bin/bash' + const workingDir = cwd || process.env.HOME || os.homedir() + + logger.info(`Starting PTY worker: node ${this.ptyWorkerPath}`) + + // Ensure the worker path exists + if (!fs.existsSync(this.ptyWorkerPath)) { + logger.error(`PTY worker file not found at: ${this.ptyWorkerPath}`) + } + + const proc = spawn(['node', this.ptyWorkerPath], { + stdin: 'pipe', + stdout: 'pipe', + stderr: 'pipe', + env: { + ...process.env, + PTY_SHELL: shell, + PTY_CWD: workingDir, + PTY_COLS: '80', + PTY_ROWS: '24', + PATH: process.env.PATH, // Explicitly pass PATH to child + }, + }) + + const session: TerminalSession = { + id, + process: proc, + cwd: workingDir, + createdAt: new Date(), + onData: null, + onExit: null, + pendingData: [], + } + + this.sessions.set(id, session) + logger.info(`Created terminal session: ${id} in ${workingDir}, PID: ${proc.pid}`) + + this.startReadingOutput(session) + this.startReadingStderr(session) + + return session + } + + private async startReadingOutput(session: TerminalSession) { + const reader = session.process.stdout.getReader() + const decoder = new TextDecoder() + let buffer = '' + + logger.info(`Starting to read output for session ${session.id}`) + + try { + while (true) { + const { done, value } = await reader.read() + if (done) { + logger.info(`Output stream ended for session ${session.id}`) + break + } + + const chunk = decoder.decode(value, { stream: true }) + buffer += chunk + + const lines = buffer.split('\n') + buffer = lines.pop() || '' + + for (const line of lines) { + if (!line.trim()) continue + try { + const msg = JSON.parse(line) + if (msg.type === 'data') { + if (session.onData) { + session.onData(msg.data) + } else { + session.pendingData.push(msg.data) + } + } else if (msg.type === 'exit') { + if (session.onExit) { + session.onExit(msg.exitCode, msg.signal) + } + } else if (msg.type === 'error') { + logger.error(`PTY error for ${session.id}:`, msg.error) + } else if (msg.type === 'started') { + logger.info(`PTY worker started for ${session.id}, child PID: ${msg.pid}`) + } + } catch (e) { + logger.warn(`Failed to parse PTY message for ${session.id}: ${line.substring(0, 100)}`) + } + } + } + } catch (error) { + logger.error(`Error reading PTY output for ${session.id}:`, error) + } + } + + private async startReadingStderr(session: TerminalSession) { + const reader = session.process.stderr.getReader() + const decoder = new TextDecoder() + + try { + while (true) { + const { done, value } = await reader.read() + if (done) break + const text = decoder.decode(value, { stream: true }) + if (text.trim()) { + logger.error(`PTY worker stderr for ${session.id}: ${text}`) + } + } + } catch (error) { + logger.error(`Error reading PTY stderr for ${session.id}:`, error) + } + } + + setOnData(id: string, callback: (data: string) => void): boolean { + const session = this.sessions.get(id) + if (!session) return false + session.onData = callback + + for (const data of session.pendingData) { + callback(data) + } + session.pendingData = [] + + return true + } + + setOnExit(id: string, callback: (exitCode: number, signal?: number) => void): boolean { + const session = this.sessions.get(id) + if (!session) return false + session.onExit = callback + return true + } + + getSession(id: string): TerminalSession | undefined { + return this.sessions.get(id) + } + + resizeSession(id: string, cols: number, rows: number): boolean { + const session = this.sessions.get(id) + if (!session) { + return false + } + + try { + session.process.stdin.write(JSON.stringify({ type: 'resize', cols, rows }) + '\n') + // @ts-ignore + if (typeof session.process.stdin.flush === 'function') { + // @ts-ignore + session.process.stdin.flush() + } + return true + } catch (error) { + logger.error(`Failed to resize terminal ${id}:`, error) + return false + } + } + + writeToSession(id: string, data: string): boolean { + const session = this.sessions.get(id) + if (!session) { + logger.warn(`writeToSession: session ${id} not found`) + return false + } + + try { + const msg = JSON.stringify({ type: 'input', data }) + '\n' + session.process.stdin.write(msg) + // @ts-ignore + if (typeof session.process.stdin.flush === 'function') { + // @ts-ignore + session.process.stdin.flush() + } + return true + } catch (error) { + logger.error(`Failed to write to terminal ${id}:`, error) + return false + } + } + + destroySession(id: string): boolean { + const session = this.sessions.get(id) + if (!session) { + return false + } + + try { + session.process.stdin.write(JSON.stringify({ type: 'kill' }) + '\n') + session.process.kill() + this.sessions.delete(id) + logger.info(`Destroyed terminal session: ${id}`) + return true + } catch (error) { + logger.error(`Failed to destroy terminal ${id}:`, error) + return false + } + } + + listSessions(): Array<{ id: string; cwd: string; createdAt: Date }> { + return Array.from(this.sessions.values()).map(session => ({ + id: session.id, + cwd: session.cwd, + createdAt: session.createdAt, + })) + } + + destroyAllSessions(): void { + for (const [id] of this.sessions) { + this.destroySession(id) + } + logger.info('Destroyed all terminal sessions') + } +} + +export const terminalService = new TerminalService() diff --git a/backend/src/services/whisper.ts b/backend/src/services/whisper.ts new file mode 100644 index 00000000..6fef59f7 --- /dev/null +++ b/backend/src/services/whisper.ts @@ -0,0 +1,323 @@ +import { spawn, ChildProcess } from 'child_process' +import fs from 'fs' +import { logger } from '../utils/logger' +import { getWorkspacePath } from '@opencode-manager/shared/config/env' +import path from 'path' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +const WHISPER_PORT = parseInt(process.env.WHISPER_PORT || '5552') +const WHISPER_HOST = process.env.WHISPER_HOST || '127.0.0.1' +const WHISPER_DEFAULT_MODEL = process.env.WHISPER_DEFAULT_MODEL || 'base' + +interface WhisperServerStatus { + running: boolean + port: number + host: string + model: string | null + error: string | null +} + +class WhisperServerManager { + private process: ChildProcess | null = null + private status: WhisperServerStatus = { + running: false, + port: WHISPER_PORT, + host: WHISPER_HOST, + model: null, + error: null + } + private startPromise: Promise | null = null + private healthCheckInterval: ReturnType | null = null + + getPort(): number { + return WHISPER_PORT + } + + getHost(): string { + return WHISPER_HOST + } + + getBaseUrl(): string { + return `http://${WHISPER_HOST}:${WHISPER_PORT}` + } + + getStatus(): WhisperServerStatus { + return { ...this.status } + } + + async syncStatus(): Promise { + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(2000) + }) + + if (response.ok) { + const data = await response.json() as { current_model?: string } + this.status.running = true + this.status.model = data.current_model || null + this.status.error = null + } else { + this.status.running = false + this.status.error = 'Health check failed' + } + } catch (error) { + this.status.running = false + this.status.error = error instanceof Error ? error.message : 'Health check failed' + } + return { ...this.status } + } + + async start(): Promise { + if (this.startPromise) { + return this.startPromise + } + + if (this.status.running) { + logger.info('Whisper server already running') + return + } + + this.startPromise = this.doStart() + try { + await this.startPromise + } finally { + this.startPromise = null + } + } + + private async doStart(): Promise { + const possiblePaths = [ + path.resolve(__dirname, '..', '..', 'scripts', 'whisper-server.py'), + path.resolve(__dirname, '..', '..', '..', 'scripts', 'whisper-server.py'), + path.join(process.cwd(), 'scripts', 'whisper-server.py') + ] + + let scriptPath: string | null = null + for (const p of possiblePaths) { + if (fs.existsSync(p)) { + scriptPath = p + break + } + } + + if (!scriptPath) { + throw new Error(`Whisper server script not found. Searched: ${possiblePaths.join(', ')}`) + } + + const modelsDir = path.join(getWorkspacePath(), 'cache', 'whisper-models') + + logger.info(`Starting Whisper server on ${WHISPER_HOST}:${WHISPER_PORT}`) + logger.info(`Script path: ${scriptPath}`) + logger.info(`Models directory: ${modelsDir}`) + + const env = { + ...process.env, + WHISPER_PORT: WHISPER_PORT.toString(), + WHISPER_HOST: WHISPER_HOST, + WHISPER_MODELS_DIR: modelsDir, + WHISPER_DEFAULT_MODEL: WHISPER_DEFAULT_MODEL, + PYTHONUNBUFFERED: '1' + } + + const venvPath = process.env.WHISPER_VENV + const pythonBin = venvPath ? path.join(venvPath, 'bin', 'python') : 'python3' + + logger.info(`Using Python: ${pythonBin}`) + + this.process = spawn(pythonBin, [scriptPath], { + env, + stdio: ['ignore', 'pipe', 'pipe'] + }) + + this.process.stdout?.on('data', (data) => { + const message = data.toString().trim() + if (message) { + logger.info(`[Whisper] ${message}`) + } + }) + + this.process.stderr?.on('data', (data) => { + const message = data.toString().trim() + if (message) { + if (message.includes('INFO') || message.includes('Uvicorn')) { + logger.info(`[Whisper] ${message}`) + } else { + logger.error(`[Whisper] ${message}`) + } + } + }) + + this.process.on('error', (error) => { + logger.error('Failed to start Whisper server:', error) + this.status.running = false + this.status.error = error.message + }) + + this.process.on('exit', (code, signal) => { + logger.info(`Whisper server exited with code ${code}, signal ${signal}`) + this.status.running = false + this.process = null + + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + }) + + await this.waitForReady() + this.startHealthCheck() + } + + private async waitForReady(maxAttempts = 30, delayMs = 1000): Promise { + for (let i = 0; i < maxAttempts; i++) { + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(2000) + }) + + if (response.ok) { + const data = await response.json() as { current_model?: string } + this.status.running = true + this.status.model = data.current_model || null + this.status.error = null + logger.info('Whisper server is ready') + return + } + } catch { + logger.debug(`Waiting for Whisper server... attempt ${i + 1}/${maxAttempts}`) + } + + await new Promise(resolve => setTimeout(resolve, delayMs)) + } + + throw new Error('Whisper server failed to start within timeout') + } + + private startHealthCheck(): void { + this.healthCheckInterval = setInterval(async () => { + try { + const response = await fetch(`${this.getBaseUrl()}/health`, { + signal: AbortSignal.timeout(5000) + }) + + if (response.ok) { + const data = await response.json() as { current_model?: string } + this.status.running = true + this.status.model = data.current_model || null + this.status.error = null + } else { + this.status.running = false + this.status.error = 'Health check failed' + } + } catch (error) { + this.status.running = false + this.status.error = error instanceof Error ? error.message : 'Health check failed' + } + }, 30000) + } + + async stop(): Promise { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval) + this.healthCheckInterval = null + } + + if (!this.process) { + return + } + + logger.info('Stopping Whisper server...') + + return new Promise((resolve) => { + const timeout = setTimeout(() => { + logger.warn('Whisper server did not exit gracefully, killing...') + this.process?.kill('SIGKILL') + resolve() + }, 5000) + + this.process!.once('exit', () => { + clearTimeout(timeout) + this.process = null + this.status.running = false + logger.info('Whisper server stopped') + resolve() + }) + + this.process!.kill('SIGTERM') + }) + } + + async transcribe(audioData: Buffer, options: { + model?: string + language?: string + format?: string + } = {}): Promise<{ + text: string + language: string + language_probability: number + duration: number + }> { + await this.syncStatus() + if (!this.status.running) { + throw new Error('Whisper server is not running') + } + + const base64Audio = audioData.toString('base64') + + const TRANSCRIBE_TIMEOUT_MS = 120000 + const response = await fetch(`${this.getBaseUrl()}/transcribe-base64`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + audio: base64Audio, + model: options.model || WHISPER_DEFAULT_MODEL, + language: options.language, + format: options.format || 'webm' + }), + signal: AbortSignal.timeout(TRANSCRIBE_TIMEOUT_MS) + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`Transcription failed: ${error}`) + } + + return response.json() + } + + async getModels(): Promise<{ + models: string[] + current: string | null + default: string + }> { + if (!this.status.running) { + return { + models: ['tiny', 'base', 'small', 'medium', 'large-v2', 'large-v3'], + current: null, + default: WHISPER_DEFAULT_MODEL + } + } + + try { + const response = await fetch(`${this.getBaseUrl()}/models`) + if (response.ok) { + return response.json() + } + } catch { + logger.warn('Failed to fetch models from Whisper server') + } + + return { + models: ['tiny', 'base', 'small', 'medium', 'large-v2', 'large-v3'], + current: this.status.model, + default: WHISPER_DEFAULT_MODEL + } + } +} + +export const whisperServerManager = new WhisperServerManager() diff --git a/backend/src/types/repo.ts b/backend/src/types/repo.ts index 2a29e993..af8fe2fe 100644 --- a/backend/src/types/repo.ts +++ b/backend/src/types/repo.ts @@ -7,11 +7,12 @@ export interface Repo extends BaseRepo { } export interface CreateRepoInput { - repoUrl: string + repoUrl?: string localPath: string branch?: string defaultBranch: string cloneStatus: 'cloning' | 'ready' | 'error' clonedAt: number isWorktree?: boolean + isLocal?: boolean } diff --git a/backend/src/utils/git-auth.ts b/backend/src/utils/git-auth.ts new file mode 100644 index 00000000..0ed1d797 --- /dev/null +++ b/backend/src/utils/git-auth.ts @@ -0,0 +1,38 @@ +export function isGitHubHttpsUrl(repoUrl: string): boolean { + try { + const parsed = new URL(repoUrl) + return parsed.protocol === 'https:' && parsed.hostname === 'github.com' + } catch { + return false + } +} + +export function createNoPromptGitEnv(): Record { + return { + GIT_TERMINAL_PROMPT: '0' + } +} + +export function createGitHubGitEnv(gitToken: string): Record { + const basicAuth = Buffer.from(`x-access-token:${gitToken}`, 'utf8').toString('base64') + + return { + ...createNoPromptGitEnv(), + GITHUB_TOKEN: gitToken, + GIT_CONFIG_COUNT: '1', + GIT_CONFIG_KEY_0: 'http.https://github.com/.extraheader', + GIT_CONFIG_VALUE_0: `AUTHORIZATION: basic ${basicAuth}` + } +} + +export function createGitEnvForRepoUrl(repoUrl: string, gitToken?: string): Record { + if (!gitToken) { + return createNoPromptGitEnv() + } + + if (isGitHubHttpsUrl(repoUrl)) { + return createGitHubGitEnv(gitToken) + } + + return createNoPromptGitEnv() +} diff --git a/backend/src/utils/logger.ts b/backend/src/utils/logger.ts index 288a84b6..baad99c7 100644 --- a/backend/src/utils/logger.ts +++ b/backend/src/utils/logger.ts @@ -1,3 +1,5 @@ +import { ENV } from '@opencode-manager/shared/config/env' + type LogLevel = 'info' | 'warn' | 'error' | 'debug' class Logger { @@ -7,7 +9,7 @@ class Logger { this.prefix = prefix } - private format(level: LogLevel, message: string, ...args: unknown[]): string { + private format(level: LogLevel, message: string): string { const timestamp = new Date().toISOString() const prefixStr = this.prefix ? `[${this.prefix}] ` : '' return `[${timestamp}] [${level.toUpperCase()}] ${prefixStr}${message}` @@ -26,7 +28,7 @@ class Logger { } debug(message: string, ...args: unknown[]): void { - if (process.env.DEBUG) { + if (ENV.LOGGING.DEBUG) { console.debug(this.format('debug', message), ...args) } } diff --git a/backend/src/utils/process.ts b/backend/src/utils/process.ts index 16717549..e10b56b3 100644 --- a/backend/src/utils/process.ts +++ b/backend/src/utils/process.ts @@ -1,16 +1,27 @@ import { spawn, type ChildProcess } from 'child_process' import { logger } from './logger' +interface ExecuteCommandOptions { + cwd?: string + silent?: boolean + env?: Record +} + export async function executeCommand( args: string[], - cwd?: string + cwdOrOptions?: string | ExecuteCommandOptions ): Promise { + const options: ExecuteCommandOptions = typeof cwdOrOptions === 'string' + ? { cwd: cwdOrOptions } + : cwdOrOptions || {} + return new Promise((resolve, reject) => { const [command, ...cmdArgs] = args const proc: ChildProcess = spawn(command || '', cmdArgs, { - cwd, - shell: false + cwd: options.cwd, + shell: false, + env: { ...process.env, ...options.env } }) let stdout = '' @@ -25,7 +36,9 @@ export async function executeCommand( }) proc.on('error', (error: Error) => { - logger.error(`Command failed: ${args.join(' ')}`, error) + if (!options.silent) { + logger.error(`Command failed: ${args.join(' ')}`, error) + } reject(error) }) @@ -34,7 +47,9 @@ export async function executeCommand( resolve(stdout) } else { const error = new Error(`Command failed with code ${code}: ${stderr || stdout}`) - logger.error(`Command failed: ${args.join(' ')}`, error) + if (!options.silent) { + logger.error(`Command failed: ${args.join(' ')}`, error) + } reject(error) } }) diff --git a/backend/test/db/queries.test.ts b/backend/test/db/queries.test.ts index 10926b1a..0d275194 100644 --- a/backend/test/db/queries.test.ts +++ b/backend/test/db/queries.test.ts @@ -30,6 +30,10 @@ describe('Database Queries', () => { isWorktree: false } + const existingCheckStmt = { + get: vi.fn().mockReturnValue(undefined) + } + const insertStmt = { run: vi.fn().mockReturnValue({ changes: 1, lastInsertRowid: 1 }) } @@ -48,6 +52,7 @@ describe('Database Queries', () => { } mockDb.prepare + .mockReturnValueOnce(existingCheckStmt) .mockReturnValueOnce(insertStmt) .mockReturnValueOnce(selectStmt) @@ -61,7 +66,8 @@ describe('Database Queries', () => { repo.defaultBranch, repo.cloneStatus, repo.clonedAt, - repo.isWorktree ? 1 : 0 + repo.isWorktree ? 1 : 0, + 0 ) expect(result.id).toBe(1) }) @@ -69,6 +75,7 @@ describe('Database Queries', () => { describe('getRepoById', () => { it('should retrieve repo by ID', () => { + const clonedAt = Date.now() const repoRow = { id: 1, repo_url: 'https://github.com/test/repo', @@ -76,10 +83,11 @@ describe('Database Queries', () => { branch: 'main', default_branch: 'main', clone_status: 'ready', - cloned_at: Date.now(), - last_pulled: undefined, - opencode_config_name: undefined, - is_worktree: 0 + cloned_at: clonedAt, + last_pulled: null, + opencode_config_name: null, + is_worktree: 0, + is_local: 0 } const stmt = { @@ -89,14 +97,19 @@ describe('Database Queries', () => { const result = db.getRepoById(mockDb, 1) - expect(result).toMatchObject({ + expect(result).toEqual({ id: 1, repoUrl: 'https://github.com/test/repo', localPath: 'repos/test-repo', + fullPath: expect.stringContaining('repos/test-repo'), branch: 'main', defaultBranch: 'main', cloneStatus: 'ready', - clonedAt: repoRow.cloned_at + clonedAt: clonedAt, + lastPulled: null, + openCodeConfigName: null, + isWorktree: undefined, + isLocal: undefined }) }) diff --git a/backend/test/integration/webfetch-large-output.test.ts b/backend/test/integration/webfetch-large-output.test.ts new file mode 100644 index 00000000..1b9b0137 --- /dev/null +++ b/backend/test/integration/webfetch-large-output.test.ts @@ -0,0 +1,345 @@ +/** + * Integration test for WebFetch large output handling + * + * This test verifies that the context overflow fix (PR #6234) is working correctly. + * It sends requests that trigger WebFetch with large outputs and verifies: + * 1. The session doesn't get stuck in a retry loop + * 2. No "prompt is too long" errors occur + * 3. Large outputs are properly handled (file persistence) + * + * Usage: + * OPENCODE_MANAGER_URL=https://your-deployment.com AUTH_USER=admin AUTH_PASS=secret pnpm run test:integration + * + * Or for local testing: + * docker run -d -p 5003:5003 ghcr.io/vibetechnologies/opencode-manager:latest + * pnpm run test:integration + * + * NOTE: This test is skipped by default. Run with RUN_INTEGRATION_TESTS=1 or use pnpm run test:integration + */ + +import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest' + +const SKIP_INTEGRATION = !process.env.RUN_INTEGRATION_TESTS && !process.env.OPENCODE_MANAGER_URL +import axios, { AxiosInstance } from 'axios' + +const OPENCODE_MANAGER_URL = process.env.OPENCODE_MANAGER_URL || 'http://localhost:5003' +const OPENCODE_API_URL = `${OPENCODE_MANAGER_URL}/api/opencode` +const AUTH_USER = process.env.AUTH_USER || '' +const AUTH_PASS = process.env.AUTH_PASS || '' +const TEST_TIMEOUT = 300000 + +interface SessionStatus { + type: 'idle' | 'busy' | 'retry' + attempt?: number + message?: string + next?: number +} + +interface ToolState { + status: string + output?: string + error?: { name: string; data: { message: string } } +} + +interface MessagePart { + type: string + text?: string + state?: ToolState + tool?: string +} + +interface MessageInfo { + id: string + role: string + finish?: string +} + +interface Message { + info: MessageInfo + parts: MessagePart[] +} + +describe.skipIf(SKIP_INTEGRATION)('WebFetch Large Output Integration Test', () => { + let client: AxiosInstance + let sessionID: string + let directory: string + + beforeAll(async () => { + console.log(`Testing against: ${OPENCODE_MANAGER_URL}`) + if (AUTH_USER) { + console.log(`Using Basic Auth: ${AUTH_USER}:****`) + } + + const axiosConfig: any = { + baseURL: OPENCODE_API_URL, + timeout: 30000 + } + + if (AUTH_USER && AUTH_PASS) { + axiosConfig.auth = { + username: AUTH_USER, + password: AUTH_PASS + } + } + + client = axios.create(axiosConfig) + + try { + const healthConfig: any = { timeout: 10000 } + if (AUTH_USER && AUTH_PASS) { + healthConfig.auth = { username: AUTH_USER, password: AUTH_PASS } + } + const healthResponse = await axios.get(`${OPENCODE_MANAGER_URL}/api/health`, healthConfig) + expect(healthResponse.status).toBe(200) + console.log('Health check passed:', healthResponse.data) + } catch (error) { + console.error(`Failed to connect to ${OPENCODE_MANAGER_URL}`) + console.error('Make sure the OpenCode Manager is running.') + console.error('You can start it with: docker run -d -p 5003:5003 ghcr.io/vibetechnologies/opencode-manager:latest') + throw error + } + + try { + const reposConfig: any = { timeout: 10000 } + if (AUTH_USER && AUTH_PASS) { + reposConfig.auth = { username: AUTH_USER, password: AUTH_PASS } + } + const reposResponse = await axios.get(`${OPENCODE_MANAGER_URL}/api/repos`, reposConfig) + const repos = reposResponse.data + if (repos.length === 0) { + console.warn('No repositories available. Using default workspace...') + directory = '/workspace' + } else { + directory = repos[0].fullPath || repos[0].path || '/workspace' + } + console.log('Using directory:', directory) + } catch (error) { + console.warn('Could not get repos, using default workspace') + directory = '/workspace' + } + + client.interceptors.request.use((config) => { + config.params = { ...config.params, directory } + return config + }) + }) + + afterAll(async () => { + if (sessionID) { + try { + await client.delete(`/session/${sessionID}`) + console.log('Cleaned up test session:', sessionID) + } catch (e) { + console.warn('Failed to cleanup session:', e) + } + } + }) + + beforeEach(() => { + sessionID = '' + }) + + async function sendMessageAsync(sid: string, text: string): Promise { + const response = await client.post(`/session/${sid}/prompt_async`, { + parts: [{ type: 'text', text }] + }) + expect(response.status).toBe(204) + } + + async function waitForSessionIdle(sid: string, maxWaitMs: number = 120000): Promise { + const startTime = Date.now() + let lastLogTime = 0 + let lastStatus = '' + + while (Date.now() - startTime < maxWaitMs) { + try { + const statusResponse = await client.get>('/session/status') + const status = statusResponse.data[sid] + + if (!status || status.type === 'idle') { + if (lastStatus === 'busy') { + console.log('Session completed (idle)') + } + return + } + + const statusStr = `${status.type}${status.attempt ? ` (attempt ${status.attempt})` : ''}` + if (Date.now() - lastLogTime > 5000 || statusStr !== lastStatus) { + console.log(`Session status: ${statusStr}`) + lastLogTime = Date.now() + lastStatus = statusStr + } + + if (status.type === 'retry') { + console.log(`Session in retry state: attempt ${status.attempt}, message: ${status.message}`) + if (status.attempt && status.attempt > 5) { + throw new Error(`Session stuck in retry loop after ${status.attempt} attempts: ${status.message}`) + } + } + } catch (error: any) { + if (error.message?.includes('stuck in retry')) { + throw error + } + console.warn('Error checking status:', error.message) + } + + await new Promise(resolve => setTimeout(resolve, 2000)) + } + throw new Error(`Timeout waiting for session to become idle after ${maxWaitMs}ms`) + } + + async function getMessages(sid: string): Promise { + const messagesResponse = await client.get(`/session/${sid}/message`) + return messagesResponse.data + } + + async function getLastAssistantMessage(sid: string): Promise { + const messages = await getMessages(sid) + return messages.filter(m => m.info.role === 'assistant').pop() + } + + async function createTestSession(title: string): Promise { + const createResponse = await client.post('/session', { title }) + return createResponse.data.id + } + + function findWebFetchPart(message: Message): MessagePart | undefined { + return message.parts.find(part => part.type === 'tool' && part.tool === 'webfetch') + } + + function hasContextOverflowError(message: Message): boolean { + return message.parts.some(part => { + const errorMsg = part.state?.error?.data?.message || '' + return errorMsg.includes('prompt is too long') || + errorMsg.includes('context length') || + errorMsg.includes('maximum context') || + errorMsg.includes('token limit') + }) + } + + function isFilePersisted(output: string): boolean { + return output.includes('Output') && + output.includes('exceeds maximum') && + output.includes('saved to') + } + + it('should handle WebFetch of a large file without context overflow', async () => { + sessionID = await createTestSession('WebFetch Large Output Test') + console.log('Created test session:', sessionID) + + const largeFileUrl = 'https://raw.githubusercontent.com/torvalds/linux/master/MAINTAINERS' + + console.log('Sending prompt to fetch large file (async)...') + await sendMessageAsync(sessionID, `Use the WebFetch tool to fetch this URL: ${largeFileUrl} +Then tell me the first 3 maintainers listed in the file. Just list their names.`) + + console.log('Waiting for response (this may take a while)...') + await waitForSessionIdle(sessionID, TEST_TIMEOUT) + + const messages = await getMessages(sessionID) + console.log(`Session has ${messages.length} messages`) + + const assistantMessages = messages.filter(m => m.info.role === 'assistant') + expect(assistantMessages.length).toBeGreaterThan(0) + + for (const msg of assistantMessages) { + expect(hasContextOverflowError(msg)).toBe(false) + + const webfetchPart = findWebFetchPart(msg) + if (webfetchPart && webfetchPart.state) { + console.log('WebFetch tool status:', webfetchPart.state.status) + const output = webfetchPart.state.output || '' + + if (isFilePersisted(output)) { + console.log('✓ Large output was saved to file (context overflow fix working)') + console.log('Output:', output.substring(0, 200)) + } + } + } + + const lastMessage = assistantMessages[assistantMessages.length - 1] + expect(lastMessage.info.finish).toBe('stop') + + const textParts = lastMessage.parts.filter(part => part.type === 'text') + const responseText = textParts.map(p => p.text || '').join(' ') + + console.log('Response preview:', responseText.substring(0, 300)) + expect(responseText.length).toBeGreaterThan(20) + + console.log('Test PASSED: WebFetch large output handled without context overflow') + }, TEST_TIMEOUT) + + it('should not get stuck in retry loop with large outputs', async () => { + sessionID = await createTestSession('Retry Loop Test') + console.log('Created test session:', sessionID) + + console.log('Sending request with potentially large output (async)...') + await sendMessageAsync(sessionID, `Fetch https://raw.githubusercontent.com/nodejs/node/main/AUTHORS and count how many contributors are listed.`) + + let retryCount = 0 + const maxRetries = 5 + const startTime = Date.now() + + while (Date.now() - startTime < TEST_TIMEOUT) { + const statusResponse = await client.get>('/session/status') + const status = statusResponse.data[sessionID] + + if (status?.type === 'retry') { + retryCount++ + console.log(`Retry detected: attempt ${status.attempt}, total retries seen: ${retryCount}`) + console.log(`Retry message: ${status.message}`) + + if (status.attempt && status.attempt > maxRetries) { + throw new Error(`Session stuck in retry loop after ${status.attempt} attempts: ${status.message}`) + } + } + + if (!status || status.type === 'idle') { + console.log('Session completed successfully') + break + } + + await new Promise(resolve => setTimeout(resolve, 2000)) + } + + const lastMessage = await getLastAssistantMessage(sessionID) + expect(lastMessage).toBeDefined() + expect(lastMessage!.info.finish).toBe('stop') + expect(hasContextOverflowError(lastMessage!)).toBe(false) + + console.log(`Test PASSED: No excessive retry loop (saw ${retryCount} retry status updates)`) + }, TEST_TIMEOUT) + + it('should recover gracefully after context-heavy operations', async () => { + sessionID = await createTestSession('Context Recovery Test') + console.log('Created test session:', sessionID) + + console.log('Step 1: Fetching content...') + await sendMessageAsync(sessionID, 'Fetch https://jsonplaceholder.typicode.com/posts and tell me how many posts there are.') + + await waitForSessionIdle(sessionID, 120000) + + let messages = await getMessages(sessionID) + let assistantMessages = messages.filter(m => m.info.role === 'assistant') + expect(assistantMessages.length).toBeGreaterThan(0) + + const firstResponse = assistantMessages[assistantMessages.length - 1] + expect(firstResponse.info.finish).toBe('stop') + expect(hasContextOverflowError(firstResponse)).toBe(false) + console.log('Step 1 completed successfully') + + console.log('Step 2: Sending follow-up question...') + await sendMessageAsync(sessionID, 'What was the title of post #1?') + + await waitForSessionIdle(sessionID, 120000) + + messages = await getMessages(sessionID) + assistantMessages = messages.filter(m => m.info.role === 'assistant') + + const lastMessage = assistantMessages[assistantMessages.length - 1] + expect(lastMessage.info.finish).toBe('stop') + expect(hasContextOverflowError(lastMessage)).toBe(false) + + console.log('Test PASSED: Session recovered gracefully after context-heavy operations') + }, TEST_TIMEOUT) +}) diff --git a/backend/test/routes/tasks.test.ts b/backend/test/routes/tasks.test.ts new file mode 100644 index 00000000..bd6cc517 --- /dev/null +++ b/backend/test/routes/tasks.test.ts @@ -0,0 +1,588 @@ +import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest' +import { Hono } from 'hono' +import type { Database } from 'bun:sqlite' + +vi.mock('bun:sqlite', () => ({ + Database: vi.fn() +})) + +vi.mock('node-cron', () => ({ + default: { + validate: vi.fn((expr: string) => { + if (expr === 'invalid-cron') return false + return true + }), + schedule: vi.fn(() => ({ + stop: vi.fn(), + start: vi.fn() + })) + } +})) + +vi.mock('child_process', () => ({ + spawn: vi.fn(() => { + const mockProcess = { + stdout: { on: vi.fn((event, cb) => { if (event === 'data') cb(Buffer.from('output')) }) }, + stderr: { on: vi.fn() }, + on: vi.fn((event, cb) => { if (event === 'close') setTimeout(() => cb(0), 10) }), + kill: vi.fn() + } + return mockProcess + }) +})) + +vi.mock('../../src/utils/logger', () => ({ + logger: { + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn() + } +})) + +import { createTaskRoutes } from '../../src/routes/tasks' +import { schedulerService, type ScheduledTaskRecord } from '../../src/services/scheduler' + +describe('Task Routes', () => { + let app: Hono + let mockDb: any + let tasks: ScheduledTaskRecord[] + let taskIdCounter: number + + beforeEach(() => { + vi.clearAllMocks() + tasks = [] + taskIdCounter = 1 + + mockDb = { + prepare: vi.fn((sql: string) => { + if (sql.includes('INSERT INTO scheduled_tasks')) { + return { + run: vi.fn((...args) => { + const task: ScheduledTaskRecord = { + id: taskIdCounter++, + name: args[0], + schedule_type: args[1], + schedule_value: args[2], + command_type: args[3], + command_config: args[4], + status: 'active', + last_run_at: null, + next_run_at: args[5], + created_at: args[6], + updated_at: args[7] + } + tasks.push(task) + return { lastInsertRowid: task.id, changes: 1 } + }) + } + } + if (sql.includes('SELECT * FROM scheduled_tasks WHERE id = ?')) { + return { + get: vi.fn((id: number) => tasks.find(t => t.id === id)) + } + } + if (sql.includes('SELECT * FROM scheduled_tasks ORDER BY')) { + return { + all: vi.fn(() => tasks) + } + } + if (sql.includes('UPDATE scheduled_tasks SET status')) { + return { + run: vi.fn((status, updatedAt, id) => { + const task = tasks.find(t => t.id === id) + if (task) { + task.status = status + task.updated_at = updatedAt + } + return { changes: task ? 1 : 0 } + }) + } + } + if (sql.includes('UPDATE scheduled_tasks SET')) { + return { + run: vi.fn((...args) => { + const id = args[args.length - 1] + const task = tasks.find(t => t.id === id) + if (task) { + return { changes: 1 } + } + return { changes: 0 } + }) + } + } + if (sql.includes('DELETE FROM scheduled_tasks')) { + return { + run: vi.fn((id: number) => { + const index = tasks.findIndex(t => t.id === id) + if (index !== -1) { + tasks.splice(index, 1) + return { changes: 1 } + } + return { changes: 0 } + }) + } + } + return { + all: vi.fn(() => []), + get: vi.fn(() => null), + run: vi.fn(() => ({ changes: 0 })) + } + }) + } as unknown as Database + + app = new Hono() + app.route('/api/tasks', createTaskRoutes(mockDb)) + }) + + afterEach(async () => { + await schedulerService.shutdown() + }) + + describe('GET /api/tasks', () => { + it('should return empty array when no tasks exist', async () => { + const res = await app.request('/api/tasks') + + expect(res.status).toBe(200) + const body = await res.json() + expect(body).toEqual([]) + }) + + it('should return all tasks', async () => { + tasks.push({ + id: 1, + name: 'Task 1', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: JSON.stringify({ skillName: 'test' }), + status: 'active', + last_run_at: null, + next_run_at: Date.now() + 60000, + created_at: Date.now(), + updated_at: Date.now() + }) + + const res = await app.request('/api/tasks') + + expect(res.status).toBe(200) + const body = await res.json() + expect(body).toHaveLength(1) + expect(body[0].name).toBe('Task 1') + }) + }) + + describe('GET /api/tasks/:id', () => { + it('should return a task by ID', async () => { + tasks.push({ + id: 1, + name: 'Find Me', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: JSON.stringify({ skillName: 'test' }), + status: 'active', + last_run_at: null, + next_run_at: Date.now() + 60000, + created_at: Date.now(), + updated_at: Date.now() + }) + + const res = await app.request('/api/tasks/1') + + expect(res.status).toBe(200) + const body = await res.json() + expect(body.name).toBe('Find Me') + }) + + it('should return 404 for non-existent task', async () => { + const res = await app.request('/api/tasks/999') + + expect(res.status).toBe(404) + const body = await res.json() + expect(body.error).toBe('Task not found') + }) + + it('should return 400 for invalid ID', async () => { + const res = await app.request('/api/tasks/invalid') + + expect(res.status).toBe(400) + const body = await res.json() + expect(body.error).toBe('Invalid task ID') + }) + }) + + describe('POST /api/tasks', () => { + it('should create a new task', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'New Task', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'skill', + command_config: { skillName: 'recruiter-response' } + }) + }) + + expect(res.status).toBe(201) + const body = await res.json() + expect(body.name).toBe('New Task') + expect(body.schedule_value).toBe('0 9 * * *') + expect(body.command_type).toBe('skill') + }) + + it('should validate required fields', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Missing Fields' + }) + }) + + expect(res.status).toBe(400) + const body = await res.json() + expect(body.error).toBe('Invalid task data') + expect(body.details).toBeDefined() + }) + + it('should validate command_type enum', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Invalid Type', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'invalid-type', + command_config: {} + }) + }) + + expect(res.status).toBe(400) + const body = await res.json() + expect(body.error).toBe('Invalid task data') + }) + + it('should reject invalid cron expression', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Invalid Cron', + schedule_type: 'cron', + schedule_value: 'invalid-cron', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + }) + + expect(res.status).toBe(400) + const body = await res.json() + expect(body.error).toContain('Invalid cron') + }) + + it('should validate name length', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: '', // empty name + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + }) + + expect(res.status).toBe(400) + }) + + it('should accept all valid command types', async () => { + for (const cmdType of ['skill', 'opencode-run', 'script']) { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: `${cmdType} Task`, + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: cmdType, + command_config: cmdType === 'script' + ? { command: 'echo' } + : { skillName: 'test' } + }) + }) + + expect(res.status).toBe(201) + } + }) + }) + + describe('PUT /api/tasks/:id', () => { + beforeEach(() => { + tasks.push({ + id: 1, + name: 'Original Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: JSON.stringify({ skillName: 'original' }), + status: 'active', + last_run_at: null, + next_run_at: Date.now() + 60000, + created_at: Date.now(), + updated_at: Date.now() + }) + }) + + it('should update task name', async () => { + const res = await app.request('/api/tasks/1', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Updated Name' + }) + }) + + expect(res.status).toBe(200) + }) + + it('should return 404 for non-existent task', async () => { + const res = await app.request('/api/tasks/999', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Updated' + }) + }) + + expect(res.status).toBe(404) + }) + + it('should reject invalid cron on update', async () => { + const res = await app.request('/api/tasks/1', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + schedule_value: 'invalid-cron' + }) + }) + + expect(res.status).toBe(400) + const body = await res.json() + expect(body.error).toContain('Invalid cron') + }) + + it('should return 400 for invalid ID', async () => { + const res = await app.request('/api/tasks/invalid', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Updated' + }) + }) + + expect(res.status).toBe(400) + }) + }) + + describe('DELETE /api/tasks/:id', () => { + beforeEach(() => { + tasks.push({ + id: 1, + name: 'Delete Me', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: JSON.stringify({ skillName: 'test' }), + status: 'active', + last_run_at: null, + next_run_at: Date.now() + 60000, + created_at: Date.now(), + updated_at: Date.now() + }) + }) + + it('should delete a task', async () => { + const res = await app.request('/api/tasks/1', { + method: 'DELETE' + }) + + expect(res.status).toBe(200) + const body = await res.json() + expect(body.success).toBe(true) + }) + + it('should return 404 for non-existent task', async () => { + const res = await app.request('/api/tasks/999', { + method: 'DELETE' + }) + + expect(res.status).toBe(404) + }) + + it('should return 400 for invalid ID', async () => { + const res = await app.request('/api/tasks/invalid', { + method: 'DELETE' + }) + + expect(res.status).toBe(400) + }) + }) + + describe('POST /api/tasks/:id/toggle', () => { + beforeEach(() => { + tasks.push({ + id: 1, + name: 'Toggle Me', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: JSON.stringify({ skillName: 'test' }), + status: 'active', + last_run_at: null, + next_run_at: Date.now() + 60000, + created_at: Date.now(), + updated_at: Date.now() + }) + }) + + it('should toggle task status from active to paused', async () => { + const res = await app.request('/api/tasks/1/toggle', { + method: 'POST' + }) + + expect(res.status).toBe(200) + const body = await res.json() + expect(body.status).toBe('paused') + }) + + it('should return 404 for non-existent task', async () => { + const res = await app.request('/api/tasks/999/toggle', { + method: 'POST' + }) + + expect(res.status).toBe(404) + }) + + it('should return 400 for invalid ID', async () => { + const res = await app.request('/api/tasks/invalid/toggle', { + method: 'POST' + }) + + expect(res.status).toBe(400) + }) + }) + + describe('POST /api/tasks/:id/run', () => { + beforeEach(() => { + tasks.push({ + id: 1, + name: 'Run Me', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'script', + command_config: JSON.stringify({ command: 'echo', args: ['hello'] }), + status: 'active', + last_run_at: null, + next_run_at: Date.now() + 60000, + created_at: Date.now(), + updated_at: Date.now() + }) + }) + + it('should run task immediately', async () => { + const res = await app.request('/api/tasks/1/run', { + method: 'POST' + }) + + expect(res.status).toBe(200) + const body = await res.json() + expect(body.success).toBe(true) + expect(body.output).toBeDefined() + expect(body.duration).toBeDefined() + }) + + it('should return result with error for non-existent task', async () => { + const res = await app.request('/api/tasks/999/run', { + method: 'POST' + }) + + expect(res.status).toBe(200) + const body = await res.json() + expect(body.success).toBe(false) + expect(body.error).toBe('Task not found') + }) + + it('should return 400 for invalid ID', async () => { + const res = await app.request('/api/tasks/invalid/run', { + method: 'POST' + }) + + expect(res.status).toBe(400) + }) + }) + + describe('Command Config Validation', () => { + it('should accept skill command config', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Skill Task', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'skill', + command_config: { + skillName: 'recruiter-response', + args: ['--verbose'] + } + }) + }) + + expect(res.status).toBe(201) + }) + + it('should accept opencode-run command config', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'OpenCode Task', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'opencode-run', + command_config: { + message: 'Check for updates', + workdir: '/path/to/repo' + } + }) + }) + + expect(res.status).toBe(201) + }) + + it('should accept script command config', async () => { + const res = await app.request('/api/tasks', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: 'Script Task', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'script', + command_config: { + command: '/usr/bin/python3', + args: ['script.py', '--arg1'], + workdir: '/path/to/scripts' + } + }) + }) + + expect(res.status).toBe(201) + }) + }) +}) diff --git a/backend/test/routes/tts.test.ts b/backend/test/routes/tts.test.ts new file mode 100644 index 00000000..ca75d17d --- /dev/null +++ b/backend/test/routes/tts.test.ts @@ -0,0 +1,244 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest' + +vi.mock('fs/promises', async () => { + return { + mkdir: vi.fn(), + readFile: vi.fn(), + writeFile: vi.fn(), + readdir: vi.fn(), + stat: vi.fn(), + unlink: vi.fn(), + } +}) + +vi.mock('bun:sqlite', () => ({ + Database: vi.fn(), +})) +vi.mock('../../src/services/settings', () => ({ + SettingsService: vi.fn(), +})) +vi.mock('../../src/utils/logger', async () => { + return { + logger: { + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn(), + }, + } +}) + +vi.mock('../../src/services/chatterbox', () => ({ + chatterboxServerManager: { + getStatus: vi.fn().mockReturnValue({ + running: false, + port: 5553, + host: '127.0.0.1', + device: null, + cudaAvailable: false, + error: null + }), + start: vi.fn(), + stop: vi.fn(), + synthesize: vi.fn(), + getVoices: vi.fn().mockResolvedValue({ voices: ['default'], voiceDetails: [] }), + uploadVoice: vi.fn(), + deleteVoice: vi.fn(), + } +})) + +import * as fsPromises from 'fs/promises' + +import { createTTSRoutes, cleanupExpiredCache, getCacheStats, generateCacheKey, ensureCacheDir, getCachedAudio, getCacheSize, cleanupOldestFiles } from '../../src/routes/tts' + +const mockMkdir = fsPromises.mkdir as ReturnType +const mockReadFile = fsPromises.readFile as ReturnType +const mockReaddir = fsPromises.readdir as ReturnType +const mockStat = fsPromises.stat as ReturnType +const mockUnlink = fsPromises.unlink as ReturnType + +describe('TTS Routes', () => { + let mockDb: any + let ttsApp: any + let mockSettingsService: any + + beforeEach(() => { + vi.clearAllMocks() + + mockDb = {} as any + mockSettingsService = { + getSettings: vi.fn().mockReturnValue({ + preferences: { + tts: { + enabled: true, + apiKey: 'test-key', + endpoint: 'https://api.openai.com/v1/audio/speech', + voice: 'alloy', + model: 'tts-1', + speed: 1.0, + }, + }, + }), + } + + ttsApp = createTTSRoutes(mockDb) + }) + + describe('generateCacheKey', () => { + it('should generate consistent cache keys for identical inputs', () => { + const text = 'Hello world' + const voice = 'alloy' + const model = 'tts-1' + const speed = 1.0 + + const key1 = generateCacheKey(text, voice, model, speed) + const key2 = generateCacheKey(text, voice, model, speed) + + expect(key1).toBe(key2) + expect(key1).toMatch(/^[a-f0-9]{64}$/) + }) + + it('should generate different cache keys for different inputs', () => { + const key1 = generateCacheKey('Hello', 'alloy', 'tts-1', 1.0) + const key2 = generateCacheKey('World', 'alloy', 'tts-1', 1.0) + + expect(key1).not.toBe(key2) + }) + }) + + describe('ensureCacheDir', () => { + it('should create cache directory when it does not exist', async () => { + mockMkdir.mockResolvedValue(undefined) + + await ensureCacheDir() + + expect(mockMkdir).toHaveBeenCalledWith( + expect.stringContaining('cache/tts'), + { recursive: true } + ) + }) + }) + + describe('getCachedAudio', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + +beforeEach(() => { + vi.useFakeTimers() + }) + + it('should return cached audio when file exists and is not expired', async () => { + const cacheKey = 'test-key' + const audioBuffer = Buffer.from('audio data') + + mockStat.mockResolvedValue({ + mtimeMs: Date.now() - 1000, // 1 second ago (not expired) + size: 1024, + }) + mockReadFile.mockResolvedValue(audioBuffer) + + const result = await getCachedAudio(cacheKey) + + expect(result).toBe(audioBuffer) + expect(mockReadFile).toHaveBeenCalledWith( + expect.stringContaining(`${cacheKey}.mp3`) + ) + }) + + it('should return null when cached file has expired', async () => { + const cacheKey = 'test-key' + + mockStat.mockResolvedValue({ + mtimeMs: Date.now() - 25 * 60 * 60 * 1000, // 25 hours ago (expired) + size: 1024, + }) + mockUnlink.mockResolvedValue(undefined) + + const result = await getCachedAudio(cacheKey) + + expect(result).toBeNull() + expect(mockUnlink).toHaveBeenCalledWith( + expect.stringContaining(`${cacheKey}.mp3`) + ) + }) + + it('should return null when cached file does not exist', async () => { + const cacheKey = 'nonexistent-key' + + mockStat.mockRejectedValue(new Error('File not found')) + + const result = await getCachedAudio(cacheKey) + + expect(result).toBeNull() + }) + }) + + describe('getCacheSize', () => { + it('should calculate correct cache size', async () => { + mockReaddir.mockResolvedValue(['file1.mp3', 'file2.mp3', 'readme.txt']) + mockStat + .mockResolvedValueOnce({ size: 1024, mtimeMs: Date.now() }) + .mockResolvedValueOnce({ size: 2048, mtimeMs: Date.now() }) + + const size = await getCacheSize() + + expect(size).toBe(3072) // 1024 + 2048 + }) + + it('should handle cache directory errors gracefully', async () => { + mockReaddir.mockRejectedValue(new Error('Permission denied')) + + const size = await getCacheSize() + + expect(size).toBe(0) + }) + }) + + describe('cleanupMethods', () => { + it('should remove oldest files when cache size limit exceeded', async () => { + mockReaddir.mockResolvedValue(['file1.mp3', 'file2.mp3', 'file3.mp3']) + mockStat + .mockResolvedValueOnce({ size: 1024, mtimeMs: 1000 }) + .mockResolvedValueOnce({ size: 2048, mtimeMs: 2000 }) + .mockResolvedValueOnce({ size: 1536, mtimeMs: 3000 }) + mockUnlink.mockResolvedValue(undefined) + + await cleanupOldestFiles(1500) // Need 1500 bytes freed + + expect(mockUnlink).toHaveBeenCalledWith( + expect.stringContaining('file1.mp3') + ) + }) + + it('should return cache statistics for files', async () => { + const currentTime = Date.now() + mockReaddir.mockResolvedValue(['file1.mp3', 'file2.mp3']) + mockStat + .mockResolvedValueOnce({ size: 1024, mtimeMs: currentTime }) + .mockResolvedValueOnce({ size: 2048, mtimeMs: currentTime }) + + const stats = await getCacheStats() + + expect(stats.count).toBe(2) + expect(stats.sizeBytes).toBe(3072) + expect(stats.sizeMB).toBeCloseTo(0, 1) + }) + + it('should cleanup expired cache files', async () => { + mockReaddir.mockResolvedValue(['file1.mp3', 'file2.mp3', 'expired.mp3']) + mockStat + .mockResolvedValueOnce({ size: 1024, mtimeMs: Date.now() }) + .mockResolvedValueOnce({ size: 2048, mtimeMs: Date.now() }) + .mockResolvedValueOnce({ size: 1536, mtimeMs: Date.now() - 25 * 60 * 60 * 1000 }) + mockUnlink.mockResolvedValue(undefined) + + const cleaned = await cleanupExpiredCache() + + expect(cleaned).toBe(1) + expect(mockUnlink).toHaveBeenCalledWith( + expect.stringContaining('expired.mp3') + ) + }) + }) +}) \ No newline at end of file diff --git a/backend/test/services/repo-auth-env.test.ts b/backend/test/services/repo-auth-env.test.ts new file mode 100644 index 00000000..ae91a622 --- /dev/null +++ b/backend/test/services/repo-auth-env.test.ts @@ -0,0 +1,79 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest' +import { getReposPath } from '@opencode-manager/shared/config/env' +import { createGitHubGitEnv } from '../../src/utils/git-auth' + +const executeCommand = vi.fn() +const ensureDirectoryExists = vi.fn() + +const getRepoByUrlAndBranch = vi.fn() +const createRepo = vi.fn() +const updateRepoStatus = vi.fn() +const deleteRepo = vi.fn() + +vi.mock('../../src/utils/process', () => ({ + executeCommand, +})) + +vi.mock('../../src/services/file-operations', () => ({ + ensureDirectoryExists, +})) + +vi.mock('../../src/db/queries', () => ({ + getRepoByUrlAndBranch, + createRepo, + updateRepoStatus, + deleteRepo, +})) + +vi.mock('../../src/services/settings', () => ({ + SettingsService: vi.fn().mockImplementation(() => ({ + getSettings: () => ({ + preferences: { + gitToken: 'ghp_test_token', + }, + updatedAt: Date.now(), + }), + })), +})) + +describe('repoService.cloneRepo auth env', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('passes github extraheader env to git clone', async () => { + const { cloneRepo } = await import('../../src/services/repo') + + const database = {} as any + const repoUrl = 'https://github.com/acme/forge.git' + + getRepoByUrlAndBranch.mockReturnValue(null) + createRepo.mockReturnValue({ + id: 1, + repoUrl, + localPath: 'forge', + defaultBranch: 'main', + cloneStatus: 'cloning', + clonedAt: Date.now(), + }) + + executeCommand + .mockResolvedValueOnce('missing') + .mockResolvedValueOnce('missing') + .mockResolvedValueOnce('') + + await cloneRepo(database, repoUrl) + + const expectedEnv = createGitHubGitEnv('ghp_test_token') + + expect(executeCommand).toHaveBeenNthCalledWith( + 3, + ['git', 'clone', 'https://github.com/acme/forge', 'forge'], + { cwd: getReposPath(), env: expectedEnv, silent: undefined } + ) + + expect(ensureDirectoryExists).toHaveBeenCalledWith(getReposPath()) + expect(updateRepoStatus).toHaveBeenCalledWith(database, 1, 'ready') + expect(deleteRepo).not.toHaveBeenCalled() + }) +}) diff --git a/backend/test/services/scheduler.test.ts b/backend/test/services/scheduler.test.ts new file mode 100644 index 00000000..b4d1e904 --- /dev/null +++ b/backend/test/services/scheduler.test.ts @@ -0,0 +1,774 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest' +import type { Database } from 'bun:sqlite' + +vi.mock('bun:sqlite', () => ({ + Database: vi.fn() +})) + +vi.mock('node-cron', () => ({ + default: { + validate: vi.fn((expr: string) => { + if (expr === 'invalid') return false + if (expr.includes('* * * * *')) return true + if (expr.includes('0 9 * * *')) return true + if (expr.includes('*/5 * * * *')) return true + return true + }), + schedule: vi.fn((_, callback, options) => { + const mockJob = { + stop: vi.fn(), + start: vi.fn(), + callback + } + return mockJob + }) + } +})) + +vi.mock('child_process', () => ({ + spawn: vi.fn((cmd, args, options) => { + const mockProcess = { + stdout: { + on: vi.fn((event, callback) => { + if (event === 'data') { + callback(Buffer.from('test output')) + } + }) + }, + stderr: { + on: vi.fn((event, callback) => {}) + }, + on: vi.fn((event, callback) => { + if (event === 'close') { + setTimeout(() => callback(0), 10) + } + }), + kill: vi.fn() + } + return mockProcess + }) +})) + +vi.mock('../../src/utils/logger', () => ({ + logger: { + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn() + } +})) + +import { schedulerService, type CreateTaskInput, type ScheduledTaskRecord } from '../../src/services/scheduler' +import cron from 'node-cron' +import { spawn } from 'child_process' + +describe('SchedulerService', () => { + let mockDb: any + let insertedTasks: ScheduledTaskRecord[] + let taskIdCounter: number + + beforeEach(() => { + vi.clearAllMocks() + insertedTasks = [] + taskIdCounter = 1 + + mockDb = { + prepare: vi.fn((sql: string) => { + if (sql.includes('INSERT INTO scheduled_tasks')) { + return { + run: vi.fn((...args) => { + const task: ScheduledTaskRecord = { + id: taskIdCounter++, + name: args[0], + schedule_type: args[1], + schedule_value: args[2], + command_type: args[3], + command_config: args[4], + status: 'active', + last_run_at: null, + next_run_at: args[5], + created_at: args[6], + updated_at: args[7] + } + insertedTasks.push(task) + return { lastInsertRowid: task.id, changes: 1 } + }) + } + } + if (sql.includes('SELECT * FROM scheduled_tasks WHERE id = ?')) { + return { + get: vi.fn((id: number) => insertedTasks.find(t => t.id === id)) + } + } + if (sql.includes('SELECT * FROM scheduled_tasks ORDER BY')) { + return { + all: vi.fn(() => insertedTasks) + } + } + if (sql.includes('UPDATE scheduled_tasks SET status')) { + return { + run: vi.fn((status, updatedAt, id) => { + const task = insertedTasks.find(t => t.id === id) + if (task) { + task.status = status + task.updated_at = updatedAt + } + return { changes: task ? 1 : 0 } + }) + } + } + if (sql.includes('UPDATE scheduled_tasks SET')) { + return { + run: vi.fn((...args) => { + const id = args[args.length - 1] + const task = insertedTasks.find(t => t.id === id) + if (task) { + return { changes: 1 } + } + return { changes: 0 } + }) + } + } + if (sql.includes('DELETE FROM scheduled_tasks')) { + return { + run: vi.fn((id: number) => { + const index = insertedTasks.findIndex(t => t.id === id) + if (index !== -1) { + insertedTasks.splice(index, 1) + return { changes: 1 } + } + return { changes: 0 } + }) + } + } + return { + all: vi.fn(() => []), + get: vi.fn(() => null), + run: vi.fn(() => ({ changes: 0 })) + } + }) + } as unknown as Database + + schedulerService.setDatabase(mockDb) + }) + + afterEach(async () => { + await schedulerService.shutdown() + }) + + describe('setDatabase', () => { + it('should set the database instance', () => { + expect(() => schedulerService.setDatabase(mockDb)).not.toThrow() + }) + }) + + describe('createTask', () => { + it('should create a new task with valid cron expression', () => { + const input: CreateTaskInput = { + name: 'Test Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'recruiter-response' } + } + + const task = schedulerService.createTask(input) + + expect(task).toBeDefined() + expect(task.name).toBe('Test Task') + expect(task.schedule_type).toBe('cron') + expect(task.schedule_value).toBe('* * * * *') + expect(task.command_type).toBe('skill') + expect(task.status).toBe('active') + expect(JSON.parse(task.command_config)).toEqual({ skillName: 'recruiter-response' }) + }) + + it('should reject invalid cron expression', () => { + const input: CreateTaskInput = { + name: 'Invalid Task', + schedule_type: 'cron', + schedule_value: 'invalid', + command_type: 'skill', + command_config: { skillName: 'test' } + } + + expect(() => schedulerService.createTask(input)).toThrow('Invalid cron expression') + }) + + it('should schedule the cron job after creation', () => { + const input: CreateTaskInput = { + name: 'Scheduled Task', + schedule_type: 'cron', + schedule_value: '*/5 * * * *', + command_type: 'script', + command_config: { command: 'echo hello' } + } + + schedulerService.createTask(input) + + expect(cron.schedule).toHaveBeenCalledWith( + '*/5 * * * *', + expect.any(Function), + expect.objectContaining({ scheduled: true }) + ) + }) + + it('should store command_config as JSON string', () => { + const input: CreateTaskInput = { + name: 'Config Task', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'opencode-run', + command_config: { + message: 'Check for new PRs', + workdir: '/path/to/repo' + } + } + + const task = schedulerService.createTask(input) + const config = JSON.parse(task.command_config) + + expect(config.message).toBe('Check for new PRs') + expect(config.workdir).toBe('/path/to/repo') + }) + + it('should set next_run_at timestamp', () => { + const input: CreateTaskInput = { + name: 'Future Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + } + + const task = schedulerService.createTask(input) + + expect(task.next_run_at).toBeDefined() + expect(task.next_run_at).toBeGreaterThan(Date.now() - 60000) + }) + }) + + describe('getAllTasks', () => { + it('should return all tasks from database', () => { + schedulerService.createTask({ + name: 'Task 1', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'skill1' } + }) + + schedulerService.createTask({ + name: 'Task 2', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'script', + command_config: { command: 'test' } + }) + + const tasks = schedulerService.getAllTasks() + + expect(tasks).toHaveLength(2) + expect(tasks[0].name).toBe('Task 1') + expect(tasks[1].name).toBe('Task 2') + }) + + it('should return empty array when no database is set', () => { + const freshService = new (schedulerService.constructor as any)() + const tasks = freshService.getAllTasks() + expect(tasks).toEqual([]) + }) + }) + + describe('getTask', () => { + it('should retrieve a task by ID', () => { + const created = schedulerService.createTask({ + name: 'Find Me', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + const found = schedulerService.getTask(created.id) + + expect(found).toBeDefined() + expect(found?.name).toBe('Find Me') + }) + + it('should return null for non-existent task', () => { + const found = schedulerService.getTask(999) + expect(found).toBeFalsy() + }) + }) + + describe('updateTask', () => { + it('should update task name', () => { + const created = schedulerService.createTask({ + name: 'Original Name', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + const updated = schedulerService.updateTask(created.id, { name: 'New Name' }) + + expect(updated).toBeDefined() + }) + + it('should reject invalid cron expression on update', () => { + const created = schedulerService.createTask({ + name: 'Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + expect(() => schedulerService.updateTask(created.id, { schedule_value: 'invalid' })) + .toThrow('Invalid cron expression') + }) + + it('should return null for non-existent task', () => { + const result = schedulerService.updateTask(999, { name: 'Updated' }) + expect(result).toBeNull() + }) + + it('should reschedule cron job when schedule changes', () => { + const created = schedulerService.createTask({ + name: 'Reschedule Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + vi.clearAllMocks() + + schedulerService.updateTask(created.id, { schedule_value: '0 9 * * *' }) + + expect(cron.schedule).toHaveBeenCalled() + }) + }) + + describe('deleteTask', () => { + it('should delete an existing task', () => { + const created = schedulerService.createTask({ + name: 'Delete Me', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + const deleted = schedulerService.deleteTask(created.id) + + expect(deleted).toBe(true) + expect(schedulerService.getTask(created.id)).toBeFalsy() + }) + + it('should return false for non-existent task', () => { + const deleted = schedulerService.deleteTask(999) + expect(deleted).toBe(false) + }) + + it('should stop the cron job when deleting', () => { + const created = schedulerService.createTask({ + name: 'Stop Job Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + schedulerService.deleteTask(created.id) + }) + }) + + describe('toggleTask', () => { + it('should pause an active task', () => { + const created = schedulerService.createTask({ + name: 'Toggle Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + expect(created.status).toBe('active') + + const toggled = schedulerService.toggleTask(created.id) + + expect(toggled).toBeDefined() + expect(toggled?.status).toBe('paused') + }) + + it('should resume a paused task', () => { + const created = schedulerService.createTask({ + name: 'Resume Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + schedulerService.toggleTask(created.id) + const resumed = schedulerService.toggleTask(created.id) + + expect(resumed?.status).toBe('active') + }) + + it('should return null for non-existent task', () => { + const result = schedulerService.toggleTask(999) + expect(result).toBeNull() + }) + }) + + describe('runTaskNow', () => { + it('should execute a skill command', async () => { + const created = schedulerService.createTask({ + name: 'Run Skill', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'recruiter-response' } + }) + + const result = await schedulerService.runTaskNow(created.id) + + expect(result.success).toBe(true) + expect(spawn).toHaveBeenCalledWith( + 'opencode', + ['run', '--command', '/recruiter-response'], + expect.any(Object) + ) + }) + + it('should execute an opencode-run command', async () => { + const created = schedulerService.createTask({ + name: 'Run OpenCode', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'opencode-run', + command_config: { message: 'Check for updates' } + }) + + const result = await schedulerService.runTaskNow(created.id) + + expect(result.success).toBe(true) + expect(spawn).toHaveBeenCalledWith( + 'opencode', + ['run', 'Check for updates'], + expect.any(Object) + ) + }) + + it('should execute a script command', async () => { + const created = schedulerService.createTask({ + name: 'Run Script', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'script', + command_config: { command: 'echo', args: ['hello', 'world'] } + }) + + const result = await schedulerService.runTaskNow(created.id) + + expect(result.success).toBe(true) + expect(spawn).toHaveBeenCalledWith( + 'echo', + ['hello', 'world'], + expect.any(Object) + ) + }) + + it('should return error for non-existent task', async () => { + const result = await schedulerService.runTaskNow(999) + + expect(result.success).toBe(false) + expect(result.error).toBe('Task not found') + }) + + it('should include duration in result', async () => { + const created = schedulerService.createTask({ + name: 'Duration Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'script', + command_config: { command: 'echo', args: ['test'] } + }) + + const result = await schedulerService.runTaskNow(created.id) + + expect(result.duration).toBeDefined() + expect(result.duration).toBeGreaterThanOrEqual(0) + }) + }) + + describe('initialize', () => { + it('should load and schedule all active tasks', async () => { + schedulerService.createTask({ + name: 'Active Task 1', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test1' } + }) + + schedulerService.createTask({ + name: 'Active Task 2', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'skill', + command_config: { skillName: 'test2' } + }) + + vi.clearAllMocks() + + await schedulerService.initialize() + + expect(cron.schedule).toHaveBeenCalledTimes(2) + }) + + it('should throw if database not set', async () => { + const freshService = new (schedulerService.constructor as any)() + + await expect(freshService.initialize()).rejects.toThrow('Database not set') + }) + }) + + describe('shutdown', () => { + it('should stop all scheduled jobs', async () => { + schedulerService.createTask({ + name: 'Shutdown Task 1', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test1' } + }) + + schedulerService.createTask({ + name: 'Shutdown Task 2', + schedule_type: 'cron', + schedule_value: '0 9 * * *', + command_type: 'skill', + command_config: { skillName: 'test2' } + }) + + await schedulerService.shutdown() + }) + }) + + describe('Command Types', () => { + describe('skill command', () => { + it('should format skill command correctly', async () => { + const created = schedulerService.createTask({ + name: 'Skill With Args', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { + skillName: 'recruiter-response', + args: ['--verbose', '--dry-run'] + } + }) + + await schedulerService.runTaskNow(created.id) + + expect(spawn).toHaveBeenCalledWith( + 'opencode', + ['run', '--command', '/recruiter-response --verbose --dry-run'], + expect.any(Object) + ) + }) + }) + + describe('opencode-run command', () => { + it('should handle command with workdir', async () => { + const created = schedulerService.createTask({ + name: 'OpenCode With Workdir', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'opencode-run', + command_config: { + command: '/check-health', + workdir: '/path/to/repo' + } + }) + + await schedulerService.runTaskNow(created.id) + + expect(spawn).toHaveBeenCalledWith( + 'opencode', + ['run', '--command', '/check-health'], + expect.objectContaining({ cwd: '/path/to/repo' }) + ) + }) + }) + + describe('script command', () => { + it('should require command for script type', async () => { + const created = schedulerService.createTask({ + name: 'Script No Command', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'script', + command_config: { args: ['test'] } + }) + + const result = await schedulerService.runTaskNow(created.id) + + expect(result.success).toBe(false) + expect(result.error).toBe('Command is required for script type') + }) + }) + }) + + describe('Database Persistence', () => { + it('should store tasks with correct timestamps', () => { + const before = Date.now() + + const task = schedulerService.createTask({ + name: 'Timestamp Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + const after = Date.now() + + expect(task.created_at).toBeGreaterThanOrEqual(before) + expect(task.created_at).toBeLessThanOrEqual(after) + expect(task.updated_at).toBeGreaterThanOrEqual(before) + expect(task.updated_at).toBeLessThanOrEqual(after) + }) + + it('should update updated_at on toggle', () => { + const task = schedulerService.createTask({ + name: 'Update Timestamp Task', + schedule_type: 'cron', + schedule_value: '* * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + const originalUpdatedAt = task.updated_at + + schedulerService.toggleTask(task.id) + + const toggled = schedulerService.getTask(task.id) + expect(toggled?.updated_at).toBeGreaterThanOrEqual(originalUpdatedAt) + }) + }) +}) + +describe('SchedulerService - Cron Trigger Simulation', () => { + let mockDb: any + let cronCallbacks: Map + let insertedTasks: ScheduledTaskRecord[] + let taskIdCounter: number + + beforeEach(() => { + vi.clearAllMocks() + cronCallbacks = new Map() + insertedTasks = [] + taskIdCounter = 1 + + vi.mocked(cron.schedule).mockImplementation((expression: string, callback: any, options: any) => { + cronCallbacks.set(expression, callback) + return { + stop: vi.fn(), + start: vi.fn() + } as any + }) + + mockDb = { + prepare: vi.fn((sql: string) => { + if (sql.includes('INSERT INTO scheduled_tasks')) { + return { + run: vi.fn((...args) => { + const task: ScheduledTaskRecord = { + id: taskIdCounter++, + name: args[0], + schedule_type: args[1], + schedule_value: args[2], + command_type: args[3], + command_config: args[4], + status: 'active', + last_run_at: null, + next_run_at: args[5], + created_at: args[6], + updated_at: args[7] + } + insertedTasks.push(task) + return { lastInsertRowid: task.id, changes: 1 } + }) + } + } + if (sql.includes('SELECT * FROM scheduled_tasks WHERE id = ?')) { + return { + get: vi.fn((id: number) => insertedTasks.find(t => t.id === id)) + } + } + if (sql.includes('SELECT * FROM scheduled_tasks ORDER BY')) { + return { + all: vi.fn(() => insertedTasks) + } + } + if (sql.includes('UPDATE scheduled_tasks')) { + return { + run: vi.fn((...args) => { + return { changes: 1 } + }) + } + } + return { + all: vi.fn(() => []), + get: vi.fn(() => null), + run: vi.fn(() => ({ changes: 0 })) + } + }) + } as unknown as Database + + schedulerService.setDatabase(mockDb) + }) + + afterEach(async () => { + await schedulerService.shutdown() + }) + + it('should register callback when task is created', () => { + schedulerService.createTask({ + name: 'Callback Test', + schedule_type: 'cron', + schedule_value: '*/5 * * * *', + command_type: 'skill', + command_config: { skillName: 'test' } + }) + + expect(cronCallbacks.has('*/5 * * * *')).toBe(true) + }) + + it('should execute task when cron triggers', async () => { + schedulerService.createTask({ + name: 'Trigger Test', + schedule_type: 'cron', + schedule_value: '*/10 * * * *', + command_type: 'script', + command_config: { command: 'echo', args: ['triggered'] } + }) + + const callback = cronCallbacks.get('*/10 * * * *') + expect(callback).toBeDefined() + + await callback!() + + expect(spawn).toHaveBeenCalledWith( + 'echo', + ['triggered'], + expect.any(Object) + ) + }) +}) diff --git a/backend/test/services/terminal.test.ts b/backend/test/services/terminal.test.ts new file mode 100644 index 00000000..2a56e55c --- /dev/null +++ b/backend/test/services/terminal.test.ts @@ -0,0 +1,74 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' +import { terminalService } from '../../src/services/terminal' + +// Mock node-pty +vi.mock('node-pty', () => ({ + spawn: vi.fn(() => ({ + onData: vi.fn(), + onExit: vi.fn(), + write: vi.fn(), + resize: vi.fn(), + kill: vi.fn(), + pid: 12345 + })) +})) + +describe('Terminal Service', () => { + const sessionId = 'test-session-id' + + afterEach(() => { + terminalService.destroyAllSessions() + vi.clearAllMocks() + }) + + it('should create a new session', () => { + const session = terminalService.createSession(sessionId) + expect(session).toBeDefined() + expect(session.id).toBe(sessionId) + expect(terminalService.getSession(sessionId)).toBeDefined() + }) + + it('should reuse existing session', () => { + const session1 = terminalService.createSession(sessionId) + const session2 = terminalService.createSession(sessionId) + expect(session1).toBe(session2) + }) + + it('should list sessions', () => { + terminalService.createSession('session-1') + terminalService.createSession('session-2') + + const sessions = terminalService.listSessions() + expect(sessions).toHaveLength(2) + expect(sessions.map(s => s.id)).toContain('session-1') + expect(sessions.map(s => s.id)).toContain('session-2') + }) + + it('should destroy session', () => { + terminalService.createSession(sessionId) + const result = terminalService.destroySession(sessionId) + + expect(result).toBe(true) + expect(terminalService.getSession(sessionId)).toBeUndefined() + }) + + it('should handle resizing session', () => { + const session = terminalService.createSession(sessionId) + const writeSpy = vi.spyOn(session.process.stdin, 'write') + + const result = terminalService.resizeSession(sessionId, 100, 40) + + expect(result).toBe(true) + expect(writeSpy).toHaveBeenCalledWith(JSON.stringify({ type: 'resize', cols: 100, rows: 40 }) + '\n') + }) + + it('should handle writing to session', () => { + const session = terminalService.createSession(sessionId) + const writeSpy = vi.spyOn(session.process.stdin, 'write') + + const result = terminalService.writeToSession(sessionId, 'ls -la') + + expect(result).toBe(true) + expect(writeSpy).toHaveBeenCalledWith(JSON.stringify({ type: 'input', data: 'ls -la' }) + '\n') + }) +}) diff --git a/backend/test/setup.ts b/backend/test/setup.ts index d352a68f..ad59756f 100644 --- a/backend/test/setup.ts +++ b/backend/test/setup.ts @@ -1,5 +1,30 @@ import { beforeAll, afterAll, vi } from 'vitest' +vi.mock('bun', () => { + const createMockReadableStream = () => ({ + getReader: () => ({ + read: vi.fn().mockResolvedValue({ done: true, value: undefined }), + }), + }) + + const createMockWritableStream = () => ({ + write: vi.fn(), + flush: vi.fn(), + }) + + return { + spawn: vi.fn(() => ({ + stdin: createMockWritableStream(), + stdout: createMockReadableStream(), + stderr: createMockReadableStream(), + pid: 12345, + kill: vi.fn(), + exited: Promise.resolve(0), + })), + Subprocess: class {}, + } +}) + beforeAll(() => { vi.stubEnv('NODE_ENV', 'test') vi.stubEnv('PORT', '3001') diff --git a/backend/vitest.config.ts b/backend/vitest.config.ts index 3364bdfd..f10b6601 100644 --- a/backend/vitest.config.ts +++ b/backend/vitest.config.ts @@ -1,27 +1,36 @@ -import { defineConfig } from 'vitest/config' +import { defineConfig } from "vitest/config"; export default defineConfig({ test: { globals: true, - environment: 'node', + environment: "node", coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], + provider: "v8", + reporter: ["text", "json", "html"], exclude: [ - 'node_modules/', - 'test/', - '**/*.test.ts', - '**/*.spec.ts', - '**/types/**', - 'vitest.config.ts' + "node_modules/", + "test/", + "**/*.test.ts", + "**/*.spec.ts", + "**/types/**", + "vitest.config.ts", ], thresholds: { lines: 80, functions: 80, branches: 80, - statements: 80 - } + statements: 80, + }, }, - setupFiles: ['./test/setup.ts'] - } -}) + setupFiles: ["./test/setup.ts"], + include: ["test/**/*.test.ts"], + exclude: ["test/integration/**"], + testTimeout: 10000, + hookTimeout: 10000, + server: { + deps: { + external: ["bun"], + }, + }, + }, +}); diff --git a/backend/vitest.integration.config.ts b/backend/vitest.integration.config.ts new file mode 100644 index 00000000..15a3b8eb --- /dev/null +++ b/backend/vitest.integration.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + globals: true, + environment: 'node', + include: ['test/integration/**/*.test.ts'], + testTimeout: 120000, + hookTimeout: 60000 + } +}) diff --git a/bin/cli.ts b/bin/cli.ts new file mode 100755 index 00000000..33860563 --- /dev/null +++ b/bin/cli.ts @@ -0,0 +1,792 @@ +#!/usr/bin/env bun +import { spawn, execSync, spawnSync } from 'child_process' +import { createInterface } from 'readline' +import * as path from 'path' +import * as fs from 'fs' +import * as os from 'os' +import * as crypto from 'crypto' + +const VERSION = '0.5.4' +const DEFAULT_PORT = 5001 +const DEFAULT_OPENCODE_PORT = 5551 +const MANAGED_PORTS = [5001, 5002, 5003, 5173, 5174, 5175, 5176, 5552, 5553, 5554] + +const CONFIG_DIR = path.join(os.homedir(), '.local', 'run', 'opencode-manager') +const ENDPOINTS_FILE = path.join(CONFIG_DIR, 'endpoints.json') +const AUTH_FILE = path.join(CONFIG_DIR, 'auth.json') + +interface AuthConfig { + username: string + password: string +} + +interface Endpoint { + type: 'local' | 'tunnel' + url: string + timestamp: string +} + +interface EndpointsConfig { + endpoints: Endpoint[] +} + +function ensureConfigDir(): void { + if (!fs.existsSync(CONFIG_DIR)) { + fs.mkdirSync(CONFIG_DIR, { recursive: true, mode: 0o700 }) + } +} + +function getOrCreateAuth(): AuthConfig { + ensureConfigDir() + + if (fs.existsSync(AUTH_FILE)) { + try { + const content = fs.readFileSync(AUTH_FILE, 'utf8') + const auth = JSON.parse(content) as AuthConfig + if (auth.username && auth.password) { + return auth + } + } catch {} + } + + const auth: AuthConfig = { + username: 'admin', + password: crypto.randomBytes(16).toString('base64url'), + } + + fs.writeFileSync(AUTH_FILE, JSON.stringify(auth, null, 2), { mode: 0o600 }) + console.log(`\n🔐 Generated new credentials:`) + console.log(` Username: ${auth.username}`) + console.log(` Password: ${auth.password}`) + console.log(` Saved to: ${AUTH_FILE}\n`) + + return auth +} + +function updateEndpoints(localUrl: string, tunnelUrl?: string): void { + ensureConfigDir() + + let config: EndpointsConfig = { endpoints: [] } + + if (fs.existsSync(ENDPOINTS_FILE)) { + try { + config = JSON.parse(fs.readFileSync(ENDPOINTS_FILE, 'utf8')) + } catch {} + } + + const timestamp = new Date().toISOString() + + config.endpoints = config.endpoints.filter(e => e.url !== localUrl) + config.endpoints.push({ type: 'local', url: localUrl, timestamp }) + + if (tunnelUrl) { + config.endpoints = config.endpoints.filter(e => e.type !== 'tunnel' || e.url === tunnelUrl) + config.endpoints.push({ type: 'tunnel', url: tunnelUrl, timestamp }) + } + + fs.writeFileSync(ENDPOINTS_FILE, JSON.stringify(config, null, 2), { mode: 0o600 }) +} + +function getPackageDir(): string { + return path.resolve(import.meta.dir, '..') +} + +function printHelp(): void { + console.log(` +opencode-manager v${VERSION} + +Usage: opencode-manager [options] + +Commands: + start Start the OpenCode Manager server + install-service Install as a user service (macOS/Linux) + uninstall-service Remove the user service + status Show service status + logs Show service logs + help Show this help message + +Start Options: + --client, -c Connect to existing opencode server + --tunnel, -t Start a Cloudflare tunnel for public access + --port, -p Backend API port (default: 5001) + --no-auth Disable basic authentication + +Service Options: + --no-tunnel Disable Cloudflare tunnel (tunnel enabled by default) + +Note: Service runs in client mode by default, connecting to existing +opencode CLI sessions. If no opencode server is found on port 5551, +one will be started automatically. + +Examples: + opencode-manager start + opencode-manager start --tunnel + opencode-manager install-service + opencode-manager install-service --no-tunnel + opencode-manager status +`) +} + +async function checkServerHealth(port: number): Promise { + try { + const response = await fetch(`http://127.0.0.1:${port}/doc`, { + signal: AbortSignal.timeout(2000) + }) + return response.status > 0 + } catch { + return false + } +} + +function isPortInUse(port: number): boolean { + try { + const output = execSync(`lsof -ti:${port}`, { encoding: 'utf8' }).trim() + return output.length > 0 + } catch { + return false + } +} + +async function waitForBackendHealth(port: number, auth: AuthConfig, maxSeconds: number): Promise { + const headers: Record = { + 'Authorization': `Basic ${Buffer.from(`${auth.username}:${auth.password}`).toString('base64')}` + } + + for (let i = 0; i < maxSeconds; i++) { + try { + const response = await fetch(`http://127.0.0.1:${port}/api/health`, { + signal: AbortSignal.timeout(2000), + headers + }) + if (response.ok) { + const data = await response.json() as { status?: string } + if (data.status === 'healthy') { + return true + } + } + } catch {} + if (i > 0 && i % 10 === 0) { + console.log(` Still waiting... (${i}s)`) + } + await new Promise(r => setTimeout(r, 1000)) + } + return false +} + +function killProcessOnPort(port: number): boolean { + try { + const output = execSync(`lsof -ti:${port}`, { encoding: 'utf8' }).trim() + if (!output) return false + + const pids = output.split('\n').filter(Boolean).map(p => parseInt(p)) + for (const pid of pids) { + try { + process.kill(pid, 'SIGTERM') + console.log(` Killed orphaned process on port ${port} (PID ${pid})`) + } catch { + try { + process.kill(pid, 'SIGKILL') + } catch {} + } + } + return pids.length > 0 + } catch { + return false + } +} + +function cleanupManagedPorts(): void { + let cleaned = false + for (const port of MANAGED_PORTS) { + if (killProcessOnPort(port)) { + cleaned = true + } + } + if (cleaned) { + execSync('sleep 1') + } +} + +async function startOpenCodeServer(port: number): Promise { + if (isPortInUse(port)) { + console.log(`\n⚠️ Port ${port} is already in use`) + for (let i = 0; i < 10; i++) { + if (await checkServerHealth(port)) { + console.log(`✓ Existing server on port ${port} is responding`) + return true + } + await new Promise(r => setTimeout(r, 500)) + } + console.log(` Server on port ${port} not responding, killing and restarting...`) + killProcessOnPort(port) + await new Promise(r => setTimeout(r, 1000)) + } + + console.log(`\n🚀 Starting opencode server on port ${port}...`) + + const serverProcess = spawn('opencode', ['serve', '--port', port.toString(), '--hostname', '127.0.0.1'], { + stdio: ['ignore', 'pipe', 'pipe'], + detached: true, + }) + + serverProcess.unref() + + for (let i = 0; i < 30; i++) { + if (await checkServerHealth(port)) { + console.log(`✓ OpenCode server started on port ${port}`) + return true + } + await new Promise(r => setTimeout(r, 500)) + } + + console.error('❌ Failed to start opencode server') + return false +} + +async function startCloudflaredTunnel(localPort: number, auth: AuthConfig): Promise<{ process: ReturnType, url: string | null, urlWithAuth: string | null }> { + console.log('\n🌐 Starting Cloudflare tunnel...') + + const tunnelProcess = spawn('cloudflared', ['tunnel', '--no-autoupdate', '--protocol', 'http2', '--url', `http://localhost:${localPort}`], { + stdio: ['ignore', 'pipe', 'pipe'], + }) + + let tunnelUrl: string | null = null + + const urlPromise = new Promise((resolve) => { + const timeout = setTimeout(() => resolve(null), 30000) + + const handleOutput = (data: Buffer) => { + const output = data.toString() + const urlMatch = output.match(/https:\/\/[a-z0-9-]+\.trycloudflare\.com/) + if (urlMatch && !tunnelUrl) { + tunnelUrl = urlMatch[0] + clearTimeout(timeout) + resolve(tunnelUrl) + } + } + + tunnelProcess.stdout?.on('data', handleOutput) + tunnelProcess.stderr?.on('data', handleOutput) + }) + + tunnelProcess.on('error', (err) => { + console.error('\n❌ Failed to start cloudflared:', err.message) + console.log('Install cloudflared: brew install cloudflared') + }) + + const url = await urlPromise + + let urlWithAuth: string | null = null + if (url && auth.username && auth.password) { + try { + const parsedUrl = new URL(url) + parsedUrl.username = auth.username + parsedUrl.password = auth.password + urlWithAuth = parsedUrl.toString().replace(/\/$/, '') + } catch {} + } + + if (url) { + console.log(`✓ Tunnel URL: ${url}`) + if (urlWithAuth) { + console.log(`✓ With auth: ${urlWithAuth}`) + } + console.log() + } + + return { process: tunnelProcess, url, urlWithAuth } +} + +async function startBackend(port: number, auth: AuthConfig, opencodePort?: number): Promise> { + const packageDir = getPackageDir() + + const env: Record = { + ...process.env as Record, + PORT: port.toString(), + NODE_ENV: 'production', + AUTH_USERNAME: auth.username, + AUTH_PASSWORD: auth.password, + } + + if (opencodePort) { + env.OPENCODE_SERVER_PORT = opencodePort.toString() + env.OPENCODE_CLIENT_MODE = 'true' + } + + console.log(`\n🚀 Starting backend on port ${port}...`) + if (opencodePort) { + console.log(` Connecting to opencode server on port ${opencodePort}`) + } + + const backendProcess = spawn('bun', [path.join(packageDir, 'backend', 'dist', 'index.js')], { + cwd: packageDir, + stdio: 'inherit', + env, + }) + + return backendProcess +} + +async function commandStart(args: string[]): Promise { + const hasClient = args.includes('--client') || args.includes('-c') + const hasTunnel = args.includes('--tunnel') || args.includes('-t') + const noAuth = args.includes('--no-auth') + const portIdx = args.findIndex(a => a === '--port' || a === '-p') + const port = portIdx >= 0 ? parseInt(args[portIdx + 1]) || DEFAULT_PORT : DEFAULT_PORT + + console.log('\n╔═══════════════════════════════════════╗') + console.log('║ OpenCode Manager - Start ║') + console.log('╚═══════════════════════════════════════╝') + + const auth = noAuth ? { username: '', password: '' } : getOrCreateAuth() + + let opencodePort: number | undefined + + if (hasClient) { + console.log('\n🔍 Checking for opencode server on port', DEFAULT_OPENCODE_PORT, '...') + + if (await checkServerHealth(DEFAULT_OPENCODE_PORT)) { + console.log(`✓ Found existing server`) + opencodePort = DEFAULT_OPENCODE_PORT + } else { + console.log(' No server found, starting one...') + if (!await startOpenCodeServer(DEFAULT_OPENCODE_PORT)) { + process.exit(1) + } + opencodePort = DEFAULT_OPENCODE_PORT + } + } + + console.log('\n🧹 Cleaning up orphaned processes...') + cleanupManagedPorts() + + const processes: ReturnType[] = [] + const backendProcess = await startBackend(port, auth, opencodePort) + processes.push(backendProcess) + + console.log('\n⏳ Waiting for backend to be ready...') + const backendReady = await waitForBackendHealth(port, auth, 120) + if (!backendReady) { + console.error('❌ Backend failed to start within timeout') + process.exit(1) + } + console.log('✓ Backend is ready!') + + const localUrl = `http://localhost:${port}` + let tunnelUrl: string | undefined + let tunnelUrlWithAuth: string | undefined + + if (hasTunnel) { + const tunnel = await startCloudflaredTunnel(port, auth) + processes.push(tunnel.process) + tunnelUrl = tunnel.url || undefined + tunnelUrlWithAuth = tunnel.urlWithAuth || undefined + + if (tunnel.url) { + console.log('\n═══════════════════════════════════════') + console.log(`🌍 Public URL: ${tunnel.url}`) + if (tunnel.urlWithAuth) { + console.log(`🔐 With auth: ${tunnel.urlWithAuth}`) + } + console.log('═══════════════════════════════════════\n') + } + } + + updateEndpoints(localUrl, tunnelUrlWithAuth || tunnelUrl) + + console.log('\n📍 Endpoints:') + console.log(` Local: ${localUrl}`) + if (tunnelUrlWithAuth) { + console.log(` Tunnel: ${tunnelUrlWithAuth}`) + } else if (tunnelUrl) { + console.log(` Tunnel: ${tunnelUrl}`) + } + if (!noAuth) { + console.log(`\n🔐 Auth: ${auth.username}:${auth.password}`) + } + console.log('\nPress Ctrl+C to stop\n') + + const cleanup = () => { + console.log('\n\n🛑 Shutting down...') + processes.forEach(p => { + try { p.kill('SIGTERM') } catch {} + }) + process.exit(0) + } + + process.on('SIGINT', cleanup) + process.on('SIGTERM', cleanup) + + await Promise.race(processes.map(p => new Promise((_, reject) => { + p.on('exit', (code) => { + if (code !== 0 && code !== null) { + reject(new Error(`Process exited with code ${code}`)) + } + }) + }))) +} + +function getServiceName(): string { + return 'opencode-manager' +} + +function getMacOSPlistPath(): string { + return path.join(os.homedir(), 'Library', 'LaunchAgents', 'com.opencode-manager.plist') +} + +function getLinuxServicePath(): string { + return path.join(os.homedir(), '.config', 'systemd', 'user', 'opencode-manager.service') +} + +function getFullPath(): string { + const basePaths = [ + '/usr/local/bin', + '/usr/bin', + '/bin', + '/usr/sbin', + '/sbin', + '/opt/homebrew/bin', + '/opt/homebrew/sbin', + ] + + try { + const bunPath = execSync('which bun', { encoding: 'utf8' }).trim() + basePaths.push(path.dirname(bunPath)) + } catch {} + + try { + const opencodePath = execSync('which opencode', { encoding: 'utf8' }).trim() + basePaths.push(path.dirname(opencodePath)) + } catch {} + + try { + const cloudflaredPath = execSync('which cloudflared', { encoding: 'utf8' }).trim() + basePaths.push(path.dirname(cloudflaredPath)) + } catch {} + + try { + const pythonPath = execSync('which python3', { encoding: 'utf8' }).trim() + basePaths.push(path.dirname(pythonPath)) + } catch {} + + const nvmDir = path.join(os.homedir(), '.nvm', 'versions', 'node') + if (fs.existsSync(nvmDir)) { + try { + const versions = fs.readdirSync(nvmDir) + for (const v of versions) { + basePaths.push(path.join(nvmDir, v, 'bin')) + } + } catch {} + } + + const userLocalBin = path.join(os.homedir(), '.local', 'bin') + if (fs.existsSync(userLocalBin)) { + basePaths.push(userLocalBin) + } + + const uniquePaths = [...new Set(basePaths)] + return uniquePaths.join(':') +} + +function commandInstallService(args: string[]): void { + const noTunnel = args.includes('--no-tunnel') + const hasTunnel = !noTunnel + const platform = os.platform() + + console.log('\n🔧 Installing OpenCode Manager as a user service...\n') + + const auth = getOrCreateAuth() + + const packageDir = getPackageDir() + const cliPath = path.join(packageDir, 'bin', 'cli.ts') + const bunPath = execSync('which bun', { encoding: 'utf8' }).trim() + const fullPath = getFullPath() + + const startArgs = ['start', '--client'] + if (hasTunnel) startArgs.push('--tunnel') + + if (platform === 'darwin') { + const plistPath = getMacOSPlistPath() + const plistDir = path.dirname(plistPath) + + if (!fs.existsSync(plistDir)) { + fs.mkdirSync(plistDir, { recursive: true }) + } + + const plistContent = ` + + + + Label + com.opencode-manager + ProgramArguments + + ${bunPath} + ${cliPath} +${startArgs.map(a => ` ${a}`).join('\n')} + + RunAtLoad + + KeepAlive + + WorkingDirectory + ${packageDir} + StandardOutPath + ${path.join(CONFIG_DIR, 'stdout.log')} + StandardErrorPath + ${path.join(CONFIG_DIR, 'stderr.log')} + EnvironmentVariables + + PATH + ${fullPath} + HOME + ${os.homedir()} + AUTH_USERNAME + ${auth.username} + AUTH_PASSWORD + ${auth.password}${process.env.GEMINI_API_KEY ? ` + GEMINI_API_KEY + ${process.env.GEMINI_API_KEY}` : ''}${process.env.OPENAI_API_KEY ? ` + OPENAI_API_KEY + ${process.env.OPENAI_API_KEY}` : ''}${process.env.ANTHROPIC_API_KEY ? ` + ANTHROPIC_API_KEY + ${process.env.ANTHROPIC_API_KEY}` : ''}${process.env.XAI_API_KEY ? ` + XAI_API_KEY + ${process.env.XAI_API_KEY}` : ''} + + +` + + fs.writeFileSync(plistPath, plistContent) + console.log(`✓ Created plist: ${plistPath}`) + + try { + execSync(`launchctl unload "${plistPath}" 2>/dev/null`, { encoding: 'utf8' }) + } catch {} + + execSync(`launchctl load "${plistPath}"`, { encoding: 'utf8' }) + console.log('✓ Service loaded and started') + + } else if (platform === 'linux') { + const servicePath = getLinuxServicePath() + const serviceDir = path.dirname(servicePath) + + if (!fs.existsSync(serviceDir)) { + fs.mkdirSync(serviceDir, { recursive: true }) + } + + const serviceContent = `[Unit] +Description=OpenCode Manager +After=network.target + +[Service] +Type=simple +ExecStart=${bunPath} ${cliPath} ${startArgs.join(' ')} +WorkingDirectory=${packageDir} +Restart=always +RestartSec=10 +Environment="PATH=${fullPath}" +Environment="HOME=${os.homedir()}" +Environment="AUTH_USERNAME=${auth.username}" +Environment="AUTH_PASSWORD=${auth.password}" + +[Install] +WantedBy=default.target +` + + fs.writeFileSync(servicePath, serviceContent) + console.log(`✓ Created service file: ${servicePath}`) + + execSync('systemctl --user daemon-reload', { encoding: 'utf8' }) + execSync('systemctl --user enable opencode-manager', { encoding: 'utf8' }) + execSync('systemctl --user start opencode-manager', { encoding: 'utf8' }) + console.log('✓ Service enabled and started') + + } else { + console.error(`❌ Unsupported platform: ${platform}`) + console.log(' Supported: macOS (darwin), Linux') + process.exit(1) + } + + console.log('\n✅ Installation complete!') + console.log(`\n🔐 Credentials saved to: ${AUTH_FILE}`) + console.log(` Username: ${auth.username}`) + console.log(` Password: ${auth.password}`) + console.log(`\n📍 Endpoints will be written to: ${ENDPOINTS_FILE}`) + console.log('\nCommands:') + console.log(' opencode-manager status - Check service status') + console.log(' opencode-manager logs - View logs') +} + +function commandUninstallService(): void { + const platform = os.platform() + + console.log('\n🔧 Uninstalling OpenCode Manager service...\n') + + if (platform === 'darwin') { + const plistPath = getMacOSPlistPath() + + try { + execSync(`launchctl unload "${plistPath}"`, { encoding: 'utf8' }) + console.log('✓ Service stopped') + } catch {} + + if (fs.existsSync(plistPath)) { + fs.unlinkSync(plistPath) + console.log(`✓ Removed plist: ${plistPath}`) + } + + } else if (platform === 'linux') { + try { + execSync('systemctl --user stop opencode-manager', { encoding: 'utf8' }) + console.log('✓ Service stopped') + } catch {} + + try { + execSync('systemctl --user disable opencode-manager', { encoding: 'utf8' }) + console.log('✓ Service disabled') + } catch {} + + const servicePath = getLinuxServicePath() + if (fs.existsSync(servicePath)) { + fs.unlinkSync(servicePath) + console.log(`✓ Removed service file: ${servicePath}`) + } + + execSync('systemctl --user daemon-reload', { encoding: 'utf8' }) + + } else { + console.error(`❌ Unsupported platform: ${platform}`) + process.exit(1) + } + + console.log('\n✅ Uninstallation complete!') +} + +function commandStatus(): void { + const platform = os.platform() + + console.log('\n📊 OpenCode Manager Service Status\n') + + if (platform === 'darwin') { + const plistPath = getMacOSPlistPath() + + if (!fs.existsSync(plistPath)) { + console.log('❌ Service not installed') + return + } + + try { + const result = execSync('launchctl list | grep com.opencode-manager', { encoding: 'utf8' }) + const parts = result.trim().split(/\s+/) + const pid = parts[0] + const exitCode = parts[1] + + if (pid !== '-') { + console.log(`✅ Running (PID: ${pid})`) + } else if (exitCode === '0') { + console.log('⏸️ Stopped (last exit: success)') + } else { + console.log(`❌ Stopped (last exit code: ${exitCode})`) + } + } catch { + console.log('⏸️ Not running') + } + + } else if (platform === 'linux') { + try { + const result = execSync('systemctl --user status opencode-manager --no-pager', { encoding: 'utf8' }) + console.log(result) + } catch (err: unknown) { + const error = err as { stdout?: string } + if (error.stdout) { + console.log(error.stdout) + } else { + console.log('❌ Service not installed or not running') + } + } + + } else { + console.log(`❌ Unsupported platform: ${platform}`) + } + + if (fs.existsSync(ENDPOINTS_FILE)) { + try { + const config = JSON.parse(fs.readFileSync(ENDPOINTS_FILE, 'utf8')) as EndpointsConfig + console.log('\n📍 Last known endpoints:') + for (const ep of config.endpoints) { + console.log(` ${ep.type}: ${ep.url}`) + } + } catch {} + } +} + +function commandLogs(): void { + const platform = os.platform() + + if (platform === 'darwin') { + const stdoutLog = path.join(CONFIG_DIR, 'stdout.log') + const stderrLog = path.join(CONFIG_DIR, 'stderr.log') + + console.log('\n📜 OpenCode Manager Logs\n') + + if (fs.existsSync(stdoutLog)) { + console.log('=== stdout ===') + const result = spawnSync('tail', ['-50', stdoutLog], { stdio: 'inherit' }) + } + + if (fs.existsSync(stderrLog)) { + console.log('\n=== stderr ===') + const result = spawnSync('tail', ['-50', stderrLog], { stdio: 'inherit' }) + } + + } else if (platform === 'linux') { + spawnSync('journalctl', ['--user', '-u', 'opencode-manager', '-f', '--no-pager', '-n', '100'], { stdio: 'inherit' }) + + } else { + console.log(`❌ Unsupported platform: ${platform}`) + } +} + +async function main(): Promise { + const args = process.argv.slice(2) + const command = args[0] || 'help' + const commandArgs = args.slice(1) + + switch (command) { + case 'start': + await commandStart(commandArgs) + break + case 'install-service': + commandInstallService(commandArgs) + break + case 'uninstall-service': + commandUninstallService() + break + case 'status': + commandStatus() + break + case 'logs': + commandLogs() + break + case 'help': + case '--help': + case '-h': + printHelp() + break + case 'version': + case '--version': + case '-v': + console.log(`opencode-manager v${VERSION}`) + break + default: + console.error(`Unknown command: ${command}`) + printHelp() + process.exit(1) + } +} + +main().catch(err => { + console.error('Fatal error:', err) + process.exit(1) +}) diff --git a/bun.lock b/bun.lock deleted file mode 100644 index 93bc095e..00000000 --- a/bun.lock +++ /dev/null @@ -1,1268 +0,0 @@ -{ - "lockfileVersion": 1, - "workspaces": { - "": { - "name": "opencode-webui", - "dependencies": { - "class-variance-authority": "^0.7.1", - "dotenv": "^17.2.3", - }, - "devDependencies": { - "concurrently": "^9.1.0", - }, - }, - "backend": { - "name": "backend", - "dependencies": { - "@hono/node-server": "^1.19.5", - "@opencode-webui/shared": "file:../shared", - "dotenv": "^17.2.3", - "hono": "^4.10.1", - "zod": "^4.1.12", - }, - "devDependencies": { - "@types/better-sqlite3": "^7.6.13", - "@types/bun": "latest", - "@vitest/ui": "^3.2.4", - "vitest": "^3.2.4", - }, - "peerDependencies": { - "typescript": "^5", - }, - }, - "frontend": { - "name": "frontend", - "dependencies": { - "@hookform/resolvers": "^5.2.2", - "@monaco-editor/react": "^4.7.0", - "@radix-ui/react-dialog": "^1.1.15", - "@radix-ui/react-dropdown-menu": "^2.1.16", - "@radix-ui/react-label": "^2.1.7", - "@radix-ui/react-select": "^2.2.6", - "@radix-ui/react-slot": "^1.2.3", - "@radix-ui/react-switch": "^1.2.6", - "@radix-ui/react-tabs": "^1.1.13", - "@tailwindcss/vite": "^4.1.14", - "@tanstack/react-query": "^5.90.5", - "axios": "^1.12.2", - "class-variance-authority": "^0.7.1", - "clsx": "^2.1.1", - "cmdk": "^1.1.1", - "date-fns": "^4.1.0", - "diff": "^8.0.2", - "highlight.js": "^11.11.1", - "lucide-react": "^0.546.0", - "react": "^19.1.1", - "react-dom": "^19.1.1", - "react-hook-form": "^7.65.0", - "react-markdown": "^10.1.0", - "react-router-dom": "^7.9.4", - "rehype-highlight": "^7.0.2", - "rehype-raw": "^7.0.0", - "remark-gfm": "^4.0.1", - "sonner": "^2.0.7", - "tailwind-merge": "^3.3.1", - "zod": "^4.1.12", - "zustand": "^5.0.8", - }, - "devDependencies": { - "@eslint/js": "^9.36.0", - "@types/node": "^24.8.1", - "@types/react": "^19.2.2", - "@types/react-dom": "^19.2.2", - "@vitejs/plugin-react": "^5.0.4", - "autoprefixer": "^10.4.21", - "eslint": "^9.36.0", - "eslint-plugin-react-hooks": "^5.2.0", - "eslint-plugin-react-refresh": "^0.4.22", - "globals": "^16.4.0", - "openapi-typescript": "^7.10.1", - "postcss": "^8.5.6", - "tailwindcss": "^4.1.14", - "typescript": "~5.9.3", - "typescript-eslint": "^8.45.0", - "vite": "^7.1.7", - }, - }, - "shared": { - "name": "@opencode-webui/shared", - "version": "1.0.0", - "dependencies": { - "dotenv": "^17.2.3", - "zod": "^4.1.12", - }, - "devDependencies": { - "typescript": "^5", - }, - }, - }, - "packages": { - "@babel/code-frame": ["@babel/code-frame@7.27.1", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg=="], - - "@babel/compat-data": ["@babel/compat-data@7.28.5", "", {}, "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA=="], - - "@babel/core": ["@babel/core@7.28.5", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.28.3", "@babel/helpers": "^7.28.4", "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.5", "@babel/types": "^7.28.5", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw=="], - - "@babel/generator": ["@babel/generator@7.28.5", "", { "dependencies": { "@babel/parser": "^7.28.5", "@babel/types": "^7.28.5", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" } }, "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ=="], - - "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.27.2", "", { "dependencies": { "@babel/compat-data": "^7.27.2", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ=="], - - "@babel/helper-globals": ["@babel/helper-globals@7.28.0", "", {}, "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw=="], - - "@babel/helper-module-imports": ["@babel/helper-module-imports@7.27.1", "", { "dependencies": { "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1" } }, "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w=="], - - "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.28.3", "", { "dependencies": { "@babel/helper-module-imports": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1", "@babel/traverse": "^7.28.3" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw=="], - - "@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.27.1", "", {}, "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw=="], - - "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="], - - "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q=="], - - "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="], - - "@babel/helpers": ["@babel/helpers@7.28.4", "", { "dependencies": { "@babel/template": "^7.27.2", "@babel/types": "^7.28.4" } }, "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w=="], - - "@babel/parser": ["@babel/parser@7.28.5", "", { "dependencies": { "@babel/types": "^7.28.5" }, "bin": "./bin/babel-parser.js" }, "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ=="], - - "@babel/plugin-transform-react-jsx-self": ["@babel/plugin-transform-react-jsx-self@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw=="], - - "@babel/plugin-transform-react-jsx-source": ["@babel/plugin-transform-react-jsx-source@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw=="], - - "@babel/template": ["@babel/template@7.27.2", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/parser": "^7.27.2", "@babel/types": "^7.27.1" } }, "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw=="], - - "@babel/traverse": ["@babel/traverse@7.28.5", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", "@babel/types": "^7.28.5", "debug": "^4.3.1" } }, "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ=="], - - "@babel/types": ["@babel/types@7.28.5", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA=="], - - "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="], - - "@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="], - - "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.12", "", { "os": "android", "cpu": "arm64" }, "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg=="], - - "@esbuild/android-x64": ["@esbuild/android-x64@0.25.12", "", { "os": "android", "cpu": "x64" }, "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg=="], - - "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg=="], - - "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA=="], - - "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg=="], - - "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ=="], - - "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.12", "", { "os": "linux", "cpu": "arm" }, "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw=="], - - "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ=="], - - "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA=="], - - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng=="], - - "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw=="], - - "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA=="], - - "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w=="], - - "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg=="], - - "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.12", "", { "os": "linux", "cpu": "x64" }, "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw=="], - - "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg=="], - - "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.12", "", { "os": "none", "cpu": "x64" }, "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ=="], - - "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.12", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A=="], - - "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw=="], - - "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg=="], - - "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w=="], - - "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg=="], - - "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ=="], - - "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="], - - "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.9.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g=="], - - "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.2", "", {}, "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew=="], - - "@eslint/config-array": ["@eslint/config-array@0.21.1", "", { "dependencies": { "@eslint/object-schema": "^2.1.7", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA=="], - - "@eslint/config-helpers": ["@eslint/config-helpers@0.4.2", "", { "dependencies": { "@eslint/core": "^0.17.0" } }, "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw=="], - - "@eslint/core": ["@eslint/core@0.17.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ=="], - - "@eslint/eslintrc": ["@eslint/eslintrc@3.3.1", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="], - - "@eslint/js": ["@eslint/js@9.39.1", "", {}, "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw=="], - - "@eslint/object-schema": ["@eslint/object-schema@2.1.7", "", {}, "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA=="], - - "@eslint/plugin-kit": ["@eslint/plugin-kit@0.4.1", "", { "dependencies": { "@eslint/core": "^0.17.0", "levn": "^0.4.1" } }, "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA=="], - - "@floating-ui/core": ["@floating-ui/core@1.7.3", "", { "dependencies": { "@floating-ui/utils": "^0.2.10" } }, "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w=="], - - "@floating-ui/dom": ["@floating-ui/dom@1.7.4", "", { "dependencies": { "@floating-ui/core": "^1.7.3", "@floating-ui/utils": "^0.2.10" } }, "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA=="], - - "@floating-ui/react-dom": ["@floating-ui/react-dom@2.1.6", "", { "dependencies": { "@floating-ui/dom": "^1.7.4" }, "peerDependencies": { "react": ">=16.8.0", "react-dom": ">=16.8.0" } }, "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw=="], - - "@floating-ui/utils": ["@floating-ui/utils@0.2.10", "", {}, "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ=="], - - "@hono/node-server": ["@hono/node-server@1.19.6", "", { "peerDependencies": { "hono": "^4" } }, "sha512-Shz/KjlIeAhfiuE93NDKVdZ7HdBVLQAfdbaXEaoAVO3ic9ibRSLGIQGkcBbFyuLr+7/1D5ZCINM8B+6IvXeMtw=="], - - "@hookform/resolvers": ["@hookform/resolvers@5.2.2", "", { "dependencies": { "@standard-schema/utils": "^0.3.0" }, "peerDependencies": { "react-hook-form": "^7.55.0" } }, "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA=="], - - "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], - - "@humanfs/node": ["@humanfs/node@0.16.7", "", { "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.4.0" } }, "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ=="], - - "@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="], - - "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.3", "", {}, "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ=="], - - "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="], - - "@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="], - - "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], - - "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="], - - "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="], - - "@monaco-editor/loader": ["@monaco-editor/loader@1.7.0", "", { "dependencies": { "state-local": "^1.0.6" } }, "sha512-gIwR1HrJrrx+vfyOhYmCZ0/JcWqG5kbfG7+d3f/C1LXk2EvzAbHSg3MQ5lO2sMlo9izoAZ04shohfKLVT6crVA=="], - - "@monaco-editor/react": ["@monaco-editor/react@4.7.0", "", { "dependencies": { "@monaco-editor/loader": "^1.5.0" }, "peerDependencies": { "monaco-editor": ">= 0.25.0 < 1", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA=="], - - "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], - - "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], - - "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], - - "@opencode-webui/shared": ["@opencode-webui/shared@workspace:shared"], - - "@polka/url": ["@polka/url@1.0.0-next.29", "", {}, "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww=="], - - "@radix-ui/number": ["@radix-ui/number@1.1.1", "", {}, "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g=="], - - "@radix-ui/primitive": ["@radix-ui/primitive@1.1.3", "", {}, "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg=="], - - "@radix-ui/react-arrow": ["@radix-ui/react-arrow@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w=="], - - "@radix-ui/react-collection": ["@radix-ui/react-collection@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw=="], - - "@radix-ui/react-compose-refs": ["@radix-ui/react-compose-refs@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg=="], - - "@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="], - - "@radix-ui/react-dialog": ["@radix-ui/react-dialog@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw=="], - - "@radix-ui/react-direction": ["@radix-ui/react-direction@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw=="], - - "@radix-ui/react-dismissable-layer": ["@radix-ui/react-dismissable-layer@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-escape-keydown": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg=="], - - "@radix-ui/react-dropdown-menu": ["@radix-ui/react-dropdown-menu@2.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-menu": "2.1.16", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw=="], - - "@radix-ui/react-focus-guards": ["@radix-ui/react-focus-guards@1.1.3", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw=="], - - "@radix-ui/react-focus-scope": ["@radix-ui/react-focus-scope@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw=="], - - "@radix-ui/react-id": ["@radix-ui/react-id@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg=="], - - "@radix-ui/react-label": ["@radix-ui/react-label@2.1.8", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A=="], - - "@radix-ui/react-menu": ["@radix-ui/react-menu@2.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg=="], - - "@radix-ui/react-popper": ["@radix-ui/react-popper@1.2.8", "", { "dependencies": { "@floating-ui/react-dom": "^2.0.0", "@radix-ui/react-arrow": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-rect": "1.1.1", "@radix-ui/react-use-size": "1.1.1", "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw=="], - - "@radix-ui/react-portal": ["@radix-ui/react-portal@1.1.9", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ=="], - - "@radix-ui/react-presence": ["@radix-ui/react-presence@1.1.5", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ=="], - - "@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="], - - "@radix-ui/react-roving-focus": ["@radix-ui/react-roving-focus@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA=="], - - "@radix-ui/react-select": ["@radix-ui/react-select@2.2.6", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ=="], - - "@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], - - "@radix-ui/react-switch": ["@radix-ui/react-switch@1.2.6", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ=="], - - "@radix-ui/react-tabs": ["@radix-ui/react-tabs@1.1.13", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A=="], - - "@radix-ui/react-use-callback-ref": ["@radix-ui/react-use-callback-ref@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg=="], - - "@radix-ui/react-use-controllable-state": ["@radix-ui/react-use-controllable-state@1.2.2", "", { "dependencies": { "@radix-ui/react-use-effect-event": "0.0.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg=="], - - "@radix-ui/react-use-effect-event": ["@radix-ui/react-use-effect-event@0.0.2", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA=="], - - "@radix-ui/react-use-escape-keydown": ["@radix-ui/react-use-escape-keydown@1.1.1", "", { "dependencies": { "@radix-ui/react-use-callback-ref": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g=="], - - "@radix-ui/react-use-layout-effect": ["@radix-ui/react-use-layout-effect@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ=="], - - "@radix-ui/react-use-previous": ["@radix-ui/react-use-previous@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ=="], - - "@radix-ui/react-use-rect": ["@radix-ui/react-use-rect@1.1.1", "", { "dependencies": { "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w=="], - - "@radix-ui/react-use-size": ["@radix-ui/react-use-size@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ=="], - - "@radix-ui/react-visually-hidden": ["@radix-ui/react-visually-hidden@1.2.3", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug=="], - - "@radix-ui/rect": ["@radix-ui/rect@1.1.1", "", {}, "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw=="], - - "@redocly/ajv": ["@redocly/ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-EDtsGZS964mf9zAUXAl9Ew16eYbeyAFWhsPr0fX6oaJxgd8rApYlPBf0joyhnUHz88WxrigyFtTaqqzXNzPgqw=="], - - "@redocly/config": ["@redocly/config@0.22.2", "", {}, "sha512-roRDai8/zr2S9YfmzUfNhKjOF0NdcOIqF7bhf4MVC5UxpjIysDjyudvlAiVbpPHp3eDRWbdzUgtkK1a7YiDNyQ=="], - - "@redocly/openapi-core": ["@redocly/openapi-core@1.34.5", "", { "dependencies": { "@redocly/ajv": "^8.11.2", "@redocly/config": "^0.22.0", "colorette": "^1.2.0", "https-proxy-agent": "^7.0.5", "js-levenshtein": "^1.1.6", "js-yaml": "^4.1.0", "minimatch": "^5.0.1", "pluralize": "^8.0.0", "yaml-ast-parser": "0.0.43" } }, "sha512-0EbE8LRbkogtcCXU7liAyC00n9uNG9hJ+eMyHFdUsy9lB/WGqnEBgwjA9q2cyzAVcdTkQqTBBU1XePNnN3OijA=="], - - "@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.47", "", {}, "sha512-8QagwMH3kNCuzD8EWL8R2YPW5e4OrHNSAHRFDdmFqEwEaD/KcNKjVoumo+gP2vW5eKB2UPbM6vTYiGZX0ixLnw=="], - - "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.53.3", "", { "os": "android", "cpu": "arm" }, "sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w=="], - - "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.53.3", "", { "os": "android", "cpu": "arm64" }, "sha512-CbDGaMpdE9sh7sCmTrTUyllhrg65t6SwhjlMJsLr+J8YjFuPmCEjbBSx4Z/e4SmDyH3aB5hGaJUP2ltV/vcs4w=="], - - "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.53.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA=="], - - "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.53.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ=="], - - "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.53.3", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-yMTrCrK92aGyi7GuDNtGn2sNW+Gdb4vErx4t3Gv/Tr+1zRb8ax4z8GWVRfr3Jw8zJWvpGHNpss3vVlbF58DZ4w=="], - - "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.53.3", "", { "os": "freebsd", "cpu": "x64" }, "sha512-lMfF8X7QhdQzseM6XaX0vbno2m3hlyZFhwcndRMw8fbAGUGL3WFMBdK0hbUBIUYcEcMhVLr1SIamDeuLBnXS+Q=="], - - "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.53.3", "", { "os": "linux", "cpu": "arm" }, "sha512-k9oD15soC/Ln6d2Wv/JOFPzZXIAIFLp6B+i14KhxAfnq76ajt0EhYc5YPeX6W1xJkAdItcVT+JhKl1QZh44/qw=="], - - "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.53.3", "", { "os": "linux", "cpu": "arm" }, "sha512-vTNlKq+N6CK/8UktsrFuc+/7NlEYVxgaEgRXVUVK258Z5ymho29skzW1sutgYjqNnquGwVUObAaxae8rZ6YMhg=="], - - "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.53.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-RGrFLWgMhSxRs/EWJMIFM1O5Mzuz3Xy3/mnxJp/5cVhZ2XoCAxJnmNsEyeMJtpK+wu0FJFWz+QF4mjCA7AUQ3w=="], - - "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.53.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-kASyvfBEWYPEwe0Qv4nfu6pNkITLTb32p4yTgzFCocHnJLAHs+9LjUu9ONIhvfT/5lv4YS5muBHyuV84epBo/A=="], - - "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.53.3", "", { "os": "linux", "cpu": "none" }, "sha512-JiuKcp2teLJwQ7vkJ95EwESWkNRFJD7TQgYmCnrPtlu50b4XvT5MOmurWNrCj3IFdyjBQ5p9vnrX4JM6I8OE7g=="], - - "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.53.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-EoGSa8nd6d3T7zLuqdojxC20oBfNT8nexBbB/rkxgKj5T5vhpAQKKnD+h3UkoMuTyXkP5jTjK/ccNRmQrPNDuw=="], - - "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.53.3", "", { "os": "linux", "cpu": "none" }, "sha512-4s+Wped2IHXHPnAEbIB0YWBv7SDohqxobiiPA1FIWZpX+w9o2i4LezzH/NkFUl8LRci/8udci6cLq+jJQlh+0g=="], - - "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.53.3", "", { "os": "linux", "cpu": "none" }, "sha512-68k2g7+0vs2u9CxDt5ktXTngsxOQkSEV/xBbwlqYcUrAVh6P9EgMZvFsnHy4SEiUl46Xf0IObWVbMvPrr2gw8A=="], - - "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.53.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-VYsFMpULAz87ZW6BVYw3I6sWesGpsP9OPcyKe8ofdg9LHxSbRMd7zrVrr5xi/3kMZtpWL/wC+UIJWJYVX5uTKg=="], - - "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.53.3", "", { "os": "linux", "cpu": "x64" }, "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w=="], - - "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.53.3", "", { "os": "linux", "cpu": "x64" }, "sha512-eoROhjcc6HbZCJr+tvVT8X4fW3/5g/WkGvvmwz/88sDtSJzO7r/blvoBDgISDiCjDRZmHpwud7h+6Q9JxFwq1Q=="], - - "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.53.3", "", { "os": "none", "cpu": "arm64" }, "sha512-OueLAWgrNSPGAdUdIjSWXw+u/02BRTcnfw9PN41D2vq/JSEPnJnVuBgw18VkN8wcd4fjUs+jFHVM4t9+kBSNLw=="], - - "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.53.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-GOFuKpsxR/whszbF/bzydebLiXIHSgsEUp6M0JI8dWvi+fFa1TD6YQa4aSZHtpmh2/uAlj/Dy+nmby3TJ3pkTw=="], - - "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.53.3", "", { "os": "win32", "cpu": "ia32" }, "sha512-iah+THLcBJdpfZ1TstDFbKNznlzoxa8fmnFYK4V67HvmuNYkVdAywJSoteUszvBQ9/HqN2+9AZghbajMsFT+oA=="], - - "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.53.3", "", { "os": "win32", "cpu": "x64" }, "sha512-J9QDiOIZlZLdcot5NXEepDkstocktoVjkaKUtqzgzpt2yWjGlbYiKyp05rWwk4nypbYUNoFAztEgixoLaSETkg=="], - - "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.53.3", "", { "os": "win32", "cpu": "x64" }, "sha512-UhTd8u31dXadv0MopwGgNOBpUVROFKWVQgAg5N1ESyCz8AuBcMqm4AuTjrwgQKGDfoFuz02EuMRHQIw/frmYKQ=="], - - "@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="], - - "@tailwindcss/node": ["@tailwindcss/node@4.1.17", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.1", "lightningcss": "1.30.2", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.1.17" } }, "sha512-csIkHIgLb3JisEFQ0vxr2Y57GUNYh447C8xzwj89U/8fdW8LhProdxvnVH6U8M2Y73QKiTIH+LWbK3V2BBZsAg=="], - - "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.17", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.17", "@tailwindcss/oxide-darwin-arm64": "4.1.17", "@tailwindcss/oxide-darwin-x64": "4.1.17", "@tailwindcss/oxide-freebsd-x64": "4.1.17", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.17", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.17", "@tailwindcss/oxide-linux-arm64-musl": "4.1.17", "@tailwindcss/oxide-linux-x64-gnu": "4.1.17", "@tailwindcss/oxide-linux-x64-musl": "4.1.17", "@tailwindcss/oxide-wasm32-wasi": "4.1.17", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.17", "@tailwindcss/oxide-win32-x64-msvc": "4.1.17" } }, "sha512-F0F7d01fmkQhsTjXezGBLdrl1KresJTcI3DB8EkScCldyKp3Msz4hub4uyYaVnk88BAS1g5DQjjF6F5qczheLA=="], - - "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.17", "", { "os": "android", "cpu": "arm64" }, "sha512-BMqpkJHgOZ5z78qqiGE6ZIRExyaHyuxjgrJ6eBO5+hfrfGkuya0lYfw8fRHG77gdTjWkNWEEm+qeG2cDMxArLQ=="], - - "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.17", "", { "os": "darwin", "cpu": "arm64" }, "sha512-EquyumkQweUBNk1zGEU/wfZo2qkp/nQKRZM8bUYO0J+Lums5+wl2CcG1f9BgAjn/u9pJzdYddHWBiFXJTcxmOg=="], - - "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.17", "", { "os": "darwin", "cpu": "x64" }, "sha512-gdhEPLzke2Pog8s12oADwYu0IAw04Y2tlmgVzIN0+046ytcgx8uZmCzEg4VcQh+AHKiS7xaL8kGo/QTiNEGRog=="], - - "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.17", "", { "os": "freebsd", "cpu": "x64" }, "sha512-hxGS81KskMxML9DXsaXT1H0DyA+ZBIbyG/sSAjWNe2EDl7TkPOBI42GBV3u38itzGUOmFfCzk1iAjDXds8Oh0g=="], - - "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.17", "", { "os": "linux", "cpu": "arm" }, "sha512-k7jWk5E3ldAdw0cNglhjSgv501u7yrMf8oeZ0cElhxU6Y2o7f8yqelOp3fhf7evjIS6ujTI3U8pKUXV2I4iXHQ=="], - - "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.17", "", { "os": "linux", "cpu": "arm64" }, "sha512-HVDOm/mxK6+TbARwdW17WrgDYEGzmoYayrCgmLEw7FxTPLcp/glBisuyWkFz/jb7ZfiAXAXUACfyItn+nTgsdQ=="], - - "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.17", "", { "os": "linux", "cpu": "arm64" }, "sha512-HvZLfGr42i5anKtIeQzxdkw/wPqIbpeZqe7vd3V9vI3RQxe3xU1fLjss0TjyhxWcBaipk7NYwSrwTwK1hJARMg=="], - - "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.17", "", { "os": "linux", "cpu": "x64" }, "sha512-M3XZuORCGB7VPOEDH+nzpJ21XPvK5PyjlkSFkFziNHGLc5d6g3di2McAAblmaSUNl8IOmzYwLx9NsE7bplNkwQ=="], - - "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.17", "", { "os": "linux", "cpu": "x64" }, "sha512-k7f+pf9eXLEey4pBlw+8dgfJHY4PZ5qOUFDyNf7SI6lHjQ9Zt7+NcscjpwdCEbYi6FI5c2KDTDWyf2iHcCSyyQ=="], - - "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.17", "", { "dependencies": { "@emnapi/core": "^1.6.0", "@emnapi/runtime": "^1.6.0", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.0.7", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.4.0" }, "cpu": "none" }, "sha512-cEytGqSSoy7zK4JRWiTCx43FsKP/zGr0CsuMawhH67ONlH+T79VteQeJQRO/X7L0juEUA8ZyuYikcRBf0vsxhg=="], - - "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.17", "", { "os": "win32", "cpu": "arm64" }, "sha512-JU5AHr7gKbZlOGvMdb4722/0aYbU+tN6lv1kONx0JK2cGsh7g148zVWLM0IKR3NeKLv+L90chBVYcJ8uJWbC9A=="], - - "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.17", "", { "os": "win32", "cpu": "x64" }, "sha512-SKWM4waLuqx0IH+FMDUw6R66Hu4OuTALFgnleKbqhgGU30DY20NORZMZUKgLRjQXNN2TLzKvh48QXTig4h4bGw=="], - - "@tailwindcss/vite": ["@tailwindcss/vite@4.1.17", "", { "dependencies": { "@tailwindcss/node": "4.1.17", "@tailwindcss/oxide": "4.1.17", "tailwindcss": "4.1.17" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-4+9w8ZHOiGnpcGI6z1TVVfWaX/koK7fKeSYF3qlYg2xpBtbteP2ddBxiarL+HVgfSJGeK5RIxRQmKm4rTJJAwA=="], - - "@tanstack/query-core": ["@tanstack/query-core@5.90.10", "", {}, "sha512-EhZVFu9rl7GfRNuJLJ3Y7wtbTnENsvzp+YpcAV7kCYiXni1v8qZh++lpw4ch4rrwC0u/EZRnBHIehzCGzwXDSQ=="], - - "@tanstack/react-query": ["@tanstack/react-query@5.90.10", "", { "dependencies": { "@tanstack/query-core": "5.90.10" }, "peerDependencies": { "react": "^18 || ^19" } }, "sha512-BKLss9Y8PQ9IUjPYQiv3/Zmlx92uxffUOX8ZZNoQlCIZBJPT5M+GOMQj7xislvVQ6l1BstBjcX0XB/aHfFYVNw=="], - - "@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA=="], - - "@types/babel__generator": ["@types/babel__generator@7.27.0", "", { "dependencies": { "@babel/types": "^7.0.0" } }, "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg=="], - - "@types/babel__template": ["@types/babel__template@7.4.4", "", { "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0" } }, "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A=="], - - "@types/babel__traverse": ["@types/babel__traverse@7.28.0", "", { "dependencies": { "@babel/types": "^7.28.2" } }, "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q=="], - - "@types/better-sqlite3": ["@types/better-sqlite3@7.6.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA=="], - - "@types/bun": ["@types/bun@1.3.3", "", { "dependencies": { "bun-types": "1.3.3" } }, "sha512-ogrKbJ2X5N0kWLLFKeytG0eHDleBYtngtlbu9cyBKFtNL3cnpDZkNdQj8flVf6WTZUX5ulI9AY1oa7ljhSrp+g=="], - - "@types/chai": ["@types/chai@5.2.3", "", { "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" } }, "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA=="], - - "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="], - - "@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="], - - "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], - - "@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="], - - "@types/hast": ["@types/hast@3.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ=="], - - "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="], - - "@types/mdast": ["@types/mdast@4.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA=="], - - "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], - - "@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], - - "@types/react": ["@types/react@19.2.6", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-p/jUvulfgU7oKtj6Xpk8cA2Y1xKTtICGpJYeJXz2YVO2UcvjQgeRMLDGfDeqeRW2Ta+0QNFwcc8X3GH8SxZz6w=="], - - "@types/react-dom": ["@types/react-dom@19.2.3", "", { "peerDependencies": { "@types/react": "^19.2.0" } }, "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ=="], - - "@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="], - - "@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="], - - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.47.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.47.0", "@typescript-eslint/type-utils": "8.47.0", "@typescript-eslint/utils": "8.47.0", "@typescript-eslint/visitor-keys": "8.47.0", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.47.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-fe0rz9WJQ5t2iaLfdbDc9T80GJy0AeO453q8C3YCilnGozvOyCG5t+EZtg7j7D88+c3FipfP/x+wzGnh1xp8ZA=="], - - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.47.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.47.0", "@typescript-eslint/types": "8.47.0", "@typescript-eslint/typescript-estree": "8.47.0", "@typescript-eslint/visitor-keys": "8.47.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-lJi3PfxVmo0AkEY93ecfN+r8SofEqZNGByvHAI3GBLrvt1Cw6H5k1IM02nSzu0RfUafr2EvFSw0wAsZgubNplQ=="], - - "@typescript-eslint/project-service": ["@typescript-eslint/project-service@8.47.0", "", { "dependencies": { "@typescript-eslint/tsconfig-utils": "^8.47.0", "@typescript-eslint/types": "^8.47.0", "debug": "^4.3.4" }, "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-2X4BX8hUeB5JcA1TQJ7GjcgulXQ+5UkNb0DL8gHsHUHdFoiCTJoYLTpib3LtSDPZsRET5ygN4qqIWrHyYIKERA=="], - - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.47.0", "", { "dependencies": { "@typescript-eslint/types": "8.47.0", "@typescript-eslint/visitor-keys": "8.47.0" } }, "sha512-a0TTJk4HXMkfpFkL9/WaGTNuv7JWfFTQFJd6zS9dVAjKsojmv9HT55xzbEpnZoY+VUb+YXLMp+ihMLz/UlZfDg=="], - - "@typescript-eslint/tsconfig-utils": ["@typescript-eslint/tsconfig-utils@8.47.0", "", { "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-ybUAvjy4ZCL11uryalkKxuT3w3sXJAuWhOoGS3T/Wu+iUu1tGJmk5ytSY8gbdACNARmcYEB0COksD2j6hfGK2g=="], - - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.47.0", "", { "dependencies": { "@typescript-eslint/types": "8.47.0", "@typescript-eslint/typescript-estree": "8.47.0", "@typescript-eslint/utils": "8.47.0", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-QC9RiCmZ2HmIdCEvhd1aJELBlD93ErziOXXlHEZyuBo3tBiAZieya0HLIxp+DoDWlsQqDawyKuNEhORyku+P8A=="], - - "@typescript-eslint/types": ["@typescript-eslint/types@8.47.0", "", {}, "sha512-nHAE6bMKsizhA2uuYZbEbmp5z2UpffNrPEqiKIeN7VsV6UY/roxanWfoRrf6x/k9+Obf+GQdkm0nPU+vnMXo9A=="], - - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.47.0", "", { "dependencies": { "@typescript-eslint/project-service": "8.47.0", "@typescript-eslint/tsconfig-utils": "8.47.0", "@typescript-eslint/types": "8.47.0", "@typescript-eslint/visitor-keys": "8.47.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-k6ti9UepJf5NpzCjH31hQNLHQWupTRPhZ+KFF8WtTuTpy7uHPfeg2NM7cP27aCGajoEplxJDFVCEm9TGPYyiVg=="], - - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.47.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.47.0", "@typescript-eslint/types": "8.47.0", "@typescript-eslint/typescript-estree": "8.47.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-g7XrNf25iL4TJOiPqatNuaChyqt49a/onq5YsJ9+hXeugK+41LVg7AxikMfM02PC6jbNtZLCJj6AUcQXJS/jGQ=="], - - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.47.0", "", { "dependencies": { "@typescript-eslint/types": "8.47.0", "eslint-visitor-keys": "^4.2.1" } }, "sha512-SIV3/6eftCy1bNzCQoPmbWsRLujS8t5iDIZ4spZOBHqrM+yfX2ogg8Tt3PDTAVKw3sSCiUgg30uOAvK2r9zGjQ=="], - - "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="], - - "@vitejs/plugin-react": ["@vitejs/plugin-react@5.1.1", "", { "dependencies": { "@babel/core": "^7.28.5", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.47", "@types/babel__core": "^7.20.5", "react-refresh": "^0.18.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-WQfkSw0QbQ5aJ2CHYw23ZGkqnRwqKHD/KYsMeTkZzPT4Jcf0DcBxBtwMJxnu6E7oxw5+JC6ZAiePgh28uJ1HBA=="], - - "@vitest/expect": ["@vitest/expect@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" } }, "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig=="], - - "@vitest/mocker": ["@vitest/mocker@3.2.4", "", { "dependencies": { "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, "peerDependencies": { "msw": "^2.4.9", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "optionalPeers": ["msw", "vite"] }, "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ=="], - - "@vitest/pretty-format": ["@vitest/pretty-format@3.2.4", "", { "dependencies": { "tinyrainbow": "^2.0.0" } }, "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA=="], - - "@vitest/runner": ["@vitest/runner@3.2.4", "", { "dependencies": { "@vitest/utils": "3.2.4", "pathe": "^2.0.3", "strip-literal": "^3.0.0" } }, "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ=="], - - "@vitest/snapshot": ["@vitest/snapshot@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" } }, "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ=="], - - "@vitest/spy": ["@vitest/spy@3.2.4", "", { "dependencies": { "tinyspy": "^4.0.3" } }, "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw=="], - - "@vitest/ui": ["@vitest/ui@3.2.4", "", { "dependencies": { "@vitest/utils": "3.2.4", "fflate": "^0.8.2", "flatted": "^3.3.3", "pathe": "^2.0.3", "sirv": "^3.0.1", "tinyglobby": "^0.2.14", "tinyrainbow": "^2.0.0" }, "peerDependencies": { "vitest": "3.2.4" } }, "sha512-hGISOaP18plkzbWEcP/QvtRW1xDXF2+96HbEX6byqQhAUbiS5oH6/9JwW+QsQCIYON2bI6QZBF+2PvOmrRZ9wA=="], - - "@vitest/utils": ["@vitest/utils@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" } }, "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA=="], - - "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="], - - "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], - - "agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="], - - "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], - - "ansi-colors": ["ansi-colors@4.1.3", "", {}, "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw=="], - - "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - - "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], - - "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], - - "aria-hidden": ["aria-hidden@1.2.6", "", { "dependencies": { "tslib": "^2.0.0" } }, "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA=="], - - "assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="], - - "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], - - "autoprefixer": ["autoprefixer@10.4.22", "", { "dependencies": { "browserslist": "^4.27.0", "caniuse-lite": "^1.0.30001754", "fraction.js": "^5.3.4", "normalize-range": "^0.1.2", "picocolors": "^1.1.1", "postcss-value-parser": "^4.2.0" }, "peerDependencies": { "postcss": "^8.1.0" }, "bin": { "autoprefixer": "bin/autoprefixer" } }, "sha512-ARe0v/t9gO28Bznv6GgqARmVqcWOV3mfgUPn9becPHMiD3o9BwlRgaeccZnwTpZ7Zwqrm+c1sUSsMxIzQzc8Xg=="], - - "axios": ["axios@1.13.2", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA=="], - - "backend": ["backend@workspace:backend"], - - "bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="], - - "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], - - "baseline-browser-mapping": ["baseline-browser-mapping@2.8.30", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-aTUKW4ptQhS64+v2d6IkPzymEzzhw+G0bA1g3uBRV3+ntkH+svttKseW5IOR4Ed6NUVKqnY7qT3dKvzQ7io4AA=="], - - "brace-expansion": ["brace-expansion@1.1.12", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="], - - "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - - "browserslist": ["browserslist@4.28.0", "", { "dependencies": { "baseline-browser-mapping": "^2.8.25", "caniuse-lite": "^1.0.30001754", "electron-to-chromium": "^1.5.249", "node-releases": "^2.0.27", "update-browserslist-db": "^1.1.4" }, "bin": { "browserslist": "cli.js" } }, "sha512-tbydkR/CxfMwelN0vwdP/pLkDwyAASZ+VfWm4EOwlB6SWhx1sYnWLqo8N5j0rAzPfzfRaxt0mM/4wPU/Su84RQ=="], - - "bun-types": ["bun-types@1.3.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ=="], - - "cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="], - - "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], - - "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], - - "caniuse-lite": ["caniuse-lite@1.0.30001756", "", {}, "sha512-4HnCNKbMLkLdhJz3TToeVWHSnfJvPaq6vu/eRP0Ahub/07n484XHhBF5AJoSGHdVrS8tKFauUQz8Bp9P7LVx7A=="], - - "ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="], - - "chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="], - - "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - - "change-case": ["change-case@5.4.4", "", {}, "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w=="], - - "character-entities": ["character-entities@2.0.2", "", {}, "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ=="], - - "character-entities-html4": ["character-entities-html4@2.1.0", "", {}, "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA=="], - - "character-entities-legacy": ["character-entities-legacy@3.0.0", "", {}, "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ=="], - - "character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="], - - "check-error": ["check-error@2.1.1", "", {}, "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw=="], - - "class-variance-authority": ["class-variance-authority@0.7.1", "", { "dependencies": { "clsx": "^2.1.1" } }, "sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg=="], - - "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], - - "clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="], - - "cmdk": ["cmdk@1.1.1", "", { "dependencies": { "@radix-ui/react-compose-refs": "^1.1.1", "@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-id": "^1.1.0", "@radix-ui/react-primitive": "^2.0.2" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "react-dom": "^18 || ^19 || ^19.0.0-rc" } }, "sha512-Vsv7kFaXm+ptHDMZ7izaRsP70GgrW9NBNGswt9OZaVBLlE0SNpDq8eu/VGXyF9r7M0azK3Wy7OlYXsuyYLFzHg=="], - - "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], - - "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], - - "colorette": ["colorette@1.4.0", "", {}, "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g=="], - - "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], - - "comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg=="], - - "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], - - "concurrently": ["concurrently@9.2.1", "", { "dependencies": { "chalk": "4.1.2", "rxjs": "7.8.2", "shell-quote": "1.8.3", "supports-color": "8.1.1", "tree-kill": "1.2.2", "yargs": "17.7.2" }, "bin": { "conc": "dist/bin/concurrently.js", "concurrently": "dist/bin/concurrently.js" } }, "sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng=="], - - "convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], - - "cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="], - - "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], - - "csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="], - - "date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="], - - "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="], - - "decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q=="], - - "deep-eql": ["deep-eql@5.0.2", "", {}, "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q=="], - - "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], - - "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], - - "dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="], - - "detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="], - - "detect-node-es": ["detect-node-es@1.1.0", "", {}, "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ=="], - - "devlop": ["devlop@1.1.0", "", { "dependencies": { "dequal": "^2.0.0" } }, "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA=="], - - "diff": ["diff@8.0.2", "", {}, "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg=="], - - "dompurify": ["dompurify@3.2.7", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw=="], - - "dotenv": ["dotenv@17.2.3", "", {}, "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="], - - "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], - - "electron-to-chromium": ["electron-to-chromium@1.5.259", "", {}, "sha512-I+oLXgpEJzD6Cwuwt1gYjxsDmu/S/Kd41mmLA3O+/uH2pFRO/DvOjUyGozL8j3KeLV6WyZ7ssPwELMsXCcsJAQ=="], - - "emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], - - "enhanced-resolve": ["enhanced-resolve@5.18.3", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww=="], - - "entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], - - "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], - - "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], - - "es-module-lexer": ["es-module-lexer@1.7.0", "", {}, "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA=="], - - "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], - - "es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="], - - "esbuild": ["esbuild@0.25.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.12", "@esbuild/android-arm": "0.25.12", "@esbuild/android-arm64": "0.25.12", "@esbuild/android-x64": "0.25.12", "@esbuild/darwin-arm64": "0.25.12", "@esbuild/darwin-x64": "0.25.12", "@esbuild/freebsd-arm64": "0.25.12", "@esbuild/freebsd-x64": "0.25.12", "@esbuild/linux-arm": "0.25.12", "@esbuild/linux-arm64": "0.25.12", "@esbuild/linux-ia32": "0.25.12", "@esbuild/linux-loong64": "0.25.12", "@esbuild/linux-mips64el": "0.25.12", "@esbuild/linux-ppc64": "0.25.12", "@esbuild/linux-riscv64": "0.25.12", "@esbuild/linux-s390x": "0.25.12", "@esbuild/linux-x64": "0.25.12", "@esbuild/netbsd-arm64": "0.25.12", "@esbuild/netbsd-x64": "0.25.12", "@esbuild/openbsd-arm64": "0.25.12", "@esbuild/openbsd-x64": "0.25.12", "@esbuild/openharmony-arm64": "0.25.12", "@esbuild/sunos-x64": "0.25.12", "@esbuild/win32-arm64": "0.25.12", "@esbuild/win32-ia32": "0.25.12", "@esbuild/win32-x64": "0.25.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg=="], - - "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], - - "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - - "eslint": ["eslint@9.39.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.1", "@eslint/config-helpers": "^0.4.2", "@eslint/core": "^0.17.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.39.1", "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.4.0", "eslint-visitor-keys": "^4.2.1", "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g=="], - - "eslint-plugin-react-hooks": ["eslint-plugin-react-hooks@5.2.0", "", { "peerDependencies": { "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" } }, "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg=="], - - "eslint-plugin-react-refresh": ["eslint-plugin-react-refresh@0.4.24", "", { "peerDependencies": { "eslint": ">=8.40" } }, "sha512-nLHIW7TEq3aLrEYWpVaJ1dRgFR+wLDPN8e8FpYAql/bMV2oBEfC37K0gLEGgv9fy66juNShSMV8OkTqzltcG/w=="], - - "eslint-scope": ["eslint-scope@8.4.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg=="], - - "eslint-visitor-keys": ["eslint-visitor-keys@4.2.1", "", {}, "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="], - - "espree": ["espree@10.4.0", "", { "dependencies": { "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^4.2.1" } }, "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ=="], - - "esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="], - - "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], - - "estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], - - "estree-util-is-identifier-name": ["estree-util-is-identifier-name@3.0.0", "", {}, "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg=="], - - "estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="], - - "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], - - "expect-type": ["expect-type@1.2.2", "", {}, "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA=="], - - "extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="], - - "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], - - "fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="], - - "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], - - "fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="], - - "fast-uri": ["fast-uri@3.1.0", "", {}, "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA=="], - - "fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="], - - "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="], - - "fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="], - - "file-entry-cache": ["file-entry-cache@8.0.0", "", { "dependencies": { "flat-cache": "^4.0.0" } }, "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="], - - "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], - - "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], - - "flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="], - - "flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="], - - "follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="], - - "form-data": ["form-data@4.0.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w=="], - - "fraction.js": ["fraction.js@5.3.4", "", {}, "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ=="], - - "frontend": ["frontend@workspace:frontend"], - - "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], - - "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], - - "gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="], - - "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], - - "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], - - "get-nonce": ["get-nonce@1.0.1", "", {}, "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q=="], - - "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], - - "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], - - "globals": ["globals@16.5.0", "", {}, "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ=="], - - "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], - - "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], - - "graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="], - - "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], - - "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], - - "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="], - - "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], - - "hast-util-from-parse5": ["hast-util-from-parse5@8.0.3", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "devlop": "^1.0.0", "hastscript": "^9.0.0", "property-information": "^7.0.0", "vfile": "^6.0.0", "vfile-location": "^5.0.0", "web-namespaces": "^2.0.0" } }, "sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg=="], - - "hast-util-is-element": ["hast-util-is-element@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g=="], - - "hast-util-parse-selector": ["hast-util-parse-selector@4.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A=="], - - "hast-util-raw": ["hast-util-raw@9.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "@ungap/structured-clone": "^1.0.0", "hast-util-from-parse5": "^8.0.0", "hast-util-to-parse5": "^8.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "parse5": "^7.0.0", "unist-util-position": "^5.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0", "web-namespaces": "^2.0.0", "zwitch": "^2.0.0" } }, "sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw=="], - - "hast-util-to-jsx-runtime": ["hast-util-to-jsx-runtime@2.3.6", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "hast-util-whitespace": "^3.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "style-to-js": "^1.0.0", "unist-util-position": "^5.0.0", "vfile-message": "^4.0.0" } }, "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg=="], - - "hast-util-to-parse5": ["hast-util-to-parse5@8.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "property-information": "^6.0.0", "space-separated-tokens": "^2.0.0", "web-namespaces": "^2.0.0", "zwitch": "^2.0.0" } }, "sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw=="], - - "hast-util-to-text": ["hast-util-to-text@4.0.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "hast-util-is-element": "^3.0.0", "unist-util-find-after": "^5.0.0" } }, "sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A=="], - - "hast-util-whitespace": ["hast-util-whitespace@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw=="], - - "hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w=="], - - "highlight.js": ["highlight.js@11.11.1", "", {}, "sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w=="], - - "hono": ["hono@4.10.6", "", {}, "sha512-BIdolzGpDO9MQ4nu3AUuDwHZZ+KViNm+EZ75Ae55eMXMqLVhDFqEMXxtUe9Qh8hjL+pIna/frs2j6Y2yD5Ua/g=="], - - "html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="], - - "html-void-elements": ["html-void-elements@3.0.0", "", {}, "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg=="], - - "https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="], - - "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], - - "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], - - "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - - "index-to-position": ["index-to-position@1.2.0", "", {}, "sha512-Yg7+ztRkqslMAS2iFaU+Oa4KTSidr63OsFGlOrJoW981kIYO3CGCS3wA95P1mUi/IVSJkn0D479KTJpVpvFNuw=="], - - "inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="], - - "is-alphabetical": ["is-alphabetical@2.0.1", "", {}, "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="], - - "is-alphanumerical": ["is-alphanumerical@2.0.1", "", { "dependencies": { "is-alphabetical": "^2.0.0", "is-decimal": "^2.0.0" } }, "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw=="], - - "is-decimal": ["is-decimal@2.0.1", "", {}, "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A=="], - - "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], - - "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], - - "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="], - - "is-hexadecimal": ["is-hexadecimal@2.0.1", "", {}, "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg=="], - - "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="], - - "is-plain-obj": ["is-plain-obj@4.1.0", "", {}, "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg=="], - - "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], - - "jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="], - - "js-levenshtein": ["js-levenshtein@1.1.6", "", {}, "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g=="], - - "js-tokens": ["js-tokens@9.0.1", "", {}, "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ=="], - - "js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="], - - "jsesc": ["jsesc@3.1.0", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="], - - "json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], - - "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], - - "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], - - "json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="], - - "keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="], - - "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], - - "lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="], - - "lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="], - - "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="], - - "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="], - - "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="], - - "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="], - - "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="], - - "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="], - - "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="], - - "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="], - - "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="], - - "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="], - - "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], - - "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], - - "longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="], - - "loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="], - - "lowlight": ["lowlight@3.3.0", "", { "dependencies": { "@types/hast": "^3.0.0", "devlop": "^1.0.0", "highlight.js": "~11.11.0" } }, "sha512-0JNhgFoPvP6U6lE/UdVsSq99tn6DhjjpAj5MxG49ewd2mOBVtwWYIT8ClyABhq198aXXODMU6Ox8DrGy/CpTZQ=="], - - "lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], - - "lucide-react": ["lucide-react@0.546.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Z94u6fKT43lKeYHiVyvyR8fT7pwCzDu7RyMPpTvh054+xahSgj4HFQ+NmflvzdXsoAjYGdCguGaFKYuvq0ThCQ=="], - - "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="], - - "markdown-table": ["markdown-table@3.0.4", "", {}, "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw=="], - - "marked": ["marked@14.0.0", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ=="], - - "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], - - "mdast-util-find-and-replace": ["mdast-util-find-and-replace@3.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "escape-string-regexp": "^5.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg=="], - - "mdast-util-from-markdown": ["mdast-util-from-markdown@2.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "mdast-util-to-string": "^4.0.0", "micromark": "^4.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA=="], - - "mdast-util-gfm": ["mdast-util-gfm@3.1.0", "", { "dependencies": { "mdast-util-from-markdown": "^2.0.0", "mdast-util-gfm-autolink-literal": "^2.0.0", "mdast-util-gfm-footnote": "^2.0.0", "mdast-util-gfm-strikethrough": "^2.0.0", "mdast-util-gfm-table": "^2.0.0", "mdast-util-gfm-task-list-item": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ=="], - - "mdast-util-gfm-autolink-literal": ["mdast-util-gfm-autolink-literal@2.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "ccount": "^2.0.0", "devlop": "^1.0.0", "mdast-util-find-and-replace": "^3.0.0", "micromark-util-character": "^2.0.0" } }, "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ=="], - - "mdast-util-gfm-footnote": ["mdast-util-gfm-footnote@2.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0" } }, "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ=="], - - "mdast-util-gfm-strikethrough": ["mdast-util-gfm-strikethrough@2.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg=="], - - "mdast-util-gfm-table": ["mdast-util-gfm-table@2.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "markdown-table": "^3.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg=="], - - "mdast-util-gfm-task-list-item": ["mdast-util-gfm-task-list-item@2.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ=="], - - "mdast-util-mdx-expression": ["mdast-util-mdx-expression@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ=="], - - "mdast-util-mdx-jsx": ["mdast-util-mdx-jsx@3.2.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-stringify-position": "^4.0.0", "vfile-message": "^4.0.0" } }, "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q=="], - - "mdast-util-mdxjs-esm": ["mdast-util-mdxjs-esm@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg=="], - - "mdast-util-phrasing": ["mdast-util-phrasing@4.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "unist-util-is": "^6.0.0" } }, "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w=="], - - "mdast-util-to-hast": ["mdast-util-to-hast@13.2.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@ungap/structured-clone": "^1.0.0", "devlop": "^1.0.0", "micromark-util-sanitize-uri": "^2.0.0", "trim-lines": "^3.0.0", "unist-util-position": "^5.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA=="], - - "mdast-util-to-markdown": ["mdast-util-to-markdown@2.1.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "longest-streak": "^3.0.0", "mdast-util-phrasing": "^4.0.0", "mdast-util-to-string": "^4.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "unist-util-visit": "^5.0.0", "zwitch": "^2.0.0" } }, "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA=="], - - "mdast-util-to-string": ["mdast-util-to-string@4.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0" } }, "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg=="], - - "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="], - - "micromark": ["micromark@4.0.2", "", { "dependencies": { "@types/debug": "^4.0.0", "debug": "^4.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA=="], - - "micromark-core-commonmark": ["micromark-core-commonmark@2.0.3", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-destination": "^2.0.0", "micromark-factory-label": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-title": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-html-tag-name": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg=="], - - "micromark-extension-gfm": ["micromark-extension-gfm@3.0.0", "", { "dependencies": { "micromark-extension-gfm-autolink-literal": "^2.0.0", "micromark-extension-gfm-footnote": "^2.0.0", "micromark-extension-gfm-strikethrough": "^2.0.0", "micromark-extension-gfm-table": "^2.0.0", "micromark-extension-gfm-tagfilter": "^2.0.0", "micromark-extension-gfm-task-list-item": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w=="], - - "micromark-extension-gfm-autolink-literal": ["micromark-extension-gfm-autolink-literal@2.1.0", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw=="], - - "micromark-extension-gfm-footnote": ["micromark-extension-gfm-footnote@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw=="], - - "micromark-extension-gfm-strikethrough": ["micromark-extension-gfm-strikethrough@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw=="], - - "micromark-extension-gfm-table": ["micromark-extension-gfm-table@2.1.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg=="], - - "micromark-extension-gfm-tagfilter": ["micromark-extension-gfm-tagfilter@2.0.0", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg=="], - - "micromark-extension-gfm-task-list-item": ["micromark-extension-gfm-task-list-item@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw=="], - - "micromark-factory-destination": ["micromark-factory-destination@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA=="], - - "micromark-factory-label": ["micromark-factory-label@2.0.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg=="], - - "micromark-factory-space": ["micromark-factory-space@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg=="], - - "micromark-factory-title": ["micromark-factory-title@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw=="], - - "micromark-factory-whitespace": ["micromark-factory-whitespace@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ=="], - - "micromark-util-character": ["micromark-util-character@2.1.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q=="], - - "micromark-util-chunked": ["micromark-util-chunked@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA=="], - - "micromark-util-classify-character": ["micromark-util-classify-character@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q=="], - - "micromark-util-combine-extensions": ["micromark-util-combine-extensions@2.0.1", "", { "dependencies": { "micromark-util-chunked": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg=="], - - "micromark-util-decode-numeric-character-reference": ["micromark-util-decode-numeric-character-reference@2.0.2", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw=="], - - "micromark-util-decode-string": ["micromark-util-decode-string@2.0.1", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ=="], - - "micromark-util-encode": ["micromark-util-encode@2.0.1", "", {}, "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw=="], - - "micromark-util-html-tag-name": ["micromark-util-html-tag-name@2.0.1", "", {}, "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA=="], - - "micromark-util-normalize-identifier": ["micromark-util-normalize-identifier@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q=="], - - "micromark-util-resolve-all": ["micromark-util-resolve-all@2.0.1", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg=="], - - "micromark-util-sanitize-uri": ["micromark-util-sanitize-uri@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ=="], - - "micromark-util-subtokenize": ["micromark-util-subtokenize@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA=="], - - "micromark-util-symbol": ["micromark-util-symbol@2.0.1", "", {}, "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q=="], - - "micromark-util-types": ["micromark-util-types@2.0.2", "", {}, "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA=="], - - "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="], - - "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], - - "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], - - "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], - - "monaco-editor": ["monaco-editor@0.55.1", "", { "dependencies": { "dompurify": "3.2.7", "marked": "14.0.0" } }, "sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A=="], - - "mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="], - - "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], - - "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], - - "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], - - "node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="], - - "normalize-range": ["normalize-range@0.1.2", "", {}, "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA=="], - - "openapi-typescript": ["openapi-typescript@7.10.1", "", { "dependencies": { "@redocly/openapi-core": "^1.34.5", "ansi-colors": "^4.1.3", "change-case": "^5.4.4", "parse-json": "^8.3.0", "supports-color": "^10.2.2", "yargs-parser": "^21.1.1" }, "peerDependencies": { "typescript": "^5.x" }, "bin": { "openapi-typescript": "bin/cli.js" } }, "sha512-rBcU8bjKGGZQT4K2ekSTY2Q5veOQbVG/lTKZ49DeCyT9z62hM2Vj/LLHjDHC9W7LJG8YMHcdXpRZDqC1ojB/lw=="], - - "optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="], - - "p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], - - "p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="], - - "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], - - "parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw=="], - - "parse-json": ["parse-json@8.3.0", "", { "dependencies": { "@babel/code-frame": "^7.26.2", "index-to-position": "^1.1.0", "type-fest": "^4.39.1" } }, "sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ=="], - - "parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="], - - "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], - - "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], - - "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], - - "pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="], - - "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], - - "picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="], - - "pluralize": ["pluralize@8.0.0", "", {}, "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA=="], - - "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="], - - "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="], - - "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], - - "property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="], - - "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], - - "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], - - "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], - - "react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="], - - "react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="], - - "react-hook-form": ["react-hook-form@7.66.1", "", { "peerDependencies": { "react": "^16.8.0 || ^17 || ^18 || ^19" } }, "sha512-2KnjpgG2Rhbi+CIiIBQQ9Df6sMGH5ExNyFl4Hw9qO7pIqMBR8Bvu9RQyjl3JM4vehzCh9soiNUM/xYMswb2EiA=="], - - "react-markdown": ["react-markdown@10.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "html-url-attributes": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "unified": "^11.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" }, "peerDependencies": { "@types/react": ">=18", "react": ">=18" } }, "sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ=="], - - "react-refresh": ["react-refresh@0.18.0", "", {}, "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw=="], - - "react-remove-scroll": ["react-remove-scroll@2.7.1", "", { "dependencies": { "react-remove-scroll-bar": "^2.3.7", "react-style-singleton": "^2.2.3", "tslib": "^2.1.0", "use-callback-ref": "^1.3.3", "use-sidecar": "^1.1.3" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-HpMh8+oahmIdOuS5aFKKY6Pyog+FNaZV/XyJOq7b4YFwsFHe5yYfdbIalI4k3vU2nSDql7YskmUseHsRrJqIPA=="], - - "react-remove-scroll-bar": ["react-remove-scroll-bar@2.3.8", "", { "dependencies": { "react-style-singleton": "^2.2.2", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" }, "optionalPeers": ["@types/react"] }, "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q=="], - - "react-router": ["react-router@7.9.6", "", { "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" }, "optionalPeers": ["react-dom"] }, "sha512-Y1tUp8clYRXpfPITyuifmSoE2vncSME18uVLgaqyxh9H35JWpIfzHo+9y3Fzh5odk/jxPW29IgLgzcdwxGqyNA=="], - - "react-router-dom": ["react-router-dom@7.9.6", "", { "dependencies": { "react-router": "7.9.6" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" } }, "sha512-2MkC2XSXq6HjGcihnx1s0DBWQETI4mlis4Ux7YTLvP67xnGxCvq+BcCQSO81qQHVUTM1V53tl4iVVaY5sReCOA=="], - - "react-style-singleton": ["react-style-singleton@2.2.3", "", { "dependencies": { "get-nonce": "^1.0.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ=="], - - "rehype-highlight": ["rehype-highlight@7.0.2", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-to-text": "^4.0.0", "lowlight": "^3.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-k158pK7wdC2qL3M5NcZROZ2tR/l7zOzjxXd5VGdcfIyoijjQqpHd3JKtYSBDpDZ38UI2WJWuFAtkMDxmx5kstA=="], - - "rehype-raw": ["rehype-raw@7.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-raw": "^9.0.0", "vfile": "^6.0.0" } }, "sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww=="], - - "remark-gfm": ["remark-gfm@4.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-gfm": "^3.0.0", "micromark-extension-gfm": "^3.0.0", "remark-parse": "^11.0.0", "remark-stringify": "^11.0.0", "unified": "^11.0.0" } }, "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg=="], - - "remark-parse": ["remark-parse@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "micromark-util-types": "^2.0.0", "unified": "^11.0.0" } }, "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA=="], - - "remark-rehype": ["remark-rehype@11.1.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw=="], - - "remark-stringify": ["remark-stringify@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-to-markdown": "^2.0.0", "unified": "^11.0.0" } }, "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw=="], - - "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], - - "require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="], - - "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], - - "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], - - "rollup": ["rollup@4.53.3", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.53.3", "@rollup/rollup-android-arm64": "4.53.3", "@rollup/rollup-darwin-arm64": "4.53.3", "@rollup/rollup-darwin-x64": "4.53.3", "@rollup/rollup-freebsd-arm64": "4.53.3", "@rollup/rollup-freebsd-x64": "4.53.3", "@rollup/rollup-linux-arm-gnueabihf": "4.53.3", "@rollup/rollup-linux-arm-musleabihf": "4.53.3", "@rollup/rollup-linux-arm64-gnu": "4.53.3", "@rollup/rollup-linux-arm64-musl": "4.53.3", "@rollup/rollup-linux-loong64-gnu": "4.53.3", "@rollup/rollup-linux-ppc64-gnu": "4.53.3", "@rollup/rollup-linux-riscv64-gnu": "4.53.3", "@rollup/rollup-linux-riscv64-musl": "4.53.3", "@rollup/rollup-linux-s390x-gnu": "4.53.3", "@rollup/rollup-linux-x64-gnu": "4.53.3", "@rollup/rollup-linux-x64-musl": "4.53.3", "@rollup/rollup-openharmony-arm64": "4.53.3", "@rollup/rollup-win32-arm64-msvc": "4.53.3", "@rollup/rollup-win32-ia32-msvc": "4.53.3", "@rollup/rollup-win32-x64-gnu": "4.53.3", "@rollup/rollup-win32-x64-msvc": "4.53.3", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-w8GmOxZfBmKknvdXU1sdM9NHcoQejwF/4mNgj2JuEEdRaHwwF12K7e9eXn1nLZ07ad+du76mkVsyeb2rKGllsA=="], - - "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], - - "rxjs": ["rxjs@7.8.2", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA=="], - - "scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="], - - "semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw=="], - - "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], - - "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], - - "shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="], - - "siginfo": ["siginfo@2.0.0", "", {}, "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g=="], - - "sirv": ["sirv@3.0.2", "", { "dependencies": { "@polka/url": "^1.0.0-next.24", "mrmime": "^2.0.0", "totalist": "^3.0.0" } }, "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g=="], - - "sonner": ["sonner@2.0.7", "", { "peerDependencies": { "react": "^18.0.0 || ^19.0.0 || ^19.0.0-rc", "react-dom": "^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-W6ZN4p58k8aDKA4XPcx2hpIQXBRAgyiWVkYhT7CvK6D3iAu7xjvVyhQHg2/iaKJZ1XVJ4r7XuwGL+WGEK37i9w=="], - - "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], - - "space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="], - - "stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="], - - "state-local": ["state-local@1.0.7", "", {}, "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w=="], - - "std-env": ["std-env@3.10.0", "", {}, "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg=="], - - "string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], - - "stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg=="], - - "strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - - "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], - - "strip-literal": ["strip-literal@3.1.0", "", { "dependencies": { "js-tokens": "^9.0.1" } }, "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg=="], - - "style-to-js": ["style-to-js@1.1.21", "", { "dependencies": { "style-to-object": "1.0.14" } }, "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ=="], - - "style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="], - - "supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="], - - "tailwind-merge": ["tailwind-merge@3.4.0", "", {}, "sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g=="], - - "tailwindcss": ["tailwindcss@4.1.17", "", {}, "sha512-j9Ee2YjuQqYT9bbRTfTZht9W/ytp5H+jJpZKiYdP/bpnXARAuELt9ofP0lPnmHjbga7SNQIxdTAXCmtKVYjN+Q=="], - - "tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="], - - "tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="], - - "tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="], - - "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="], - - "tinypool": ["tinypool@1.1.1", "", {}, "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg=="], - - "tinyrainbow": ["tinyrainbow@2.0.0", "", {}, "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw=="], - - "tinyspy": ["tinyspy@4.0.4", "", {}, "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q=="], - - "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], - - "totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="], - - "tree-kill": ["tree-kill@1.2.2", "", { "bin": "cli.js" }, "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A=="], - - "trim-lines": ["trim-lines@3.0.1", "", {}, "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg=="], - - "trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="], - - "ts-api-utils": ["ts-api-utils@2.1.0", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ=="], - - "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - - "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], - - "type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], - - "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], - - "typescript-eslint": ["typescript-eslint@8.47.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.47.0", "@typescript-eslint/parser": "8.47.0", "@typescript-eslint/typescript-estree": "8.47.0", "@typescript-eslint/utils": "8.47.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-Lwe8i2XQ3WoMjua/r1PHrCTpkubPYJCAfOurtn+mtTzqB6jNd+14n9UN1bJ4s3F49x9ixAm0FLflB/JzQ57M8Q=="], - - "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - - "unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="], - - "unist-util-find-after": ["unist-util-find-after@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ=="], - - "unist-util-is": ["unist-util-is@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g=="], - - "unist-util-position": ["unist-util-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA=="], - - "unist-util-stringify-position": ["unist-util-stringify-position@4.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ=="], - - "unist-util-visit": ["unist-util-visit@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg=="], - - "unist-util-visit-parents": ["unist-util-visit-parents@6.0.2", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ=="], - - "update-browserslist-db": ["update-browserslist-db@1.1.4", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A=="], - - "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], - - "use-callback-ref": ["use-callback-ref@1.3.3", "", { "dependencies": { "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg=="], - - "use-sidecar": ["use-sidecar@1.1.3", "", { "dependencies": { "detect-node-es": "^1.1.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ=="], - - "vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="], - - "vfile-location": ["vfile-location@5.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg=="], - - "vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="], - - "vite": ["vite@7.2.4", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-NL8jTlbo0Tn4dUEXEsUg8KeyG/Lkmc4Fnzb8JXN/Ykm9G4HNImjtABMJgkQoVjOBN/j2WAwDTRytdqJbZsah7w=="], - - "vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="], - - "vitest": ["vitest@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/expect": "3.2.4", "@vitest/mocker": "3.2.4", "@vitest/pretty-format": "^3.2.4", "@vitest/runner": "3.2.4", "@vitest/snapshot": "3.2.4", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "debug": "^4.4.1", "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", "picomatch": "^4.0.2", "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.14", "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "@vitest/browser": "3.2.4", "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@types/debug", "@types/node", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A=="], - - "web-namespaces": ["web-namespaces@2.0.1", "", {}, "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ=="], - - "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - - "why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="], - - "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], - - "wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - - "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], - - "yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], - - "yaml-ast-parser": ["yaml-ast-parser@0.0.43", "", {}, "sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A=="], - - "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], - - "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], - - "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], - - "zod": ["zod@4.1.12", "", {}, "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ=="], - - "zustand": ["zustand@5.0.8", "", { "peerDependencies": { "@types/react": ">=18.0.0", "immer": ">=9.0.6", "react": ">=18.0.0", "use-sync-external-store": ">=1.2.0" }, "optionalPeers": ["@types/react", "immer", "react", "use-sync-external-store"] }, "sha512-gyPKpIaxY9XcO2vSMrLbiER7QMAMGOQZVRdJ6Zi782jkbzZygq5GI9nG8g+sMgitRtndwaBSl7uiqC49o1SSiw=="], - - "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], - - "@babel/code-frame/js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], - - "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], - - "@eslint/eslintrc/globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="], - - "@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], - - "@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], - - "@radix-ui/react-label/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.4", "", { "dependencies": { "@radix-ui/react-slot": "1.2.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg=="], - - "@radix-ui/react-menu/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], - - "@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], - - "@radix-ui/react-select/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], - - "@redocly/ajv/json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="], - - "@redocly/openapi-core/minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="], - - "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.7.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg=="], - - "@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.7.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA=="], - - "@tailwindcss/oxide-wasm32-wasi/@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.1.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ=="], - - "@tailwindcss/oxide-wasm32-wasi/@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@1.0.7", "", { "dependencies": { "@emnapi/core": "^1.5.0", "@emnapi/runtime": "^1.5.0", "@tybys/wasm-util": "^0.10.1" }, "bundled": true }, "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw=="], - - "@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="], - - "@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - - "@typescript-eslint/eslint-plugin/ignore": ["ignore@7.0.5", "", {}, "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="], - - "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "@typescript-eslint/typescript-estree/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], - - "backend/@opencode-webui/shared": ["@opencode-webui/shared@file:shared", { "dependencies": { "dotenv": "^17.2.3", "zod": "^4.1.12" }, "devDependencies": { "typescript": "^5" } }], - - "chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - - "cmdk/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.4", "", { "dependencies": { "@radix-ui/react-slot": "1.2.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg=="], - - "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - - "hast-util-to-parse5/property-information": ["property-information@6.5.0", "", {}, "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig=="], - - "mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], - - "micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], - - "openapi-typescript/supports-color": ["supports-color@10.2.2", "", {}, "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g=="], - - "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], - - "@redocly/openapi-core/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], - - "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], - } -} diff --git a/bun.lockb b/bun.lockb new file mode 100755 index 00000000..88b719ea Binary files /dev/null and b/bun.lockb differ diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml deleted file mode 100644 index 6dd1e719..00000000 --- a/docker-compose.dev.yml +++ /dev/null @@ -1,48 +0,0 @@ -services: - backend: - build: - context: . - dockerfile: Dockerfile - target: base - container_name: opencode-backend-dev - command: sh -c "cd /app && bun --watch backend/src/index.ts" - ports: - - "5001:5001" - environment: - - NODE_ENV=development - - HOST=0.0.0.0 - - PORT=5001 - - OPENCODE_SERVER_PORT=5551 - - DATABASE_PATH=/app/backend/data/opencode.db - - WORKSPACE_PATH=/workspace - - DEBUG=true - volumes: - - ./backend:/app/backend - - ./shared:/app/shared - - opencode-workspace-dev:/workspace - - opencode-data-dev:/app/backend/data - restart: unless-stopped - - frontend: - build: - context: . - dockerfile: Dockerfile - target: base - container_name: opencode-frontend-dev - command: sh -c "cd /app/frontend && npm run dev -- --host 0.0.0.0" - ports: - - "5173:5173" - environment: - - VITE_API_URL=http://localhost:5001 - volumes: - - ./frontend:/app/frontend - - ./shared:/app/shared - depends_on: - - backend - restart: unless-stopped - -volumes: - opencode-workspace-dev: - driver: local - opencode-data-dev: - driver: local diff --git a/docker-compose.yml b/docker-compose.yml index 7a318007..999648c8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,17 +1,22 @@ services: app: - build: - context: . - dockerfile: Dockerfile - container_name: opencode-web + image: ghcr.io/dzianisv/opencode-manager:latest + # build: + # context: . + # dockerfile: Dockerfile + container_name: opencode-manager ports: - - "5001:5001" + - "5003:5003" + - "5100:5100" + - "5101:5101" + - "5102:5102" + - "5103:5103" environment: - NODE_ENV=production - HOST=0.0.0.0 - - PORT=5001 + - PORT=5003 - OPENCODE_SERVER_PORT=5551 - - DATABASE_PATH=/app/backend/data/opencode.db + - DATABASE_PATH=/app/data/opencode.db - WORKSPACE_PATH=/workspace - PROCESS_START_WAIT_MS=2000 - PROCESS_VERIFY_WAIT_MS=1000 @@ -22,12 +27,15 @@ services: - SANDBOX_TTL_HOURS=24 - CLEANUP_INTERVAL_MINUTES=60 - DEBUG=false + - OPENCODE_FORK_REPO=${OPENCODE_FORK_REPO:-} + - OPENCODE_FORK_BRANCH=${OPENCODE_FORK_BRANCH:-main} volumes: - opencode-workspace:/workspace - - opencode-data:/app/backend/data + - opencode-data:/app/data + - ~/.kube/config:/home/node/.kube/config:ro restart: unless-stopped healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:5001/api/health"] + test: ["CMD", "curl", "-f", "http://localhost:5003/api/health"] interval: 30s timeout: 3s retries: 3 diff --git a/docs/alternatives.md b/docs/alternatives.md new file mode 100644 index 00000000..a89daedf --- /dev/null +++ b/docs/alternatives.md @@ -0,0 +1,77 @@ +# OpenCode Manager Alternatives Research + +**Date:** January 2026 +**Conclusion:** Keep opencode-manager - no alternatives have voice capabilities + +## Summary + +We evaluated open-source alternatives to opencode-manager to determine if migration would be beneficial. After reviewing the top projects, we concluded that opencode-manager should be maintained because its voice features (STT, TTS, Talk Mode) are unique and not available in any alternative. + +## Alternatives Evaluated + +### 1. claude-code-webui (867 stars) +**Repository:** https://github.com/anthropics/claude-code-webui + +**Features:** +- Basic web UI for Claude Code +- Session management +- File browsing + +**Missing:** +- No voice/STT support +- No TTS support +- No Talk Mode +- No push notifications +- No Cloudflare tunnel integration + +### 2. Portal (314 stars) +**Repository:** https://github.com/anthropics/portal + +**Features:** +- Mobile-first design +- Uses official @opencode-ai/sdk +- Clean UI + +**Missing:** +- No voice/STT support +- No TTS support +- No Talk Mode +- No push notifications +- No tunnel integration + +## opencode-manager Unique Features + +| Feature | opencode-manager | claude-code-webui | Portal | +|---------|------------------|-------------------|--------| +| Voice STT (Whisper) | ✅ | ❌ | ❌ | +| TTS (Chatterbox/Coqui) | ✅ | ❌ | ❌ | +| Talk Mode | ✅ | ❌ | ❌ | +| Push Notifications | ✅ | ❌ | ❌ | +| Notification Sound | ✅ | ❌ | ❌ | +| Cloudflare Tunnel | ✅ | ❌ | ❌ | +| Multi-repo Management | ✅ | ❌ | ❌ | +| OpenCode Config UI | ✅ | ❌ | ❌ | + +## Decision + +**Keep opencode-manager** and continue development because: + +1. **Voice is the killer feature** - No other project offers voice-to-code capabilities +2. **Mobile coding use case** - Talk Mode enables hands-free coding from mobile devices +3. **Notification system** - Important for long-running tasks when away from screen +4. **Tunnel integration** - Easy remote access without manual setup + +## Improvements Made + +Based on this research, we identified and implemented improvements: + +1. **Project sync from OpenCode API** - Auto-register projects (including sandboxes like vibe.2, vibe.3) +2. **Removed lsof dependency** - Use OpenCode HTTP API for discovery instead +3. **Health monitoring** - Auto-reconnect when OpenCode server restarts + +## Future Considerations + +If migrating in the future, consider: +- Porting voice features to Portal (smaller codebase, uses official SDK) +- Contributing STT/TTS as plugins to existing projects +- Creating a standalone voice bridge that works with any Claude Code UI diff --git a/docs/assets/talk-mode-demo.gif b/docs/assets/talk-mode-demo.gif new file mode 100644 index 00000000..8fa28ac1 Binary files /dev/null and b/docs/assets/talk-mode-demo.gif differ diff --git a/docs/azureDeploy.md b/docs/azureDeploy.md new file mode 100644 index 00000000..8a1488a5 --- /dev/null +++ b/docs/azureDeploy.md @@ -0,0 +1,13 @@ +To Complete Deployment +The Docker build on Azure is running in the background. Once it completes: +# 1. Start the containers +ssh azureuser@4.246.110.199 "cd ~/opencode-manager && sudo docker compose up -d" +# 2. Enable YOLO mode +ssh azureuser@4.246.110.199 "sudo docker exec opencode-manager sed -i 's/yolo = false/yolo = true/' /app/.opencode.json 2>/dev/null || true" +# 3. Get tunnel URL +ssh azureuser@4.246.110.199 "sudo docker logs cloudflared-tunnel 2>&1 | grep -o 'https://[a-z0-9-]*\.trycloudflare\.com' | tail -1" +# 4. Verify Coqui is installed +ssh azureuser@4.246.110.199 "sudo docker exec opencode-manager ls -la /opt/ | grep coqui" +# 5. Test health +curl -u admin:PASSWORD "https://TUNNEL-URL/api/health" +curl -u admin:PASSWORD "https://TUNNEL-URL/api/tts/coqui/status" \ No newline at end of file diff --git a/docs/cloudVibeCoding.md b/docs/cloudVibeCoding.md new file mode 100644 index 00000000..ab229ae7 --- /dev/null +++ b/docs/cloudVibeCoding.md @@ -0,0 +1,285 @@ +# Building "Vibe Coding" Cloud: Self-Hosting OpenCode Manager with Voice, Terminal, and Secure Cloudflare Tunnels + +![Vibe Coding Header](https://images.unsplash.com/photo-1555099962-4199c345e5dd?q=80&w=2940&auto=format&fit=crop) + +"Vibe Coding" isn't just about AI code generation—it's about an environment where you can talk to your code, execute it instantly, and manage your infrastructure from anywhere. It's the shift from "typing syntax" to "commanding intent." + +We built the ultimate self-hosted vibe coding stack using **OpenCode Manager**. This isn't just a wrapper; it's a full-stack enhancement that turns the powerful [OpenCode](https://opencode.ai) CLI into a web-native, voice-controlled, multi-LLM development platform. + +Here is the deep dive into how we engineered a solution that lets you vibe code from an iPad on a walk or a laptop in a cafe, powered by a secure cloud VM. + +--- + +## The Core: What Makes This Special? + +Most AI coding assistants are just chat interfaces or IDE plugins. Our solution is different because it focuses on **infrastructure, control, and interaction**: + +1. **OpenCode Manager (The Enhanced Core):** We didn't just host OpenCode; we wrapped it in a robust Node.js backend that adds: + * **Universal LLM Support:** Bring your own key. Whether it's OpenAI, Anthropic, Gemini, or a local model, our `AuthService` manages secure credential storage (`auth.json` with strict permissions), letting you swap brains on the fly. + * **Full Web Terminal:** Not a simulated console. We integrated `node-pty` to spawn real shell sessions on the server, piped directly to your browser via WebSockets. You can run `docker build`, `git push`, or `cargo run` from your phone. + * **Voice-First Interface:** Integrated Text-to-Speech (TTS) and Voice-to-Text so you can literally talk to your agent while walking. + +2. **Cloudflare Tunneling (The Secure Gateway):** + * Zero open ports. No VPNs. No public IPs exposed. + * We use Cloudflare Zero Trust to tunnel traffic from our private Docker container directly to a public HTTPS domain. This gives us enterprise-grade security and global caching for free. + +3. **Dockerized Consistency:** + * The entire stack (Frontend, Backend, Database) is packaged into a single multi-stage Docker image. This ensures that the complex dependency chain (Node.js, Python, system libraries for PTY) works perfectly on any VM. + +--- + +## System Architecture + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ CLOUD VM (Azure/AWS) │ +│ ┌───────────────────────────────────────────────────────────────────────┐ │ +│ │ Docker Environment │ │ +│ │ │ │ +│ │ ┌─────────────────────────────────────────────────────────────────┐ │ │ +│ │ │ OpenCode Manager Container │ │ │ +│ │ │ │ │ │ +│ │ │ ┌─────────────────┐ ┌─────────────────────────────────┐ │ │ │ +│ │ │ │ Frontend │ │ Backend │ │ │ │ +│ │ │ │ (React/Vite) │ │ (Bun/Hono) │ │ │ │ +│ │ │ │ │ │ │ │ │ │ +│ │ │ │ ┌───────────┐ │ │ ┌───────────┐ ┌───────────┐ │ │ │ │ +│ │ │ │ │ Chat UI │ │ HTTP │ │ Routes │ │ Services │ │ │ │ │ +│ │ │ │ ├───────────┤ │◄────►│ ├───────────┤ ├───────────┤ │ │ │ │ +│ │ │ │ │ Terminal │ │ │ │ /api/* │ │ AuthSvc │ │ │ │ │ +│ │ │ │ ├───────────┤ │ WS │ │ /ws/* │ │ RepoSvc │ │ │ │ │ +│ │ │ │ │ Voice │ │◄────►│ │ │ │ FileSvc │ │ │ │ │ +│ │ │ │ │ Controls │ │ │ └───────────┘ └───────────┘ │ │ │ │ +│ │ │ │ └───────────┘ │ │ │ │ │ │ │ │ +│ │ │ └─────────────────┘ │ ▼ ▼ │ │ │ │ +│ │ │ │ ┌───────────┐ ┌───────────┐ │ │ │ │ +│ │ │ │ │ SQLite │ │ auth.json │ │ │ │ │ +│ │ │ │ │ DB │ │ (0o600) │ │ │ │ │ +│ │ │ │ └───────────┘ └───────────┘ │ │ │ │ +│ │ │ │ │ │ │ │ │ +│ │ │ │ ▼ │ │ │ │ +│ │ │ │ ┌─────────────────────────┐ │ │ │ │ +│ │ │ │ │ OpenCode Server │ │ │ │ │ +│ │ │ │ │ (Port 5551) │ │ │ │ │ +│ │ │ │ │ │ │ │ │ │ +│ │ │ │ │ ┌───────┐ ┌─────────┐ │ │ │ │ │ +│ │ │ │ │ │node- │ │ LLM │ │ │ │ │ │ +│ │ │ │ │ │pty │ │ Proxy │ │ │ │ │ │ +│ │ │ │ │ └───────┘ └─────────┘ │ │ │ │ │ +│ │ │ │ └─────────────────────────┘ │ │ │ │ +│ │ │ └─────────────────────────────────┘ │ │ │ +│ │ └─────────────────────────────────────────────────────────────────┘ │ │ +│ │ │ │ │ +│ │ │ HTTP (internal) │ │ +│ │ ▼ │ │ +│ │ ┌─────────────────────────────────────────────────────────────────┐ │ │ +│ │ │ Cloudflare Tunnel Container │ │ │ +│ │ │ (cloudflared) │ │ │ +│ │ └─────────────────────────────────────────────────────────────────┘ │ │ +│ │ │ │ │ +│ └────────────────────────────────────│──────────────────────────────────┘ │ +│ │ Encrypted Tunnel │ +└───────────────────────────────────────│─────────────────────────────────────┘ + │ + ▼ +┌───────────────────────────────────────────────────────────────────────────────┐ +│ Cloudflare Edge Network │ +│ ┌─────────────────────────────────────────────────────────────────────────┐ │ +│ │ Zero Trust Access ──► TLS Termination ──► Global CDN ──► DDoS Shield │ │ +│ └─────────────────────────────────────────────────────────────────────────┘ │ +└───────────────────────────────────────────────────────────────────────────────┘ + │ + │ HTTPS (vibe.your-domain.com) + ▼ +┌───────────────────────────────────────────────────────────────────────────────┐ +│ Clients │ +│ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ +│ │ iPad │ │ Laptop │ │ Desktop │ │ Mobile │ │ +│ │ Walk │ │ Cafe │ │ Home │ │ Browser │ │ +│ └─────────┘ └─────────┘ └─────────┘ └─────────┘ │ +└───────────────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Request Flow Diagram + +``` +┌──────────────────────────────────────────────────────────────────────────────┐ +│ Voice Command Flow │ +└──────────────────────────────────────────────────────────────────────────────┘ + + User Browser Backend OpenCode LLM API + │ │ │ │ │ + │ "Clone repo and │ │ │ │ + │ run tests" │ │ │ │ + │ ──────────────────►│ │ │ │ + │ (Voice Input) │ │ │ │ + │ │ │ │ │ + │ │ Voice-to-Text │ │ │ + │ │ (Web Speech API) │ │ │ + │ │ │ │ │ + │ │ POST /api/chat │ │ │ + │ │───────────────────►│ │ │ + │ │ │ │ │ + │ │ │ Proxy Request │ │ + │ │ │──────────────────►│ │ + │ │ │ │ │ + │ │ │ │ API Call │ + │ │ │ │──────────────►│ + │ │ │ │ │ + │ │ │ │◄──────────────│ + │ │ │ │ Response │ + │ │ │ │ │ + │ │ │ Tool Execution │ │ + │ │ │ (git clone, npm) │ │ + │ │ │◄──────────────────│ │ + │ │ │ │ │ + │ │ WebSocket Stream │ │ │ + │ │◄───────────────────│ │ │ + │ │ (Terminal Output) │ │ │ + │ │ │ │ │ + │ │ SSE: Chat Response│ │ │ + │ │◄───────────────────│ │ │ + │ │ │ │ │ + │ │ Text-to-Speech │ │ │ + │ ◄─────────────────│ (Voice Output) │ │ │ + │ Spoken Response │ │ │ │ + │ │ │ │ │ + + +┌──────────────────────────────────────────────────────────────────────────────┐ +│ Terminal Session Flow │ +└──────────────────────────────────────────────────────────────────────────────┘ + + Browser Backend node-pty Shell + │ │ │ │ + │ WS: Connect │ │ │ + │ /ws/terminal │ │ │ + │───────────────────────►│ │ │ + │ │ │ │ + │ │ spawn('bash') │ │ + │ │─────────────────────────►│ │ + │ │ │ │ + │ │ │ Fork PTY │ + │ │ │───────────────────►│ + │ │ │ │ + │ WS: Send Command │ │ │ + │ "ls -la" │ │ │ + │───────────────────────►│ │ │ + │ │ │ │ + │ │ write(data) │ │ + │ │─────────────────────────►│ │ + │ │ │ │ + │ │ │ stdin │ + │ │ │───────────────────►│ + │ │ │ │ + │ │ │◄───────────────────│ + │ │ │ stdout │ + │ │ │ │ + │ │ on('data') │ │ + │ │◄─────────────────────────│ │ + │ │ │ │ + │ WS: Receive Output │ │ │ + │◄───────────────────────│ │ │ + │ (rendered in xterm.js)│ │ │ + │ │ │ │ + + +┌──────────────────────────────────────────────────────────────────────────────┐ +│ LLM Provider Authentication │ +└──────────────────────────────────────────────────────────────────────────────┘ + + User Frontend Backend auth.json + │ │ │ │ + │ Set API Key │ │ │ + │─────────────────►│ │ │ + │ │ │ │ + │ │ POST /api/providers│ │ + │ │ /:id/credentials │ │ + │ │────────────────────►│ │ + │ │ │ │ + │ │ │ Encrypt & Store │ + │ │ │ (chmod 0o600) │ + │ │ │────────────────────►│ + │ │ │ │ + │ │ 200 OK │ │ + │ │ (key never echoed) │ │ + │ │◄────────────────────│ │ + │ │ │ │ + │ ◄───────────────│ │ │ + │ "Key Saved" │ │ │ + │ │ │ │ +``` + +--- + +## Engineering Deep Dive: Fixes & Architecture + +Building a seamless web experience for a CLI tool required solving several complex engineering challenges. + +### 1. The "Ghost Terminal" Bug (Docker & PTY) +**The Problem:** The web terminal worked locally but crashed instantly when deployed to Azure. +**The Investigation:** The logs showed "File not found" when trying to spawn a shell. +**The Discovery:** `node-pty` relies on a native C++ compiled binary (`pty-worker.cjs`) to interact with the OS pseudo-terminals. Our Docker multi-stage build was aggressively pruning files, and due to `.dockerignore` rules or build caching, this critical worker file was being left behind in the final image. +**The Fix:** We rewrote the Dockerfile to explicitly copy the `backend` assets in the final `runner` stage and implemented a `fix-terminal-worker.sh` script to verify file presence at runtime. We also forced a cache-busting rebuild on the production VM to ensure the binary matched the OS architecture. + +### 2. Intelligent Version Detection +**The Problem:** The system kept reporting the OpenCode version as `0.0.0`, triggering "Update Required" warnings. +**The Cause:** The `opencode --version` command output included logs like `Listening on 0.0.0.0`. Our initial regex `(\d+\.\d+\.\d+)` was too eager and matched the IP address before the actual version number. +**The Fix:** We implemented a robust parsing strategy in `opencode-single-server.ts`: +* **Line-by-Line Parsing:** We process output one line at a time. +* **Strict Regex:** We updated the pattern to `/(?:^|\s|v)(\d+\.\d+\.\d+)(?:\s|$)/` to enforce boundaries. +* **Heuristic Guard:** We added a check to ensure the match isn't followed by a dot (excluding IP addresses). + +### 3. Secure Credential Management +**The Problem:** We needed a way to store API keys for different providers (OpenAI, Anthropic) securely without hardcoding them in env vars. +**The Solution:** We built a dedicated `AuthService` and API routes (`/api/providers/:id/credentials`). +* Credentials are stored in a secured JSON file (`auth.json`) with `0o600` permissions. +* The frontend acts as a management interface, allowing users to securely set, check status, and delete keys without ever exposing the actual key values back to the client. + +--- + +## Quick Start Deployment + +### Prerequisites +* A Cloud VM (Azure B2s or AWS t3.medium recommended) or an existing Ubuntu server +* A Domain Name (managed by Cloudflare) +* Local machine with `bun` installed + +### Step 1: Deploy to VM + +For an existing server: +```bash +export TARGET_HOST="your-server-ip" +bun run scripts/deploy.ts +``` + +For a new Azure VM (requires Azure CLI): +```bash +bun run scripts/deploy.ts +``` + +The script will: +- Create an Azure Resource Group and VM (if deploying new) +- Install Docker and dependencies +- Build and start all containers with `docker compose up -d --build` + +### Step 2: Configure Cloudflare Tunnel + +In your Cloudflare Dashboard (Zero Trust > Access > Tunnels): +1. Create a new tunnel +2. Route your domain (e.g., `vibe.your-domain.com`) to `http://opencode-manager:5003` +3. Copy the tunnel token and add to your server's `.env` file + +Done! Your instance is now live at `https://vibe.your-domain.com` + +### The Experience +Once deployed, you simply navigate to your URL. You're greeted by a chat interface. You can: +* **Say:** "Clone the repo from GitHub and run the tests." +* **Watch:** The terminal opens, `git clone` runs, and tests execute. +* **Read:** The agent reads back the results using TTS. + +This is the power of self-hosting: absolute control, privacy, and the ability to fix and extend the platform yourself—just like we did with the terminal and version detection logic. + +**Ready to vibe?** Fork the repo and start building. diff --git a/docs/design.md b/docs/design.md new file mode 100644 index 00000000..0bd1ff49 --- /dev/null +++ b/docs/design.md @@ -0,0 +1,494 @@ +# OpenCode Manager Voice Architecture + +Live voice chat for AI-assisted coding using OpenCode, Whisper STT, Chatterbox TTS, and streaming VAD. + +## System Architecture + +``` +┌─────────────────────────────────────────────────────────────────────────────────┐ +│ BROWSER │ +│ ┌──────────────────────────────────────────────────────────────────────────┐ │ +│ │ TalkModeContext │ │ +│ │ ┌────────────────┐ ┌─────────────────┐ ┌────────────────────┐ │ │ +│ │ │ useStreaming │───▶│ MediaRecorder │───▶│ Blob (webm/opus) │ │ │ +│ │ │ VAD │ │ (100ms chunks) │ │ 2.5s batches │ │ │ +│ │ └────────────────┘ └─────────────────┘ └─────────┬──────────┘ │ │ +│ │ │ │ │ │ +│ │ │ silenceTimeoutMs=1500 │ base64 │ │ +│ │ ▼ ▼ │ │ +│ │ ┌────────────────┐ ┌─────────────────────┐ │ │ +│ │ │ Silence Detect │◀─── no new words ───────│ STT API Client │ │ │ +│ │ │ (1.5s timer) │ │ POST /api/stt/ │ │ │ +│ │ └───────┬────────┘ │ transcribe │ │ │ +│ │ │ └─────────────────────┘ │ │ +│ │ │ fullTranscript │ │ +│ │ ▼ │ │ +│ │ ┌────────────────────────────────────────────────────────────────┐ │ │ +│ │ │ sendToOpenCode() │ │ │ +│ │ │ POST /api/opencode/session/{id}/message │ │ │ +│ │ │ body: { parts: [{ type: 'text', text: transcript }] } │ │ │ +│ │ └────────────────────────────────┬───────────────────────────────┘ │ │ +│ │ │ │ │ +│ │ ┌────────────────────────┼────────────────────────┐ │ │ +│ │ │ Poll every 500ms │ GET /session/{id}/message │ │ +│ │ │ ▼ │ │ │ +│ │ │ ┌─────────────────┐ │ │ │ +│ │ │ │ Wait for │ │ │ │ +│ │ │ │ assistant msg │ │ │ │ +│ │ │ │ time.completed │ │ │ │ +│ │ │ └────────┬────────┘ │ │ │ +│ │ └───────────────────────┼─────────────────────────┘ │ │ +│ │ │ │ │ +│ │ ▼ │ │ +│ │ ┌─────────────────┐ │ │ +│ │ │ useTTS hook │ │ │ +│ │ │ speak(text) │───▶ Audio Element │ │ +│ │ └─────────────────┘ │ │ +│ └──────────────────────────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────────────┘ + │ + │ HTTP + ▼ +┌─────────────────────────────────────────────────────────────────────────────────┐ +│ BACKEND (Bun + Hono) │ +│ Port 5003 │ +│ ┌──────────────────────────────────────────────────────────────────────────┐ │ +│ │ Routes │ │ +│ │ ┌────────────┐ ┌────────────┐ ┌────────────┐ ┌─────────────────┐ │ │ +│ │ │ /api/stt/* │ │ /api/tts/* │ │ /api/ │ │ /api/opencode/* │ │ │ +│ │ │ │ │ │ │ settings │ │ │ │ │ +│ │ └─────┬──────┘ └─────┬──────┘ └────────────┘ └────────┬────────┘ │ │ +│ │ │ │ │ │ │ +│ │ ▼ ▼ ▼ │ │ +│ │ ┌───────────────────────────────────────────────────────────────────┐ │ │ +│ │ │ Service Layer │ │ │ +│ │ │ ┌──────────────────┐ ┌──────────────────┐ ┌─────────────────┐ │ │ │ +│ │ │ │ WhisperServer │ │ ChatterboxServer │ │ OpenCodeServer │ │ │ │ +│ │ │ │ Manager │ │ Manager │ │ Manager │ │ │ │ +│ │ │ │ │ │ │ │ │ │ │ │ +│ │ │ │ spawn(python3) │ │ spawn(python3) │ │ spawn(opencode) │ │ │ │ +│ │ │ │ healthCheck() │ │ healthCheck() │ │ proxy requests │ │ │ │ +│ │ │ │ transcribe() │ │ synthesize() │ │ │ │ │ │ +│ │ │ └────────┬─────────┘ └────────┬─────────┘ └────────┬────────┘ │ │ │ +│ │ └───────────┼─────────────────────┼─────────────────────┼───────────┘ │ │ +│ └──────────────┼─────────────────────┼─────────────────────┼───────────────┘ │ +│ │ │ │ │ +│ ▼ ▼ ▼ │ +│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────────────────────────┐ │ +│ │ Whisper Server │ │ Chatterbox Server│ │ OpenCode Server │ │ +│ │ (FastAPI/Python)│ │ (FastAPI/Python) │ │ (Go binary via npx) │ │ +│ │ │ │ │ │ │ │ +│ │ Port 5552 │ │ Port 5553 │ │ Port 5551 │ │ +│ │ │ │ │ │ │ │ +│ │ faster-whisper │ │ chatterbox-tts │ │ ┌────────────────────────┐ │ │ +│ │ (ctranslate2) │ │ (PyTorch) │ │ │ Anthropic/OpenAI/etc │ │ │ +│ │ │ │ │ │ │ via LLM provider APIs │ │ │ +│ │ Models: │ │ Voice cloning │ │ └────────────────────────┘ │ │ +│ │ - tiny (75MB) │ │ Custom voices │ │ │ │ +│ │ - base (145MB) │ │ from audio │ │ Tools: bash, read, write, │ │ +│ │ - small (488MB) │ │ samples │ │ glob, grep, etc. │ │ +│ │ - medium (1.5GB)│ │ │ │ │ │ +│ │ - large-v3 (3GB)│ │ │ │ │ │ +│ └──────────────────┘ └──────────────────┘ └──────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────────────┘ +``` + +## Data Flow: Voice-to-Response + +``` +┌─────────────┐ ┌─────────────┐ ┌──────────────┐ ┌──────────────┐ +│ Microphone │────▶│ MediaRecord │────▶│ STT API │────▶│ OpenCode │ +│ Input │ │ webm/opus │ │ Whisper │ │ Session │ +│ │ │ 2.5s chunk │ │ transcribe │ │ message │ +└─────────────┘ └─────────────┘ └──────────────┘ └──────┬───────┘ + │ + ┌───────────────────────────────────────────────────────────────┘ + │ + ▼ +┌──────────────┐ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ +│ OpenCode │────▶│ LLM API │────▶│ Tool Exec │────▶│ Response │ +│ Process │ │ Claude/GPT │ │ (if needed) │ │ Text │ +│ Request │ │ │ │ │ │ │ +└──────────────┘ └──────────────┘ └──────────────┘ └──────┬───────┘ + │ + ┌────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌──────────────┐ ┌──────────────┐ ┌──────────────┐ +│ TTS API │────▶│ Chatterbox │────▶│ Audio │ +│ synthesize │ │ generate │ │ Playback │ +│ │ │ WAV output │ │