From 486d190cc40c23fa98585fdb19f0b511753d1f80 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Wed, 4 Mar 2026 14:17:40 +0800 Subject: [PATCH 01/32] =?UTF-8?q?Add=20Supabase=E2=80=91Aidap=20MCP=20serv?= =?UTF-8?q?er=20with=20uvx=20entry?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - New supabase‑aidap package (config, models, platform, tools, utils) - pyproject.toml, uv.lock, README, .gitignore added - uvx script entry `supabase-aidap` for easy execution Co-Authored-By: Claude Opus 4.6 (1M context) --- server/mcp_server_supabase/README.md | 180 +++ server/mcp_server_supabase/README_zh.md | 275 ++++ server/mcp_server_supabase/pyproject.toml | 48 + .../src/mcp_server_supabase/__init__.py | 3 + .../src/mcp_server_supabase/config.py | 60 + .../mcp_server_supabase/models/__init__.py | 17 + .../mcp_server_supabase/models/database.py | 24 + .../models/edge_function.py | 29 + .../src/mcp_server_supabase/models/storage.py | 16 + .../mcp_server_supabase/models/workspace.py | 27 + .../mcp_server_supabase/platform/__init__.py | 4 + .../platform/aidap_client.py | 146 +++ .../platform/supabase_client.py | 79 ++ .../src/mcp_server_supabase/server.py | 167 +++ .../src/mcp_server_supabase/tools/__init__.py | 7 + .../src/mcp_server_supabase/tools/base.py | 37 + .../tools/database_tools.py | 67 + .../tools/edge_function_tools.py | 254 ++++ .../tools/storage_tools.py | 67 + .../tools/workspace_tools.py | 110 ++ .../src/mcp_server_supabase/utils/__init__.py | 3 + .../mcp_server_supabase/utils/decorators.py | 41 + server/mcp_server_supabase/uv.lock | 1139 +++++++++++++++++ 23 files changed, 2800 insertions(+) create mode 100644 server/mcp_server_supabase/README.md create mode 100644 server/mcp_server_supabase/README_zh.md create mode 100644 server/mcp_server_supabase/pyproject.toml create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/__init__.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/config.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/models/__init__.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/models/database.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/models/edge_function.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/models/storage.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/models/workspace.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/platform/__init__.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/server.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tools/__init__.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py create mode 100644 server/mcp_server_supabase/uv.lock diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md new file mode 100644 index 00000000..7106fd73 --- /dev/null +++ b/server/mcp_server_supabase/README.md @@ -0,0 +1,180 @@ +# Supabase MCP Server + +**Supabase MCP Server** 是一款基于模型上下文协议(Model Context Protocol, MCP)的服务器,实现了对 AIDAP Supabase 服务的全链路智能化管理。通过自然语言指令,用户可以对工作空间、数据库、Edge Functions、存储等资源进行创建、查询、修改、删除等操作,从而大幅提升 Supabase 开发与运维的效率。 + +--- + +## 项目概览 +| 项目 | 详情 | +| ---- | ---- | +| **版本** | v1.0.0 | +| **描述** | 基于 MCP 管理 AIDAP Supabase 资源,支持智能化数据库与应用开发 | +| **分类** | 数据库与应用开发 | +| **标签** | Supabase, PostgreSQL, Edge Functions, BaaS | + +--- + +## 关键特性 +- **自动默认分支解析**:`branch_id` 参数可选,系统会自动使用工作空间的默认分支。 +- **完整工具集合**:提供 53 个高阶工具,覆盖数据库、Edge Functions、存储、工作空间等全方位能力。 +- **安全与审计**:只读模式、凭证管理、细粒度日志查询与安全建议。 +- **跨语言支持**:兼容 Python、Node.js、Go 等多语言客户端。 + +--- + +## 快速开始 +### 系统依赖 +- Python 3.10+ +- 推荐使用 `uv` 包管理器 + +### 安装 `uv` +```bash +curl -LsSf https://astral.sh/uv/install.sh | sh +``` + +### 本地开发(推荐) +在项目根目录执行: +```bash +uv sync +source .venv/bin/activate +mv .env_example .env # 填写环境变量 +``` + +### 运行方式 +#### 方式一:使用 `uvx`(推荐) +在 MCP 客户端配置文件中添加: +```json +{ + "mcpServers": { + "supabase": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/volcengine/mcp-server#subdirectory=server/mcp_server_supabase", + "mcp-server-supabase" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "your-access-key", + "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_REGION": "cn-beijing" + } + } + } +} +``` +#### 方式二:本地直接运行 +```json +{ + "mcpServers": { + "supabase-dev": { + "command": "uv", + "args": [ + "--directory", + "/ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase", + "run", + "mcp-server-supabase" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "your-access-key", + "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_REGION": "cn-beijing", + "READ_ONLY": "true" + } + } + } +} +``` +#### 方式三:Python 直接执行 +```json +{ + "mcpServers": { + "supabase": { + "command": "python", + "args": ["-m", "mcp_server_supabase.server"], + "env": { + "VOLCENGINE_ACCESS_KEY": "your-access-key", + "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_REGION": "cn-beijing" + } + } + } +} +``` + +--- + +## 配置说明 +主要配置文件位于 `server/mcp_server_supabase/src/mcp_server_supabase/config/config.yaml`,常用字段: +- `transport`:`sse`、`StreamableHTTP`、`stdio`(默认 `sse`) +- `auth`:`oauth`、`none` +- `credential`:`env`(从环境变量读取 AK/SK)或 `token` +- `credential.env`:`VOLCENGINE_ACCESS_KEY`、`VOLCENGINE_SECRET_KEY`、`VOLCENGINE_REGION` + +--- + +## 核心工具一览 +> **注**:以下为常用工具示例,完整列表请参见文档章节 "Tools"。 + +### 数据库操作(8) +- `list_tables` +- `execute_sql` +- `list_extensions` +- `list_migrations` +- `apply_migration` +- `list_databases` +- `create_database` +- `drop_database` + +### Edge Functions(6) +- `list_edge_functions` +- `get_edge_function` +- `deploy_edge_function` +- `delete_edge_function` +- `invoke_edge_function` +- `get_edge_function_logs` + +### 存储管理(8) +- `list_storage_buckets` +- `create_storage_bucket` +- `delete_storage_bucket` +- `list_storage_objects` +- `delete_storage_object` +- `get_storage_object_info` +- `get_storage_config` +- `update_storage_config` + +### 工作空间管理(12) +- `list_workspaces` +- `get_workspace` +- `create_workspace` +- `delete_workspace` +- `start_workspace` +- `stop_workspace` +- `get_workspace_endpoints` +- `get_workspace_api_keys` +- `modify_workspace_name` +- `modify_workspace_settings` +- `modify_workspace_deletion_protection` +- `reset_workspace_password` + +--- + +## 常用 Prompt 示例 +- **数据库**:`"列出我的数据库表"`、`"查询 users 表的所有数据"` +- **Edge Functions**:`"列出所有 Edge Functions"`、`"部署一个新的 Edge Function"` +- **存储**:`"列出所有存储桶"`、`"创建一个公开存储桶"` +- **工作空间**:`"列出我的所有工作空间"`、`"创建一个新的工作空间"` + +--- + +## 文档与资源 +- [火山引擎 AIDAP Supabase 官方文档](https://www.volcengine.com/docs/87275/2105900) +- [Model Context Protocol 介绍](https://modelcontextprotocol.io/introduction) +- [Supabase 官方文档](https://supabase.com/docs) + +--- + +## License + +本项目遵循 MIT 许可证: +[MIT License](https://github.com/volcengine/mcp-server/blob/main/LICENSE) diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md new file mode 100644 index 00000000..daba9ca6 --- /dev/null +++ b/server/mcp_server_supabase/README_zh.md @@ -0,0 +1,275 @@ +# Supabase MCP Server (Python 版本) + +> 将 Supabase/AIDAP 项目连接到 Cursor、Claude、Windsurf 等 AI 助手 + +[English](./README.md) | 简体中文 + +## 功能特性 + +通过 [Model Context Protocol](https://modelcontextprotocol.io/introduction) (MCP),AI 助手可以直接与你的 Supabase/AIDAP 项目交互。 + +### 支持的功能 + +- ✅ **工作空间管理** - 列出、创建、启动/停止工作空间,管理设置 +- ✅ **数据库管理** - 列出表、执行 SQL、应用迁移、管理数据库和账户 ✨ 增强 +- ✅ **Edge Functions** - 部署、获取代码、调用和管理 Edge Functions ✨ 增强 +- ✅ **调试工具** - 获取多服务日志和性能/安全建议 ✨ 新增 +- ✅ **开发工具** - 生成 TypeScript 类型、获取 API URL 和密钥 ✨ 新增 +- ✅ **存储管理** - 管理存储桶和对象 +- ✅ **分支管理** - 创建、删除、重置、恢复分支 +- ✅ **AIDAP 集成** - 火山引擎工作空间管理(workspace = project) + +## 快速开始 + +### 前置要求 + +⚠️ **重要**: 此 MCP server 依赖内部的 volcengine-python-sdk(包含 AIDAP 模块),需要先安装: + +```bash +# 克隆 SDK 仓库 +git clone https://code.byted.org/iaasng/volcengine-python-sdk.git -b aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12 + +# 安装 SDK +cd volcengine-python-sdk +pip install -e . +``` + +### 本地安装运行 + +安装完 volcengine-python-sdk 后: + +```bash +# 安装 mcp-server-supabase +cd /path/to/mcp-server/server/mcp_server_supabase +pip install -e . + +# 运行服务器 +python -m mcp_server_supabase.server +``` + +### MCP 客户端配置 + +在 MCP 客户端(如 Claude Desktop、Cursor、Windsurf)中配置: + +```json +{ + "mcpServers": { + "supabase": { + "command": "python", + "args": [ + "-m", + "mcp_server_supabase.server" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "your-access-key", + "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_REGION": "cn-beijing" + } + } + } +} +``` + + +## 环境变量配置 + +| 变量名 | 必需 | 说明 | +|--------|------|------| +| `VOLCENGINE_ACCESS_KEY` | ✅ | 火山引擎访问密钥 | +| `VOLCENGINE_SECRET_KEY` | ✅ | 火山引擎私密密钥 | +| `VOLCENGINE_REGION` | ⭕ | 区域(默认:cn-beijing) | +| `READ_ONLY` | ⭕ | 只读模式(设置为 "true" 启用) | + + +## 🎯 自动默认分支解析 + +**新功能!** 现在大部分工具的 `branch_id` 参数都是可选的。如果不提供 `branch_id`,系统会自动使用工作空间的默认分支。 + +### 工作原理 + +1. **自动获取**:首次调用时,系统自动查询工作空间的默认分支 +2. **智能缓存**:默认分支 ID 会被缓存,避免重复 API 调用 +3. **自动刷新**:当设置新的默认分支时,缓存会自动清除 + +### 使用示例 + +```python +# 之前:必须提供 branch_id +execute_sql(workspace_id="xxx", branch_id="br-xxx", query="SELECT * FROM users") + +# 现在:branch_id 可选,自动使用默认分支 +execute_sql(workspace_id="xxx", query="SELECT * FROM users") +``` + +### 缓存管理 + +如果需要手动清除缓存(例如更改了默认分支): + +```python +# 清除特定工作空间的缓存 +clear_default_branch_cache(workspace_id="xxx") + +# 清除所有缓存 +clear_default_branch_cache() +``` + + +## 可用工具(54 个) + +### 数据库操作(8 个) +- `list_tables` - 列出数据库表 +- `execute_sql` - 执行 SQL 查询 +- `list_extensions` - 列出数据库扩展 +- `list_migrations` - 列出迁移历史 ✨ 新增 +- `apply_migration` - 应用数据库迁移并记录到 schema_migrations ✨ 新增 +- `list_databases` - 列出所有数据库 +- `create_database` - 创建新数据库 +- `drop_database` - 删除数据库 + +### Edge Functions(6 个) +- `list_edge_functions` - 列出 Edge Functions +- `get_edge_function` - 获取 Edge Function 源代码 ✨ 新增 +- `deploy_edge_function` - 部署或更新 Edge Function ✨ 新增 +- `delete_edge_function` - 删除 Edge Function ✨ 新增 +- `invoke_edge_function` - 调用 Edge Function +- `get_edge_function_logs` - 获取函数日志 + +### 调试工具(2 个) +- `get_logs` - 获取服务日志 (postgres/api/auth/storage/realtime/functions) ✨ 新增 +- `get_advisors` - 获取性能和安全建议 (检查缺失索引、未使用索引、缺失主键等) ✨ 新增 + +### 开发工具(3 个) +- `generate_typescript_types` - 根据数据库 schema 生成 TypeScript 类型定义 ✨ 新增 +- `get_project_url` - 获取项目 API URL(别名:get_workspace_endpoints) +- `get_publishable_keys` - 获取可发布的 API 密钥(别名:get_workspace_api_keys) + +### 存储管理(8 个) +- `list_storage_buckets` - 列出存储桶 +- `create_storage_bucket` - 创建存储桶 +- `delete_storage_bucket` - 删除存储桶 +- `list_storage_objects` - 列出存储对象 +- `delete_storage_object` - 删除存储对象 +- `get_storage_object_info` - 获取对象元数据 +- `get_storage_config` - 获取存储配置 ✨ 新增 +- `update_storage_config` - 更新存储配置(需要付费计划) ✨ 新增 + +### 工作空间管理(12 个) +- `list_workspaces` - 列出所有工作空间 +- `get_workspace` - 获取工作空间详情 +- `create_workspace` - 创建新工作空间 +- `delete_workspace` - 删除工作空间 +- `start_workspace` - 启动工作空间 +- `stop_workspace` - 停止工作空间 +- `get_workspace_endpoints` - 获取工作空间端点 +- `get_workspace_api_keys` - 获取 API 密钥 +- `modify_workspace_name` - 修改工作空间名称 +- `modify_workspace_settings` - 修改工作空间设置 +- `modify_workspace_deletion_protection` - 修改删除保护策略 +- `reset_workspace_password` - 重置管理员密码 +- `get_workspace_usage_stats` - 获取使用统计 + +### 数据库账户管理(4 个) +- `list_db_accounts` - 列出数据库账户 +- `create_db_account` - 创建数据库账户 +- `delete_db_account` - 删除数据库账户 +- `reset_db_account_password` - 重置账户密码 + +### 分支管理(10 个) +- `list_branches` - 列出所有分支 +- `get_branch_detail` - 获取分支详情 +- `create_branch` - 创建新分支 +- `delete_branch` - 删除分支 +- `reset_branch` - 重置分支 +- `restart_branch` - 重启分支 +- `restore_branch` - 恢复分支 +- `set_default_branch` - 设置默认分支 +- `list_restorable_branches` - 列出可恢复的分支 +- `clear_default_branch_cache` - 清除默认分支缓存 ✨ 新增 + +## 使用示例 + +配置完成后,在 AI 助手中可以这样使用: + +``` +"帮我查看数据库中的所有表" +"执行 SQL: SELECT * FROM users LIMIT 10" +"生成数据库的 TypeScript 类型定义" +"部署一个新的 Edge Function" +"查看最近的 API 日志" +"列出所有组织和项目" +"列出所有 AIDAP 工作空间" +"获取工作空间的 API 密钥" +``` + +## 安全建议 + +⚠️ 连接 LLM 到数据源存在固有风险,请遵循以下最佳实践: + +1. **不要连接生产环境** - 使用开发项目,避免暴露真实数据 +2. **启用只读模式** - 设置 `READ_ONLY=true` 限制写操作 +3. **项目范围限制** - 设置 `SUPABASE_PROJECT_REF` 限制访问范围 +4. **审查工具调用** - 始终在 MCP 客户端中审查并批准工具调用 + +## 本地开发 + +```bash +# 克隆仓库 +git clone https://github.com/volcengine/mcp-server.git +cd mcp-server/server/mcp_server_supabase + +# 安装依赖 +uv pip install -e ".[dev]" + +# 运行测试 +pytest + +# 代码格式化 +black src/ +ruff check src/ +``` + +## 项目结构 + +``` +mcp_server_supabase/ +├── pyproject.toml # 项目配置 +├── README.md # 英文文档 +├── README_zh.md # 中文文档 +└── src/ + └── mcp_server_supabase/ + ├── __init__.py + └── server.py # 主入口(FastMCP 实现) +``` + +## 常见问题 + +### Q: 如何获取 Supabase Access Token? + +A: 访问 [Supabase Dashboard](https://supabase.com/dashboard/account/tokens) 生成个人访问令牌。 + +### Q: 如何获取 AIDAP 密钥? + +A: 登录火山引擎控制台,在 [访问控制](https://console.volcengine.com/iam/keymanage/) 页面创建 Access Key。 + +### Q: AIDAP 中的 workspace 和 Supabase 的 project 有什么区别? + +A: 在 AIDAP 中,workspace 就是 Supabase 的 project。两者是等价的概念,只是名称不同。 + +### Q: 只读模式有什么限制? + +A: 只读模式下,只能执行 SELECT、WITH、EXPLAIN 查询,无法执行 INSERT、UPDATE、DELETE、CREATE 等写操作。 + +### Q: 如何更新到最新版本? + +A: 使用 `uvx` 会自动使用最新版本,无需手动更新。 + +## 相关资源 + +- [Model Context Protocol 文档](https://modelcontextprotocol.io/introduction) +- [Supabase 文档](https://supabase.com/docs) +- [AIDAP 文档](https://www.volcengine.com/docs/6431/1181698) +- [火山引擎 MCP Server 仓库](https://github.com/volcengine/mcp-server) + +## 许可证 + +Apache 2.0 - 详见 [LICENSE](../../LICENSE) 文件 diff --git a/server/mcp_server_supabase/pyproject.toml b/server/mcp_server_supabase/pyproject.toml new file mode 100644 index 00000000..a5419db9 --- /dev/null +++ b/server/mcp_server_supabase/pyproject.toml @@ -0,0 +1,48 @@ +[project] +name = "mcp-server-supabase" +version = "0.1.0" +description = "MCP server for Supabase/AIDAP" +readme = "README.md" +requires-python = ">=3.10" +license = { text = "Apache-2.0" } +authors = [ + { name = "Volcengine", email = "support@volcengine.com" } +] +dependencies = [ + "mcp>=1.1.2", + "httpx>=0.27.0", + "pydantic>=2.0.0", + "volcengine-python-sdk @ git+https://code.byted.org/iaasng/volcengine-python-sdk.git@aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0.0", + "pytest-asyncio>=0.23.0", + "black>=24.0.0", + "ruff>=0.3.0", +] +legacy = [ + "psycopg2-binary>=2.9.0", +] + +[project.scripts] +mcp-server-supabase = "mcp_server_supabase.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/mcp_server_supabase"] + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.black] +line-length = 100 +target-version = ["py310"] + +[tool.ruff] +line-length = 100 +target-version = "py310" diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/__init__.py new file mode 100644 index 00000000..409d62ba --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/__init__.py @@ -0,0 +1,3 @@ +"""Supabase MCP Server package.""" + +__version__ = "0.1.0" diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/config.py b/server/mcp_server_supabase/src/mcp_server_supabase/config.py new file mode 100644 index 00000000..ca79c075 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/config.py @@ -0,0 +1,60 @@ +import os +import logging + +logger = logging.getLogger(__name__) + +READ_ONLY = os.getenv("READ_ONLY", "false").lower() == "true" + +VOLCENGINE_ACCESS_KEY = os.getenv("VOLCENGINE_ACCESS_KEY") +VOLCENGINE_SECRET_KEY = os.getenv("VOLCENGINE_SECRET_KEY") +VOLCENGINE_REGION = os.getenv("VOLCENGINE_REGION", "cn-beijing") + +# 验证必需的环境变量 +if not VOLCENGINE_ACCESS_KEY: + logger.warning("VOLCENGINE_ACCESS_KEY not set") +if not VOLCENGINE_SECRET_KEY: + logger.warning("VOLCENGINE_SECRET_KEY not set") + +_default_branch_cache = {} +_endpoint_cache = {} +_api_key_cache = {} + + +def get_branch_cache(): + return _default_branch_cache + + +def get_endpoint_cache(): + return _endpoint_cache + + +def get_api_key_cache(): + return _api_key_cache + + +def clear_branch_cache(workspace_id: str = None): + if workspace_id: + _default_branch_cache.pop(workspace_id, None) + else: + _default_branch_cache.clear() + + +def clear_endpoint_cache(workspace_id: str = None): + if workspace_id: + _endpoint_cache.pop(workspace_id, None) + else: + _endpoint_cache.clear() + + +def clear_api_key_cache(workspace_id: str = None): + if workspace_id: + _api_key_cache.pop(workspace_id, None) + else: + _api_key_cache.clear() + + +def clear_all_caches(workspace_id: str = None): + """Clear all caches for a workspace or all workspaces""" + clear_branch_cache(workspace_id) + clear_endpoint_cache(workspace_id) + clear_api_key_cache(workspace_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/models/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/models/__init__.py new file mode 100644 index 00000000..ee654474 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/models/__init__.py @@ -0,0 +1,17 @@ +from .workspace import Workspace, Branch, ApiKey +from .edge_function import EdgeFunction, EdgeFunctionDeployment +from .storage import StorageBucket, StorageConfig +from .database import Table, Column, Migration + +__all__ = [ + 'Workspace', + 'Branch', + 'ApiKey', + 'EdgeFunction', + 'EdgeFunctionDeployment', + 'StorageBucket', + 'StorageConfig', + 'Table', + 'Column', + 'Migration', +] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/models/database.py b/server/mcp_server_supabase/src/mcp_server_supabase/models/database.py new file mode 100644 index 00000000..f8345f1a --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/models/database.py @@ -0,0 +1,24 @@ +from typing import Optional, List +from pydantic import BaseModel, Field + + +class Column(BaseModel): + name: str + format: str + is_nullable: Optional[bool] = None + is_unique: Optional[bool] = None + default_value: Optional[str] = None + + +class Table(BaseModel): + schema_name: str = Field(alias="schema") + name: str + columns: List[Column] = [] + + class Config: + populate_by_name = True + + +class Migration(BaseModel): + version: str + name: str diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/models/edge_function.py b/server/mcp_server_supabase/src/mcp_server_supabase/models/edge_function.py new file mode 100644 index 00000000..77b2edc3 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/models/edge_function.py @@ -0,0 +1,29 @@ +from typing import Optional, List, Dict, Any +from pydantic import BaseModel + + +class EdgeFunction(BaseModel): + id: str + slug: str + name: str + status: str + version: int + created_at: str + updated_at: str + verify_jwt: bool + entrypoint_path: str + runtime_config: Optional[str] = None + runtime: Optional[str] = None + + +class EdgeFunctionFile(BaseModel): + name: str + content: str + + +class EdgeFunctionDeployment(BaseModel): + name: str + entrypoint_path: str = "index.ts" + verify_jwt: bool = True + import_map_path: Optional[str] = None + files: List[EdgeFunctionFile] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/models/storage.py b/server/mcp_server_supabase/src/mcp_server_supabase/models/storage.py new file mode 100644 index 00000000..590e34d7 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/models/storage.py @@ -0,0 +1,16 @@ +from typing import Optional +from pydantic import BaseModel + + +class StorageBucket(BaseModel): + id: str + name: str + owner: Optional[str] = None + public: bool = False + created_at: Optional[str] = None + updated_at: Optional[str] = None + + +class StorageConfig(BaseModel): + fileSizeLimit: int + totalFileSizeLimit: Optional[int] = None diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/models/workspace.py b/server/mcp_server_supabase/src/mcp_server_supabase/models/workspace.py new file mode 100644 index 00000000..d30a0aae --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/models/workspace.py @@ -0,0 +1,27 @@ +from datetime import datetime +from typing import Optional +from pydantic import BaseModel, Field + + +class Workspace(BaseModel): + workspace_id: str + workspace_name: str + status: str + region: str + create_time: str + engine_type: str + engine_version: str + + +class Branch(BaseModel): + branch_id: str + branch_name: Optional[str] = None + default: bool = False + workspace_id: Optional[str] = None + + +class ApiKey(BaseModel): + key: str + name: str + type: str + create_time: Optional[str] = None diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/__init__.py new file mode 100644 index 00000000..689ed0a7 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/__init__.py @@ -0,0 +1,4 @@ +from .aidap_client import AidapClient +from .supabase_client import SupabaseClient + +__all__ = ['AidapClient', 'SupabaseClient'] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py new file mode 100644 index 00000000..9914cc53 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -0,0 +1,146 @@ +import logging +from typing import Optional, Dict, Any +from ..config import ( + VOLCENGINE_ACCESS_KEY, + VOLCENGINE_SECRET_KEY, + VOLCENGINE_REGION, + get_branch_cache, + get_endpoint_cache, + get_api_key_cache +) + +logger = logging.getLogger(__name__) + +try: + import volcenginesdkcore + from volcenginesdkaidap import AIDAPApi + from volcenginesdkaidap.models import ( + DescribeBranchesRequest, + DescribeWorkspaceEndpointRequest, + DescribeAPIKeysRequest, + DescribeComputesRequest, + ) +except ImportError: + logger.error("volcengine-python-sdk not installed") + raise + + +class AidapClient: + def __init__(self) -> None: + configuration = volcenginesdkcore.Configuration() + configuration.ak = VOLCENGINE_ACCESS_KEY + configuration.sk = VOLCENGINE_SECRET_KEY + configuration.region = VOLCENGINE_REGION + + api_client = volcenginesdkcore.ApiClient(configuration) + self.client = AIDAPApi(api_client) + + async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) -> Optional[str]: + cache = get_branch_cache() + if use_cache and workspace_id in cache: + return cache[workspace_id] + + try: + request = DescribeBranchesRequest(workspace_id=workspace_id) + response = self.client.describe_branches(request) + + if hasattr(response, 'branches') and response.branches: + for branch in response.branches: + if getattr(branch, 'default', False): + branch_id = branch.branch_id + cache[workspace_id] = branch_id + return branch_id + + first_branch = response.branches[0] + branch_id = first_branch.branch_id + cache[workspace_id] = branch_id + return branch_id + + return None + except Exception as e: + logger.error(f"Error getting default branch: {e}") + return None + + async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: + # 检查缓存 + cache_key = f"{workspace_id}:{branch_id}" if branch_id else workspace_id + endpoint_cache = get_endpoint_cache() + + if use_cache and cache_key in endpoint_cache: + return endpoint_cache[cache_key] + + if not branch_id: + branch_id = await self.get_default_branch_id(workspace_id) + if not branch_id: + return None + + try: + request = DescribeWorkspaceEndpointRequest( + workspace_id=workspace_id, + branch_id=branch_id + ) + response = self.client.describe_workspace_endpoint(request) + + if hasattr(response, 'endpoints') and response.endpoints: + domains = [] + for endpoint in response.endpoints: + if hasattr(endpoint, 'addresses') and endpoint.addresses: + for addr in endpoint.addresses: + if hasattr(addr, 'address_domain'): + domains.append(addr.address_domain) + + for domain in domains: + if 'volces.com' in domain and 'ivolces.com' not in domain: + result = f"http://{domain}:80" + endpoint_cache[cache_key] = result + return result + + if domains: + result = f"http://{domains[0]}:80" + endpoint_cache[cache_key] = result + return result + + return None + except Exception as e: + logger.error(f"Error getting endpoint: {e}") + return None + + async def get_api_key(self, workspace_id: str, key_type: str = "service_role", + branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: + # 检查缓存 + cache_key = f"{workspace_id}:{key_type}:{branch_id}" if branch_id else f"{workspace_id}:{key_type}" + api_key_cache = get_api_key_cache() + + if use_cache and cache_key in api_key_cache: + return api_key_cache[cache_key] + + if not branch_id: + branch_id = await self.get_default_branch_id(workspace_id) + if not branch_id: + return None + + try: + request = DescribeAPIKeysRequest( + workspace_id=workspace_id, + branch_id=branch_id + ) + response = self.client.describe_api_keys(request) + + if hasattr(response, 'api_keys') and response.api_keys: + type_mapping = { + "service_role": "Service", + "anon": "Public" + } + target_type = type_mapping.get(key_type, "Service") + + for key in response.api_keys: + if hasattr(key, 'type') and key.type == target_type: + result = key.key if hasattr(key, 'key') else None + if result: + api_key_cache[cache_key] = result + return result + + return None + except Exception as e: + logger.error(f"Error getting API key: {e}") + return None diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py new file mode 100644 index 00000000..ae5e1f72 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py @@ -0,0 +1,79 @@ +import httpx +import logging +from typing import Optional, Dict, Any + +logger = logging.getLogger(__name__) + + +class SupabaseClient: + def __init__(self, endpoint: str, api_key: str): + self.endpoint = endpoint + self.api_key = api_key + self._client: Optional[httpx.AsyncClient] = None + + async def _get_client(self) -> httpx.AsyncClient: + """Get or create HTTP client with connection pooling""" + if self._client is None or self._client.is_closed: + self._client = httpx.AsyncClient( + timeout=30.0, + limits=httpx.Limits(max_keepalive_connections=5, max_connections=10) + ) + return self._client + + async def close(self): + """Close HTTP client""" + if self._client and not self._client.is_closed: + await self._client.aclose() + + async def call_api( + self, + path: str, + method: str = "GET", + json_data: Optional[Dict] = None, + headers: Optional[Dict] = None, + params: Optional[Dict] = None, + content: Optional[bytes] = None, + timeout: float = 30.0 + ) -> Any: + url = f"{self.endpoint}{path}" + + logger.info(f"[DEBUG] Calling API: method={method}, url={url}, path={path}") + + default_headers = { + "apikey": self.api_key, + "Authorization": f"Bearer {self.api_key}", + } + if headers: + default_headers.update(headers) + + client = await self._get_client() + try: + if content: + response = await client.request( + method, url, content=content, headers=default_headers, + params=params, timeout=timeout + ) + else: + response = await client.request( + method, url, json=json_data, headers=default_headers, + params=params, timeout=timeout + ) + response.raise_for_status() + + if response.status_code == 204 or not response.content: + return {"success": True} + + return response.json() + except httpx.HTTPStatusError as e: + # 对于 HTTP 错误,尝试返回响应体 + try: + error_body = e.response.json() + return error_body + except: + error_details = f"{str(e)}" + raise Exception(f"{error_details} [endpoint: {self.endpoint}, path: {path}]") from e + except Exception as e: + error_details = f"{str(e)}" + if hasattr(e, '__cause__') and e.__cause__: + error_details += f" | Cause: {str(e.__cause__)}" + raise Exception(f"{error_details} [endpoint: {self.endpoint}, path: {path}]") from e diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py new file mode 100644 index 00000000..de4a477d --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -0,0 +1,167 @@ +"""Supabase MCP Server - Refactored Version""" + +import argparse +import logging +import os +from mcp.server.fastmcp import FastMCP + +from .config import READ_ONLY +from .platform import AidapClient +from .tools import EdgeFunctionTools, StorageTools, DatabaseTools, WorkspaceTools + +logger = logging.getLogger(__name__) +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) + +mcp = FastMCP("Supabase MCP Server (AIDAP)", port=int(os.getenv("PORT", "8000"))) + +aidap_client = AidapClient() +default_workspace_id = os.getenv("DEFAULT_WORKSPACE_ID") + +edge_tools = EdgeFunctionTools(aidap_client, default_workspace_id) +storage_tools = StorageTools(aidap_client, default_workspace_id) +database_tools = DatabaseTools(aidap_client, default_workspace_id) +workspace_tools = WorkspaceTools(aidap_client, default_workspace_id) + + +@mcp.tool() +async def list_edge_functions(workspace_id: str = None) -> str: + """Lists all Edge Functions in a workspace.""" + return await edge_tools.list_edge_functions(workspace_id) + + +@mcp.tool() +async def get_edge_function(function_name: str, workspace_id: str = None) -> str: + """Retrieves the source code and configuration for an Edge Function.""" + return await edge_tools.get_edge_function(function_name, workspace_id) + + +@mcp.tool() +async def deploy_edge_function( + function_name: str, + source_code: str, + verify_jwt: bool = True, + runtime: str = "native-node20/v1", + import_map: str = None, + workspace_id: str = None +) -> str: + """Deploys a new Edge Function or updates an existing one. + + Args: + function_name: Name of the function to deploy + source_code: Source code for the function + verify_jwt: Whether to verify JWT tokens (default: True) + runtime: Runtime environment (default: native-node20/v1) + Options: native-node20/v1, native-python3.9/v1, + native-python3.10/v1, native-python3.12/v1 + import_map: Optional import map JSON for dependencies + workspace_id: The workspace ID (optional) + """ + return await edge_tools.deploy_edge_function( + function_name, source_code, verify_jwt, runtime, import_map, workspace_id + ) + + +@mcp.tool() +async def delete_edge_function(function_name: str, workspace_id: str = None) -> str: + """Deletes an Edge Function.""" + return await edge_tools.delete_edge_function(function_name, workspace_id) + + +@mcp.tool() +async def invoke_edge_function( + function_name: str, + payload: str = None, + method: str = "POST", + workspace_id: str = None +) -> str: + """Invokes an Edge Function.""" + return await edge_tools.invoke_edge_function(function_name, payload, method, workspace_id) + + +@mcp.tool() +async def list_storage_buckets(workspace_id: str = None) -> str: + """Lists all storage buckets in a workspace.""" + return await storage_tools.list_storage_buckets(workspace_id) + + +@mcp.tool() +async def create_storage_bucket( + bucket_name: str, + public: bool = False, + file_size_limit: int = None, + allowed_mime_types: str = None, + workspace_id: str = None +) -> str: + """Creates a new storage bucket.""" + return await storage_tools.create_storage_bucket( + bucket_name, public, file_size_limit, allowed_mime_types, workspace_id + ) + + +@mcp.tool() +async def delete_storage_bucket(bucket_name: str, workspace_id: str = None) -> str: + """Deletes a storage bucket.""" + return await storage_tools.delete_storage_bucket(bucket_name, workspace_id) + + +@mcp.tool() +async def get_storage_config(workspace_id: str = None) -> str: + """Gets the storage configuration for a workspace.""" + return await storage_tools.get_storage_config(workspace_id) + + +@mcp.tool() +async def execute_sql(query: str, workspace_id: str = None) -> str: + """Executes raw SQL in the Postgres database.""" + return await database_tools.execute_sql(query, workspace_id) + + +@mcp.tool() +async def list_tables(schemas: str = "public", workspace_id: str = None) -> str: + """Lists all tables in one or more schemas.""" + schema_list = [s.strip() for s in schemas.split(",")] + return await database_tools.list_tables(schema_list, workspace_id) + + +@mcp.tool() +async def list_migrations(workspace_id: str = None) -> str: + """Lists all migrations in the database.""" + return await database_tools.list_migrations(workspace_id) + + +@mcp.tool() +async def apply_migration(name: str, query: str, workspace_id: str = None) -> str: + """Applies a migration to the database.""" + return await database_tools.apply_migration(name, query, workspace_id) + + +@mcp.tool() +async def list_workspaces() -> str: + """Lists all available workspaces.""" + return await workspace_tools.list_workspaces() + + +@mcp.tool() +async def get_workspace(workspace_id: str) -> str: + """Gets details for a specific workspace.""" + return await workspace_tools.get_workspace(workspace_id) + + +def main(): + parser = argparse.ArgumentParser(description="Supabase MCP Server") + parser.add_argument("--port", type=int, default=8000, help="Port to run the server on") + args = parser.parse_args() + + logger.info(f"Starting Supabase MCP Server on port {args.port}") + logger.info(f"Read-only mode: {READ_ONLY}") + if default_workspace_id: + logger.info(f"Default workspace ID: {default_workspace_id}") + + mcp.run() + + +if __name__ == "__main__": + main() diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/__init__.py new file mode 100644 index 00000000..770c9b4d --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/__init__.py @@ -0,0 +1,7 @@ +from .base import BaseTools +from .edge_function_tools import EdgeFunctionTools +from .storage_tools import StorageTools +from .database_tools import DatabaseTools +from .workspace_tools import WorkspaceTools + +__all__ = ['BaseTools', 'EdgeFunctionTools', 'StorageTools', 'DatabaseTools', 'WorkspaceTools'] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py new file mode 100644 index 00000000..6ad539e7 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py @@ -0,0 +1,37 @@ +from typing import Optional +from ..platform import AidapClient, SupabaseClient + + +class BaseTools: + """Base class for all tool classes""" + + def __init__(self, aidap_client: AidapClient, workspace_id: Optional[str] = None): + self.aidap = aidap_client + self.default_workspace_id = workspace_id + + def _get_workspace_id(self, workspace_id: Optional[str]) -> str: + """Get workspace ID from parameter or default""" + result = workspace_id or self.default_workspace_id + if not result: + raise ValueError( + "workspace_id is required: not provided as parameter and no default workspace_id configured. " + "Please provide workspace_id or set DEFAULT_WORKSPACE_ID environment variable." + ) + return result + + async def _get_client(self, workspace_id: str) -> SupabaseClient: + """Get Supabase client for workspace""" + import logging + logger = logging.getLogger(__name__) + + endpoint = await self.aidap.get_endpoint(workspace_id) + logger.info(f"[DEBUG] Got endpoint for {workspace_id}: {endpoint}") + if not endpoint: + raise ValueError(f"Could not get endpoint for workspace {workspace_id}") + + api_key = await self.aidap.get_api_key(workspace_id, "service_role") + logger.info(f"[DEBUG] Got API key for {workspace_id}: {api_key[:20] if api_key else None}...") + if not api_key: + raise ValueError(f"Could not get API key for workspace {workspace_id}") + + return SupabaseClient(endpoint, api_key) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py new file mode 100644 index 00000000..c2d831a8 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -0,0 +1,67 @@ +from typing import Optional, List +import logging +from .base import BaseTools +from ..utils import handle_errors, read_only_check + +logger = logging.getLogger(__name__) + + +class DatabaseTools(BaseTools): + """使用 REST API 方式执行 SQL""" + @handle_errors + async def execute_sql(self, query: str, workspace_id: Optional[str] = None) -> List[dict]: + if not query or not query.strip(): + raise ValueError("SQL query cannot be empty") + + ws_id = self._get_workspace_id(workspace_id) + logger.info( + "Executing SQL query", + extra={"workspace_id": ws_id, "query_length": len(query)} + ) + + client = await self._get_client(ws_id) + result = await client.call_api("/pg/query", method="POST", json_data={"query": query}) + + logger.debug(f"SQL query returned {len(result) if isinstance(result, list) else 'N/A'} rows") + return result + + @handle_errors + async def list_tables(self, schemas: List[str] = None, workspace_id: Optional[str] = None) -> List[dict]: + if schemas is None: + schemas = ["public"] + + # 验证 schema 名称,防止 SQL 注入 + for schema in schemas: + if not schema.replace('_', '').isalnum(): + raise ValueError(f"Invalid schema name: {schema}") + + schema_list = "', '".join(schemas) + query = f""" + SELECT + schemaname as schema, + tablename as name + FROM pg_tables + WHERE schemaname IN ('{schema_list}') + ORDER BY schemaname, tablename + """ + + return await self.execute_sql(query, workspace_id) + + @handle_errors + async def list_migrations(self, workspace_id: Optional[str] = None) -> List[dict]: + query = """ + SELECT version, name + FROM supabase_migrations.schema_migrations + ORDER BY version DESC + """ + try: + return await self.execute_sql(query, workspace_id) + except Exception as e: + logger.warning(f"Failed to list migrations: {e}") + return [] + + @handle_errors + @read_only_check + async def apply_migration(self, name: str, query: str, workspace_id: Optional[str] = None) -> dict: + await self.execute_sql(query, workspace_id) + return {"success": True, "message": f"Migration {name} applied successfully"} diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py new file mode 100644 index 00000000..9d61d0c3 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -0,0 +1,254 @@ +from typing import Optional, List +import logging +import json +import html +from .base import BaseTools +from ..utils import handle_errors, read_only_check +from ..models import EdgeFunction + +logger = logging.getLogger(__name__) + +# 运行时配置 +RUNTIME_CONFIG = { + "native-node20/v1": { + "entrypoint": "index.ts", + "extensions": [".ts", ".js"], + "description": "Node.js 20 runtime" + }, + "native-python3.9/v1": { + "entrypoint": "app.py", + "extensions": [".py"], + "description": "Python 3.9 runtime" + }, + "native-python3.10/v1": { + "entrypoint": "app.py", + "extensions": [".py"], + "description": "Python 3.10 runtime" + }, + "native-python3.12/v1": { + "entrypoint": "app.py", + "extensions": [".py"], + "description": "Python 3.12 runtime" + } +} + +# 保留的函数名 +RESERVED_SLUGS = {"deploy", "body", "health", "metrics"} +MAX_SLUG_LENGTH = 127 +MAX_CODE_SIZE = 10 * 1024 * 1024 # 10MB + + +class EdgeFunctionTools(BaseTools): + def _validate_function_name(self, function_name: str) -> None: + """验证函数名称""" + if not function_name: + raise ValueError("Function name cannot be empty") + + if len(function_name) > MAX_SLUG_LENGTH: + raise ValueError(f"Function name too long (max {MAX_SLUG_LENGTH} characters)") + + if function_name in RESERVED_SLUGS: + raise ValueError(f"Function name '{function_name}' is reserved") + + def _validate_runtime(self, runtime: str) -> None: + """验证运行时""" + if runtime not in RUNTIME_CONFIG: + available = ", ".join(RUNTIME_CONFIG.keys()) + raise ValueError(f"Unsupported runtime '{runtime}'. Available: {available}") + + def _get_entrypoint(self, runtime: str) -> str: + """获取运行时的入口文件""" + return RUNTIME_CONFIG[runtime]["entrypoint"] + + def _validate_code_size(self, source_code: str) -> None: + """验证代码大小""" + code_size = len(source_code.encode('utf-8')) + if code_size > MAX_CODE_SIZE: + raise ValueError(f"Source code too large: {code_size} bytes (max {MAX_CODE_SIZE} bytes)") + + def _validate_runtime_compatibility(self, runtime: str, source_code: str) -> None: + """验证运行时和代码的兼容性""" + if runtime.startswith("native-node"): + # 检查是否使用了 Deno 特有的 API + if "Deno." in source_code: + raise ValueError( + f"Code contains Deno-specific APIs (Deno.*) but runtime is {runtime}. " + "Please use Node.js compatible code or switch to a Deno runtime." + ) + elif runtime.startswith("native-python"): + # 基本的 Python 语法检查 + if not any(keyword in source_code for keyword in ["def ", "import ", "from "]): + logger.warning("Python code may be invalid - no function definitions or imports found") + + @handle_errors + async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[EdgeFunction]: + ws_id = self._get_workspace_id(workspace_id) + logger.info(f"Listing edge functions for workspace {ws_id}") + + client = await self._get_client(ws_id) + # AIDAP 使用不同的 API 路径 + result = await client.call_api("/v1/projects/default/functions") + + functions = [EdgeFunction(**func) for func in result] + logger.info(f"Found {len(functions)} edge functions") + return functions + + @handle_errors + async def get_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> EdgeFunction: + ws_id = self._get_workspace_id(workspace_id) + logger.info(f"Getting edge function '{function_name}' from workspace {ws_id}") + + client = await self._get_client(ws_id) + # AIDAP 使用不同的 API 路径 + result = await client.call_api(f"/v1/projects/default/functions/{function_name}") + return EdgeFunction(**result) + + @handle_errors + @read_only_check + async def deploy_edge_function( + self, + function_name: str, + source_code: str, + verify_jwt: bool = True, + runtime: str = "native-node20/v1", + import_map: Optional[str] = None, + workspace_id: Optional[str] = None + ) -> dict: + """ + 部署边缘函数 + + Args: + function_name: 函数名称 + source_code: 源代码 + verify_jwt: 是否验证 JWT + runtime: 运行时环境 (native-node20/v1, native-python3.9/v1, etc.) + import_map: 可选的 import map JSON + workspace_id: 工作空间 ID + + Returns: + 部署结果字典 + + Raises: + ValueError: 参数验证失败 + """ + # 验证输入 + self._validate_function_name(function_name) + + if not source_code or not source_code.strip(): + raise ValueError("Source code cannot be empty") + + # HTML 反转义,防止代码中的特殊字符被转义 + source_code = html.unescape(source_code) + + self._validate_code_size(source_code) + + ws_id = self._get_workspace_id(workspace_id) + # AIDAP 默认使用 Deno 运行时,entrypoint 固定为 index.ts + entrypoint = "index.ts" + + logger.info( + "Deploying edge function", + extra={ + "function_name": function_name, + "workspace_id": ws_id, + "runtime": runtime, + "verify_jwt": verify_jwt, + "entrypoint": entrypoint, + "code_size": len(source_code) + } + ) + + client = await self._get_client(ws_id) + + # AIDAP 使用不同的请求格式和 API 路径 + # URL 编码 function_name 防止特殊字符问题 + from urllib.parse import quote + encoded_name = quote(function_name) + + data = { + "metadata": { + "name": function_name, + "slug": function_name, + "entrypoint_path": entrypoint, + "verify_jwt": verify_jwt + }, + "files": [ + { + "name": entrypoint, + "content": source_code + } + ] + } + + if import_map: + try: + import_map_data = json.loads(import_map) + data["metadata"]["import_map_path"] = "import_map.json" + data["files"].append({ + "name": "import_map.json", + "content": json.dumps(import_map_data) + }) + logger.debug("Added import map to deployment") + except json.JSONDecodeError as e: + raise ValueError(f"Invalid import map JSON: {e}") + + # AIDAP 部署 API 路径 + result = await client.call_api( + f"/v1/projects/default/functions/deploy?slug={encoded_name}", + method="POST", + json_data=data + ) + + logger.info( + f"Successfully deployed edge function '{function_name}'", + extra={"function_id": result.get("id"), "version": result.get("version")} + ) + + return result + + @handle_errors + @read_only_check + async def delete_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: + ws_id = self._get_workspace_id(workspace_id) + logger.info(f"Deleting edge function '{function_name}' from workspace {ws_id}") + + client = await self._get_client(ws_id) + # AIDAP 使用不同的 API 路径 + await client.call_api(f"/v1/projects/default/functions/{function_name}", method="DELETE") + + logger.info(f"Successfully deleted edge function '{function_name}'") + return {"success": True, "message": "Edge function deleted successfully"} + + @handle_errors + async def invoke_edge_function( + self, + function_name: str, + payload: Optional[str] = None, + method: str = "POST", + workspace_id: Optional[str] = None + ) -> dict: + ws_id = self._get_workspace_id(workspace_id) + logger.info( + f"Invoking edge function '{function_name}'", + extra={"method": method, "has_payload": payload is not None} + ) + + client = await self._get_client(ws_id) + + json_data = None + if payload: + try: + json_data = json.loads(payload) + except json.JSONDecodeError as e: + raise ValueError(f"Invalid payload JSON: {e}") + + # AIDAP 调用 edge function 使用 /functions/v1/{slug} 路径 + result = await client.call_api( + f"/functions/v1/{function_name}", + method=method, + json_data=json_data, + timeout=60.0 + ) + + logger.debug(f"Edge function '{function_name}' invoked successfully") + return result diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py new file mode 100644 index 00000000..3d3a6b67 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -0,0 +1,67 @@ +from typing import Optional, List +import logging +from .base import BaseTools +from ..utils import handle_errors, read_only_check +from ..models import StorageBucket, StorageConfig + +logger = logging.getLogger(__name__) + + +class StorageTools(BaseTools): + @handle_errors + async def list_storage_buckets(self, workspace_id: Optional[str] = None) -> List[dict]: + ws_id = self._get_workspace_id(workspace_id) + logger.info(f"Listing storage buckets for workspace {ws_id}") + + client = await self._get_client(ws_id) + result = await client.call_api("/storage/v1/bucket") + + logger.info(f"Found {len(result)} storage buckets") + return result + + @handle_errors + @read_only_check + async def create_storage_bucket( + self, + bucket_name: str, + public: bool = False, + file_size_limit: Optional[int] = None, + allowed_mime_types: Optional[str] = None, + workspace_id: Optional[str] = None + ) -> dict: + if not bucket_name or not bucket_name.strip(): + raise ValueError("Bucket name cannot be empty") + + ws_id = self._get_workspace_id(workspace_id) + logger.info( + f"Creating storage bucket '{bucket_name}'", + extra={"workspace_id": ws_id, "public": public} + ) + + client = await self._get_client(ws_id) + + data = { + "name": bucket_name, + "public": public + } + if file_size_limit: + data["file_size_limit"] = file_size_limit + if allowed_mime_types: + data["allowed_mime_types"] = allowed_mime_types.split(",") + + return await client.call_api("/storage/v1/bucket", method="POST", json_data=data) + + @handle_errors + @read_only_check + async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[str] = None) -> dict: + ws_id = self._get_workspace_id(workspace_id) + client = await self._get_client(ws_id) + await client.call_api(f"/storage/v1/bucket/{bucket_name}", method="DELETE") + return {"success": True, "message": "Bucket deleted successfully"} + + @handle_errors + async def get_storage_config(self, workspace_id: Optional[str] = None) -> StorageConfig: + ws_id = self._get_workspace_id(workspace_id) + client = await self._get_client(ws_id) + result = await client.call_api("/storage/v1/config") + return StorageConfig(**result) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py new file mode 100644 index 00000000..826c7f86 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -0,0 +1,110 @@ +"""Workspace management tools for Supabase MCP Server""" + +import json +import logging +from typing import Optional + +logger = logging.getLogger(__name__) + + +class WorkspaceTools: + """Tools for managing workspaces""" + + def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): + self.aidap_client = aidap_client + self.default_workspace_id = default_workspace_id + + async def list_workspaces(self) -> str: + """Lists all available workspaces. + + Returns: + JSON string containing list of workspaces + """ + try: + from volcenginesdkaidap.models import DescribeWorkspacesRequest, FilterForDescribeWorkspacesInput + + # 添加过滤条件,只查询 Supabase 类型的 workspace + filters = [ + FilterForDescribeWorkspacesInput( + name="DBEngineVersion", + value="Supabase_1_24", + mode="Exact" + ) + ] + + request = DescribeWorkspacesRequest(filters=filters) + response = self.aidap_client.client.describe_workspaces(request) + + if hasattr(response, 'workspaces') and response.workspaces: + workspaces = [] + for ws in response.workspaces: + workspace_info = { + "workspace_id": getattr(ws, 'workspace_id', None), + "workspace_name": getattr(ws, 'workspace_name', None), + "status": getattr(ws, 'status', None), + "region": getattr(ws, 'region', None), + } + workspaces.append(workspace_info) + + return json.dumps({ + "success": True, + "workspaces": workspaces, + "count": len(workspaces) + }, indent=2) + + return json.dumps({ + "success": True, + "workspaces": [], + "count": 0 + }, indent=2) + + except Exception as e: + logger.error(f"Error listing workspaces: {e}") + return json.dumps({ + "success": False, + "error": str(e) + }, indent=2) + + async def get_workspace(self, workspace_id: str) -> str: + """Gets details for a specific workspace. + + Args: + workspace_id: The workspace ID + + Returns: + JSON string containing workspace details + """ + try: + # 使用正确的 API 方法名 + from volcenginesdkaidap.models import DescribeWorkspaceDetailRequest + + request = DescribeWorkspaceDetailRequest(workspace_id=workspace_id) + response = self.aidap_client.client.describe_workspace_detail(request) + + if hasattr(response, 'workspace'): + ws = response.workspace + workspace_info = { + "workspace_id": getattr(ws, 'workspace_id', None), + "workspace_name": getattr(ws, 'workspace_name', None), + "status": getattr(ws, 'status', None), + "region": getattr(ws, 'region', None), + "created_at": getattr(ws, 'created_at', None), + "updated_at": getattr(ws, 'updated_at', None), + } + + return json.dumps({ + "success": True, + "workspace": workspace_info + }, indent=2) + + return json.dumps({ + "success": False, + "error": "Workspace not found" + }, indent=2) + + except Exception as e: + logger.error(f"Error getting workspace: {e}") + return json.dumps({ + "success": False, + "error": str(e) + }, indent=2) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py new file mode 100644 index 00000000..cfc41494 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py @@ -0,0 +1,3 @@ +from .decorators import handle_errors, read_only_check, format_error + +__all__ = ['handle_errors', 'read_only_check', 'format_error'] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py new file mode 100644 index 00000000..5a8070a8 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py @@ -0,0 +1,41 @@ +import json +import logging +from functools import wraps +from typing import Any, Callable + +logger = logging.getLogger(__name__) + + +def format_error(e: Exception) -> str: + error_msg = str(e) if str(e) else f"{type(e).__name__}" + return error_msg + + +def handle_errors(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args, **kwargs) -> str: + try: + result = await func(*args, **kwargs) + if isinstance(result, str): + return result + if isinstance(result, list): + if result and hasattr(result[0], 'model_dump'): + result = [item.model_dump() for item in result] + elif hasattr(result, 'model_dump'): + result = result.model_dump() + return json.dumps(result, indent=2, ensure_ascii=False) + except Exception as e: + error_msg = format_error(e) + logger.error(f"Error in {func.__name__}: {error_msg}") + return json.dumps({"error": error_msg}, ensure_ascii=False) + return wrapper + + +def read_only_check(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args, **kwargs) -> Any: + from ..config import READ_ONLY + if READ_ONLY: + return json.dumps({"error": f"Cannot execute {func.__name__} in read-only mode"}) + return await func(*args, **kwargs) + return wrapper diff --git a/server/mcp_server_supabase/uv.lock b/server/mcp_server_supabase/uv.lock new file mode 100644 index 00000000..dceb75fe --- /dev/null +++ b/server/mcp_server_supabase/uv.lock @@ -0,0 +1,1139 @@ +version = 1 +revision = 2 +requires-python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "black" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pytokens" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/1b/523329e713f965ad0ea2b7a047eeb003007792a0353622ac7a8cb2ee6fef/black-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ca699710dece84e3ebf6e92ee15f5b8f72870ef984bf944a57a777a48357c168", size = 1849661, upload-time = "2026-01-18T04:59:12.425Z" }, + { url = "https://files.pythonhosted.org/packages/14/82/94c0640f7285fa71c2f32879f23e609dd2aa39ba2641f395487f24a578e7/black-26.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e8e75dabb6eb83d064b0db46392b25cabb6e784ea624219736e8985a6b3675d", size = 1689065, upload-time = "2026-01-18T04:59:13.993Z" }, + { url = "https://files.pythonhosted.org/packages/f0/78/474373cbd798f9291ed8f7107056e343fd39fef42de4a51c7fd0d360840c/black-26.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb07665d9a907a1a645ee41a0df8a25ffac8ad9c26cdb557b7b88eeeeec934e0", size = 1751502, upload-time = "2026-01-18T04:59:15.971Z" }, + { url = "https://files.pythonhosted.org/packages/29/89/59d0e350123f97bc32c27c4d79563432d7f3530dca2bff64d855c178af8b/black-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:7ed300200918147c963c87700ccf9966dceaefbbb7277450a8d646fc5646bf24", size = 1400102, upload-time = "2026-01-18T04:59:17.8Z" }, + { url = "https://files.pythonhosted.org/packages/e1/bc/5d866c7ae1c9d67d308f83af5462ca7046760158bbf142502bad8f22b3a1/black-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:c5b7713daea9bf943f79f8c3b46f361cc5229e0e604dcef6a8bb6d1c37d9df89", size = 1207038, upload-time = "2026-01-18T04:59:19.543Z" }, + { url = "https://files.pythonhosted.org/packages/30/83/f05f22ff13756e1a8ce7891db517dbc06200796a16326258268f4658a745/black-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cee1487a9e4c640dc7467aaa543d6c0097c391dc8ac74eb313f2fbf9d7a7cb5", size = 1831956, upload-time = "2026-01-18T04:59:21.38Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/b2c570550e39bedc157715e43927360312d6dd677eed2cc149a802577491/black-26.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d62d14ca31c92adf561ebb2e5f2741bf8dea28aef6deb400d49cca011d186c68", size = 1672499, upload-time = "2026-01-18T04:59:23.257Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d7/990d6a94dc9e169f61374b1c3d4f4dd3037e93c2cc12b6f3b12bc663aa7b/black-26.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb1dafbbaa3b1ee8b4550a84425aac8874e5f390200f5502cf3aee4a2acb2f14", size = 1735431, upload-time = "2026-01-18T04:59:24.729Z" }, + { url = "https://files.pythonhosted.org/packages/36/1c/cbd7bae7dd3cb315dfe6eeca802bb56662cc92b89af272e014d98c1f2286/black-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:101540cb2a77c680f4f80e628ae98bd2bd8812fb9d72ade4f8995c5ff019e82c", size = 1400468, upload-time = "2026-01-18T04:59:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/59/b1/9fe6132bb2d0d1f7094613320b56297a108ae19ecf3041d9678aec381b37/black-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f3977a16e347f1b115662be07daa93137259c711e526402aa444d7a88fdc9d4", size = 1207332, upload-time = "2026-01-18T04:59:28.711Z" }, + { url = "https://files.pythonhosted.org/packages/f5/13/710298938a61f0f54cdb4d1c0baeb672c01ff0358712eddaf29f76d32a0b/black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f", size = 1878189, upload-time = "2026-01-18T04:59:30.682Z" }, + { url = "https://files.pythonhosted.org/packages/79/a6/5179beaa57e5dbd2ec9f1c64016214057b4265647c62125aa6aeffb05392/black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6", size = 1700178, upload-time = "2026-01-18T04:59:32.387Z" }, + { url = "https://files.pythonhosted.org/packages/8c/04/c96f79d7b93e8f09d9298b333ca0d31cd9b2ee6c46c274fd0f531de9dc61/black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a", size = 1777029, upload-time = "2026-01-18T04:59:33.767Z" }, + { url = "https://files.pythonhosted.org/packages/49/f9/71c161c4c7aa18bdda3776b66ac2dc07aed62053c7c0ff8bbda8c2624fe2/black-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a19915ec61f3a8746e8b10adbac4a577c6ba9851fa4a9e9fbfbcf319887a5791", size = 1406466, upload-time = "2026-01-18T04:59:35.177Z" }, + { url = "https://files.pythonhosted.org/packages/4a/8b/a7b0f974e473b159d0ac1b6bcefffeb6bec465898a516ee5cc989503cbc7/black-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:643d27fb5facc167c0b1b59d0315f2674a6e950341aed0fc05cf307d22bf4954", size = 1216393, upload-time = "2026-01-18T04:59:37.18Z" }, + { url = "https://files.pythonhosted.org/packages/79/04/fa2f4784f7237279332aa735cdfd5ae2e7730db0072fb2041dadda9ae551/black-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ba1d768fbfb6930fc93b0ecc32a43d8861ded16f47a40f14afa9bb04ab93d304", size = 1877781, upload-time = "2026-01-18T04:59:39.054Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ad/5a131b01acc0e5336740a039628c0ab69d60cf09a2c87a4ec49f5826acda/black-26.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b807c240b64609cb0e80d2200a35b23c7df82259f80bef1b2c96eb422b4aac9", size = 1699670, upload-time = "2026-01-18T04:59:41.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/7c/b05f22964316a52ab6b4265bcd52c0ad2c30d7ca6bd3d0637e438fc32d6e/black-26.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1de0f7d01cc894066a1153b738145b194414cc6eeaad8ef4397ac9abacf40f6b", size = 1775212, upload-time = "2026-01-18T04:59:42.545Z" }, + { url = "https://files.pythonhosted.org/packages/a6/a3/e8d1526bea0446e040193185353920a9506eab60a7d8beb062029129c7d2/black-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:91a68ae46bf07868963671e4d05611b179c2313301bd756a89ad4e3b3db2325b", size = 1409953, upload-time = "2026-01-18T04:59:44.357Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5a/d62ebf4d8f5e3a1daa54adaab94c107b57be1b1a2f115a0249b41931e188/black-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:be5e2fe860b9bd9edbf676d5b60a9282994c03fbbd40fe8f5e75d194f96064ca", size = 1217707, upload-time = "2026-01-18T04:59:45.719Z" }, + { url = "https://files.pythonhosted.org/packages/6a/83/be35a175aacfce4b05584ac415fd317dd6c24e93a0af2dcedce0f686f5d8/black-26.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9dc8c71656a79ca49b8d3e2ce8103210c9481c57798b48deeb3a8bb02db5f115", size = 1871864, upload-time = "2026-01-18T04:59:47.586Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f5/d33696c099450b1274d925a42b7a030cd3ea1f56d72e5ca8bbed5f52759c/black-26.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b22b3810451abe359a964cc88121d57f7bce482b53a066de0f1584988ca36e79", size = 1701009, upload-time = "2026-01-18T04:59:49.443Z" }, + { url = "https://files.pythonhosted.org/packages/1b/87/670dd888c537acb53a863bc15abbd85b22b429237d9de1b77c0ed6b79c42/black-26.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:53c62883b3f999f14e5d30b5a79bd437236658ad45b2f853906c7cbe79de00af", size = 1767806, upload-time = "2026-01-18T04:59:50.769Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9c/cd3deb79bfec5bcf30f9d2100ffeec63eecce826eb63e3961708b9431ff1/black-26.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:f016baaadc423dc960cdddf9acae679e71ee02c4c341f78f3179d7e4819c095f", size = 1433217, upload-time = "2026-01-18T04:59:52.218Z" }, + { url = "https://files.pythonhosted.org/packages/4e/29/f3be41a1cf502a283506f40f5d27203249d181f7a1a2abce1c6ce188035a/black-26.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:66912475200b67ef5a0ab665011964bf924745103f51977a78b4fb92a9fc1bf0", size = 1245773, upload-time = "2026-01-18T04:59:54.457Z" }, + { url = "https://files.pythonhosted.org/packages/e4/3d/51bdb3ecbfadfaf825ec0c75e1de6077422b4afa2091c6c9ba34fbfc0c2d/black-26.1.0-py3-none-any.whl", hash = "sha256:1054e8e47ebd686e078c0bb0eaf31e6ce69c966058d122f2c0c950311f9f3ede", size = 204010, upload-time = "2026-01-18T04:50:09.978Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, + { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "mcp" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, +] + +[[package]] +name = "mcp-server-supabase" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "httpx" }, + { name = "mcp" }, + { name = "pydantic" }, + { name = "volcengine-python-sdk" }, +] + +[package.optional-dependencies] +dev = [ + { name = "black" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "ruff" }, +] +legacy = [ + { name = "psycopg2-binary" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", marker = "extra == 'dev'", specifier = ">=24.0.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "mcp", specifier = ">=1.1.2" }, + { name = "psycopg2-binary", marker = "extra == 'legacy'", specifier = ">=2.9.0" }, + { name = "pydantic", specifier = ">=2.0.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.3.0" }, + { name = "volcengine-python-sdk", git = "https://code.byted.org/iaasng/volcengine-python-sdk.git?rev=aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12" }, +] +provides-extras = ["dev", "legacy"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/04/fea538adf7dbbd6d186f551d595961e564a3b6715bdf276b477460858672/platformdirs-4.9.2.tar.gz", hash = "sha256:9a33809944b9db043ad67ca0db94b14bf452cc6aeaac46a88ea55b26e2e9d291", size = 28394, upload-time = "2026-02-16T03:56:10.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl", hash = "sha256:9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd", size = 21168, upload-time = "2026-02-16T03:56:08.891Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/f2/8e377d29c2ecf99f6062d35ea606b036e8800720eccfec5fe3dd672c2b24/psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2", size = 3756506, upload-time = "2025-10-10T11:10:30.144Z" }, + { url = "https://files.pythonhosted.org/packages/24/cc/dc143ea88e4ec9d386106cac05023b69668bd0be20794c613446eaefafe5/psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087", size = 3863943, upload-time = "2025-10-10T11:10:34.586Z" }, + { url = "https://files.pythonhosted.org/packages/8c/df/16848771155e7c419c60afeb24950b8aaa3ab09c0a091ec3ccca26a574d0/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d", size = 4410873, upload-time = "2025-10-10T11:10:38.951Z" }, + { url = "https://files.pythonhosted.org/packages/43/79/5ef5f32621abd5a541b89b04231fe959a9b327c874a1d41156041c75494b/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2", size = 4468016, upload-time = "2025-10-10T11:10:43.319Z" }, + { url = "https://files.pythonhosted.org/packages/f0/9b/d7542d0f7ad78f57385971f426704776d7b310f5219ed58da5d605b1892e/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b", size = 4164996, upload-time = "2025-10-10T11:10:46.705Z" }, + { url = "https://files.pythonhosted.org/packages/14/ed/e409388b537fa7414330687936917c522f6a77a13474e4238219fcfd9a84/psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14", size = 3981881, upload-time = "2025-10-30T02:54:57.182Z" }, + { url = "https://files.pythonhosted.org/packages/bf/30/50e330e63bb05efc6fa7c1447df3e08954894025ca3dcb396ecc6739bc26/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd", size = 3650857, upload-time = "2025-10-10T11:10:50.112Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e0/4026e4c12bb49dd028756c5b0bc4c572319f2d8f1c9008e0dad8cc9addd7/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b", size = 3296063, upload-time = "2025-10-10T11:10:54.089Z" }, + { url = "https://files.pythonhosted.org/packages/2c/34/eb172be293c886fef5299fe5c3fcf180a05478be89856067881007934a7c/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152", size = 3043464, upload-time = "2025-10-30T02:55:02.483Z" }, + { url = "https://files.pythonhosted.org/packages/18/1c/532c5d2cb11986372f14b798a95f2eaafe5779334f6a80589a68b5fcf769/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e", size = 3345378, upload-time = "2025-10-10T11:11:01.039Z" }, + { url = "https://files.pythonhosted.org/packages/70/e7/de420e1cf16f838e1fa17b1120e83afff374c7c0130d088dba6286fcf8ea/psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39", size = 2713904, upload-time = "2025-10-10T11:11:04.81Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" }, + { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, + { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, + { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a9/9d55c614a891288f15ca4b5209b09f0f01e3124056924e17b81b9fa054cc/psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f", size = 3864755, upload-time = "2025-10-10T11:13:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" }, + { url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" }, + { url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, +] + +[[package]] +name = "pytokens" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/24/f206113e05cb8ef51b3850e7ef88f20da6f4bf932190ceb48bd3da103e10/pytokens-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a44ed93ea23415c54f3face3b65ef2b844d96aeb3455b8a69b3df6beab6acc5", size = 161522, upload-time = "2026-01-30T01:02:50.393Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e9/06a6bf1b90c2ed81a9c7d2544232fe5d2891d1cd480e8a1809ca354a8eb2/pytokens-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:add8bf86b71a5d9fb5b89f023a80b791e04fba57960aa790cc6125f7f1d39dfe", size = 246945, upload-time = "2026-01-30T01:02:52.399Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/f6fb1007a4c3d8b682d5d65b7c1fb33257587a5f782647091e3408abe0b8/pytokens-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:670d286910b531c7b7e3c0b453fd8156f250adb140146d234a82219459b9640c", size = 259525, upload-time = "2026-01-30T01:02:53.737Z" }, + { url = "https://files.pythonhosted.org/packages/04/92/086f89b4d622a18418bac74ab5db7f68cf0c21cf7cc92de6c7b919d76c88/pytokens-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e691d7f5186bd2842c14813f79f8884bb03f5995f0575272009982c5ac6c0f7", size = 262693, upload-time = "2026-01-30T01:02:54.871Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7b/8b31c347cf94a3f900bdde750b2e9131575a61fdb620d3d3c75832262137/pytokens-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:27b83ad28825978742beef057bfe406ad6ed524b2d28c252c5de7b4a6dd48fa2", size = 103567, upload-time = "2026-01-30T01:02:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, + { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, + { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, + { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, + { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, + { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, + { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, + { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/0c/0c411a0ec64ccb6d104dcabe0e713e05e153a9a2c3c2bd2b32ce412166fe/rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288", size = 370490, upload-time = "2025-11-30T20:21:33.256Z" }, + { url = "https://files.pythonhosted.org/packages/19/6a/4ba3d0fb7297ebae71171822554abe48d7cab29c28b8f9f2c04b79988c05/rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00", size = 359751, upload-time = "2025-11-30T20:21:34.591Z" }, + { url = "https://files.pythonhosted.org/packages/cd/7c/e4933565ef7f7a0818985d87c15d9d273f1a649afa6a52ea35ad011195ea/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389a2d49eded1896c3d48b0136ead37c48e221b391c052fba3f4055c367f60a6", size = 389696, upload-time = "2025-11-30T20:21:36.122Z" }, + { url = "https://files.pythonhosted.org/packages/5e/01/6271a2511ad0815f00f7ed4390cf2567bec1d4b1da39e2c27a41e6e3b4de/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:32c8528634e1bf7121f3de08fa85b138f4e0dc47657866630611b03967f041d7", size = 403136, upload-time = "2025-11-30T20:21:37.728Z" }, + { url = "https://files.pythonhosted.org/packages/55/64/c857eb7cd7541e9b4eee9d49c196e833128a55b89a9850a9c9ac33ccf897/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f207f69853edd6f6700b86efb84999651baf3789e78a466431df1331608e5324", size = 524699, upload-time = "2025-11-30T20:21:38.92Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ed/94816543404078af9ab26159c44f9e98e20fe47e2126d5d32c9d9948d10a/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67b02ec25ba7a9e8fa74c63b6ca44cf5707f2fbfadae3ee8e7494297d56aa9df", size = 412022, upload-time = "2025-11-30T20:21:40.407Z" }, + { url = "https://files.pythonhosted.org/packages/61/b5/707f6cf0066a6412aacc11d17920ea2e19e5b2f04081c64526eb35b5c6e7/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3", size = 390522, upload-time = "2025-11-30T20:21:42.17Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/57a85fda37a229ff4226f8cbcf09f2a455d1ed20e802ce5b2b4a7f5ed053/rpds_py-0.30.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:a452763cc5198f2f98898eb98f7569649fe5da666c2dc6b5ddb10fde5a574221", size = 404579, upload-time = "2025-11-30T20:21:43.769Z" }, + { url = "https://files.pythonhosted.org/packages/f9/da/c9339293513ec680a721e0e16bf2bac3db6e5d7e922488de471308349bba/rpds_py-0.30.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0b65193a413ccc930671c55153a03ee57cecb49e6227204b04fae512eb657a7", size = 421305, upload-time = "2025-11-30T20:21:44.994Z" }, + { url = "https://files.pythonhosted.org/packages/f9/be/522cb84751114f4ad9d822ff5a1aa3c98006341895d5f084779b99596e5c/rpds_py-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:858738e9c32147f78b3ac24dc0edb6610000e56dc0f700fd5f651d0a0f0eb9ff", size = 572503, upload-time = "2025-11-30T20:21:46.91Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9b/de879f7e7ceddc973ea6e4629e9b380213a6938a249e94b0cdbcc325bb66/rpds_py-0.30.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:da279aa314f00acbb803da1e76fa18666778e8a8f83484fba94526da5de2cba7", size = 598322, upload-time = "2025-11-30T20:21:48.709Z" }, + { url = "https://files.pythonhosted.org/packages/48/ac/f01fc22efec3f37d8a914fc1b2fb9bcafd56a299edbe96406f3053edea5a/rpds_py-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c64d38fb49b6cdeda16ab49e35fe0da2e1e9b34bc38bd78386530f218b37139", size = 560792, upload-time = "2025-11-30T20:21:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/da/4e2b19d0f131f35b6146425f846563d0ce036763e38913d917187307a671/rpds_py-0.30.0-cp310-cp310-win32.whl", hash = "sha256:6de2a32a1665b93233cde140ff8b3467bdb9e2af2b91079f0333a0974d12d464", size = 221901, upload-time = "2025-11-30T20:21:51.32Z" }, + { url = "https://files.pythonhosted.org/packages/96/cb/156d7a5cf4f78a7cc571465d8aec7a3c447c94f6749c5123f08438bcf7bc/rpds_py-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:1726859cd0de969f88dc8673bdd954185b9104e05806be64bcd87badbe313169", size = 235823, upload-time = "2025-11-30T20:21:52.505Z" }, + { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, + { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" }, + { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" }, + { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" }, + { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" }, + { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" }, + { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" }, + { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" }, + { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" }, + { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" }, + { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" }, + { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" }, + { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" }, + { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" }, + { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" }, + { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" }, + { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" }, + { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" }, + { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" }, + { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" }, + { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" }, + { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" }, + { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" }, + { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" }, + { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" }, + { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" }, + { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" }, + { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" }, + { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" }, + { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" }, + { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" }, + { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" }, + { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" }, + { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, + { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, + { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" }, + { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" }, + { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" }, + { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" }, + { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" }, + { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550, upload-time = "2026-02-26T20:04:14.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333, upload-time = "2026-02-26T20:04:20.093Z" }, + { url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356, upload-time = "2026-02-26T20:04:05.808Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434, upload-time = "2026-02-26T20:03:54.656Z" }, + { url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456, upload-time = "2026-02-26T20:03:52.738Z" }, + { url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772, upload-time = "2026-02-26T20:04:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051, upload-time = "2026-02-26T20:04:03.53Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494, upload-time = "2026-02-26T20:04:10.497Z" }, + { url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221, upload-time = "2026-02-26T20:04:12.84Z" }, + { url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459, upload-time = "2026-02-26T20:04:00.969Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366, upload-time = "2026-02-26T20:03:48.099Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887, upload-time = "2026-02-26T20:03:45.771Z" }, + { url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939, upload-time = "2026-02-26T20:04:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471, upload-time = "2026-02-26T20:03:58.924Z" }, + { url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382, upload-time = "2026-02-26T20:04:24.424Z" }, + { url = "https://files.pythonhosted.org/packages/1b/fa/2ef715a1cd329ef47c1a050e10dee91a9054b7ce2fcfdd6a06d139afb7ec/ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22", size = 10506664, upload-time = "2026-02-26T20:03:50.56Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a8/c688ef7e29983976820d18710f955751d9f4d4eb69df658af3d006e2ba3e/ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f", size = 11651048, upload-time = "2026-02-26T20:04:17.191Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0a/9e1be9035b37448ce2e68c978f0591da94389ade5a5abafa4cf99985d1b2/ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453", size = 10966776, upload-time = "2026-02-26T20:03:56.908Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "starlette" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/c3695c2d2d4ef70072c3a06992850498b01c6bc9be531950813716b426fa/sse_starlette-3.3.2.tar.gz", hash = "sha256:678fca55a1945c734d8472a6cad186a55ab02840b4f6786f5ee8770970579dcd", size = 32326, upload-time = "2026-02-28T11:24:34.36Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/28/8cb142d3fe80c4a2d8af54ca0b003f47ce0ba920974e7990fa6e016402d1/sse_starlette-3.3.2-py3-none-any.whl", hash = "sha256:5c3ea3dad425c601236726af2f27689b74494643f57017cafcb6f8c9acfbb862", size = 14270, upload-time = "2026-02-28T11:24:32.984Z" }, +] + +[[package]] +name = "starlette" +version = "0.52.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, +] + +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.41.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/ce/eeb58ae4ac36fe09e3842eb02e0eb676bf2c53ae062b98f1b2531673efdd/uvicorn-0.41.0.tar.gz", hash = "sha256:09d11cf7008da33113824ee5a1c6422d89fbc2ff476540d69a34c87fab8b571a", size = 82633, upload-time = "2026-02-16T23:07:24.1Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/e4/d04a086285c20886c0daad0e026f250869201013d18f81d9ff5eada73a88/uvicorn-0.41.0-py3-none-any.whl", hash = "sha256:29e35b1d2c36a04b9e180d4007ede3bcb32a85fbdfd6c6aeb3f26839de088187", size = 68783, upload-time = "2026-02-16T23:07:22.357Z" }, +] + +[[package]] +name = "volcengine-python-sdk" +version = "5.0.12" +source = { git = "https://code.byted.org/iaasng/volcengine-python-sdk.git?rev=aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12#e0c7b5980e5fa6d3621df3823e59fae957d68e82" } +dependencies = [ + { name = "certifi" }, + { name = "python-dateutil" }, + { name = "six" }, + { name = "urllib3" }, +] From 9e29d34909c13c17cfe3186ee14cb655d75e3858 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Wed, 4 Mar 2026 14:18:26 +0800 Subject: [PATCH 02/32] Update pyproject.toml with uvx script entry and package path --- server/mcp_server_supabase/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/server/mcp_server_supabase/pyproject.toml b/server/mcp_server_supabase/pyproject.toml index a5419db9..8bdbb3f5 100644 --- a/server/mcp_server_supabase/pyproject.toml +++ b/server/mcp_server_supabase/pyproject.toml @@ -28,6 +28,7 @@ legacy = [ [project.scripts] mcp-server-supabase = "mcp_server_supabase.server:main" +supabase-aidap = "mcp_server_supabase.server:main" [build-system] requires = ["hatchling"] From f5ff80b57313232fab839a53ea98597b3435d394 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 19:15:35 +0800 Subject: [PATCH 03/32] chore(supabase): switch volcengine sdk to github source --- server/mcp_server_supabase/pyproject.toml | 2 +- server/mcp_server_supabase/uv.lock | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/mcp_server_supabase/pyproject.toml b/server/mcp_server_supabase/pyproject.toml index 8bdbb3f5..b1348b7c 100644 --- a/server/mcp_server_supabase/pyproject.toml +++ b/server/mcp_server_supabase/pyproject.toml @@ -12,7 +12,7 @@ dependencies = [ "mcp>=1.1.2", "httpx>=0.27.0", "pydantic>=2.0.0", - "volcengine-python-sdk @ git+https://code.byted.org/iaasng/volcengine-python-sdk.git@aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12", + "volcengine-python-sdk @ git+https://github.com/sjcsjcsjc/volcengine-python-sdk.git", ] [project.optional-dependencies] diff --git a/server/mcp_server_supabase/uv.lock b/server/mcp_server_supabase/uv.lock index dceb75fe..ef598d6c 100644 --- a/server/mcp_server_supabase/uv.lock +++ b/server/mcp_server_supabase/uv.lock @@ -419,7 +419,7 @@ requires-dist = [ { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.0" }, { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.3.0" }, - { name = "volcengine-python-sdk", git = "https://code.byted.org/iaasng/volcengine-python-sdk.git?rev=aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12" }, + { name = "volcengine-python-sdk", git = "https://github.com/sjcsjcsjc/volcengine-python-sdk.git" }, ] provides-extras = ["dev", "legacy"] @@ -1129,8 +1129,8 @@ wheels = [ [[package]] name = "volcengine-python-sdk" -version = "5.0.12" -source = { git = "https://code.byted.org/iaasng/volcengine-python-sdk.git?rev=aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12#e0c7b5980e5fa6d3621df3823e59fae957d68e82" } +version = "5.0.14" +source = { git = "https://github.com/sjcsjcsjc/volcengine-python-sdk.git#9905a8853a0e5fd26fdae93eefb4f201e8bef539" } dependencies = [ { name = "certifi" }, { name = "python-dateutil" }, From 3726e922bacb0bcfd7fe1857517ee2f9f2f9c88e Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 21:57:04 +0800 Subject: [PATCH 04/32] fix(supabase): support aidap workspace filter signature variants --- .../tools/workspace_tools.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 826c7f86..d601504f 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -2,6 +2,7 @@ import json import logging +import inspect from typing import Optional logger = logging.getLogger(__name__) @@ -23,14 +24,14 @@ async def list_workspaces(self) -> str: try: from volcenginesdkaidap.models import DescribeWorkspacesRequest, FilterForDescribeWorkspacesInput - # 添加过滤条件,只查询 Supabase 类型的 workspace - filters = [ - FilterForDescribeWorkspacesInput( - name="DBEngineVersion", - value="Supabase_1_24", - mode="Exact" - ) - ] + parameters = inspect.signature(FilterForDescribeWorkspacesInput).parameters + filter_kwargs = { + "name": "DBEngineVersion", + "value": "Supabase_1_24", + } + if "mode" in parameters: + filter_kwargs["mode"] = "Exact" + filters = [FilterForDescribeWorkspacesInput(**filter_kwargs)] request = DescribeWorkspacesRequest(filters=filters) response = self.aidap_client.client.describe_workspaces(request) From c0b06304dc8cef4eb8822dfda835a656857d1494 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 21:57:51 +0800 Subject: [PATCH 05/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 3 +- server/mcp_server_supabase/README_zh.md | 6 +- .../platform/aidap_client.py | 78 ++++++++++++++++++- .../src/mcp_server_supabase/server.py | 38 +++++++++ .../tools/database_tools.py | 13 ++++ .../tools/storage_tools.py | 19 ++++- .../tools/workspace_tools.py | 74 ++++++++++++++++++ 7 files changed, 223 insertions(+), 8 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 7106fd73..4153ef6d 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -143,7 +143,7 @@ mv .env_example .env # 填写环境变量 - `get_storage_config` - `update_storage_config` -### 工作空间管理(12) +### 工作空间管理(13) - `list_workspaces` - `get_workspace` - `create_workspace` @@ -156,6 +156,7 @@ mv .env_example .env # 填写环境变量 - `modify_workspace_settings` - `modify_workspace_deletion_protection` - `reset_workspace_password` +- `reset_branch` (official-aligned: reset migrations of a development branch) --- diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index daba9ca6..8fe69a34 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -23,11 +23,11 @@ ### 前置要求 -⚠️ **重要**: 此 MCP server 依赖内部的 volcengine-python-sdk(包含 AIDAP 模块),需要先安装: +⚠️ **重要**: 此 MCP server 依赖 volcengine-python-sdk(包含 AIDAP 模块),需要先安装: ```bash # 克隆 SDK 仓库 -git clone https://code.byted.org/iaasng/volcengine-python-sdk.git -b aidap-Python-2025-10-01-online-2306-2026_02_27_11_45_12 +git clone https://github.com/sjcsjcsjc/volcengine-python-sdk.git # 安装 SDK cd volcengine-python-sdk @@ -179,7 +179,7 @@ clear_default_branch_cache() - `get_branch_detail` - 获取分支详情 - `create_branch` - 创建新分支 - `delete_branch` - 删除分支 -- `reset_branch` - 重置分支 +- `reset_branch` - 重置分支(对齐官方:重置开发分支迁移;当前 AIDAP SDK 不支持 `migration_version` 参数) - `restart_branch` - 重启分支 - `restore_branch` - 恢复分支 - `set_default_branch` - 设置默认分支 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index 9914cc53..f9f39471 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -1,5 +1,5 @@ import logging -from typing import Optional, Dict, Any +from typing import Optional from ..config import ( VOLCENGINE_ACCESS_KEY, VOLCENGINE_SECRET_KEY, @@ -18,7 +18,10 @@ DescribeBranchesRequest, DescribeWorkspaceEndpointRequest, DescribeAPIKeysRequest, - DescribeComputesRequest, + ResetBranchRequest, + CreateBranchRequest, + DeleteBranchRequest, + BranchSettingsForCreateBranchInput, ) except ImportError: logger.error("volcengine-python-sdk not installed") @@ -61,6 +64,65 @@ async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) logger.error(f"Error getting default branch: {e}") return None + async def list_branches(self, workspace_id: str) -> list[dict]: + try: + request = DescribeBranchesRequest(workspace_id=workspace_id) + response = self.client.describe_branches(request) + + branches = [] + if hasattr(response, 'branches') and response.branches: + for branch in response.branches: + branches.append({ + "branch_id": getattr(branch, 'branch_id', None), + "name": getattr(branch, 'name', None), + "status": getattr(branch, 'status', None), + "default": getattr(branch, 'default', False), + "parent_id": getattr(branch, 'parent_id', None), + }) + return branches + except Exception as e: + logger.error(f"Error listing branches: {e}") + return [] + + async def create_branch(self, workspace_id: str, name: str = "develop") -> dict: + try: + request = CreateBranchRequest( + workspace_id=workspace_id, + branch_settings=BranchSettingsForCreateBranchInput(name=name), + ) + response = self.client.create_branch(request) + + branch_id = getattr(response, 'branch_id', None) + if not branch_id and hasattr(response, 'branch'): + branch_id = getattr(response.branch, 'branch_id', None) + + return { + "success": True, + "branch_id": branch_id, + "workspace_id": workspace_id, + } + except Exception as e: + logger.error(f"Error creating branch: {e}") + return { + "success": False, + "error": str(e), + } + + async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: + try: + request = DeleteBranchRequest( + workspace_id=workspace_id, + branch_id=branch_id, + ) + self.client.delete_branch(request) + return {"success": True} + except Exception as e: + logger.error(f"Error deleting branch: {e}") + return { + "success": False, + "error": str(e), + } + async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: # 检查缓存 cache_key = f"{workspace_id}:{branch_id}" if branch_id else workspace_id @@ -105,6 +167,18 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, logger.error(f"Error getting endpoint: {e}") return None + async def reset_branch(self, workspace_id: str, branch_id: str) -> bool: + try: + request = ResetBranchRequest( + workspace_id=workspace_id, + branch_id=branch_id, + ) + self.client.reset_branch(request) + return True + except Exception as e: + logger.error(f"Error resetting branch: {e}") + return False + async def get_api_key(self, workspace_id: str, key_type: str = "service_role", branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: # 检查缓存 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index de4a477d..84c52d07 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -113,6 +113,14 @@ async def get_storage_config(workspace_id: str = None) -> str: return await storage_tools.get_storage_config(workspace_id) +@mcp.tool() +async def update_storage_config(config: str, workspace_id: str = None) -> str: + """Updates the storage configuration for a workspace.""" + import json + parsed_config = json.loads(config) + return await storage_tools.update_storage_config(parsed_config, workspace_id) + + @mcp.tool() async def execute_sql(query: str, workspace_id: str = None) -> str: """Executes raw SQL in the Postgres database.""" @@ -132,6 +140,12 @@ async def list_migrations(workspace_id: str = None) -> str: return await database_tools.list_migrations(workspace_id) +@mcp.tool() +async def list_extensions(workspace_id: str = None) -> str: + """Lists all PostgreSQL extensions in the database.""" + return await database_tools.list_extensions(workspace_id) + + @mcp.tool() async def apply_migration(name: str, query: str, workspace_id: str = None) -> str: """Applies a migration to the database.""" @@ -150,6 +164,30 @@ async def get_workspace(workspace_id: str) -> str: return await workspace_tools.get_workspace(workspace_id) +@mcp.tool() +async def list_branches(workspace_id: str = None) -> str: + """Lists all development branches of a workspace.""" + return await workspace_tools.list_branches(workspace_id) + + +@mcp.tool() +async def create_branch(name: str = "develop", workspace_id: str = None) -> str: + """Creates a development branch.""" + return await workspace_tools.create_branch(name, workspace_id) + + +@mcp.tool() +async def delete_branch(branch_id: str, workspace_id: str = None) -> str: + """Deletes a development branch.""" + return await workspace_tools.delete_branch(branch_id, workspace_id) + + +@mcp.tool() +async def reset_branch(branch_id: str, migration_version: str = None, workspace_id: str = None) -> str: + """Resets migrations of a development branch. Any untracked data or schema changes will be lost.""" + return await workspace_tools.reset_branch(branch_id, migration_version, workspace_id) + + def main(): parser = argparse.ArgumentParser(description="Supabase MCP Server") parser.add_argument("--port", type=int, default=8000, help="Port to run the server on") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index c2d831a8..3dccfe24 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -59,6 +59,19 @@ async def list_migrations(self, workspace_id: Optional[str] = None) -> List[dict except Exception as e: logger.warning(f"Failed to list migrations: {e}") return [] + + @handle_errors + async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict]: + query = """ + SELECT + e.extname AS name, + n.nspname AS schema, + e.extversion AS version + FROM pg_extension e + JOIN pg_namespace n ON n.oid = e.extnamespace + ORDER BY e.extname + """ + return await self.execute_sql(query, workspace_id) @handle_errors @read_only_check diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 3d3a6b67..3368a58f 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -1,8 +1,8 @@ -from typing import Optional, List +from typing import Optional, List, Dict, Any import logging from .base import BaseTools from ..utils import handle_errors, read_only_check -from ..models import StorageBucket, StorageConfig +from ..models import StorageConfig logger = logging.getLogger(__name__) @@ -65,3 +65,18 @@ async def get_storage_config(self, workspace_id: Optional[str] = None) -> Storag client = await self._get_client(ws_id) result = await client.call_api("/storage/v1/config") return StorageConfig(**result) + + @handle_errors + @read_only_check + async def update_storage_config( + self, + config: Dict[str, Any], + workspace_id: Optional[str] = None, + ) -> dict: + if not isinstance(config, dict) or not config: + raise ValueError("config must be a non-empty object") + + ws_id = self._get_workspace_id(workspace_id) + client = await self._get_client(ws_id) + await client.call_api("/storage/v1/config", method="PUT", json_data=config) + return {"success": True} diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index d601504f..c762e56e 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -5,6 +5,8 @@ import inspect from typing import Optional +from ..utils import read_only_check + logger = logging.getLogger(__name__) @@ -109,3 +111,75 @@ async def get_workspace(self, workspace_id: str) -> str: "success": False, "error": str(e) }, indent=2) + + @read_only_check + async def create_branch( + self, + name: str = "develop", + workspace_id: Optional[str] = None, + ) -> str: + ws_id = workspace_id or self.default_workspace_id + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + + result = await self.aidap_client.create_branch(ws_id, name) + return json.dumps(result, indent=2) + + async def list_branches(self, workspace_id: Optional[str] = None) -> str: + ws_id = workspace_id or self.default_workspace_id + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + + branches = await self.aidap_client.list_branches(ws_id) + return json.dumps({"branches": branches}, indent=2) + + @read_only_check + async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: + ws_id = workspace_id or self.default_workspace_id + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + + result = await self.aidap_client.delete_branch(ws_id, branch_id) + return json.dumps(result, indent=2) + + @read_only_check + async def reset_branch( + self, + branch_id: str, + migration_version: Optional[str] = None, + workspace_id: Optional[str] = None, + ) -> str: + """Resets migrations of a development branch. + + Args: + branch_id: Branch ID to reset + migration_version: Target migration version (official schema field, not supported by current AIDAP SDK) + workspace_id: The workspace ID (optional) + + Returns: + JSON string containing operation result + """ + ws_id = workspace_id or self.default_workspace_id + if not ws_id: + return json.dumps({ + "success": False, + "error": "workspace_id is required" + }, indent=2) + + if migration_version: + return json.dumps({ + "success": False, + "error": "migration_version is not supported by current AIDAP reset_branch API" + }, indent=2) + + try: + success = await self.aidap_client.reset_branch(ws_id, branch_id) + return json.dumps({ + "success": success + }, indent=2) + except Exception as e: + logger.error(f"Error resetting branch: {e}") + return json.dumps({ + "success": False, + "error": str(e) + }, indent=2) From 46ae7685d680ddaca4fab31fe638c86ac0fd2df6 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 22:22:29 +0800 Subject: [PATCH 06/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../platform/aidap_client.py | 52 +++++++++++++------ .../platform/supabase_client.py | 27 +++++++--- .../src/mcp_server_supabase/tools/base.py | 2 +- .../tools/database_tools.py | 43 ++++++++++++--- .../tools/edge_function_tools.py | 37 +++++++------ .../tools/storage_tools.py | 6 ++- 6 files changed, 121 insertions(+), 46 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index f9f39471..e2542b1f 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -1,4 +1,6 @@ import logging +import asyncio +import os from typing import Optional from ..config import ( VOLCENGINE_ACCESS_KEY, @@ -10,6 +12,7 @@ ) logger = logging.getLogger(__name__) +ENDPOINT_SCHEME = os.getenv("SUPABASE_ENDPOINT_SCHEME", "http").strip().lower() or "http" try: import volcenginesdkcore @@ -109,19 +112,32 @@ async def create_branch(self, workspace_id: str, name: str = "develop") -> dict: } async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: - try: - request = DeleteBranchRequest( - workspace_id=workspace_id, - branch_id=branch_id, - ) - self.client.delete_branch(request) - return {"success": True} - except Exception as e: - logger.error(f"Error deleting branch: {e}") - return { - "success": False, - "error": str(e), - } + max_attempts = 6 + delay_seconds = 2 + for attempt in range(1, max_attempts + 1): + try: + request = DeleteBranchRequest( + workspace_id=workspace_id, + branch_id=branch_id, + ) + self.client.delete_branch(request) + return {"success": True} + except Exception as e: + error_text = str(e) + if "BranchNotFound" in error_text: + return {"success": True} + if "OperationDenied_BranchNotReady" in error_text and attempt < max_attempts: + await asyncio.sleep(delay_seconds) + continue + logger.error(f"Error deleting branch: {e}") + return { + "success": False, + "error": error_text, + } + return { + "success": False, + "error": "delete_branch failed after retries", + } async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: # 检查缓存 @@ -153,12 +169,18 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, for domain in domains: if 'volces.com' in domain and 'ivolces.com' not in domain: - result = f"http://{domain}:80" + if ENDPOINT_SCHEME == "https": + result = f"https://{domain}" + else: + result = f"http://{domain}:80" endpoint_cache[cache_key] = result return result if domains: - result = f"http://{domains[0]}:80" + if ENDPOINT_SCHEME == "https": + result = f"https://{domains[0]}" + else: + result = f"http://{domains[0]}:80" endpoint_cache[cache_key] = result return result diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py index ae5e1f72..d4bfc739 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py @@ -1,5 +1,6 @@ import httpx import logging +import json from typing import Optional, Dict, Any logger = logging.getLogger(__name__) @@ -63,15 +64,27 @@ async def call_api( if response.status_code == 204 or not response.content: return {"success": True} - return response.json() + content_type = response.headers.get("content-type", "") + if "application/json" in content_type: + return response.json() + return {"raw": response.text} except httpx.HTTPStatusError as e: - # 对于 HTTP 错误,尝试返回响应体 + response = e.response + payload: Any try: - error_body = e.response.json() - return error_body - except: - error_details = f"{str(e)}" - raise Exception(f"{error_details} [endpoint: {self.endpoint}, path: {path}]") from e + payload = response.json() + except Exception: + payload = response.text + error_message = json.dumps( + { + "status_code": response.status_code, + "path": path, + "endpoint": self.endpoint, + "error": payload, + }, + ensure_ascii=False, + ) + raise Exception(error_message) from e except Exception as e: error_details = f"{str(e)}" if hasattr(e, '__cause__') and e.__cause__: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py index 6ad539e7..89731b50 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py @@ -30,7 +30,7 @@ async def _get_client(self, workspace_id: str) -> SupabaseClient: raise ValueError(f"Could not get endpoint for workspace {workspace_id}") api_key = await self.aidap.get_api_key(workspace_id, "service_role") - logger.info(f"[DEBUG] Got API key for {workspace_id}: {api_key[:20] if api_key else None}...") + logger.info(f"[DEBUG] Got API key for {workspace_id}: {'yes' if api_key else 'no'}") if not api_key: raise ValueError(f"Could not get API key for workspace {workspace_id}") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index 3dccfe24..8f029baf 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -1,5 +1,6 @@ from typing import Optional, List import logging +from datetime import datetime, timezone from .base import BaseTools from ..utils import handle_errors, read_only_check @@ -50,15 +51,17 @@ async def list_tables(self, schemas: List[str] = None, workspace_id: Optional[st @handle_errors async def list_migrations(self, workspace_id: Optional[str] = None) -> List[dict]: query = """ + CREATE SCHEMA IF NOT EXISTS supabase_migrations; + CREATE TABLE IF NOT EXISTS supabase_migrations.schema_migrations ( + version text PRIMARY KEY, + name text NOT NULL, + inserted_at timestamptz NOT NULL DEFAULT now() + ); SELECT version, name FROM supabase_migrations.schema_migrations ORDER BY version DESC """ - try: - return await self.execute_sql(query, workspace_id) - except Exception as e: - logger.warning(f"Failed to list migrations: {e}") - return [] + return await self.execute_sql(query, workspace_id) @handle_errors async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict]: @@ -76,5 +79,31 @@ async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict @handle_errors @read_only_check async def apply_migration(self, name: str, query: str, workspace_id: Optional[str] = None) -> dict: - await self.execute_sql(query, workspace_id) - return {"success": True, "message": f"Migration {name} applied successfully"} + if not name or not name.strip(): + raise ValueError("Migration name cannot be empty") + if not query or not query.strip(): + raise ValueError("Migration SQL cannot be empty") + + migration_name = name.strip().replace("'", "''") + migration_version = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S%f") + migration_sql = f""" + BEGIN; + CREATE SCHEMA IF NOT EXISTS supabase_migrations; + CREATE TABLE IF NOT EXISTS supabase_migrations.schema_migrations ( + version text PRIMARY KEY, + name text NOT NULL, + inserted_at timestamptz NOT NULL DEFAULT now() + ); + {query} + INSERT INTO supabase_migrations.schema_migrations (version, name) + VALUES ('{migration_version}', '{migration_name}') + ON CONFLICT (version) DO UPDATE SET name = EXCLUDED.name; + COMMIT; + """ + await self.execute_sql(migration_sql, workspace_id) + return { + "success": True, + "message": f"Migration {name} applied successfully", + "version": migration_version, + "name": name.strip(), + } diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index 9d61d0c3..0965c546 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -2,6 +2,9 @@ import logging import json import html +import os +import re +from urllib.parse import quote from .base import BaseTools from ..utils import handle_errors, read_only_check from ..models import EdgeFunction @@ -36,6 +39,7 @@ RESERVED_SLUGS = {"deploy", "body", "health", "metrics"} MAX_SLUG_LENGTH = 127 MAX_CODE_SIZE = 10 * 1024 * 1024 # 10MB +PROJECT_SLUG = os.getenv("SUPABASE_PROJECT_SLUG", "default").strip() or "default" class EdgeFunctionTools(BaseTools): @@ -50,6 +54,9 @@ def _validate_function_name(self, function_name: str) -> None: if function_name in RESERVED_SLUGS: raise ValueError(f"Function name '{function_name}' is reserved") + if not re.match(r"^[a-z0-9][a-z0-9-]*$", function_name): + raise ValueError("Function name must match ^[a-z0-9][a-z0-9-]*$") + def _validate_runtime(self, runtime: str) -> None: """验证运行时""" if runtime not in RUNTIME_CONFIG: @@ -86,8 +93,7 @@ async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[ logger.info(f"Listing edge functions for workspace {ws_id}") client = await self._get_client(ws_id) - # AIDAP 使用不同的 API 路径 - result = await client.call_api("/v1/projects/default/functions") + result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions") functions = [EdgeFunction(**func) for func in result] logger.info(f"Found {len(functions)} edge functions") @@ -95,12 +101,13 @@ async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[ @handle_errors async def get_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> EdgeFunction: + self._validate_function_name(function_name) ws_id = self._get_workspace_id(workspace_id) logger.info(f"Getting edge function '{function_name}' from workspace {ws_id}") client = await self._get_client(ws_id) - # AIDAP 使用不同的 API 路径 - result = await client.call_api(f"/v1/projects/default/functions/{function_name}") + encoded_name = quote(function_name, safe="") + result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}") return EdgeFunction(**result) @handle_errors @@ -133,6 +140,7 @@ async def deploy_edge_function( """ # 验证输入 self._validate_function_name(function_name) + self._validate_runtime(runtime) if not source_code or not source_code.strip(): raise ValueError("Source code cannot be empty") @@ -141,10 +149,10 @@ async def deploy_edge_function( source_code = html.unescape(source_code) self._validate_code_size(source_code) + self._validate_runtime_compatibility(runtime, source_code) ws_id = self._get_workspace_id(workspace_id) - # AIDAP 默认使用 Deno 运行时,entrypoint 固定为 index.ts - entrypoint = "index.ts" + entrypoint = self._get_entrypoint(runtime) logger.info( "Deploying edge function", @@ -160,10 +168,7 @@ async def deploy_edge_function( client = await self._get_client(ws_id) - # AIDAP 使用不同的请求格式和 API 路径 - # URL 编码 function_name 防止特殊字符问题 - from urllib.parse import quote - encoded_name = quote(function_name) + encoded_name = quote(function_name, safe="") data = { "metadata": { @@ -194,7 +199,7 @@ async def deploy_edge_function( # AIDAP 部署 API 路径 result = await client.call_api( - f"/v1/projects/default/functions/deploy?slug={encoded_name}", + f"/v1/projects/{PROJECT_SLUG}/functions/deploy?slug={encoded_name}", method="POST", json_data=data ) @@ -209,12 +214,13 @@ async def deploy_edge_function( @handle_errors @read_only_check async def delete_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: + self._validate_function_name(function_name) ws_id = self._get_workspace_id(workspace_id) logger.info(f"Deleting edge function '{function_name}' from workspace {ws_id}") client = await self._get_client(ws_id) - # AIDAP 使用不同的 API 路径 - await client.call_api(f"/v1/projects/default/functions/{function_name}", method="DELETE") + encoded_name = quote(function_name, safe="") + await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}", method="DELETE") logger.info(f"Successfully deleted edge function '{function_name}'") return {"success": True, "message": "Edge function deleted successfully"} @@ -227,6 +233,7 @@ async def invoke_edge_function( method: str = "POST", workspace_id: Optional[str] = None ) -> dict: + self._validate_function_name(function_name) ws_id = self._get_workspace_id(workspace_id) logger.info( f"Invoking edge function '{function_name}'", @@ -242,9 +249,9 @@ async def invoke_edge_function( except json.JSONDecodeError as e: raise ValueError(f"Invalid payload JSON: {e}") - # AIDAP 调用 edge function 使用 /functions/v1/{slug} 路径 + encoded_name = quote(function_name, safe="") result = await client.call_api( - f"/functions/v1/{function_name}", + f"/functions/v1/{encoded_name}", method=method, json_data=json_data, timeout=60.0 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 3368a58f..0d12ec21 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -54,9 +54,13 @@ async def create_storage_bucket( @handle_errors @read_only_check async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[str] = None) -> dict: + if not bucket_name or not bucket_name.strip(): + raise ValueError("Bucket name cannot be empty") ws_id = self._get_workspace_id(workspace_id) client = await self._get_client(ws_id) - await client.call_api(f"/storage/v1/bucket/{bucket_name}", method="DELETE") + response = await client.call_api(f"/storage/v1/bucket/{bucket_name}", method="DELETE") + if isinstance(response, dict) and "error" in response: + raise ValueError(response["error"]) return {"success": True, "message": "Bucket deleted successfully"} @handle_errors From 3aa9c5038331d2eba658c3f02d8eff1b97fc4227 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 22:39:02 +0800 Subject: [PATCH 07/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../platform/aidap_client.py | 36 +++++++---- .../platform/supabase_client.py | 37 ++++++++--- .../tools/edge_function_tools.py | 64 ++++++++++++++++--- .../tools/storage_tools.py | 8 ++- .../tools/workspace_tools.py | 16 ++--- 5 files changed, 120 insertions(+), 41 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index e2542b1f..f0bc75ed 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -189,17 +189,31 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, logger.error(f"Error getting endpoint: {e}") return None - async def reset_branch(self, workspace_id: str, branch_id: str) -> bool: - try: - request = ResetBranchRequest( - workspace_id=workspace_id, - branch_id=branch_id, - ) - self.client.reset_branch(request) - return True - except Exception as e: - logger.error(f"Error resetting branch: {e}") - return False + async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: + max_attempts = 6 + delay_seconds = 2 + for attempt in range(1, max_attempts + 1): + try: + request = ResetBranchRequest( + workspace_id=workspace_id, + branch_id=branch_id, + ) + self.client.reset_branch(request) + return {"success": True} + except Exception as e: + error_text = str(e) + if "OperationDenied_BranchNotReady" in error_text and attempt < max_attempts: + await asyncio.sleep(delay_seconds) + continue + logger.error(f"Error resetting branch: {e}") + return { + "success": False, + "error": error_text, + } + return { + "success": False, + "error": "reset_branch failed after retries", + } async def get_api_key(self, workspace_id: str, key_type: str = "service_role", branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py index d4bfc739..83d22b36 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py @@ -6,6 +6,25 @@ logger = logging.getLogger(__name__) +class SupabaseApiError(Exception): + def __init__(self, status_code: int, path: str, endpoint: str, payload: Any): + self.status_code = status_code + self.path = path + self.endpoint = endpoint + self.payload = payload + super().__init__( + json.dumps( + { + "status_code": status_code, + "path": path, + "endpoint": endpoint, + "error": payload, + }, + ensure_ascii=False, + ) + ) + + class SupabaseClient: def __init__(self, endpoint: str, api_key: str): self.endpoint = endpoint @@ -75,17 +94,15 @@ async def call_api( payload = response.json() except Exception: payload = response.text - error_message = json.dumps( - { - "status_code": response.status_code, - "path": path, - "endpoint": self.endpoint, - "error": payload, - }, - ensure_ascii=False, - ) - raise Exception(error_message) from e + raise SupabaseApiError( + status_code=response.status_code, + path=path, + endpoint=self.endpoint, + payload=payload, + ) from e except Exception as e: + if isinstance(e, SupabaseApiError): + raise error_details = f"{str(e)}" if hasattr(e, '__cause__') and e.__cause__: error_details += f" | Cause: {str(e.__cause__)}" diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index 0965c546..b28897c3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -8,6 +8,7 @@ from .base import BaseTools from ..utils import handle_errors, read_only_check from ..models import EdgeFunction +from ..platform.supabase_client import SupabaseApiError logger = logging.getLogger(__name__) @@ -87,6 +88,17 @@ def _validate_runtime_compatibility(self, runtime: str, source_code: str) -> Non if not any(keyword in source_code for keyword in ["def ", "import ", "from "]): logger.warning("Python code may be invalid - no function definitions or imports found") + def _is_function_metadata(self, result: dict) -> bool: + if not isinstance(result, dict): + return False + required_keys = {"id", "slug", "name", "status", "version", "entrypoint_path"} + return required_keys.issubset(set(result.keys())) + + def _extract_error_text(self, payload: object) -> str: + if isinstance(payload, dict): + return json.dumps(payload, ensure_ascii=False) + return str(payload) + @handle_errors async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[EdgeFunction]: ws_id = self._get_workspace_id(workspace_id) @@ -107,7 +119,13 @@ async def get_edge_function(self, function_name: str, workspace_id: Optional[str client = await self._get_client(ws_id) encoded_name = quote(function_name, safe="") - result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}") + try: + result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}") + except SupabaseApiError as e: + payload_text = self._extract_error_text(e.payload).lower() + if "function not found" in payload_text or "not found" in payload_text: + raise ValueError(f"Edge function '{function_name}' not found") + raise return EdgeFunction(**result) @handle_errors @@ -241,6 +259,9 @@ async def invoke_edge_function( ) client = await self._get_client(ws_id) + http_method = method.upper().strip() if method else "POST" + if http_method not in {"GET", "POST", "PUT", "PATCH", "DELETE"}: + raise ValueError(f"Unsupported method '{method}'") json_data = None if payload: @@ -250,12 +271,37 @@ async def invoke_edge_function( raise ValueError(f"Invalid payload JSON: {e}") encoded_name = quote(function_name, safe="") - result = await client.call_api( - f"/functions/v1/{encoded_name}", - method=method, - json_data=json_data, - timeout=60.0 + primary_path = f"/functions/v1/{encoded_name}" + fallback_path = f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}/invoke" + + try: + primary_result = await client.call_api( + primary_path, + method=http_method, + json_data=json_data, + timeout=60.0 + ) + if not self._is_function_metadata(primary_result): + logger.debug(f"Edge function '{function_name}' invoked successfully via {primary_path}") + return primary_result + except SupabaseApiError as e: + payload_text = self._extract_error_text(e.payload).lower() + if e.status_code not in {404, 405} and "route" not in payload_text: + raise + + try: + fallback_result = await client.call_api( + fallback_path, + method=http_method, + json_data=json_data, + timeout=60.0 + ) + if not self._is_function_metadata(fallback_result): + logger.debug(f"Edge function '{function_name}' invoked successfully via {fallback_path}") + return fallback_result + except SupabaseApiError: + pass + + raise ValueError( + "Edge function invocation is not supported by current AIDAP workspace endpoint" ) - - logger.debug(f"Edge function '{function_name}' invoked successfully") - return result diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 0d12ec21..2fd8f8f3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -3,6 +3,7 @@ from .base import BaseTools from ..utils import handle_errors, read_only_check from ..models import StorageConfig +from ..platform.supabase_client import SupabaseApiError logger = logging.getLogger(__name__) @@ -82,5 +83,10 @@ async def update_storage_config( ws_id = self._get_workspace_id(workspace_id) client = await self._get_client(ws_id) - await client.call_api("/storage/v1/config", method="PUT", json_data=config) + try: + await client.call_api("/storage/v1/config", method="PUT", json_data=config) + except SupabaseApiError as e: + if e.status_code == 404 and e.path == "/storage/v1/config": + raise ValueError("Updating storage config is not supported by current AIDAP workspace endpoint") + raise return {"success": True} diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index c762e56e..cddd70ba 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -166,17 +166,13 @@ async def reset_branch( "error": "workspace_id is required" }, indent=2) - if migration_version: - return json.dumps({ - "success": False, - "error": "migration_version is not supported by current AIDAP reset_branch API" - }, indent=2) - try: - success = await self.aidap_client.reset_branch(ws_id, branch_id) - return json.dumps({ - "success": success - }, indent=2) + result = await self.aidap_client.reset_branch(ws_id, branch_id) + if not isinstance(result, dict): + result = {"success": bool(result)} + if migration_version: + result["warning"] = "migration_version is ignored because current AIDAP reset_branch API does not support version-targeted reset" + return json.dumps(result, indent=2) except Exception as e: logger.error(f"Error resetting branch: {e}") return json.dumps({ From 5886b277429dfabe40c2a5958b0a88b424db0fd8 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 23:06:27 +0800 Subject: [PATCH 08/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../platform/aidap_client.py | 92 +++++++++++- .../src/mcp_server_supabase/server.py | 41 ++++++ .../tools/database_tools.py | 124 ++++++++++++++++ .../tools/edge_function_tools.py | 9 +- .../tools/workspace_tools.py | 134 ++++++++++++++++-- 5 files changed, 381 insertions(+), 19 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index f0bc75ed..665ed4ef 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -25,6 +25,12 @@ CreateBranchRequest, DeleteBranchRequest, BranchSettingsForCreateBranchInput, + CreateWorkspaceRequest, + WorkspaceSettingsForCreateWorkspaceInput, + BranchSettingsForCreateWorkspaceInput, + ComputeSettingsForCreateWorkspaceInput, + StartWorkspaceRequest, + StopWorkspaceRequest, ) except ImportError: logger.error("volcengine-python-sdk not installed") @@ -85,7 +91,7 @@ async def list_branches(self, workspace_id: str) -> list[dict]: return branches except Exception as e: logger.error(f"Error listing branches: {e}") - return [] + raise RuntimeError(str(e)) async def create_branch(self, workspace_id: str, name: str = "develop") -> dict: try: @@ -111,6 +117,66 @@ async def create_branch(self, workspace_id: str, name: str = "develop") -> dict: "error": str(e), } + async def create_workspace( + self, + workspace_name: str, + engine_type: str = "Supabase", + engine_version: str = "Supabase_1_24", + ) -> dict: + try: + request = CreateWorkspaceRequest( + workspace_name=workspace_name, + engine_type=engine_type, + engine_version=engine_version, + branch_settings=BranchSettingsForCreateWorkspaceInput(branch_name="main"), + compute_settings=ComputeSettingsForCreateWorkspaceInput( + auto_scaling_limit_min_cu=0.25, + auto_scaling_limit_max_cu=1, + suspend_timeout_seconds=300 + ), + workspace_settings=WorkspaceSettingsForCreateWorkspaceInput( + public_connection=False, + deletion_protection=False + ), + ) + response = self.client.create_workspace(request) + + workspace_id = getattr(response, 'workspace_id', None) + if not workspace_id and hasattr(response, 'workspace'): + workspace_id = getattr(response.workspace, 'workspace_id', None) + + return { + "success": True, + "workspace_id": workspace_id, + "workspace_name": workspace_name, + "engine_type": engine_type, + "engine_version": engine_version, + } + except Exception as e: + logger.error(f"Error creating workspace: {e}") + return { + "success": False, + "error": str(e), + } + + async def start_workspace(self, workspace_id: str) -> dict: + try: + request = StartWorkspaceRequest(workspace_id=workspace_id) + self.client.start_workspace(request) + return {"success": True, "workspace_id": workspace_id, "status": "starting"} + except Exception as e: + logger.error(f"Error starting workspace: {e}") + return {"success": False, "error": str(e)} + + async def stop_workspace(self, workspace_id: str) -> dict: + try: + request = StopWorkspaceRequest(workspace_id=workspace_id) + self.client.stop_workspace(request) + return {"success": True, "workspace_id": workspace_id, "status": "stopping"} + except Exception as e: + logger.error(f"Error stopping workspace: {e}") + return {"success": False, "error": str(e)} + async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: max_attempts = 6 delay_seconds = 2 @@ -125,7 +191,7 @@ async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: except Exception as e: error_text = str(e) if "BranchNotFound" in error_text: - return {"success": True} + return {"success": False, "error": error_text} if "OperationDenied_BranchNotReady" in error_text and attempt < max_attempts: await asyncio.sleep(delay_seconds) continue @@ -254,3 +320,25 @@ async def get_api_key(self, workspace_id: str, key_type: str = "service_role", except Exception as e: logger.error(f"Error getting API key: {e}") return None + + async def get_api_keys(self, workspace_id: str, branch_id: Optional[str] = None) -> list[dict]: + if not branch_id: + branch_id = await self.get_default_branch_id(workspace_id) + if not branch_id: + raise RuntimeError(f"Could not get default branch for workspace {workspace_id}") + + request = DescribeAPIKeysRequest( + workspace_id=workspace_id, + branch_id=branch_id + ) + response = self.client.describe_api_keys(request) + + keys = [] + if hasattr(response, 'api_keys') and response.api_keys: + for key in response.api_keys: + keys.append({ + "type": getattr(key, "type", None), + "key": getattr(key, "key", None), + "description": getattr(key, "description", None), + }) + return keys diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index 84c52d07..914e9256 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -152,6 +152,13 @@ async def apply_migration(name: str, query: str, workspace_id: str = None) -> st return await database_tools.apply_migration(name, query, workspace_id) +@mcp.tool() +async def generate_typescript_types(schemas: str = "public", workspace_id: str = None) -> str: + """Generates TypeScript definitions from database schema.""" + schema_list = [s.strip() for s in schemas.split(",") if s.strip()] + return await database_tools.generate_typescript_types(schema_list, workspace_id) + + @mcp.tool() async def list_workspaces() -> str: """Lists all available workspaces.""" @@ -164,6 +171,40 @@ async def get_workspace(workspace_id: str) -> str: return await workspace_tools.get_workspace(workspace_id) +@mcp.tool() +async def create_workspace( + workspace_name: str, + engine_version: str = "Supabase_1_24", + engine_type: str = "Supabase" +) -> str: + """Creates a new workspace.""" + return await workspace_tools.create_workspace(workspace_name, engine_version, engine_type) + + +@mcp.tool() +async def start_workspace(workspace_id: str = None) -> str: + """Starts a workspace.""" + return await workspace_tools.start_workspace(workspace_id) + + +@mcp.tool() +async def stop_workspace(workspace_id: str = None) -> str: + """Stops a workspace.""" + return await workspace_tools.stop_workspace(workspace_id) + + +@mcp.tool() +async def get_workspace_endpoints(workspace_id: str = None) -> str: + """Gets API endpoint URL for a workspace.""" + return await workspace_tools.get_workspace_endpoints(workspace_id) + + +@mcp.tool() +async def get_workspace_api_keys(workspace_id: str = None) -> str: + """Gets API keys for a workspace.""" + return await workspace_tools.get_workspace_api_keys(workspace_id) + + @mcp.tool() async def list_branches(workspace_id: str = None) -> str: """Lists all development branches of a workspace.""" diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index 8f029baf..c42cb1d3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -107,3 +107,127 @@ async def apply_migration(self, name: str, query: str, workspace_id: Optional[st "version": migration_version, "name": name.strip(), } + + def _to_ts_type(self, data_type: str, udt_name: str) -> str: + normalized_data_type = (data_type or "").lower() + normalized_udt_name = (udt_name or "").lower() + if normalized_data_type in {"smallint", "integer", "bigint", "numeric", "decimal", "real", "double precision"}: + return "number" + if normalized_data_type in {"boolean"}: + return "boolean" + if normalized_data_type in {"json", "jsonb"}: + return "Json" + if normalized_data_type in {"date", "timestamp without time zone", "timestamp with time zone", "time without time zone", "time with time zone"}: + return "string" + if normalized_data_type in {"bytea"}: + return "string" + if normalized_data_type == "array": + base = normalized_udt_name[1:] if normalized_udt_name.startswith("_") else normalized_udt_name + item_type = self._to_ts_type(base, base) + return f"{item_type}[]" + if normalized_udt_name in {"uuid", "varchar", "text", "bpchar", "name", "citext", "inet"}: + return "string" + if normalized_udt_name in {"int2", "int4", "int8", "float4", "float8"}: + return "number" + if normalized_udt_name in {"bool"}: + return "boolean" + if normalized_udt_name in {"json", "jsonb"}: + return "Json" + return "string" + + def _to_ts_key(self, key: str) -> str: + if key.replace("_", "").isalnum() and not key[0].isdigit(): + return key + escaped = key.replace("\\", "\\\\").replace("'", "\\'") + return f"'{escaped}'" + + @handle_errors + async def generate_typescript_types( + self, + schemas: List[str] = None, + workspace_id: Optional[str] = None + ) -> str: + if schemas is None: + schemas = ["public"] + for schema in schemas: + if not schema.replace('_', '').isalnum(): + raise ValueError(f"Invalid schema name: {schema}") + + schema_list = "', '".join(schemas) + query = f""" + SELECT + table_schema, + table_name, + column_name, + is_nullable, + data_type, + udt_name, + column_default + FROM information_schema.columns + WHERE table_schema IN ('{schema_list}') + ORDER BY table_schema, table_name, ordinal_position + """ + columns = await self.execute_sql(query, workspace_id) + + grouped: dict[str, dict[str, list[dict]]] = {} + for column in columns: + schema_name = column.get("table_schema") + table_name = column.get("table_name") + grouped.setdefault(schema_name, {}) + grouped[schema_name].setdefault(table_name, []) + grouped[schema_name][table_name].append(column) + + lines: list[str] = [] + lines.append("export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[]") + lines.append("") + lines.append("export type Database = {") + + for schema_name in sorted(grouped.keys()): + tables = grouped[schema_name] + lines.append(f" {self._to_ts_key(schema_name)}: {{") + lines.append(" Tables: {") + + for table_name in sorted(tables.keys()): + table_columns = tables[table_name] + lines.append(f" {self._to_ts_key(table_name)}: {{") + lines.append(" Row: {") + for column in table_columns: + col_name = column.get("column_name") + ts_key = self._to_ts_key(col_name) + base_type = self._to_ts_type(column.get("data_type", ""), column.get("udt_name", "")) + nullable = column.get("is_nullable") == "YES" + row_type = f"{base_type} | null" if nullable else base_type + lines.append(f" {ts_key}: {row_type}") + lines.append(" }") + lines.append(" Insert: {") + for column in table_columns: + col_name = column.get("column_name") + ts_key = self._to_ts_key(col_name) + base_type = self._to_ts_type(column.get("data_type", ""), column.get("udt_name", "")) + nullable = column.get("is_nullable") == "YES" + has_default = column.get("column_default") is not None + optional = nullable or has_default + insert_type = f"{base_type} | null" if nullable else base_type + suffix = "?" if optional else "" + lines.append(f" {ts_key}{suffix}: {insert_type}") + lines.append(" }") + lines.append(" Update: {") + for column in table_columns: + col_name = column.get("column_name") + ts_key = self._to_ts_key(col_name) + base_type = self._to_ts_type(column.get("data_type", ""), column.get("udt_name", "")) + nullable = column.get("is_nullable") == "YES" + update_type = f"{base_type} | null" if nullable else base_type + lines.append(f" {ts_key}?: {update_type}") + lines.append(" }") + lines.append(" }") + + lines.append(" }") + lines.append(" Views: {}") + lines.append(" Functions: {}") + lines.append(" Enums: {}") + lines.append(" CompositeTypes: {}") + lines.append(" }") + + lines.append("}") + return "\n".join(lines) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index b28897c3..f0c526e8 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -76,14 +76,7 @@ def _validate_code_size(self, source_code: str) -> None: def _validate_runtime_compatibility(self, runtime: str, source_code: str) -> None: """验证运行时和代码的兼容性""" - if runtime.startswith("native-node"): - # 检查是否使用了 Deno 特有的 API - if "Deno." in source_code: - raise ValueError( - f"Code contains Deno-specific APIs (Deno.*) but runtime is {runtime}. " - "Please use Node.js compatible code or switch to a Deno runtime." - ) - elif runtime.startswith("native-python"): + if runtime.startswith("native-python"): # 基本的 Python 语法检查 if not any(keyword in source_code for keyword in ["def ", "import ", "from "]): logger.warning("Python code may be invalid - no function definitions or imports found") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index cddd70ba..87d418fa 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -1,5 +1,6 @@ """Workspace management tools for Supabase MCP Server""" +import asyncio import json import logging import inspect @@ -17,6 +18,9 @@ def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): self.aidap_client = aidap_client self.default_workspace_id = default_workspace_id + def _resolve_workspace_id(self, workspace_id: Optional[str] = None) -> Optional[str]: + return workspace_id or self.default_workspace_id + async def list_workspaces(self) -> str: """Lists all available workspaces. @@ -112,13 +116,45 @@ async def get_workspace(self, workspace_id: str) -> str: "error": str(e) }, indent=2) + @read_only_check + async def create_workspace( + self, + workspace_name: str, + engine_version: str = "Supabase_1_24", + engine_type: str = "Supabase", + ) -> str: + if not workspace_name or not workspace_name.strip(): + return json.dumps({"success": False, "error": "workspace_name is required"}, indent=2) + result = await self.aidap_client.create_workspace( + workspace_name=workspace_name.strip(), + engine_type=engine_type, + engine_version=engine_version + ) + return json.dumps(result, indent=2) + + @read_only_check + async def start_workspace(self, workspace_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(workspace_id) + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + result = await self.aidap_client.start_workspace(ws_id) + return json.dumps(result, indent=2) + + @read_only_check + async def stop_workspace(self, workspace_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(workspace_id) + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + result = await self.aidap_client.stop_workspace(ws_id) + return json.dumps(result, indent=2) + @read_only_check async def create_branch( self, name: str = "develop", workspace_id: Optional[str] = None, ) -> str: - ws_id = workspace_id or self.default_workspace_id + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) @@ -126,21 +162,101 @@ async def create_branch( return json.dumps(result, indent=2) async def list_branches(self, workspace_id: Optional[str] = None) -> str: - ws_id = workspace_id or self.default_workspace_id + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) - - branches = await self.aidap_client.list_branches(ws_id) - return json.dumps({"branches": branches}, indent=2) + try: + branches = await self.aidap_client.list_branches(ws_id) + return json.dumps({"success": True, "branches": branches}, indent=2) + except Exception as e: + logger.error(f"Error listing branches: {e}") + return json.dumps({"success": False, "error": str(e)}, indent=2) @read_only_check async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: - ws_id = workspace_id or self.default_workspace_id + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + if not branch_id or not branch_id.strip(): + return json.dumps({"success": False, "error": "branch_id is required"}, indent=2) + normalized_branch_id = branch_id.strip() - result = await self.aidap_client.delete_branch(ws_id, branch_id) - return json.dumps(result, indent=2) + try: + branches = await self.aidap_client.list_branches(ws_id) + exists = any(b.get("branch_id") == normalized_branch_id for b in branches) + if not exists: + return json.dumps({ + "success": False, + "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'" + }, indent=2) + except Exception as e: + logger.error(f"Error checking branch before delete: {e}") + return json.dumps({"success": False, "error": str(e)}, indent=2) + + result = await self.aidap_client.delete_branch(ws_id, normalized_branch_id) + if not result.get("success"): + return json.dumps(result, indent=2) + + for _ in range(10): + await asyncio.sleep(1) + branches = await self.aidap_client.list_branches(ws_id) + exists = any(b.get("branch_id") == normalized_branch_id for b in branches) + if not exists: + return json.dumps({"success": True, "branch_id": normalized_branch_id}, indent=2) + + return json.dumps({ + "success": False, + "error": f"Delete requested for branch '{normalized_branch_id}' but branch still exists" + }, indent=2) + + async def get_workspace_endpoints(self, workspace_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(workspace_id) + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + + endpoint = await self.aidap_client.get_endpoint(ws_id) + if not endpoint: + return json.dumps({ + "success": False, + "error": f"Could not get endpoint for workspace {ws_id}" + }, indent=2) + + return json.dumps({ + "success": True, + "workspace_id": ws_id, + "project_url": endpoint, + "api_url": endpoint + }, indent=2) + + async def get_workspace_api_keys(self, workspace_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(workspace_id) + if not ws_id: + return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + + try: + keys = await self.aidap_client.get_api_keys(ws_id) + publishable_key = None + anon_key = None + service_role_key = None + for key in keys: + key_type = (key.get("type") or "").lower() + value = key.get("key") + if key_type == "public": + publishable_key = value + anon_key = value + if key_type == "service": + service_role_key = value + return json.dumps({ + "success": True, + "workspace_id": ws_id, + "publishable_key": publishable_key, + "anon_key": anon_key, + "service_role_key": service_role_key, + "keys": keys + }, indent=2) + except Exception as e: + logger.error(f"Error getting api keys: {e}") + return json.dumps({"success": False, "error": str(e)}, indent=2) @read_only_check async def reset_branch( @@ -159,7 +275,7 @@ async def reset_branch( Returns: JSON string containing operation result """ - ws_id = workspace_id or self.default_workspace_id + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return json.dumps({ "success": False, From e5e606b7a7af063645a5a6fa5a741c4a9538ca6b Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 23:25:17 +0800 Subject: [PATCH 09/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../platform/aidap_client.py | 46 ++++++-- .../src/mcp_server_supabase/server.py | 4 +- .../tools/database_tools.py | 24 +++-- .../tools/workspace_tools.py | 102 +++++++++++++++--- 4 files changed, 139 insertions(+), 37 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index 665ed4ef..27398ac9 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -1,6 +1,7 @@ import logging import asyncio import os +import random from typing import Optional from ..config import ( VOLCENGINE_ACCESS_KEY, @@ -46,6 +47,23 @@ def __init__(self) -> None: api_client = volcenginesdkcore.ApiClient(configuration) self.client = AIDAPApi(api_client) + + def _branch_error_code(self, error_text: str) -> str: + if "OperationDenied_BranchNotReady" in error_text: + return "OperationDenied_BranchNotReady" + if "BranchNotFound" in error_text: + return "BranchNotFound" + return "AIDAPError" + + async def _sleep_backoff( + self, + attempt: int, + base_seconds: float = 1.0, + max_seconds: float = 10.0, + ) -> None: + delay = min(max_seconds, base_seconds * (2 ** max(attempt - 1, 0))) + jitter = random.uniform(0.0, delay * 0.2) + await asyncio.sleep(delay + jitter) async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) -> Optional[str]: cache = get_branch_cache() @@ -178,8 +196,7 @@ async def stop_workspace(self, workspace_id: str) -> dict: return {"success": False, "error": str(e)} async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: - max_attempts = 6 - delay_seconds = 2 + max_attempts = 8 for attempt in range(1, max_attempts + 1): try: request = DeleteBranchRequest( @@ -190,19 +207,23 @@ async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: return {"success": True} except Exception as e: error_text = str(e) - if "BranchNotFound" in error_text: - return {"success": False, "error": error_text} - if "OperationDenied_BranchNotReady" in error_text and attempt < max_attempts: - await asyncio.sleep(delay_seconds) + code = self._branch_error_code(error_text) + retriable = code == "OperationDenied_BranchNotReady" + if retriable and attempt < max_attempts: + await self._sleep_backoff(attempt) continue logger.error(f"Error deleting branch: {e}") return { "success": False, "error": error_text, + "code": code, + "retriable": retriable, } return { "success": False, "error": "delete_branch failed after retries", + "code": "OperationDenied_BranchNotReady", + "retriable": True, } async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: @@ -256,8 +277,7 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, return None async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: - max_attempts = 6 - delay_seconds = 2 + max_attempts = 8 for attempt in range(1, max_attempts + 1): try: request = ResetBranchRequest( @@ -268,17 +288,23 @@ async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: return {"success": True} except Exception as e: error_text = str(e) - if "OperationDenied_BranchNotReady" in error_text and attempt < max_attempts: - await asyncio.sleep(delay_seconds) + code = self._branch_error_code(error_text) + retriable = code == "OperationDenied_BranchNotReady" + if retriable and attempt < max_attempts: + await self._sleep_backoff(attempt) continue logger.error(f"Error resetting branch: {e}") return { "success": False, "error": error_text, + "code": code, + "retriable": retriable, } return { "success": False, "error": "reset_branch failed after retries", + "code": "OperationDenied_BranchNotReady", + "retriable": True, } async def get_api_key(self, workspace_id: str, key_type: str = "service_role", diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index 914e9256..cb4bbab1 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -200,9 +200,9 @@ async def get_workspace_endpoints(workspace_id: str = None) -> str: @mcp.tool() -async def get_workspace_api_keys(workspace_id: str = None) -> str: +async def get_workspace_api_keys(workspace_id: str = None, reveal: bool = False) -> str: """Gets API keys for a workspace.""" - return await workspace_tools.get_workspace_api_keys(workspace_id) + return await workspace_tools.get_workspace_api_keys(workspace_id, reveal) @mcp.tool() diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index c42cb1d3..bbbca43a 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -9,8 +9,7 @@ class DatabaseTools(BaseTools): """使用 REST API 方式执行 SQL""" - @handle_errors - async def execute_sql(self, query: str, workspace_id: Optional[str] = None) -> List[dict]: + async def _execute_sql_raw(self, query: str, workspace_id: Optional[str] = None) -> List[dict]: if not query or not query.strip(): raise ValueError("SQL query cannot be empty") @@ -23,8 +22,17 @@ async def execute_sql(self, query: str, workspace_id: Optional[str] = None) -> L client = await self._get_client(ws_id) result = await client.call_api("/pg/query", method="POST", json_data={"query": query}) - logger.debug(f"SQL query returned {len(result) if isinstance(result, list) else 'N/A'} rows") + if isinstance(result, dict) and isinstance(result.get("data"), list): + result = result["data"] + if not isinstance(result, list): + raise TypeError(f"Unexpected SQL result type: {type(result).__name__}") + + logger.debug(f"SQL query returned {len(result)} rows") return result + + @handle_errors + async def execute_sql(self, query: str, workspace_id: Optional[str] = None) -> List[dict]: + return await self._execute_sql_raw(query, workspace_id) @handle_errors async def list_tables(self, schemas: List[str] = None, workspace_id: Optional[str] = None) -> List[dict]: @@ -46,7 +54,7 @@ async def list_tables(self, schemas: List[str] = None, workspace_id: Optional[st ORDER BY schemaname, tablename """ - return await self.execute_sql(query, workspace_id) + return await self._execute_sql_raw(query, workspace_id) @handle_errors async def list_migrations(self, workspace_id: Optional[str] = None) -> List[dict]: @@ -61,7 +69,7 @@ async def list_migrations(self, workspace_id: Optional[str] = None) -> List[dict FROM supabase_migrations.schema_migrations ORDER BY version DESC """ - return await self.execute_sql(query, workspace_id) + return await self._execute_sql_raw(query, workspace_id) @handle_errors async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict]: @@ -74,7 +82,7 @@ async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict JOIN pg_namespace n ON n.oid = e.extnamespace ORDER BY e.extname """ - return await self.execute_sql(query, workspace_id) + return await self._execute_sql_raw(query, workspace_id) @handle_errors @read_only_check @@ -100,7 +108,7 @@ async def apply_migration(self, name: str, query: str, workspace_id: Optional[st ON CONFLICT (version) DO UPDATE SET name = EXCLUDED.name; COMMIT; """ - await self.execute_sql(migration_sql, workspace_id) + await self._execute_sql_raw(migration_sql, workspace_id) return { "success": True, "message": f"Migration {name} applied successfully", @@ -167,7 +175,7 @@ async def generate_typescript_types( WHERE table_schema IN ('{schema_list}') ORDER BY table_schema, table_name, ordinal_position """ - columns = await self.execute_sql(query, workspace_id) + columns = await self._execute_sql_raw(query, workspace_id) grouped: dict[str, dict[str, list[dict]]] = {} for column in columns: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 87d418fa..851acf3a 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -21,6 +21,22 @@ def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): def _resolve_workspace_id(self, workspace_id: Optional[str] = None) -> Optional[str]: return workspace_id or self.default_workspace_id + def _error_detail(self, code: str, message: str, retriable: bool = False) -> dict: + return { + "code": code, + "message": message, + "retriable": retriable, + } + + def _mask_key(self, value: Optional[str], reveal: bool) -> Optional[str]: + if value is None: + return None + if reveal: + return value + if len(value) <= 12: + return "*" * len(value) + return f"{value[:6]}...{value[-4:]}" + async def list_workspaces(self) -> str: """Lists all available workspaces. @@ -176,9 +192,17 @@ async def list_branches(self, workspace_id: Optional[str] = None) -> str: async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return json.dumps({ + "success": False, + "error": "workspace_id is required", + "error_detail": self._error_detail("MissingWorkspaceId", "workspace_id is required", False), + }, indent=2) if not branch_id or not branch_id.strip(): - return json.dumps({"success": False, "error": "branch_id is required"}, indent=2) + return json.dumps({ + "success": False, + "error": "branch_id is required", + "error_detail": self._error_detail("MissingBranchId", "branch_id is required", False), + }, indent=2) normalized_branch_id = branch_id.strip() try: @@ -187,26 +211,64 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None if not exists: return json.dumps({ "success": False, - "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'" + "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", + "error_detail": self._error_detail( + "BranchNotFound", + f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", + False + ), }, indent=2) except Exception as e: logger.error(f"Error checking branch before delete: {e}") - return json.dumps({"success": False, "error": str(e)}, indent=2) + return json.dumps({ + "success": False, + "error": str(e), + "error_detail": self._error_detail("ListBranchesFailed", str(e), True), + }, indent=2) result = await self.aidap_client.delete_branch(ws_id, normalized_branch_id) if not result.get("success"): - return json.dumps(result, indent=2) + error_text = result.get("error", "delete branch failed") + return json.dumps({ + "success": False, + "error": error_text, + "error_detail": self._error_detail( + result.get("code", "DeleteBranchFailed"), + error_text, + bool(result.get("retriable", False)) + ), + }, indent=2) - for _ in range(10): + max_confirm_attempts = 20 + last_list_error: Optional[str] = None + for _ in range(max_confirm_attempts): await asyncio.sleep(1) - branches = await self.aidap_client.list_branches(ws_id) - exists = any(b.get("branch_id") == normalized_branch_id for b in branches) - if not exists: - return json.dumps({"success": True, "branch_id": normalized_branch_id}, indent=2) - + try: + branches = await self.aidap_client.list_branches(ws_id) + exists = any(b.get("branch_id") == normalized_branch_id for b in branches) + if not exists: + return json.dumps({"success": True, "branch_id": normalized_branch_id}, indent=2) + except Exception as e: + last_list_error = str(e) + + if last_list_error: + return json.dumps({ + "success": False, + "error": f"Delete requested for branch '{normalized_branch_id}' but verification failed: {last_list_error}", + "error_detail": self._error_detail( + "DeleteBranchVerifyFailed", + f"Delete requested for branch '{normalized_branch_id}' but verification failed: {last_list_error}", + True + ), + }, indent=2) return json.dumps({ "success": False, - "error": f"Delete requested for branch '{normalized_branch_id}' but branch still exists" + "error": f"Delete requested for branch '{normalized_branch_id}' but branch still exists", + "error_detail": self._error_detail( + "BranchStillExists", + f"Delete requested for branch '{normalized_branch_id}' but branch still exists", + True + ), }, indent=2) async def get_workspace_endpoints(self, workspace_id: Optional[str] = None) -> str: @@ -228,7 +290,7 @@ async def get_workspace_endpoints(self, workspace_id: Optional[str] = None) -> s "api_url": endpoint }, indent=2) - async def get_workspace_api_keys(self, workspace_id: Optional[str] = None) -> str: + async def get_workspace_api_keys(self, workspace_id: Optional[str] = None, reveal: bool = False) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) @@ -238,6 +300,7 @@ async def get_workspace_api_keys(self, workspace_id: Optional[str] = None) -> st publishable_key = None anon_key = None service_role_key = None + masked_keys = [] for key in keys: key_type = (key.get("type") or "").lower() value = key.get("key") @@ -246,13 +309,18 @@ async def get_workspace_api_keys(self, workspace_id: Optional[str] = None) -> st anon_key = value if key_type == "service": service_role_key = value + masked_keys.append({ + **key, + "key": self._mask_key(value, reveal), + }) return json.dumps({ "success": True, "workspace_id": ws_id, - "publishable_key": publishable_key, - "anon_key": anon_key, - "service_role_key": service_role_key, - "keys": keys + "reveal": reveal, + "publishable_key": self._mask_key(publishable_key, reveal), + "anon_key": self._mask_key(anon_key, reveal), + "service_role_key": self._mask_key(service_role_key, reveal), + "keys": masked_keys }, indent=2) except Exception as e: logger.error(f"Error getting api keys: {e}") From 4c76f3e31e6b62189bc9bf05059e7dd13f3435fd Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Thu, 5 Mar 2026 23:51:26 +0800 Subject: [PATCH 10/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/mcp_server_supabase/server.py | 188 +++++++++++------- 1 file changed, 117 insertions(+), 71 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index cb4bbab1..4719bfd3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -18,24 +18,24 @@ mcp = FastMCP("Supabase MCP Server (AIDAP)", port=int(os.getenv("PORT", "8000"))) aidap_client = AidapClient() -default_workspace_id = os.getenv("DEFAULT_WORKSPACE_ID") +default_project_id = os.getenv("DEFAULT_PROJECT_ID") or os.getenv("DEFAULT_WORKSPACE_ID") -edge_tools = EdgeFunctionTools(aidap_client, default_workspace_id) -storage_tools = StorageTools(aidap_client, default_workspace_id) -database_tools = DatabaseTools(aidap_client, default_workspace_id) -workspace_tools = WorkspaceTools(aidap_client, default_workspace_id) +edge_tools = EdgeFunctionTools(aidap_client, default_project_id) +storage_tools = StorageTools(aidap_client, default_project_id) +database_tools = DatabaseTools(aidap_client, default_project_id) +workspace_tools = WorkspaceTools(aidap_client, default_project_id) @mcp.tool() -async def list_edge_functions(workspace_id: str = None) -> str: - """Lists all Edge Functions in a workspace.""" - return await edge_tools.list_edge_functions(workspace_id) +async def list_edge_functions(project_id: str = None) -> str: + """Lists all Edge Functions in a project.""" + return await edge_tools.list_edge_functions(project_id) @mcp.tool() -async def get_edge_function(function_name: str, workspace_id: str = None) -> str: +async def get_edge_function(function_name: str, project_id: str = None) -> str: """Retrieves the source code and configuration for an Edge Function.""" - return await edge_tools.get_edge_function(function_name, workspace_id) + return await edge_tools.get_edge_function(function_name, project_id) @mcp.tool() @@ -45,7 +45,7 @@ async def deploy_edge_function( verify_jwt: bool = True, runtime: str = "native-node20/v1", import_map: str = None, - workspace_id: str = None + project_id: str = None ) -> str: """Deploys a new Edge Function or updates an existing one. @@ -57,17 +57,17 @@ async def deploy_edge_function( Options: native-node20/v1, native-python3.9/v1, native-python3.10/v1, native-python3.12/v1 import_map: Optional import map JSON for dependencies - workspace_id: The workspace ID (optional) + project_id: The project ID (optional) """ return await edge_tools.deploy_edge_function( - function_name, source_code, verify_jwt, runtime, import_map, workspace_id + function_name, source_code, verify_jwt, runtime, import_map, project_id ) @mcp.tool() -async def delete_edge_function(function_name: str, workspace_id: str = None) -> str: +async def delete_edge_function(function_name: str, project_id: str = None) -> str: """Deletes an Edge Function.""" - return await edge_tools.delete_edge_function(function_name, workspace_id) + return await edge_tools.delete_edge_function(function_name, project_id) @mcp.tool() @@ -75,16 +75,16 @@ async def invoke_edge_function( function_name: str, payload: str = None, method: str = "POST", - workspace_id: str = None + project_id: str = None ) -> str: """Invokes an Edge Function.""" - return await edge_tools.invoke_edge_function(function_name, payload, method, workspace_id) + return await edge_tools.invoke_edge_function(function_name, payload, method, project_id) @mcp.tool() -async def list_storage_buckets(workspace_id: str = None) -> str: - """Lists all storage buckets in a workspace.""" - return await storage_tools.list_storage_buckets(workspace_id) +async def list_storage_buckets(project_id: str = None) -> str: + """Lists all storage buckets in a project.""" + return await storage_tools.list_storage_buckets(project_id) @mcp.tool() @@ -93,70 +93,70 @@ async def create_storage_bucket( public: bool = False, file_size_limit: int = None, allowed_mime_types: str = None, - workspace_id: str = None + project_id: str = None ) -> str: """Creates a new storage bucket.""" return await storage_tools.create_storage_bucket( - bucket_name, public, file_size_limit, allowed_mime_types, workspace_id + bucket_name, public, file_size_limit, allowed_mime_types, project_id ) @mcp.tool() -async def delete_storage_bucket(bucket_name: str, workspace_id: str = None) -> str: +async def delete_storage_bucket(bucket_name: str, project_id: str = None) -> str: """Deletes a storage bucket.""" - return await storage_tools.delete_storage_bucket(bucket_name, workspace_id) + return await storage_tools.delete_storage_bucket(bucket_name, project_id) @mcp.tool() -async def get_storage_config(workspace_id: str = None) -> str: - """Gets the storage configuration for a workspace.""" - return await storage_tools.get_storage_config(workspace_id) +async def get_storage_config(project_id: str = None) -> str: + """Gets the storage configuration for a project.""" + return await storage_tools.get_storage_config(project_id) @mcp.tool() -async def update_storage_config(config: str, workspace_id: str = None) -> str: - """Updates the storage configuration for a workspace.""" +async def update_storage_config(config: str, project_id: str = None) -> str: + """Updates the storage configuration for a project.""" import json parsed_config = json.loads(config) - return await storage_tools.update_storage_config(parsed_config, workspace_id) + return await storage_tools.update_storage_config(parsed_config, project_id) @mcp.tool() -async def execute_sql(query: str, workspace_id: str = None) -> str: +async def execute_sql(query: str, project_id: str = None) -> str: """Executes raw SQL in the Postgres database.""" - return await database_tools.execute_sql(query, workspace_id) + return await database_tools.execute_sql(query, project_id) @mcp.tool() -async def list_tables(schemas: str = "public", workspace_id: str = None) -> str: +async def list_tables(schemas: str = "public", project_id: str = None) -> str: """Lists all tables in one or more schemas.""" schema_list = [s.strip() for s in schemas.split(",")] - return await database_tools.list_tables(schema_list, workspace_id) + return await database_tools.list_tables(schema_list, project_id) @mcp.tool() -async def list_migrations(workspace_id: str = None) -> str: +async def list_migrations(project_id: str = None) -> str: """Lists all migrations in the database.""" - return await database_tools.list_migrations(workspace_id) + return await database_tools.list_migrations(project_id) @mcp.tool() -async def list_extensions(workspace_id: str = None) -> str: +async def list_extensions(project_id: str = None) -> str: """Lists all PostgreSQL extensions in the database.""" - return await database_tools.list_extensions(workspace_id) + return await database_tools.list_extensions(project_id) @mcp.tool() -async def apply_migration(name: str, query: str, workspace_id: str = None) -> str: +async def apply_migration(name: str, query: str, project_id: str = None) -> str: """Applies a migration to the database.""" - return await database_tools.apply_migration(name, query, workspace_id) + return await database_tools.apply_migration(name, query, project_id) @mcp.tool() -async def generate_typescript_types(schemas: str = "public", workspace_id: str = None) -> str: +async def generate_typescript_types(schemas: str = "public", project_id: str = None) -> str: """Generates TypeScript definitions from database schema.""" schema_list = [s.strip() for s in schemas.split(",") if s.strip()] - return await database_tools.generate_typescript_types(schema_list, workspace_id) + return await database_tools.generate_typescript_types(schema_list, project_id) @mcp.tool() @@ -166,67 +166,113 @@ async def list_workspaces() -> str: @mcp.tool() -async def get_workspace(workspace_id: str) -> str: - """Gets details for a specific workspace.""" - return await workspace_tools.get_workspace(workspace_id) +async def get_workspace(project_id: str) -> str: + """Gets details for a specific project.""" + return await workspace_tools.get_workspace(project_id) @mcp.tool() async def create_workspace( - workspace_name: str, + project_name: str, engine_version: str = "Supabase_1_24", engine_type: str = "Supabase" ) -> str: - """Creates a new workspace.""" - return await workspace_tools.create_workspace(workspace_name, engine_version, engine_type) + """Creates a new project.""" + return await workspace_tools.create_workspace(project_name, engine_version, engine_type) @mcp.tool() -async def start_workspace(workspace_id: str = None) -> str: - """Starts a workspace.""" - return await workspace_tools.start_workspace(workspace_id) +async def start_workspace(project_id: str = None) -> str: + """Starts a project.""" + return await workspace_tools.start_workspace(project_id) @mcp.tool() -async def stop_workspace(workspace_id: str = None) -> str: - """Stops a workspace.""" - return await workspace_tools.stop_workspace(workspace_id) +async def stop_workspace(project_id: str = None) -> str: + """Stops a project.""" + return await workspace_tools.stop_workspace(project_id) @mcp.tool() -async def get_workspace_endpoints(workspace_id: str = None) -> str: - """Gets API endpoint URL for a workspace.""" - return await workspace_tools.get_workspace_endpoints(workspace_id) +async def get_workspace_endpoints(project_id: str = None) -> str: + """Gets API endpoint URL for a project.""" + return await workspace_tools.get_workspace_endpoints(project_id) @mcp.tool() -async def get_workspace_api_keys(workspace_id: str = None, reveal: bool = False) -> str: - """Gets API keys for a workspace.""" - return await workspace_tools.get_workspace_api_keys(workspace_id, reveal) +async def get_workspace_api_keys(project_id: str = None, reveal: bool = False) -> str: + """Gets API keys for a project.""" + return await workspace_tools.get_workspace_api_keys(project_id, reveal) @mcp.tool() -async def list_branches(workspace_id: str = None) -> str: - """Lists all development branches of a workspace.""" - return await workspace_tools.list_branches(workspace_id) +async def list_branches(project_id: str = None) -> str: + """Lists all development branches of a project.""" + return await workspace_tools.list_branches(project_id) @mcp.tool() -async def create_branch(name: str = "develop", workspace_id: str = None) -> str: +async def create_branch(name: str = "develop", project_id: str = None) -> str: """Creates a development branch.""" - return await workspace_tools.create_branch(name, workspace_id) + return await workspace_tools.create_branch(name, project_id) @mcp.tool() -async def delete_branch(branch_id: str, workspace_id: str = None) -> str: +async def delete_branch(branch_id: str, project_id: str = None) -> str: """Deletes a development branch.""" - return await workspace_tools.delete_branch(branch_id, workspace_id) + return await workspace_tools.delete_branch(branch_id, project_id) @mcp.tool() -async def reset_branch(branch_id: str, migration_version: str = None, workspace_id: str = None) -> str: +async def reset_branch(branch_id: str, migration_version: str = None, project_id: str = None) -> str: """Resets migrations of a development branch. Any untracked data or schema changes will be lost.""" - return await workspace_tools.reset_branch(branch_id, migration_version, workspace_id) + return await workspace_tools.reset_branch(branch_id, migration_version, project_id) + + +@mcp.tool() +async def list_projects() -> str: + """Lists all available projects.""" + return await workspace_tools.list_workspaces() + + +@mcp.tool() +async def get_project(project_id: str) -> str: + """Gets details for a specific project.""" + return await workspace_tools.get_workspace(project_id) + + +@mcp.tool() +async def create_project( + project_name: str, + engine_version: str = "Supabase_1_24", + engine_type: str = "Supabase" +) -> str: + """Creates a new project.""" + return await workspace_tools.create_workspace(project_name, engine_version, engine_type) + + +@mcp.tool() +async def pause_project(project_id: str = None) -> str: + """Pauses a project.""" + return await workspace_tools.stop_workspace(project_id) + + +@mcp.tool() +async def restore_project(project_id: str = None) -> str: + """Restores a project.""" + return await workspace_tools.start_workspace(project_id) + + +@mcp.tool() +async def get_project_url(project_id: str = None) -> str: + """Gets API endpoint URL for a project.""" + return await workspace_tools.get_workspace_endpoints(project_id) + + +@mcp.tool() +async def get_publishable_keys(project_id: str = None, reveal: bool = False) -> str: + """Gets API keys for a project.""" + return await workspace_tools.get_workspace_api_keys(project_id, reveal) def main(): @@ -236,8 +282,8 @@ def main(): logger.info(f"Starting Supabase MCP Server on port {args.port}") logger.info(f"Read-only mode: {READ_ONLY}") - if default_workspace_id: - logger.info(f"Default workspace ID: {default_workspace_id}") + if default_project_id: + logger.info(f"Default project ID: {default_project_id}") mcp.run() From f5555084dc50cd4b335fcf37af115af978519cb2 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 11:37:02 +0800 Subject: [PATCH 11/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 3 +- server/mcp_server_supabase/README_zh.md | 7 +- .../src/mcp_server_supabase/server.py | 25 +- .../tools/edge_function_tools.py | 69 --- .../tools/workspace_tools.py | 448 ++++++++++++------ 5 files changed, 311 insertions(+), 241 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 4153ef6d..dbbee8c2 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -125,12 +125,11 @@ mv .env_example .env # 填写环境变量 - `create_database` - `drop_database` -### Edge Functions(6) +### Edge Functions(5) - `list_edge_functions` - `get_edge_function` - `deploy_edge_function` - `delete_edge_function` -- `invoke_edge_function` - `get_edge_function_logs` ### 存储管理(8) diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 8fe69a34..38c1a9e2 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -12,7 +12,7 @@ - ✅ **工作空间管理** - 列出、创建、启动/停止工作空间,管理设置 - ✅ **数据库管理** - 列出表、执行 SQL、应用迁移、管理数据库和账户 ✨ 增强 -- ✅ **Edge Functions** - 部署、获取代码、调用和管理 Edge Functions ✨ 增强 +- ✅ **Edge Functions** - 部署、获取代码和管理 Edge Functions ✨ 增强 - ✅ **调试工具** - 获取多服务日志和性能/安全建议 ✨ 新增 - ✅ **开发工具** - 生成 TypeScript 类型、获取 API URL 和密钥 ✨ 新增 - ✅ **存储管理** - 管理存储桶和对象 @@ -114,7 +114,7 @@ clear_default_branch_cache() ``` -## 可用工具(54 个) +## 可用工具(53 个) ### 数据库操作(8 个) - `list_tables` - 列出数据库表 @@ -126,12 +126,11 @@ clear_default_branch_cache() - `create_database` - 创建新数据库 - `drop_database` - 删除数据库 -### Edge Functions(6 个) +### Edge Functions(5 个) - `list_edge_functions` - 列出 Edge Functions - `get_edge_function` - 获取 Edge Function 源代码 ✨ 新增 - `deploy_edge_function` - 部署或更新 Edge Function ✨ 新增 - `delete_edge_function` - 删除 Edge Function ✨ 新增 -- `invoke_edge_function` - 调用 Edge Function - `get_edge_function_logs` - 获取函数日志 ### 调试工具(2 个) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index 4719bfd3..f19bc17f 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -70,17 +70,6 @@ async def delete_edge_function(function_name: str, project_id: str = None) -> st return await edge_tools.delete_edge_function(function_name, project_id) -@mcp.tool() -async def invoke_edge_function( - function_name: str, - payload: str = None, - method: str = "POST", - project_id: str = None -) -> str: - """Invokes an Edge Function.""" - return await edge_tools.invoke_edge_function(function_name, payload, method, project_id) - - @mcp.tool() async def list_storage_buckets(project_id: str = None) -> str: """Lists all storage buckets in a project.""" @@ -232,13 +221,13 @@ async def reset_branch(branch_id: str, migration_version: str = None, project_id @mcp.tool() async def list_projects() -> str: """Lists all available projects.""" - return await workspace_tools.list_workspaces() + return await workspace_tools.list_projects() @mcp.tool() async def get_project(project_id: str) -> str: """Gets details for a specific project.""" - return await workspace_tools.get_workspace(project_id) + return await workspace_tools.get_project(project_id) @mcp.tool() @@ -248,31 +237,31 @@ async def create_project( engine_type: str = "Supabase" ) -> str: """Creates a new project.""" - return await workspace_tools.create_workspace(project_name, engine_version, engine_type) + return await workspace_tools.create_project(project_name, engine_version, engine_type) @mcp.tool() async def pause_project(project_id: str = None) -> str: """Pauses a project.""" - return await workspace_tools.stop_workspace(project_id) + return await workspace_tools.pause_project(project_id) @mcp.tool() async def restore_project(project_id: str = None) -> str: """Restores a project.""" - return await workspace_tools.start_workspace(project_id) + return await workspace_tools.restore_project(project_id) @mcp.tool() async def get_project_url(project_id: str = None) -> str: """Gets API endpoint URL for a project.""" - return await workspace_tools.get_workspace_endpoints(project_id) + return await workspace_tools.get_project_url(project_id) @mcp.tool() async def get_publishable_keys(project_id: str = None, reveal: bool = False) -> str: """Gets API keys for a project.""" - return await workspace_tools.get_workspace_api_keys(project_id, reveal) + return await workspace_tools.get_publishable_keys(project_id, reveal) def main(): diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index f0c526e8..e692697b 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -81,12 +81,6 @@ def _validate_runtime_compatibility(self, runtime: str, source_code: str) -> Non if not any(keyword in source_code for keyword in ["def ", "import ", "from "]): logger.warning("Python code may be invalid - no function definitions or imports found") - def _is_function_metadata(self, result: dict) -> bool: - if not isinstance(result, dict): - return False - required_keys = {"id", "slug", "name", "status", "version", "entrypoint_path"} - return required_keys.issubset(set(result.keys())) - def _extract_error_text(self, payload: object) -> str: if isinstance(payload, dict): return json.dumps(payload, ensure_ascii=False) @@ -235,66 +229,3 @@ async def delete_edge_function(self, function_name: str, workspace_id: Optional[ logger.info(f"Successfully deleted edge function '{function_name}'") return {"success": True, "message": "Edge function deleted successfully"} - - @handle_errors - async def invoke_edge_function( - self, - function_name: str, - payload: Optional[str] = None, - method: str = "POST", - workspace_id: Optional[str] = None - ) -> dict: - self._validate_function_name(function_name) - ws_id = self._get_workspace_id(workspace_id) - logger.info( - f"Invoking edge function '{function_name}'", - extra={"method": method, "has_payload": payload is not None} - ) - - client = await self._get_client(ws_id) - http_method = method.upper().strip() if method else "POST" - if http_method not in {"GET", "POST", "PUT", "PATCH", "DELETE"}: - raise ValueError(f"Unsupported method '{method}'") - - json_data = None - if payload: - try: - json_data = json.loads(payload) - except json.JSONDecodeError as e: - raise ValueError(f"Invalid payload JSON: {e}") - - encoded_name = quote(function_name, safe="") - primary_path = f"/functions/v1/{encoded_name}" - fallback_path = f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}/invoke" - - try: - primary_result = await client.call_api( - primary_path, - method=http_method, - json_data=json_data, - timeout=60.0 - ) - if not self._is_function_metadata(primary_result): - logger.debug(f"Edge function '{function_name}' invoked successfully via {primary_path}") - return primary_result - except SupabaseApiError as e: - payload_text = self._extract_error_text(e.payload).lower() - if e.status_code not in {404, 405} and "route" not in payload_text: - raise - - try: - fallback_result = await client.call_api( - fallback_path, - method=http_method, - json_data=json_data, - timeout=60.0 - ) - if not self._is_function_metadata(fallback_result): - logger.debug(f"Edge function '{function_name}' invoked successfully via {fallback_path}") - return fallback_result - except SupabaseApiError: - pass - - raise ValueError( - "Edge function invocation is not supported by current AIDAP workspace endpoint" - ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 851acf3a..2a3dcf6d 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -4,7 +4,7 @@ import json import logging import inspect -from typing import Optional +from typing import Any, Optional from ..utils import read_only_check @@ -21,6 +21,98 @@ def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): def _resolve_workspace_id(self, workspace_id: Optional[str] = None) -> Optional[str]: return workspace_id or self.default_workspace_id + def _to_json(self, payload: dict) -> str: + return json.dumps(payload, indent=2, ensure_ascii=False) + + def _compact(self, payload: dict) -> dict: + return {k: v for k, v in payload.items() if v is not None} + + def _pick(self, source: Any, *field_names: str) -> Any: + source_dict = source.to_dict() if hasattr(source, "to_dict") else source if isinstance(source, dict) else {} + for field_name in field_names: + value = None + if isinstance(source, dict): + value = source.get(field_name) + else: + value = getattr(source, field_name, None) + if value is None and isinstance(source_dict, dict): + value = source_dict.get(field_name) + if isinstance(value, str): + value = value.strip() + if not value: + value = None + if value is not None: + return value + return None + + def _workspace_view(self, source: Any) -> dict: + workspace_id = self._pick(source, "workspace_id") + workspace_name = self._pick(source, "workspace_name") + project_name = self._pick(source, "project_name") + payload = { + "workspace_id": workspace_id, + "workspace_name": workspace_name, + "project_name": project_name or workspace_name, + "status": self._pick(source, "workspace_status", "status"), + "region": self._pick(source, "region_id", "region"), + "created_at": self._pick(source, "create_time", "created_at"), + "updated_at": self._pick(source, "update_time", "updated_at"), + "engine_type": self._pick(source, "engine_type"), + "engine_version": self._pick(source, "engine_version"), + "deletion_protection_status": self._pick(source, "deletion_protection_status"), + } + return self._compact(payload) + + def _project_view(self, source: Any) -> dict: + workspace_payload = self._workspace_view(source) + project_name = workspace_payload.get("project_name") or workspace_payload.get("workspace_name") + payload = { + "project_id": workspace_payload.get("workspace_id"), + "project_name": project_name, + "workspace_id": workspace_payload.get("workspace_id"), + "workspace_name": workspace_payload.get("workspace_name"), + "status": workspace_payload.get("status"), + "region": workspace_payload.get("region"), + "created_at": workspace_payload.get("created_at"), + "updated_at": workspace_payload.get("updated_at"), + "engine_type": workspace_payload.get("engine_type"), + "engine_version": workspace_payload.get("engine_version"), + "deletion_protection_status": workspace_payload.get("deletion_protection_status"), + } + return self._compact(payload) + + def _with_project_alias(self, payload: dict, project_id: Optional[str] = None, project_name: Optional[str] = None) -> dict: + result = dict(payload) + workspace_id = result.get("workspace_id") or project_id + workspace_name = result.get("workspace_name") or project_name + if workspace_id: + result["workspace_id"] = workspace_id + result["project_id"] = workspace_id + if workspace_name: + result["workspace_name"] = workspace_name + result["project_name"] = workspace_name + return result + + def _describe_workspaces_response(self): + from volcenginesdkaidap.models import DescribeWorkspacesRequest, FilterForDescribeWorkspacesInput + + parameters = inspect.signature(FilterForDescribeWorkspacesInput).parameters + filter_kwargs = { + "name": "DBEngineVersion", + "value": "Supabase_1_24", + } + if "mode" in parameters: + filter_kwargs["mode"] = "Exact" + filters = [FilterForDescribeWorkspacesInput(**filter_kwargs)] + request = DescribeWorkspacesRequest(filters=filters) + return self.aidap_client.client.describe_workspaces(request) + + def _describe_workspace_detail_response(self, workspace_id: str): + from volcenginesdkaidap.models import DescribeWorkspaceDetailRequest + + request = DescribeWorkspaceDetailRequest(workspace_id=workspace_id) + return self.aidap_client.client.describe_workspace_detail(request) + def _error_detail(self, code: str, message: str, retriable: bool = False) -> dict: return { "code": code, @@ -38,99 +130,83 @@ def _mask_key(self, value: Optional[str], reveal: bool) -> Optional[str]: return f"{value[:6]}...{value[-4:]}" async def list_workspaces(self) -> str: - """Lists all available workspaces. - - Returns: - JSON string containing list of workspaces - """ try: - from volcenginesdkaidap.models import DescribeWorkspacesRequest, FilterForDescribeWorkspacesInput - - parameters = inspect.signature(FilterForDescribeWorkspacesInput).parameters - filter_kwargs = { - "name": "DBEngineVersion", - "value": "Supabase_1_24", - } - if "mode" in parameters: - filter_kwargs["mode"] = "Exact" - filters = [FilterForDescribeWorkspacesInput(**filter_kwargs)] - - request = DescribeWorkspacesRequest(filters=filters) - response = self.aidap_client.client.describe_workspaces(request) - - if hasattr(response, 'workspaces') and response.workspaces: - workspaces = [] - for ws in response.workspaces: - workspace_info = { - "workspace_id": getattr(ws, 'workspace_id', None), - "workspace_name": getattr(ws, 'workspace_name', None), - "status": getattr(ws, 'status', None), - "region": getattr(ws, 'region', None), - } - workspaces.append(workspace_info) - - return json.dumps({ - "success": True, - "workspaces": workspaces, - "count": len(workspaces) - }, indent=2) - - return json.dumps({ + response = self._describe_workspaces_response() + raw_workspaces = list(getattr(response, "workspaces", []) or []) + workspaces = [self._workspace_view(ws) for ws in raw_workspaces] + return self._to_json({ "success": True, - "workspaces": [], - "count": 0 - }, indent=2) + "workspaces": workspaces, + "count": len(workspaces) + }) except Exception as e: logger.error(f"Error listing workspaces: {e}") - return json.dumps({ + return self._to_json({ "success": False, "error": str(e) - }, indent=2) + }) async def get_workspace(self, workspace_id: str) -> str: - """Gets details for a specific workspace. - - Args: - workspace_id: The workspace ID - - Returns: - JSON string containing workspace details - """ try: - # 使用正确的 API 方法名 - from volcenginesdkaidap.models import DescribeWorkspaceDetailRequest - - request = DescribeWorkspaceDetailRequest(workspace_id=workspace_id) - response = self.aidap_client.client.describe_workspace_detail(request) - - if hasattr(response, 'workspace'): - ws = response.workspace - workspace_info = { - "workspace_id": getattr(ws, 'workspace_id', None), - "workspace_name": getattr(ws, 'workspace_name', None), - "status": getattr(ws, 'status', None), - "region": getattr(ws, 'region', None), - "created_at": getattr(ws, 'created_at', None), - "updated_at": getattr(ws, 'updated_at', None), - } - - return json.dumps({ + response = self._describe_workspace_detail_response(workspace_id) + ws = getattr(response, "workspace", None) + if ws is not None: + workspace_info = self._workspace_view(ws) + return self._to_json({ "success": True, "workspace": workspace_info - }, indent=2) + }) - return json.dumps({ + return self._to_json({ "success": False, "error": "Workspace not found" - }, indent=2) + }) except Exception as e: logger.error(f"Error getting workspace: {e}") - return json.dumps({ + return self._to_json({ + "success": False, + "error": str(e) + }) + + async def list_projects(self) -> str: + try: + response = self._describe_workspaces_response() + raw_workspaces = list(getattr(response, "workspaces", []) or []) + projects = [self._project_view(ws) for ws in raw_workspaces] + return self._to_json({ + "success": True, + "projects": projects, + "count": len(projects) + }) + except Exception as e: + logger.error(f"Error listing projects: {e}") + return self._to_json({ + "success": False, + "error": str(e) + }) + + async def get_project(self, project_id: str) -> str: + try: + response = self._describe_workspace_detail_response(project_id) + ws = getattr(response, "workspace", None) + if ws is not None: + project_info = self._project_view(ws) + return self._to_json({ + "success": True, + "project": project_info + }) + return self._to_json({ + "success": False, + "error": "Project not found" + }) + except Exception as e: + logger.error(f"Error getting project: {e}") + return self._to_json({ "success": False, "error": str(e) - }, indent=2) + }) @read_only_check async def create_workspace( @@ -140,29 +216,78 @@ async def create_workspace( engine_type: str = "Supabase", ) -> str: if not workspace_name or not workspace_name.strip(): - return json.dumps({"success": False, "error": "workspace_name is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_name is required"}) result = await self.aidap_client.create_workspace( workspace_name=workspace_name.strip(), engine_type=engine_type, engine_version=engine_version ) - return json.dumps(result, indent=2) + return self._to_json(result) + + @read_only_check + async def create_project( + self, + project_name: str, + engine_version: str = "Supabase_1_24", + engine_type: str = "Supabase", + ) -> str: + if not project_name or not project_name.strip(): + return self._to_json({"success": False, "error": "project_name is required"}) + result = await self.aidap_client.create_workspace( + workspace_name=project_name.strip(), + engine_type=engine_type, + engine_version=engine_version + ) + if not isinstance(result, dict): + return self._to_json({"success": False, "error": "Unexpected create project response"}) + if result.get("success"): + mapped = { + "success": True, + "project_id": result.get("workspace_id"), + "project_name": result.get("workspace_name") or project_name.strip(), + "workspace_id": result.get("workspace_id"), + "workspace_name": result.get("workspace_name") or project_name.strip(), + "engine_type": result.get("engine_type"), + "engine_version": result.get("engine_version"), + } + return self._to_json(self._compact(mapped)) + return self._to_json(result) @read_only_check async def start_workspace(self, workspace_id: Optional[str] = None) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.start_workspace(ws_id) - return json.dumps(result, indent=2) + return self._to_json(result) @read_only_check async def stop_workspace(self, workspace_id: Optional[str] = None) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_id is required"}) + result = await self.aidap_client.stop_workspace(ws_id) + return self._to_json(result) + + @read_only_check + async def restore_project(self, project_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(project_id) + if not ws_id: + return self._to_json({"success": False, "error": "project_id is required"}) + result = await self.aidap_client.start_workspace(ws_id) + if isinstance(result, dict): + result = self._with_project_alias(result, ws_id) + return self._to_json(result) + + @read_only_check + async def pause_project(self, project_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(project_id) + if not ws_id: + return self._to_json({"success": False, "error": "project_id is required"}) result = await self.aidap_client.stop_workspace(ws_id) - return json.dumps(result, indent=2) + if isinstance(result, dict): + result = self._with_project_alias(result, ws_id) + return self._to_json(result) @read_only_check async def create_branch( @@ -172,44 +297,44 @@ async def create_branch( ) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.create_branch(ws_id, name) - return json.dumps(result, indent=2) + return self._to_json(result) async def list_branches(self, workspace_id: Optional[str] = None) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_id is required"}) try: branches = await self.aidap_client.list_branches(ws_id) - return json.dumps({"success": True, "branches": branches}, indent=2) + return self._to_json({"success": True, "branches": branches}) except Exception as e: logger.error(f"Error listing branches: {e}") - return json.dumps({"success": False, "error": str(e)}, indent=2) + return self._to_json({"success": False, "error": str(e)}) @read_only_check async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({ + return self._to_json({ "success": False, "error": "workspace_id is required", "error_detail": self._error_detail("MissingWorkspaceId", "workspace_id is required", False), - }, indent=2) + }) if not branch_id or not branch_id.strip(): - return json.dumps({ + return self._to_json({ "success": False, "error": "branch_id is required", "error_detail": self._error_detail("MissingBranchId", "branch_id is required", False), - }, indent=2) + }) normalized_branch_id = branch_id.strip() try: branches = await self.aidap_client.list_branches(ws_id) exists = any(b.get("branch_id") == normalized_branch_id for b in branches) if not exists: - return json.dumps({ + return self._to_json({ "success": False, "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", "error_detail": self._error_detail( @@ -217,19 +342,19 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", False ), - }, indent=2) + }) except Exception as e: logger.error(f"Error checking branch before delete: {e}") - return json.dumps({ + return self._to_json({ "success": False, "error": str(e), "error_detail": self._error_detail("ListBranchesFailed", str(e), True), - }, indent=2) + }) result = await self.aidap_client.delete_branch(ws_id, normalized_branch_id) if not result.get("success"): error_text = result.get("error", "delete branch failed") - return json.dumps({ + return self._to_json({ "success": False, "error": error_text, "error_detail": self._error_detail( @@ -237,7 +362,7 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None error_text, bool(result.get("retriable", False)) ), - }, indent=2) + }) max_confirm_attempts = 20 last_list_error: Optional[str] = None @@ -247,12 +372,12 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None branches = await self.aidap_client.list_branches(ws_id) exists = any(b.get("branch_id") == normalized_branch_id for b in branches) if not exists: - return json.dumps({"success": True, "branch_id": normalized_branch_id}, indent=2) + return self._to_json({"success": True, "branch_id": normalized_branch_id}) except Exception as e: last_list_error = str(e) if last_list_error: - return json.dumps({ + return self._to_json({ "success": False, "error": f"Delete requested for branch '{normalized_branch_id}' but verification failed: {last_list_error}", "error_detail": self._error_detail( @@ -260,8 +385,8 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None f"Delete requested for branch '{normalized_branch_id}' but verification failed: {last_list_error}", True ), - }, indent=2) - return json.dumps({ + }) + return self._to_json({ "success": False, "error": f"Delete requested for branch '{normalized_branch_id}' but branch still exists", "error_detail": self._error_detail( @@ -269,62 +394,99 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None f"Delete requested for branch '{normalized_branch_id}' but branch still exists", True ), - }, indent=2) + }) async def get_workspace_endpoints(self, workspace_id: Optional[str] = None) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_id is required"}) endpoint = await self.aidap_client.get_endpoint(ws_id) if not endpoint: - return json.dumps({ + return self._to_json({ "success": False, "error": f"Could not get endpoint for workspace {ws_id}" - }, indent=2) + }) + + return self._to_json({ + "success": True, + "workspace_id": ws_id, + "project_url": endpoint, + "api_url": endpoint + }) - return json.dumps({ + async def get_project_url(self, project_id: Optional[str] = None) -> str: + ws_id = self._resolve_workspace_id(project_id) + if not ws_id: + return self._to_json({"success": False, "error": "project_id is required"}) + + endpoint = await self.aidap_client.get_endpoint(ws_id) + if not endpoint: + return self._to_json({ + "success": False, + "error": f"Could not get endpoint for project {ws_id}" + }) + + return self._to_json({ "success": True, + "project_id": ws_id, "workspace_id": ws_id, "project_url": endpoint, "api_url": endpoint - }, indent=2) + }) + + async def _get_api_keys_payload(self, workspace_id: str, reveal: bool = False) -> dict: + keys = await self.aidap_client.get_api_keys(workspace_id) + publishable_key = None + anon_key = None + service_role_key = None + masked_keys = [] + for key in keys: + key_type = (key.get("type") or "").lower() + value = key.get("key") + if key_type == "public": + publishable_key = value + anon_key = value + if key_type == "service": + service_role_key = value + masked_keys.append({ + **key, + "key": self._mask_key(value, reveal), + }) + return { + "success": True, + "workspace_id": workspace_id, + "reveal": reveal, + "publishable_key": self._mask_key(publishable_key, reveal), + "anon_key": self._mask_key(anon_key, reveal), + "service_role_key": self._mask_key(service_role_key, reveal), + "keys": masked_keys + } async def get_workspace_api_keys(self, workspace_id: Optional[str] = None, reveal: bool = False) -> str: ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({"success": False, "error": "workspace_id is required"}, indent=2) + return self._to_json({"success": False, "error": "workspace_id is required"}) try: - keys = await self.aidap_client.get_api_keys(ws_id) - publishable_key = None - anon_key = None - service_role_key = None - masked_keys = [] - for key in keys: - key_type = (key.get("type") or "").lower() - value = key.get("key") - if key_type == "public": - publishable_key = value - anon_key = value - if key_type == "service": - service_role_key = value - masked_keys.append({ - **key, - "key": self._mask_key(value, reveal), - }) - return json.dumps({ - "success": True, - "workspace_id": ws_id, - "reveal": reveal, - "publishable_key": self._mask_key(publishable_key, reveal), - "anon_key": self._mask_key(anon_key, reveal), - "service_role_key": self._mask_key(service_role_key, reveal), - "keys": masked_keys - }, indent=2) + payload = await self._get_api_keys_payload(ws_id, reveal) + return self._to_json(payload) except Exception as e: logger.error(f"Error getting api keys: {e}") - return json.dumps({"success": False, "error": str(e)}, indent=2) + return self._to_json({"success": False, "error": str(e)}) + + async def get_publishable_keys(self, project_id: Optional[str] = None, reveal: bool = False) -> str: + ws_id = self._resolve_workspace_id(project_id) + if not ws_id: + return self._to_json({"success": False, "error": "project_id is required"}) + + try: + payload = await self._get_api_keys_payload(ws_id, reveal) + payload = self._with_project_alias(payload, ws_id) + return self._to_json(payload) + except Exception as e: + logger.error(f"Error getting publishable keys: {e}") + return self._to_json({"success": False, "error": str(e)}) @read_only_check async def reset_branch( @@ -333,22 +495,12 @@ async def reset_branch( migration_version: Optional[str] = None, workspace_id: Optional[str] = None, ) -> str: - """Resets migrations of a development branch. - - Args: - branch_id: Branch ID to reset - migration_version: Target migration version (official schema field, not supported by current AIDAP SDK) - workspace_id: The workspace ID (optional) - - Returns: - JSON string containing operation result - """ ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: - return json.dumps({ + return self._to_json({ "success": False, "error": "workspace_id is required" - }, indent=2) + }) try: result = await self.aidap_client.reset_branch(ws_id, branch_id) @@ -356,10 +508,10 @@ async def reset_branch( result = {"success": bool(result)} if migration_version: result["warning"] = "migration_version is ignored because current AIDAP reset_branch API does not support version-targeted reset" - return json.dumps(result, indent=2) + return self._to_json(result) except Exception as e: logger.error(f"Error resetting branch: {e}") - return json.dumps({ + return self._to_json({ "success": False, "error": str(e) - }, indent=2) + }) From 7b4fbbf7044331622a39e29a00bf072367b0d6d6 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 11:37:15 +0800 Subject: [PATCH 12/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/supabase-mcp | 1 + 1 file changed, 1 insertion(+) create mode 160000 server/mcp_server_supabase/supabase-mcp diff --git a/server/mcp_server_supabase/supabase-mcp b/server/mcp_server_supabase/supabase-mcp new file mode 160000 index 00000000..462dad3f --- /dev/null +++ b/server/mcp_server_supabase/supabase-mcp @@ -0,0 +1 @@ +Subproject commit 462dad3fd7e247019944aa1f6791eda3d4fd4942 From 7399970150c4fe28f8745e5908af5874515d3133 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 12:00:59 +0800 Subject: [PATCH 13/32] =?UTF-8?q?fix:=E6=B3=A8=E5=86=8C=E6=89=80=E6=9C=89?= =?UTF-8?q?=E5=B7=A5=E5=85=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/supabase-mcp | 1 - 1 file changed, 1 deletion(-) delete mode 160000 server/mcp_server_supabase/supabase-mcp diff --git a/server/mcp_server_supabase/supabase-mcp b/server/mcp_server_supabase/supabase-mcp deleted file mode 160000 index 462dad3f..00000000 --- a/server/mcp_server_supabase/supabase-mcp +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 462dad3fd7e247019944aa1f6791eda3d4fd4942 From c5cd99418daeee715e290716e238f70f2da8c451 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:20:23 +0800 Subject: [PATCH 14/32] =?UTF-8?q?fix:=E6=B3=A8=E4=BC=98=E5=8C=96=E5=A4=87?= =?UTF-8?q?=E4=BB=BD=E5=B7=A5=E5=8D=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 34 +- server/mcp_server_supabase/README_zh.md | 36 +-- .../src/mcp_server_supabase/config.py | 45 ++- .../platform/aidap_client.py | 143 ++++++-- .../platform/supabase_client.py | 89 ++--- .../src/mcp_server_supabase/runtime.py | 30 ++ .../src/mcp_server_supabase/server.py | 267 +-------------- .../src/mcp_server_supabase/tool_registry.py | 201 ++++++++++++ .../src/mcp_server_supabase/tools/base.py | 29 +- .../tools/database_tools.py | 10 +- .../tools/edge_function_tools.py | 95 ++++-- .../tools/storage_tools.py | 61 +++- .../tools/workspace_tools.py | 304 +++++++----------- .../src/mcp_server_supabase/utils/__init__.py | 15 +- .../src/mcp_server_supabase/utils/common.py | 29 ++ .../mcp_server_supabase/utils/decorators.py | 8 +- .../src/mcp_server_supabase/utils/targets.py | 12 + 17 files changed, 816 insertions(+), 592 deletions(-) create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/runtime.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/utils/common.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index dbbee8c2..51e80a35 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -1,6 +1,6 @@ # Supabase MCP Server -**Supabase MCP Server** 是一款基于模型上下文协议(Model Context Protocol, MCP)的服务器,实现了对 AIDAP Supabase 服务的全链路智能化管理。通过自然语言指令,用户可以对工作空间、数据库、Edge Functions、存储等资源进行创建、查询、修改、删除等操作,从而大幅提升 Supabase 开发与运维的效率。 +**Supabase MCP Server** 是一款基于模型上下文协议(Model Context Protocol, MCP)的服务器,实现了对 AIDAP Supabase 服务的全链路智能化管理。通过自然语言指令,用户可以对项目、数据库、Edge Functions、存储等资源进行创建、查询、修改、删除等操作,从而大幅提升 Supabase 开发与运维的效率。 --- @@ -15,8 +15,8 @@ --- ## 关键特性 -- **自动默认分支解析**:`branch_id` 参数可选,系统会自动使用工作空间的默认分支。 -- **完整工具集合**:提供 53 个高阶工具,覆盖数据库、Edge Functions、存储、工作空间等全方位能力。 +- **自动默认分支解析**:`branch_id` 参数可选,系统会自动使用项目的默认分支。 +- **完整工具集合**:提供高阶工具,覆盖数据库、Edge Functions、存储、项目与分支等核心能力。 - **安全与审计**:只读模式、凭证管理、细粒度日志查询与安全建议。 - **跨语言支持**:兼容 Python、Node.js、Go 等多语言客户端。 @@ -142,20 +142,18 @@ mv .env_example .env # 填写环境变量 - `get_storage_config` - `update_storage_config` -### 工作空间管理(13) -- `list_workspaces` -- `get_workspace` -- `create_workspace` -- `delete_workspace` -- `start_workspace` -- `stop_workspace` -- `get_workspace_endpoints` -- `get_workspace_api_keys` -- `modify_workspace_name` -- `modify_workspace_settings` -- `modify_workspace_deletion_protection` -- `reset_workspace_password` -- `reset_branch` (official-aligned: reset migrations of a development branch) +### 项目管理(11) +- `list_projects` +- `get_project` +- `create_project` +- `pause_project` +- `restore_project` +- `get_project_url` +- `get_publishable_keys` +- `list_branches` +- `create_branch` +- `delete_branch` +- `reset_branch` --- @@ -163,7 +161,7 @@ mv .env_example .env # 填写环境变量 - **数据库**:`"列出我的数据库表"`、`"查询 users 表的所有数据"` - **Edge Functions**:`"列出所有 Edge Functions"`、`"部署一个新的 Edge Function"` - **存储**:`"列出所有存储桶"`、`"创建一个公开存储桶"` -- **工作空间**:`"列出我的所有工作空间"`、`"创建一个新的工作空间"` +- **项目**:`"列出我的所有项目"`、`"创建一个新的项目"` --- diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 38c1a9e2..7246d455 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -10,7 +10,7 @@ ### 支持的功能 -- ✅ **工作空间管理** - 列出、创建、启动/停止工作空间,管理设置 +- ✅ **项目管理** - 列出、创建、暂停/恢复项目,管理分支与访问入口 - ✅ **数据库管理** - 列出表、执行 SQL、应用迁移、管理数据库和账户 ✨ 增强 - ✅ **Edge Functions** - 部署、获取代码和管理 Edge Functions ✨ 增强 - ✅ **调试工具** - 获取多服务日志和性能/安全建议 ✨ 新增 @@ -83,11 +83,11 @@ python -m mcp_server_supabase.server ## 🎯 自动默认分支解析 -**新功能!** 现在大部分工具的 `branch_id` 参数都是可选的。如果不提供 `branch_id`,系统会自动使用工作空间的默认分支。 +**新功能!** 现在大部分工具的 `branch_id` 参数都是可选的。如果不提供 `branch_id`,系统会自动使用项目的默认分支。 ### 工作原理 -1. **自动获取**:首次调用时,系统自动查询工作空间的默认分支 +1. **自动获取**:首次调用时,系统自动查询项目的默认分支 2. **智能缓存**:默认分支 ID 会被缓存,避免重复 API 调用 3. **自动刷新**:当设置新的默认分支时,缓存会自动清除 @@ -139,8 +139,8 @@ clear_default_branch_cache() ### 开发工具(3 个) - `generate_typescript_types` - 根据数据库 schema 生成 TypeScript 类型定义 ✨ 新增 -- `get_project_url` - 获取项目 API URL(别名:get_workspace_endpoints) -- `get_publishable_keys` - 获取可发布的 API 密钥(别名:get_workspace_api_keys) +- `get_project_url` - 获取项目 API URL +- `get_publishable_keys` - 获取可发布的 API 密钥 ### 存储管理(8 个) - `list_storage_buckets` - 列出存储桶 @@ -152,20 +152,18 @@ clear_default_branch_cache() - `get_storage_config` - 获取存储配置 ✨ 新增 - `update_storage_config` - 更新存储配置(需要付费计划) ✨ 新增 -### 工作空间管理(12 个) -- `list_workspaces` - 列出所有工作空间 -- `get_workspace` - 获取工作空间详情 -- `create_workspace` - 创建新工作空间 -- `delete_workspace` - 删除工作空间 -- `start_workspace` - 启动工作空间 -- `stop_workspace` - 停止工作空间 -- `get_workspace_endpoints` - 获取工作空间端点 -- `get_workspace_api_keys` - 获取 API 密钥 -- `modify_workspace_name` - 修改工作空间名称 -- `modify_workspace_settings` - 修改工作空间设置 -- `modify_workspace_deletion_protection` - 修改删除保护策略 -- `reset_workspace_password` - 重置管理员密码 -- `get_workspace_usage_stats` - 获取使用统计 +### 项目管理(11 个) +- `list_projects` - 列出所有项目 +- `get_project` - 获取项目详情 +- `create_project` - 创建新项目 +- `pause_project` - 暂停项目 +- `restore_project` - 恢复项目 +- `get_project_url` - 获取项目端点 +- `get_publishable_keys` - 获取项目 API 密钥 +- `list_branches` - 列出项目分支 +- `create_branch` - 创建项目分支 +- `delete_branch` - 删除项目分支 +- `reset_branch` - 重置项目分支 ### 数据库账户管理(4 个) - `list_db_accounts` - 列出数据库账户 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/config.py b/server/mcp_server_supabase/src/mcp_server_supabase/config.py index ca79c075..c9520ea3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/config.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/config.py @@ -18,6 +18,7 @@ _default_branch_cache = {} _endpoint_cache = {} _api_key_cache = {} +_branch_workspace_cache = {} def get_branch_cache(): @@ -32,6 +33,10 @@ def get_api_key_cache(): return _api_key_cache +def get_branch_workspace_cache(): + return _branch_workspace_cache + + def clear_branch_cache(workspace_id: str = None): if workspace_id: _default_branch_cache.pop(workspace_id, None) @@ -39,22 +44,44 @@ def clear_branch_cache(workspace_id: str = None): _default_branch_cache.clear() -def clear_endpoint_cache(workspace_id: str = None): - if workspace_id: +def clear_endpoint_cache(workspace_id: str = None, branch_id: str = None): + if workspace_id and branch_id: + _endpoint_cache.pop(f"{workspace_id}:{branch_id}", None) + elif workspace_id: _endpoint_cache.pop(workspace_id, None) + keys_to_delete = [key for key in _endpoint_cache if key.startswith(f"{workspace_id}:")] + for key in keys_to_delete: + _endpoint_cache.pop(key, None) else: _endpoint_cache.clear() -def clear_api_key_cache(workspace_id: str = None): - if workspace_id: - _api_key_cache.pop(workspace_id, None) +def clear_api_key_cache(workspace_id: str = None, branch_id: str = None): + if workspace_id and branch_id: + keys_to_delete = [key for key in _api_key_cache if key.startswith(f"{workspace_id}:") and key.endswith(f":{branch_id}")] + for key in keys_to_delete: + _api_key_cache.pop(key, None) + elif workspace_id: + keys_to_delete = [key for key in _api_key_cache if key == workspace_id or key.startswith(f"{workspace_id}:")] + for key in keys_to_delete: + _api_key_cache.pop(key, None) else: _api_key_cache.clear() -def clear_all_caches(workspace_id: str = None): - """Clear all caches for a workspace or all workspaces""" +def clear_branch_workspace_cache(workspace_id: str = None, branch_id: str = None): + if branch_id: + _branch_workspace_cache.pop(branch_id, None) + elif workspace_id: + branch_ids = [key for key, value in _branch_workspace_cache.items() if value == workspace_id] + for key in branch_ids: + _branch_workspace_cache.pop(key, None) + else: + _branch_workspace_cache.clear() + + +def clear_all_caches(workspace_id: str = None, branch_id: str = None): clear_branch_cache(workspace_id) - clear_endpoint_cache(workspace_id) - clear_api_key_cache(workspace_id) + clear_endpoint_cache(workspace_id, branch_id) + clear_api_key_cache(workspace_id, branch_id) + clear_branch_workspace_cache(workspace_id, branch_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index 27398ac9..de038c82 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -2,15 +2,18 @@ import asyncio import os import random -from typing import Optional +from typing import Any, Optional from ..config import ( VOLCENGINE_ACCESS_KEY, VOLCENGINE_SECRET_KEY, VOLCENGINE_REGION, get_branch_cache, + get_branch_workspace_cache, get_endpoint_cache, - get_api_key_cache + get_api_key_cache, + clear_all_caches, ) +from ..utils import pick_value logger = logging.getLogger(__name__) ENDPOINT_SCHEME = os.getenv("SUPABASE_ENDPOINT_SCHEME", "http").strip().lower() or "http" @@ -20,6 +23,7 @@ from volcenginesdkaidap import AIDAPApi from volcenginesdkaidap.models import ( DescribeBranchesRequest, + DescribeWorkspacesRequest, DescribeWorkspaceEndpointRequest, DescribeAPIKeysRequest, ResetBranchRequest, @@ -51,10 +55,95 @@ def __init__(self) -> None: def _branch_error_code(self, error_text: str) -> str: if "OperationDenied_BranchNotReady" in error_text: return "OperationDenied_BranchNotReady" + if "BranchStatusNotMatch" in error_text: + return "BranchStatusNotMatch" if "BranchNotFound" in error_text: return "BranchNotFound" return "AIDAPError" + def _pick_value(self, source: Any, *field_names: str) -> Any: + return pick_value(source, *field_names) + + def _looks_like_branch_id(self, value: Optional[str]) -> bool: + return bool(value and value.strip().startswith("br-")) + + def _cache_branch_workspace(self, workspace_id: Optional[str], branch_id: Optional[str]) -> None: + if workspace_id and branch_id: + get_branch_workspace_cache()[branch_id] = workspace_id + + def _workspace_ids_from_response(self, response: Any) -> list[str]: + workspace_ids = [] + for workspace in list(getattr(response, "workspaces", []) or []): + workspace_id = self._pick_value(workspace, "workspace_id") + if workspace_id: + workspace_ids.append(workspace_id) + return workspace_ids + + def _branch_payload(self, branch: Any, fallback_name: Optional[str] = None) -> dict: + parent_branch = self._pick_value(branch, "parent_branch") + parent_id = self._pick_value(parent_branch, "branch_id", "parent_id") + payload = { + "branch_id": self._pick_value(branch, "branch_id"), + "name": self._pick_value(branch, "name", "branch_name") or fallback_name, + "status": self._pick_value(branch, "status", "branch_status"), + "default": bool(self._pick_value(branch, "default", "is_default") or False), + "parent_id": parent_id or self._pick_value(branch, "parent_id", "parent_branch_id"), + "workspace_id": self._pick_value(branch, "workspace_id"), + "archived": self._pick_value(branch, "archived"), + "protected": self._pick_value(branch, "protected"), + "created_at": self._pick_value(branch, "create_time", "created_at"), + "updated_at": self._pick_value(branch, "update_time", "updated_at"), + } + result = {key: value for key, value in payload.items() if value is not None} + self._cache_branch_workspace(result.get("workspace_id"), result.get("branch_id")) + return result + + def _describe_supabase_workspaces_response(self): + request = DescribeWorkspacesRequest() + return self.client.describe_workspaces(request) + + async def _find_branch( + self, + workspace_id: str, + branch_id: Optional[str] = None, + name: Optional[str] = None, + max_attempts: int = 6, + ) -> Optional[dict]: + for attempt in range(1, max_attempts + 1): + branches = await self.list_branches(workspace_id) + for branch in branches: + if branch_id and branch.get("branch_id") == branch_id: + return branch + if name and branch.get("name") == name: + return branch + if attempt < max_attempts: + await self._sleep_backoff(attempt, base_seconds=0.5, max_seconds=3.0) + return None + + async def _find_workspace_id_for_branch(self, branch_id: str) -> Optional[str]: + cached_workspace_id = get_branch_workspace_cache().get(branch_id) + if cached_workspace_id: + return cached_workspace_id + response = self._describe_supabase_workspaces_response() + for workspace_id in self._workspace_ids_from_response(response): + branch = await self._find_branch(workspace_id, branch_id=branch_id, max_attempts=1) + if branch: + self._cache_branch_workspace(workspace_id, branch_id) + return workspace_id + return None + + async def resolve_workspace_and_branch(self, workspace_or_branch_id: str) -> tuple[str, Optional[str]]: + normalized_id = workspace_or_branch_id.strip() + if not self._looks_like_branch_id(normalized_id): + return normalized_id, None + workspace_id = await self._find_workspace_id_for_branch(normalized_id) + if not workspace_id: + raise ValueError(f"Could not resolve workspace for branch {normalized_id}") + return workspace_id, normalized_id + + async def get_branch(self, workspace_id: str, branch_id: str) -> Optional[dict]: + return await self._find_branch(workspace_id, branch_id=branch_id, max_attempts=1) + async def _sleep_backoff( self, attempt: int, @@ -79,11 +168,13 @@ async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) if getattr(branch, 'default', False): branch_id = branch.branch_id cache[workspace_id] = branch_id + self._cache_branch_workspace(workspace_id, branch_id) return branch_id first_branch = response.branches[0] branch_id = first_branch.branch_id cache[workspace_id] = branch_id + self._cache_branch_workspace(workspace_id, branch_id) return branch_id return None @@ -99,13 +190,7 @@ async def list_branches(self, workspace_id: str) -> list[dict]: branches = [] if hasattr(response, 'branches') and response.branches: for branch in response.branches: - branches.append({ - "branch_id": getattr(branch, 'branch_id', None), - "name": getattr(branch, 'name', None), - "status": getattr(branch, 'status', None), - "default": getattr(branch, 'default', False), - "parent_id": getattr(branch, 'parent_id', None), - }) + branches.append(self._branch_payload(branch)) return branches except Exception as e: logger.error(f"Error listing branches: {e}") @@ -123,11 +208,22 @@ async def create_branch(self, workspace_id: str, name: str = "develop") -> dict: if not branch_id and hasattr(response, 'branch'): branch_id = getattr(response.branch, 'branch_id', None) - return { + branch_payload = None + if branch_id or name: + try: + branch_payload = await self._find_branch(workspace_id, branch_id, name) + except Exception as lookup_error: + logger.warning(f"Error loading created branch details: {lookup_error}") + + result = { "success": True, "branch_id": branch_id, "workspace_id": workspace_id, + "name": name, } + if branch_payload: + result.update(branch_payload) + return result except Exception as e: logger.error(f"Error creating branch: {e}") return { @@ -153,8 +249,8 @@ async def create_workspace( suspend_timeout_seconds=300 ), workspace_settings=WorkspaceSettingsForCreateWorkspaceInput( - public_connection=False, - deletion_protection=False + public_connection="Disabled", + deletion_protection="Disabled" ), ) response = self.client.create_workspace(request) @@ -204,11 +300,12 @@ async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: branch_id=branch_id, ) self.client.delete_branch(request) + clear_all_caches(workspace_id, branch_id) return {"success": True} except Exception as e: error_text = str(e) code = self._branch_error_code(error_text) - retriable = code == "OperationDenied_BranchNotReady" + retriable = code in {"OperationDenied_BranchNotReady", "BranchStatusNotMatch"} if retriable and attempt < max_attempts: await self._sleep_backoff(attempt) continue @@ -289,7 +386,7 @@ async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: except Exception as e: error_text = str(e) code = self._branch_error_code(error_text) - retriable = code == "OperationDenied_BranchNotReady" + retriable = code in {"OperationDenied_BranchNotReady", "BranchStatusNotMatch"} if retriable and attempt < max_attempts: await self._sleep_backoff(attempt) continue @@ -347,16 +444,22 @@ async def get_api_key(self, workspace_id: str, key_type: str = "service_role", logger.error(f"Error getting API key: {e}") return None - async def get_api_keys(self, workspace_id: str, branch_id: Optional[str] = None) -> list[dict]: - if not branch_id: + async def get_api_keys( + self, + workspace_id: str, + branch_id: Optional[str] = None, + use_default_branch: bool = False, + ) -> list[dict]: + if use_default_branch and not branch_id: branch_id = await self.get_default_branch_id(workspace_id) if not branch_id: raise RuntimeError(f"Could not get default branch for workspace {workspace_id}") - request = DescribeAPIKeysRequest( - workspace_id=workspace_id, - branch_id=branch_id - ) + request_kwargs = {"workspace_id": workspace_id} + if branch_id: + request_kwargs["branch_id"] = branch_id + + request = DescribeAPIKeysRequest(**request_kwargs) response = self.client.describe_api_keys(request) keys = [] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py index 83d22b36..f28387a5 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py @@ -1,3 +1,4 @@ +import asyncio import httpx import logging import json @@ -56,7 +57,6 @@ async def call_api( timeout: float = 30.0 ) -> Any: url = f"{self.endpoint}{path}" - logger.info(f"[DEBUG] Calling API: method={method}, url={url}, path={path}") default_headers = { @@ -67,43 +67,54 @@ async def call_api( default_headers.update(headers) client = await self._get_client() - try: - if content: - response = await client.request( - method, url, content=content, headers=default_headers, - params=params, timeout=timeout - ) - else: - response = await client.request( - method, url, json=json_data, headers=default_headers, - params=params, timeout=timeout - ) - response.raise_for_status() + for attempt in range(3): + try: + if content: + response = await client.request( + method, url, content=content, headers=default_headers, + params=params, timeout=timeout + ) + else: + response = await client.request( + method, url, json=json_data, headers=default_headers, + params=params, timeout=timeout + ) + response.raise_for_status() - if response.status_code == 204 or not response.content: - return {"success": True} + if response.status_code == 204 or not response.content: + return {"success": True} - content_type = response.headers.get("content-type", "") - if "application/json" in content_type: - return response.json() - return {"raw": response.text} - except httpx.HTTPStatusError as e: - response = e.response - payload: Any - try: - payload = response.json() - except Exception: - payload = response.text - raise SupabaseApiError( - status_code=response.status_code, - path=path, - endpoint=self.endpoint, - payload=payload, - ) from e - except Exception as e: - if isinstance(e, SupabaseApiError): - raise - error_details = f"{str(e)}" - if hasattr(e, '__cause__') and e.__cause__: - error_details += f" | Cause: {str(e.__cause__)}" - raise Exception(f"{error_details} [endpoint: {self.endpoint}, path: {path}]") from e + content_type = response.headers.get("content-type", "") + if "application/json" in content_type: + return response.json() + return {"raw": response.text} + except httpx.HTTPStatusError as e: + response = e.response + if response.status_code in {502, 503, 504} and attempt < 2: + await asyncio.sleep(0.5 * (attempt + 1)) + continue + payload: Any + try: + payload = response.json() + except Exception: + payload = response.text + raise SupabaseApiError( + status_code=response.status_code, + path=path, + endpoint=self.endpoint, + payload=payload, + ) from e + except httpx.TransportError as e: + if attempt < 2: + await asyncio.sleep(0.5 * (attempt + 1)) + continue + detail = str(e) or type(e).__name__ + raise Exception(f"{detail} [endpoint: {self.endpoint}, path: {path}]") from e + except Exception as e: + if isinstance(e, SupabaseApiError): + raise + detail = str(e) or type(e).__name__ + if hasattr(e, "__cause__") and e.__cause__: + cause_detail = str(e.__cause__) or type(e.__cause__).__name__ + detail += f" | Cause: {cause_detail}" + raise Exception(f"{detail} [endpoint: {self.endpoint}, path: {path}]") from e diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py new file mode 100644 index 00000000..bffe946a --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py @@ -0,0 +1,30 @@ +from dataclasses import dataclass +from typing import Optional + +from .platform import AidapClient +from .tools import DatabaseTools, EdgeFunctionTools, StorageTools, WorkspaceTools + + +@dataclass(slots=True) +class SupabaseRuntime: + aidap_client: AidapClient + default_project_id: Optional[str] + edge_tools: EdgeFunctionTools + storage_tools: StorageTools + database_tools: DatabaseTools + workspace_tools: WorkspaceTools + + +def create_runtime( + default_project_id: Optional[str] = None, + aidap_client: Optional[AidapClient] = None, +) -> SupabaseRuntime: + client = aidap_client or AidapClient() + return SupabaseRuntime( + aidap_client=client, + default_project_id=default_project_id, + edge_tools=EdgeFunctionTools(client, default_project_id), + storage_tools=StorageTools(client, default_project_id), + database_tools=DatabaseTools(client, default_project_id), + workspace_tools=WorkspaceTools(client, default_project_id), + ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index f19bc17f..588cbcab 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -1,13 +1,12 @@ -"""Supabase MCP Server - Refactored Version""" - import argparse import logging import os + from mcp.server.fastmcp import FastMCP from .config import READ_ONLY -from .platform import AidapClient -from .tools import EdgeFunctionTools, StorageTools, DatabaseTools, WorkspaceTools +from .runtime import create_runtime +from .tool_registry import register_tools logger = logging.getLogger(__name__) logging.basicConfig( @@ -15,266 +14,34 @@ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) -mcp = FastMCP("Supabase MCP Server (AIDAP)", port=int(os.getenv("PORT", "8000"))) - -aidap_client = AidapClient() default_project_id = os.getenv("DEFAULT_PROJECT_ID") or os.getenv("DEFAULT_WORKSPACE_ID") -edge_tools = EdgeFunctionTools(aidap_client, default_project_id) -storage_tools = StorageTools(aidap_client, default_project_id) -database_tools = DatabaseTools(aidap_client, default_project_id) -workspace_tools = WorkspaceTools(aidap_client, default_project_id) - - -@mcp.tool() -async def list_edge_functions(project_id: str = None) -> str: - """Lists all Edge Functions in a project.""" - return await edge_tools.list_edge_functions(project_id) - - -@mcp.tool() -async def get_edge_function(function_name: str, project_id: str = None) -> str: - """Retrieves the source code and configuration for an Edge Function.""" - return await edge_tools.get_edge_function(function_name, project_id) - - -@mcp.tool() -async def deploy_edge_function( - function_name: str, - source_code: str, - verify_jwt: bool = True, - runtime: str = "native-node20/v1", - import_map: str = None, - project_id: str = None -) -> str: - """Deploys a new Edge Function or updates an existing one. - - Args: - function_name: Name of the function to deploy - source_code: Source code for the function - verify_jwt: Whether to verify JWT tokens (default: True) - runtime: Runtime environment (default: native-node20/v1) - Options: native-node20/v1, native-python3.9/v1, - native-python3.10/v1, native-python3.12/v1 - import_map: Optional import map JSON for dependencies - project_id: The project ID (optional) - """ - return await edge_tools.deploy_edge_function( - function_name, source_code, verify_jwt, runtime, import_map, project_id - ) - - -@mcp.tool() -async def delete_edge_function(function_name: str, project_id: str = None) -> str: - """Deletes an Edge Function.""" - return await edge_tools.delete_edge_function(function_name, project_id) - - -@mcp.tool() -async def list_storage_buckets(project_id: str = None) -> str: - """Lists all storage buckets in a project.""" - return await storage_tools.list_storage_buckets(project_id) - - -@mcp.tool() -async def create_storage_bucket( - bucket_name: str, - public: bool = False, - file_size_limit: int = None, - allowed_mime_types: str = None, - project_id: str = None -) -> str: - """Creates a new storage bucket.""" - return await storage_tools.create_storage_bucket( - bucket_name, public, file_size_limit, allowed_mime_types, project_id - ) - - -@mcp.tool() -async def delete_storage_bucket(bucket_name: str, project_id: str = None) -> str: - """Deletes a storage bucket.""" - return await storage_tools.delete_storage_bucket(bucket_name, project_id) - - -@mcp.tool() -async def get_storage_config(project_id: str = None) -> str: - """Gets the storage configuration for a project.""" - return await storage_tools.get_storage_config(project_id) - - -@mcp.tool() -async def update_storage_config(config: str, project_id: str = None) -> str: - """Updates the storage configuration for a project.""" - import json - parsed_config = json.loads(config) - return await storage_tools.update_storage_config(parsed_config, project_id) - - -@mcp.tool() -async def execute_sql(query: str, project_id: str = None) -> str: - """Executes raw SQL in the Postgres database.""" - return await database_tools.execute_sql(query, project_id) - - -@mcp.tool() -async def list_tables(schemas: str = "public", project_id: str = None) -> str: - """Lists all tables in one or more schemas.""" - schema_list = [s.strip() for s in schemas.split(",")] - return await database_tools.list_tables(schema_list, project_id) - - -@mcp.tool() -async def list_migrations(project_id: str = None) -> str: - """Lists all migrations in the database.""" - return await database_tools.list_migrations(project_id) - - -@mcp.tool() -async def list_extensions(project_id: str = None) -> str: - """Lists all PostgreSQL extensions in the database.""" - return await database_tools.list_extensions(project_id) - - -@mcp.tool() -async def apply_migration(name: str, query: str, project_id: str = None) -> str: - """Applies a migration to the database.""" - return await database_tools.apply_migration(name, query, project_id) - - -@mcp.tool() -async def generate_typescript_types(schemas: str = "public", project_id: str = None) -> str: - """Generates TypeScript definitions from database schema.""" - schema_list = [s.strip() for s in schemas.split(",") if s.strip()] - return await database_tools.generate_typescript_types(schema_list, project_id) - +def create_mcp( + port: int | None = None, + default_target_id: str | None = None, +) -> FastMCP: + resolved_port = port if port is not None else int(os.getenv("PORT", "8000")) + resolved_default_target_id = default_target_id if default_target_id is not None else default_project_id + runtime = create_runtime(resolved_default_target_id) + mcp = FastMCP("Supabase MCP Server (AIDAP)", port=resolved_port) + register_tools(mcp, runtime) + return mcp -@mcp.tool() -async def list_workspaces() -> str: - """Lists all available workspaces.""" - return await workspace_tools.list_workspaces() - -@mcp.tool() -async def get_workspace(project_id: str) -> str: - """Gets details for a specific project.""" - return await workspace_tools.get_workspace(project_id) - - -@mcp.tool() -async def create_workspace( - project_name: str, - engine_version: str = "Supabase_1_24", - engine_type: str = "Supabase" -) -> str: - """Creates a new project.""" - return await workspace_tools.create_workspace(project_name, engine_version, engine_type) - - -@mcp.tool() -async def start_workspace(project_id: str = None) -> str: - """Starts a project.""" - return await workspace_tools.start_workspace(project_id) - - -@mcp.tool() -async def stop_workspace(project_id: str = None) -> str: - """Stops a project.""" - return await workspace_tools.stop_workspace(project_id) - - -@mcp.tool() -async def get_workspace_endpoints(project_id: str = None) -> str: - """Gets API endpoint URL for a project.""" - return await workspace_tools.get_workspace_endpoints(project_id) - - -@mcp.tool() -async def get_workspace_api_keys(project_id: str = None, reveal: bool = False) -> str: - """Gets API keys for a project.""" - return await workspace_tools.get_workspace_api_keys(project_id, reveal) - - -@mcp.tool() -async def list_branches(project_id: str = None) -> str: - """Lists all development branches of a project.""" - return await workspace_tools.list_branches(project_id) - - -@mcp.tool() -async def create_branch(name: str = "develop", project_id: str = None) -> str: - """Creates a development branch.""" - return await workspace_tools.create_branch(name, project_id) - - -@mcp.tool() -async def delete_branch(branch_id: str, project_id: str = None) -> str: - """Deletes a development branch.""" - return await workspace_tools.delete_branch(branch_id, project_id) - - -@mcp.tool() -async def reset_branch(branch_id: str, migration_version: str = None, project_id: str = None) -> str: - """Resets migrations of a development branch. Any untracked data or schema changes will be lost.""" - return await workspace_tools.reset_branch(branch_id, migration_version, project_id) - - -@mcp.tool() -async def list_projects() -> str: - """Lists all available projects.""" - return await workspace_tools.list_projects() - - -@mcp.tool() -async def get_project(project_id: str) -> str: - """Gets details for a specific project.""" - return await workspace_tools.get_project(project_id) - - -@mcp.tool() -async def create_project( - project_name: str, - engine_version: str = "Supabase_1_24", - engine_type: str = "Supabase" -) -> str: - """Creates a new project.""" - return await workspace_tools.create_project(project_name, engine_version, engine_type) - - -@mcp.tool() -async def pause_project(project_id: str = None) -> str: - """Pauses a project.""" - return await workspace_tools.pause_project(project_id) - - -@mcp.tool() -async def restore_project(project_id: str = None) -> str: - """Restores a project.""" - return await workspace_tools.restore_project(project_id) - - -@mcp.tool() -async def get_project_url(project_id: str = None) -> str: - """Gets API endpoint URL for a project.""" - return await workspace_tools.get_project_url(project_id) - - -@mcp.tool() -async def get_publishable_keys(project_id: str = None, reveal: bool = False) -> str: - """Gets API keys for a project.""" - return await workspace_tools.get_publishable_keys(project_id, reveal) +mcp = create_mcp() def main(): parser = argparse.ArgumentParser(description="Supabase MCP Server") parser.add_argument("--port", type=int, default=8000, help="Port to run the server on") args = parser.parse_args() - + logger.info(f"Starting Supabase MCP Server on port {args.port}") logger.info(f"Read-only mode: {READ_ONLY}") if default_project_id: logger.info(f"Default project ID: {default_project_id}") - - mcp.run() + + create_mcp(port=args.port).run() if __name__ == "__main__": diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py new file mode 100644 index 00000000..baa42932 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -0,0 +1,201 @@ +import json + +from mcp.server.fastmcp import FastMCP + +from .runtime import SupabaseRuntime + + +def register_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + _register_edge_tools(mcp, runtime) + _register_storage_tools(mcp, runtime) + _register_database_tools(mcp, runtime) + _register_project_tools(mcp, runtime) + + +def _register_edge_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + edge_tools = runtime.edge_tools + + @mcp.tool() + async def list_edge_functions(project_id: str = None) -> str: + """Lists all Edge Functions in a project.""" + return await edge_tools.list_edge_functions(project_id) + + @mcp.tool() + async def get_edge_function(function_name: str, project_id: str = None) -> str: + """Retrieves the source code and configuration for an Edge Function.""" + return await edge_tools.get_edge_function(function_name, project_id) + + @mcp.tool() + async def deploy_edge_function( + function_name: str, + source_code: str, + verify_jwt: bool = True, + runtime: str = "native-node20/v1", + import_map: str = None, + project_id: str = None, + ) -> str: + """Deploys a new Edge Function or updates an existing one. + + Args: + function_name: Name of the function to deploy + source_code: Source code for the function + verify_jwt: Whether to verify JWT tokens (default: True) + runtime: Runtime environment (default: native-node20/v1) + Options: native-node20/v1, native-python3.9/v1, + native-python3.10/v1, native-python3.12/v1 + import_map: Optional import map JSON for dependencies + project_id: The project ID (optional) + """ + return await edge_tools.deploy_edge_function( + function_name, + source_code, + verify_jwt, + runtime, + import_map, + project_id, + ) + + @mcp.tool() + async def delete_edge_function(function_name: str, project_id: str = None) -> str: + """Deletes an Edge Function.""" + return await edge_tools.delete_edge_function(function_name, project_id) + + +def _register_storage_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + storage_tools = runtime.storage_tools + + @mcp.tool() + async def list_storage_buckets(project_id: str = None) -> str: + """Lists all storage buckets in a project.""" + return await storage_tools.list_storage_buckets(project_id) + + @mcp.tool() + async def create_storage_bucket( + bucket_name: str, + public: bool = False, + file_size_limit: int = None, + allowed_mime_types: str | list[str] = None, + project_id: str = None, + ) -> str: + """Creates a new storage bucket.""" + return await storage_tools.create_storage_bucket( + bucket_name, + public, + file_size_limit, + allowed_mime_types, + project_id, + ) + + @mcp.tool() + async def delete_storage_bucket(bucket_name: str, project_id: str = None) -> str: + """Deletes a storage bucket.""" + return await storage_tools.delete_storage_bucket(bucket_name, project_id) + + @mcp.tool() + async def get_storage_config(project_id: str = None) -> str: + """Gets the storage configuration for a project.""" + return await storage_tools.get_storage_config(project_id) + + @mcp.tool() + async def update_storage_config(config: str, project_id: str = None) -> str: + """Updates the storage configuration for a project.""" + return await storage_tools.update_storage_config(json.loads(config), project_id) + + +def _register_database_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + database_tools = runtime.database_tools + + @mcp.tool() + async def execute_sql(query: str, project_id: str = None) -> str: + """Executes raw SQL in the Postgres database.""" + return await database_tools.execute_sql(query, project_id) + + @mcp.tool() + async def list_tables(schemas: str = "public", project_id: str = None) -> str: + """Lists all tables in one or more schemas.""" + schema_list = [schema.strip() for schema in schemas.split(",")] + return await database_tools.list_tables(schema_list, project_id) + + @mcp.tool() + async def list_migrations(project_id: str = None) -> str: + """Lists all migrations in the database.""" + return await database_tools.list_migrations(project_id) + + @mcp.tool() + async def list_extensions(project_id: str = None) -> str: + """Lists all PostgreSQL extensions in the database.""" + return await database_tools.list_extensions(project_id) + + @mcp.tool() + async def apply_migration(name: str, query: str, project_id: str = None) -> str: + """Applies a migration to the database.""" + return await database_tools.apply_migration(name, query, project_id) + + @mcp.tool() + async def generate_typescript_types(schemas: str = "public", project_id: str = None) -> str: + """Generates TypeScript definitions from database schema.""" + schema_list = [schema.strip() for schema in schemas.split(",") if schema.strip()] + return await database_tools.generate_typescript_types(schema_list, project_id) + + +def _register_project_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + workspace_tools = runtime.workspace_tools + + @mcp.tool() + async def list_projects() -> str: + """Lists all available projects.""" + return await workspace_tools.list_projects() + + @mcp.tool() + async def get_project(project_id: str) -> str: + """Gets details for a specific project.""" + return await workspace_tools.get_project(project_id) + + @mcp.tool() + async def create_project( + project_name: str, + engine_version: str = "Supabase_1_24", + engine_type: str = "Supabase", + ) -> str: + """Creates a new project.""" + return await workspace_tools.create_project(project_name, engine_version, engine_type) + + @mcp.tool() + async def pause_project(project_id: str = None) -> str: + """Pauses a project.""" + return await workspace_tools.pause_project(project_id) + + @mcp.tool() + async def restore_project(project_id: str = None) -> str: + """Restores a project.""" + return await workspace_tools.restore_project(project_id) + + @mcp.tool() + async def get_project_url(project_id: str = None) -> str: + """Gets API endpoint URL for a project.""" + return await workspace_tools.get_project_url(project_id) + + @mcp.tool() + async def get_publishable_keys(project_id: str = None, reveal: bool = False) -> str: + """Gets API keys for a project.""" + return await workspace_tools.get_publishable_keys(project_id, reveal) + + @mcp.tool() + async def list_branches(project_id: str = None) -> str: + """Lists all development branches of a project.""" + return await workspace_tools.list_branches(project_id) + + @mcp.tool() + async def create_branch(name: str = "develop", project_id: str = None) -> str: + """Creates a development branch.""" + return await workspace_tools.create_branch(name, project_id) + + @mcp.tool() + async def delete_branch(branch_id: str, project_id: str = None) -> str: + """Deletes a development branch.""" + return await workspace_tools.delete_branch(branch_id, project_id) + + @mcp.tool() + async def reset_branch(branch_id: str, migration_version: str = None, project_id: str = None) -> str: + """Resets migrations of a development branch. Any untracked data or schema changes will be lost.""" + return await workspace_tools.reset_branch(branch_id, migration_version, project_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py index 89731b50..677e5fc2 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py @@ -1,5 +1,6 @@ from typing import Optional from ..platform import AidapClient, SupabaseClient +from ..utils import resolve_target, select_target_id class BaseTools: @@ -11,7 +12,7 @@ def __init__(self, aidap_client: AidapClient, workspace_id: Optional[str] = None def _get_workspace_id(self, workspace_id: Optional[str]) -> str: """Get workspace ID from parameter or default""" - result = workspace_id or self.default_workspace_id + result = select_target_id(workspace_id, self.default_workspace_id) if not result: raise ValueError( "workspace_id is required: not provided as parameter and no default workspace_id configured. " @@ -19,19 +20,31 @@ def _get_workspace_id(self, workspace_id: Optional[str]) -> str: ) return result - async def _get_client(self, workspace_id: str) -> SupabaseClient: + async def _resolve_target(self, workspace_id: Optional[str]) -> tuple[str, Optional[str]]: + target = self._get_workspace_id(workspace_id) + resolved_workspace_id, branch_id = await resolve_target(self.aidap, target, None) + if not resolved_workspace_id: + raise ValueError( + "workspace_id is required: not provided as parameter and no default workspace_id configured. " + "Please provide workspace_id or set DEFAULT_WORKSPACE_ID environment variable." + ) + return resolved_workspace_id, branch_id + + async def _get_client(self, workspace_id: str, branch_id: Optional[str] = None) -> SupabaseClient: """Get Supabase client for workspace""" import logging logger = logging.getLogger(__name__) - endpoint = await self.aidap.get_endpoint(workspace_id) - logger.info(f"[DEBUG] Got endpoint for {workspace_id}: {endpoint}") + endpoint = await self.aidap.get_endpoint(workspace_id, branch_id=branch_id) + logger.info(f"[DEBUG] Got endpoint for {workspace_id} branch={branch_id}: {endpoint}") if not endpoint: - raise ValueError(f"Could not get endpoint for workspace {workspace_id}") + target = branch_id or workspace_id + raise ValueError(f"Could not get endpoint for target {target}") - api_key = await self.aidap.get_api_key(workspace_id, "service_role") - logger.info(f"[DEBUG] Got API key for {workspace_id}: {'yes' if api_key else 'no'}") + api_key = await self.aidap.get_api_key(workspace_id, "service_role", branch_id=branch_id) + logger.info(f"[DEBUG] Got API key for {workspace_id} branch={branch_id}: {'yes' if api_key else 'no'}") if not api_key: - raise ValueError(f"Could not get API key for workspace {workspace_id}") + target = branch_id or workspace_id + raise ValueError(f"Could not get API key for target {target}") return SupabaseClient(endpoint, api_key) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index bbbca43a..f6ba117e 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -13,13 +13,13 @@ async def _execute_sql_raw(self, query: str, workspace_id: Optional[str] = None) if not query or not query.strip(): raise ValueError("SQL query cannot be empty") - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) logger.info( "Executing SQL query", - extra={"workspace_id": ws_id, "query_length": len(query)} + extra={"workspace_id": ws_id, "branch_id": branch_id, "query_length": len(query)} ) - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) result = await client.call_api("/pg/query", method="POST", json_data={"query": query}) if isinstance(result, dict) and isinstance(result.get("data"), list): @@ -168,6 +168,7 @@ async def generate_typescript_types( table_name, column_name, is_nullable, + is_identity, data_type, udt_name, column_default @@ -214,7 +215,8 @@ async def generate_typescript_types( base_type = self._to_ts_type(column.get("data_type", ""), column.get("udt_name", "")) nullable = column.get("is_nullable") == "YES" has_default = column.get("column_default") is not None - optional = nullable or has_default + is_identity = column.get("is_identity") == "YES" + optional = nullable or has_default or is_identity insert_type = f"{base_type} | null" if nullable else base_type suffix = "?" if optional else "" lines.append(f" {ts_key}{suffix}: {insert_type}") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index e692697b..438e35a7 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -44,6 +44,60 @@ class EdgeFunctionTools(BaseTools): + def _needs_handler_wrapper(self, runtime: str, source_code: str) -> bool: + if runtime != "native-node20/v1": + return False + if "Deno.serve" in source_code: + return False + return bool(re.search(r"export\s+default\s+(async\s+)?function", source_code) or re.search(r"export\s+default\s*\(", source_code)) + + def _build_deployment_payload(self, runtime: str, source_code: str, verify_jwt: bool, function_name: str) -> dict: + entrypoint = self._get_entrypoint(runtime) + files = [{ + "name": entrypoint, + "content": source_code + }] + if self._needs_handler_wrapper(runtime, source_code): + files = [ + { + "name": "handler.ts", + "content": source_code + }, + { + "name": entrypoint, + "content": "import handler from './handler.ts'\nDeno.serve((req) => handler(req))\n" + } + ] + return { + "metadata": { + "name": function_name, + "slug": function_name, + "entrypoint_path": entrypoint, + "verify_jwt": verify_jwt + }, + "files": files + } + + def _normalize_function_payload(self, payload: object) -> object: + if not isinstance(payload, dict): + return payload + result = dict(payload) + files = result.get("files") + entrypoint_path = result.get("entrypoint_path") + if isinstance(files, list): + source_code = None + for file_info in files: + if not isinstance(file_info, dict): + continue + if entrypoint_path and file_info.get("name") == entrypoint_path and isinstance(file_info.get("content"), str): + source_code = file_info.get("content") + break + if source_code is None and isinstance(file_info.get("content"), str): + source_code = file_info.get("content") + if source_code is not None: + result["source_code"] = source_code + return result + def _validate_function_name(self, function_name: str) -> None: """验证函数名称""" if not function_name: @@ -88,10 +142,10 @@ def _extract_error_text(self, payload: object) -> str: @handle_errors async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[EdgeFunction]: - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) logger.info(f"Listing edge functions for workspace {ws_id}") - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions") functions = [EdgeFunction(**func) for func in result] @@ -99,12 +153,12 @@ async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[ return functions @handle_errors - async def get_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> EdgeFunction: + async def get_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: self._validate_function_name(function_name) - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) logger.info(f"Getting edge function '{function_name}' from workspace {ws_id}") - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) encoded_name = quote(function_name, safe="") try: result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}") @@ -113,7 +167,10 @@ async def get_edge_function(self, function_name: str, workspace_id: Optional[str if "function not found" in payload_text or "not found" in payload_text: raise ValueError(f"Edge function '{function_name}' not found") raise - return EdgeFunction(**result) + normalized_result = self._normalize_function_payload(result) + if isinstance(normalized_result, dict): + return normalized_result + return EdgeFunction(**result).model_dump() @handle_errors @read_only_check @@ -156,7 +213,7 @@ async def deploy_edge_function( self._validate_code_size(source_code) self._validate_runtime_compatibility(runtime, source_code) - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) entrypoint = self._get_entrypoint(runtime) logger.info( @@ -164,6 +221,7 @@ async def deploy_edge_function( extra={ "function_name": function_name, "workspace_id": ws_id, + "branch_id": branch_id, "runtime": runtime, "verify_jwt": verify_jwt, "entrypoint": entrypoint, @@ -171,24 +229,11 @@ async def deploy_edge_function( } ) - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) encoded_name = quote(function_name, safe="") - data = { - "metadata": { - "name": function_name, - "slug": function_name, - "entrypoint_path": entrypoint, - "verify_jwt": verify_jwt - }, - "files": [ - { - "name": entrypoint, - "content": source_code - } - ] - } + data = self._build_deployment_payload(runtime, source_code, verify_jwt, function_name) if import_map: try: @@ -214,16 +259,18 @@ async def deploy_edge_function( extra={"function_id": result.get("id"), "version": result.get("version")} ) + if isinstance(result, dict) and not result.get("runtime"): + result["runtime"] = runtime return result @handle_errors @read_only_check async def delete_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: self._validate_function_name(function_name) - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) logger.info(f"Deleting edge function '{function_name}' from workspace {ws_id}") - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) encoded_name = quote(function_name, safe="") await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}", method="DELETE") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 2fd8f8f3..5845b896 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -1,5 +1,6 @@ from typing import Optional, List, Dict, Any import logging +import json from .base import BaseTools from ..utils import handle_errors, read_only_check from ..models import StorageConfig @@ -9,12 +10,36 @@ class StorageTools(BaseTools): + def _normalize_allowed_mime_types(self, allowed_mime_types: Optional[str | list[str]]) -> Optional[list[str]]: + if allowed_mime_types is None: + return None + values: list[str] + if isinstance(allowed_mime_types, list): + values = allowed_mime_types + elif isinstance(allowed_mime_types, str): + text = allowed_mime_types.strip() + if not text: + return None + if text.startswith("["): + parsed = json.loads(text) + if not isinstance(parsed, list): + raise ValueError("allowed_mime_types JSON value must be a list of strings") + values = parsed + else: + values = text.split(",") + else: + raise ValueError("allowed_mime_types must be a string, JSON array string, or list of strings") + result = [value.strip() for value in values if isinstance(value, str) and value.strip()] + if not result: + return None + return result + @handle_errors async def list_storage_buckets(self, workspace_id: Optional[str] = None) -> List[dict]: - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) logger.info(f"Listing storage buckets for workspace {ws_id}") - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) result = await client.call_api("/storage/v1/bucket") logger.info(f"Found {len(result)} storage buckets") @@ -27,19 +52,19 @@ async def create_storage_bucket( bucket_name: str, public: bool = False, file_size_limit: Optional[int] = None, - allowed_mime_types: Optional[str] = None, + allowed_mime_types: Optional[str | list[str]] = None, workspace_id: Optional[str] = None ) -> dict: if not bucket_name or not bucket_name.strip(): raise ValueError("Bucket name cannot be empty") - ws_id = self._get_workspace_id(workspace_id) + ws_id, branch_id = await self._resolve_target(workspace_id) logger.info( f"Creating storage bucket '{bucket_name}'", - extra={"workspace_id": ws_id, "public": public} + extra={"workspace_id": ws_id, "branch_id": branch_id, "public": public} ) - client = await self._get_client(ws_id) + client = await self._get_client(ws_id, branch_id) data = { "name": bucket_name, @@ -47,8 +72,9 @@ async def create_storage_bucket( } if file_size_limit: data["file_size_limit"] = file_size_limit - if allowed_mime_types: - data["allowed_mime_types"] = allowed_mime_types.split(",") + normalized_mime_types = self._normalize_allowed_mime_types(allowed_mime_types) + if normalized_mime_types: + data["allowed_mime_types"] = normalized_mime_types return await client.call_api("/storage/v1/bucket", method="POST", json_data=data) @@ -57,8 +83,8 @@ async def create_storage_bucket( async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[str] = None) -> dict: if not bucket_name or not bucket_name.strip(): raise ValueError("Bucket name cannot be empty") - ws_id = self._get_workspace_id(workspace_id) - client = await self._get_client(ws_id) + ws_id, branch_id = await self._resolve_target(workspace_id) + client = await self._get_client(ws_id, branch_id) response = await client.call_api(f"/storage/v1/bucket/{bucket_name}", method="DELETE") if isinstance(response, dict) and "error" in response: raise ValueError(response["error"]) @@ -66,8 +92,8 @@ async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[s @handle_errors async def get_storage_config(self, workspace_id: Optional[str] = None) -> StorageConfig: - ws_id = self._get_workspace_id(workspace_id) - client = await self._get_client(ws_id) + ws_id, branch_id = await self._resolve_target(workspace_id) + client = await self._get_client(ws_id, branch_id) result = await client.call_api("/storage/v1/config") return StorageConfig(**result) @@ -81,12 +107,17 @@ async def update_storage_config( if not isinstance(config, dict) or not config: raise ValueError("config must be a non-empty object") - ws_id = self._get_workspace_id(workspace_id) - client = await self._get_client(ws_id) + ws_id, branch_id = await self._resolve_target(workspace_id) + client = await self._get_client(ws_id, branch_id) try: await client.call_api("/storage/v1/config", method="PUT", json_data=config) except SupabaseApiError as e: if e.status_code == 404 and e.path == "/storage/v1/config": - raise ValueError("Updating storage config is not supported by current AIDAP workspace endpoint") + return { + "success": False, + "supported": False, + "code": "UnsupportedOperation", + "error": "Updating storage config is not supported by current AIDAP workspace endpoint" + } raise return {"success": True} diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 2a3dcf6d..5856fd36 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -1,12 +1,9 @@ -"""Workspace management tools for Supabase MCP Server""" - import asyncio -import json import logging import inspect from typing import Any, Optional -from ..utils import read_only_check +from ..utils import compact_dict, pick_value, read_only_check, resolve_target, to_json logger = logging.getLogger(__name__) @@ -18,32 +15,17 @@ def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): self.aidap_client = aidap_client self.default_workspace_id = default_workspace_id - def _resolve_workspace_id(self, workspace_id: Optional[str] = None) -> Optional[str]: - return workspace_id or self.default_workspace_id - def _to_json(self, payload: dict) -> str: - return json.dumps(payload, indent=2, ensure_ascii=False) + return to_json(payload) def _compact(self, payload: dict) -> dict: - return {k: v for k, v in payload.items() if v is not None} + return compact_dict(payload) def _pick(self, source: Any, *field_names: str) -> Any: - source_dict = source.to_dict() if hasattr(source, "to_dict") else source if isinstance(source, dict) else {} - for field_name in field_names: - value = None - if isinstance(source, dict): - value = source.get(field_name) - else: - value = getattr(source, field_name, None) - if value is None and isinstance(source_dict, dict): - value = source_dict.get(field_name) - if isinstance(value, str): - value = value.strip() - if not value: - value = None - if value is not None: - return value - return None + return pick_value(source, *field_names) + + async def _resolve_target(self, target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: + return await resolve_target(self.aidap_client, target_id, self.default_workspace_id) def _workspace_view(self, source: Any) -> dict: workspace_id = self._pick(source, "workspace_id") @@ -63,14 +45,31 @@ def _workspace_view(self, source: Any) -> dict: } return self._compact(payload) + def _branch_view(self, branch: dict, workspace_payload: Optional[dict] = None) -> dict: + workspace_payload = workspace_payload or {} + payload = { + "branch_id": branch.get("branch_id"), + "branch_name": branch.get("name"), + "status": branch.get("status") or workspace_payload.get("status"), + "default": branch.get("default"), + "parent_id": branch.get("parent_id"), + "root_project_id": workspace_payload.get("workspace_id") or branch.get("workspace_id"), + "root_project_name": workspace_payload.get("workspace_name"), + "created_at": branch.get("created_at") or workspace_payload.get("created_at"), + "updated_at": branch.get("updated_at") or workspace_payload.get("updated_at"), + "engine_type": workspace_payload.get("engine_type"), + "engine_version": workspace_payload.get("engine_version"), + "deletion_protection_status": workspace_payload.get("deletion_protection_status"), + "target_type": "branch", + } + return self._compact(payload) + def _project_view(self, source: Any) -> dict: workspace_payload = self._workspace_view(source) project_name = workspace_payload.get("project_name") or workspace_payload.get("workspace_name") payload = { "project_id": workspace_payload.get("workspace_id"), "project_name": project_name, - "workspace_id": workspace_payload.get("workspace_id"), - "workspace_name": workspace_payload.get("workspace_name"), "status": workspace_payload.get("status"), "region": workspace_payload.get("region"), "created_at": workspace_payload.get("created_at"), @@ -82,15 +81,13 @@ def _project_view(self, source: Any) -> dict: return self._compact(payload) def _with_project_alias(self, payload: dict, project_id: Optional[str] = None, project_name: Optional[str] = None) -> dict: - result = dict(payload) - workspace_id = result.get("workspace_id") or project_id - workspace_name = result.get("workspace_name") or project_name - if workspace_id: - result["workspace_id"] = workspace_id - result["project_id"] = workspace_id - if workspace_name: - result["workspace_name"] = workspace_name - result["project_name"] = workspace_name + result = {key: value for key, value in dict(payload).items() if key not in {"workspace_id", "workspace_name"}} + resolved_project_id = result.get("project_id") or project_id + resolved_project_name = result.get("project_name") or project_name + if resolved_project_id: + result["project_id"] = resolved_project_id + if resolved_project_name: + result["project_name"] = resolved_project_name return result def _describe_workspaces_response(self): @@ -107,11 +104,12 @@ def _describe_workspaces_response(self): request = DescribeWorkspacesRequest(filters=filters) return self.aidap_client.client.describe_workspaces(request) - def _describe_workspace_detail_response(self, workspace_id: str): - from volcenginesdkaidap.models import DescribeWorkspaceDetailRequest - - request = DescribeWorkspaceDetailRequest(workspace_id=workspace_id) - return self.aidap_client.client.describe_workspace_detail(request) + def _find_workspace_source(self, workspace_id: str) -> Optional[Any]: + response = self._describe_workspaces_response() + for workspace in list(getattr(response, "workspaces", []) or []): + if self._pick(workspace, "workspace_id") == workspace_id: + return workspace + return None def _error_detail(self, code: str, message: str, retriable: bool = False) -> dict: return { @@ -129,47 +127,6 @@ def _mask_key(self, value: Optional[str], reveal: bool) -> Optional[str]: return "*" * len(value) return f"{value[:6]}...{value[-4:]}" - async def list_workspaces(self) -> str: - try: - response = self._describe_workspaces_response() - raw_workspaces = list(getattr(response, "workspaces", []) or []) - workspaces = [self._workspace_view(ws) for ws in raw_workspaces] - return self._to_json({ - "success": True, - "workspaces": workspaces, - "count": len(workspaces) - }) - - except Exception as e: - logger.error(f"Error listing workspaces: {e}") - return self._to_json({ - "success": False, - "error": str(e) - }) - - async def get_workspace(self, workspace_id: str) -> str: - try: - response = self._describe_workspace_detail_response(workspace_id) - ws = getattr(response, "workspace", None) - if ws is not None: - workspace_info = self._workspace_view(ws) - return self._to_json({ - "success": True, - "workspace": workspace_info - }) - - return self._to_json({ - "success": False, - "error": "Workspace not found" - }) - - except Exception as e: - logger.error(f"Error getting workspace: {e}") - return self._to_json({ - "success": False, - "error": str(e) - }) - async def list_projects(self) -> str: try: response = self._describe_workspaces_response() @@ -189,10 +146,24 @@ async def list_projects(self) -> str: async def get_project(self, project_id: str) -> str: try: - response = self._describe_workspace_detail_response(project_id) - ws = getattr(response, "workspace", None) + ws_id, branch_id = await self._resolve_target(project_id) + if not ws_id: + return self._to_json({ + "success": False, + "error": "project_id is required" + }) + ws = self._find_workspace_source(ws_id) if ws is not None: project_info = self._project_view(ws) + if branch_id: + branch = await self.aidap_client.get_branch(ws_id, branch_id) + if branch: + branch_view = self._branch_view(branch, self._workspace_view(ws)) + project_info.update({ + "project_id": branch_id, + "project_name": branch.get("name") or project_info.get("project_name"), + **branch_view, + }) return self._to_json({ "success": True, "project": project_info @@ -208,22 +179,6 @@ async def get_project(self, project_id: str) -> str: "error": str(e) }) - @read_only_check - async def create_workspace( - self, - workspace_name: str, - engine_version: str = "Supabase_1_24", - engine_type: str = "Supabase", - ) -> str: - if not workspace_name or not workspace_name.strip(): - return self._to_json({"success": False, "error": "workspace_name is required"}) - result = await self.aidap_client.create_workspace( - workspace_name=workspace_name.strip(), - engine_type=engine_type, - engine_version=engine_version - ) - return self._to_json(result) - @read_only_check async def create_project( self, @@ -245,33 +200,15 @@ async def create_project( "success": True, "project_id": result.get("workspace_id"), "project_name": result.get("workspace_name") or project_name.strip(), - "workspace_id": result.get("workspace_id"), - "workspace_name": result.get("workspace_name") or project_name.strip(), "engine_type": result.get("engine_type"), "engine_version": result.get("engine_version"), } return self._to_json(self._compact(mapped)) return self._to_json(result) - @read_only_check - async def start_workspace(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - result = await self.aidap_client.start_workspace(ws_id) - return self._to_json(result) - - @read_only_check - async def stop_workspace(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - result = await self.aidap_client.stop_workspace(ws_id) - return self._to_json(result) - @read_only_check async def restore_project(self, project_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(project_id) + ws_id, _ = await self._resolve_target(project_id) if not ws_id: return self._to_json({"success": False, "error": "project_id is required"}) result = await self.aidap_client.start_workspace(ws_id) @@ -281,7 +218,7 @@ async def restore_project(self, project_id: Optional[str] = None) -> str: @read_only_check async def pause_project(self, project_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(project_id) + ws_id, _ = await self._resolve_target(project_id) if not ws_id: return self._to_json({"success": False, "error": "project_id is required"}) result = await self.aidap_client.stop_workspace(ws_id) @@ -293,34 +230,60 @@ async def pause_project(self, project_id: Optional[str] = None) -> str: async def create_branch( self, name: str = "develop", - workspace_id: Optional[str] = None, + project_id: Optional[str] = None, ) -> str: - ws_id = self._resolve_workspace_id(workspace_id) + ws_id, _ = await self._resolve_target(project_id) if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) + return self._to_json({"success": False, "error": "project_id is required"}) result = await self.aidap_client.create_branch(ws_id, name) + if result.get("success") and result.get("branch_id"): + branch_id = result["branch_id"] + result.pop("workspace_id", None) + result.pop("workspace_name", None) + result.update({ + "project_id": branch_id, + "project_name": result.get("name") or name, + "root_project_id": ws_id, + "target_type": "branch", + }) + endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=branch_id, use_cache=False) + if endpoint: + result["project_url"] = endpoint + result["api_url"] = endpoint return self._to_json(result) - async def list_branches(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) + async def list_branches(self, project_id: Optional[str] = None) -> str: + ws_id, _ = await self._resolve_target(project_id) if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) + return self._to_json({"success": False, "error": "project_id is required"}) try: branches = await self.aidap_client.list_branches(ws_id) - return self._to_json({"success": True, "branches": branches}) + normalized_branches = [] + for branch in branches: + normalized_branch = dict(branch) + root_project_id = normalized_branch.pop("workspace_id", None) + normalized_branch.pop("workspace_name", None) + if normalized_branch.get("branch_id"): + normalized_branch["project_id"] = normalized_branch["branch_id"] + normalized_branch["project_name"] = normalized_branch.get("name") + normalized_branch["target_type"] = "branch" + if root_project_id: + normalized_branch["root_project_id"] = root_project_id + normalized_branches.append(normalized_branch) + return self._to_json({"success": True, "branches": normalized_branches}) except Exception as e: logger.error(f"Error listing branches: {e}") return self._to_json({"success": False, "error": str(e)}) @read_only_check - async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) + async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) -> str: + ws_id, _ = await self._resolve_target(project_id) if not ws_id: return self._to_json({ "success": False, - "error": "workspace_id is required", - "error_detail": self._error_detail("MissingWorkspaceId", "workspace_id is required", False), + "error": "project_id is required", + "error_detail": self._error_detail("MissingProjectId", "project_id is required", False), }) if not branch_id or not branch_id.strip(): return self._to_json({ @@ -336,10 +299,10 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None if not exists: return self._to_json({ "success": False, - "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", + "error": f"Branch '{normalized_branch_id}' not found in project '{ws_id}'", "error_detail": self._error_detail( "BranchNotFound", - f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", + f"Branch '{normalized_branch_id}' not found in project '{ws_id}'", False ), }) @@ -396,47 +359,34 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None ), }) - async def get_workspace_endpoints(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - - endpoint = await self.aidap_client.get_endpoint(ws_id) - if not endpoint: - return self._to_json({ - "success": False, - "error": f"Could not get endpoint for workspace {ws_id}" - }) - - return self._to_json({ - "success": True, - "workspace_id": ws_id, - "project_url": endpoint, - "api_url": endpoint - }) - async def get_project_url(self, project_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(project_id) + ws_id, branch_id = await self._resolve_target(project_id) if not ws_id: return self._to_json({"success": False, "error": "project_id is required"}) - endpoint = await self.aidap_client.get_endpoint(ws_id) + endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=branch_id) if not endpoint: return self._to_json({ "success": False, - "error": f"Could not get endpoint for project {ws_id}" + "error": f"Could not get endpoint for project {ws_id if not branch_id else branch_id}" }) - return self._to_json({ + payload = { "success": True, - "project_id": ws_id, - "workspace_id": ws_id, + "project_id": branch_id or ws_id, "project_url": endpoint, "api_url": endpoint - }) + } + if branch_id: + payload.update({ + "branch_id": branch_id, + "root_project_id": ws_id, + "target_type": "branch", + }) + return self._to_json(payload) - async def _get_api_keys_payload(self, workspace_id: str, reveal: bool = False) -> dict: - keys = await self.aidap_client.get_api_keys(workspace_id) + async def _get_api_keys_payload(self, workspace_id: str, branch_id: Optional[str] = None, reveal: bool = False) -> dict: + keys = await self.aidap_client.get_api_keys(workspace_id, branch_id=branch_id) publishable_key = None anon_key = None service_role_key = None @@ -453,36 +403,28 @@ async def _get_api_keys_payload(self, workspace_id: str, reveal: bool = False) - **key, "key": self._mask_key(value, reveal), }) - return { + payload = { "success": True, - "workspace_id": workspace_id, + "project_id": branch_id or workspace_id, "reveal": reveal, "publishable_key": self._mask_key(publishable_key, reveal), "anon_key": self._mask_key(anon_key, reveal), "service_role_key": self._mask_key(service_role_key, reveal), "keys": masked_keys } - - async def get_workspace_api_keys(self, workspace_id: Optional[str] = None, reveal: bool = False) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - - try: - payload = await self._get_api_keys_payload(ws_id, reveal) - return self._to_json(payload) - except Exception as e: - logger.error(f"Error getting api keys: {e}") - return self._to_json({"success": False, "error": str(e)}) + if branch_id: + payload["branch_id"] = branch_id + payload["root_project_id"] = workspace_id + payload["target_type"] = "branch" + return payload async def get_publishable_keys(self, project_id: Optional[str] = None, reveal: bool = False) -> str: - ws_id = self._resolve_workspace_id(project_id) + ws_id, branch_id = await self._resolve_target(project_id) if not ws_id: return self._to_json({"success": False, "error": "project_id is required"}) try: - payload = await self._get_api_keys_payload(ws_id, reveal) - payload = self._with_project_alias(payload, ws_id) + payload = await self._get_api_keys_payload(ws_id, branch_id=branch_id, reveal=reveal) return self._to_json(payload) except Exception as e: logger.error(f"Error getting publishable keys: {e}") @@ -493,13 +435,13 @@ async def reset_branch( self, branch_id: str, migration_version: Optional[str] = None, - workspace_id: Optional[str] = None, + project_id: Optional[str] = None, ) -> str: - ws_id = self._resolve_workspace_id(workspace_id) + ws_id, _ = await self._resolve_target(project_id) if not ws_id: return self._to_json({ "success": False, - "error": "workspace_id is required" + "error": "project_id is required" }) try: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py index cfc41494..e43d67e1 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py @@ -1,3 +1,14 @@ -from .decorators import handle_errors, read_only_check, format_error +from .common import compact_dict, pick_value, to_json +from .decorators import format_error, handle_errors, read_only_check +from .targets import resolve_target, select_target_id -__all__ = ['handle_errors', 'read_only_check', 'format_error'] +__all__ = [ + 'compact_dict', + 'format_error', + 'handle_errors', + 'pick_value', + 'read_only_check', + 'resolve_target', + 'select_target_id', + 'to_json', +] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/common.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/common.py new file mode 100644 index 00000000..17700102 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/common.py @@ -0,0 +1,29 @@ +import json +from typing import Any + + +def to_json(payload: Any) -> str: + return json.dumps(payload, indent=2, ensure_ascii=False) + + +def compact_dict(payload: dict) -> dict: + return {key: value for key, value in payload.items() if value is not None} + + +def pick_value(source: Any, *field_names: str) -> Any: + source_dict = source.to_dict() if hasattr(source, "to_dict") else source if isinstance(source, dict) else {} + for field_name in field_names: + value = None + if isinstance(source, dict): + value = source.get(field_name) + else: + value = getattr(source, field_name, None) + if value is None and isinstance(source_dict, dict): + value = source_dict.get(field_name) + if isinstance(value, str): + value = value.strip() + if not value: + value = None + if value is not None: + return value + return None diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py index 5a8070a8..aee00dcc 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py @@ -3,6 +3,8 @@ from functools import wraps from typing import Any, Callable +from .common import to_json + logger = logging.getLogger(__name__) @@ -23,11 +25,11 @@ async def wrapper(*args, **kwargs) -> str: result = [item.model_dump() for item in result] elif hasattr(result, 'model_dump'): result = result.model_dump() - return json.dumps(result, indent=2, ensure_ascii=False) + return to_json(result) except Exception as e: error_msg = format_error(e) logger.error(f"Error in {func.__name__}: {error_msg}") - return json.dumps({"error": error_msg}, ensure_ascii=False) + return to_json({"error": error_msg}) return wrapper @@ -36,6 +38,6 @@ def read_only_check(func: Callable) -> Callable: async def wrapper(*args, **kwargs) -> Any: from ..config import READ_ONLY if READ_ONLY: - return json.dumps({"error": f"Cannot execute {func.__name__} in read-only mode"}) + return to_json({"error": f"Cannot execute {func.__name__} in read-only mode"}) return await func(*args, **kwargs) return wrapper diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py new file mode 100644 index 00000000..a5f1126e --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py @@ -0,0 +1,12 @@ +from typing import Optional + + +def select_target_id(target_id: Optional[str], default_target_id: Optional[str]) -> Optional[str]: + return target_id or default_target_id + + +async def resolve_target(aidap_client, target_id: Optional[str], default_target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: + resolved_id = select_target_id(target_id, default_target_id) + if not resolved_id: + return None, None + return await aidap_client.resolve_workspace_and_branch(resolved_id) From 8c2fb078fba08863d9c64f0ae262bd0a3e38cf74 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:25:46 +0800 Subject: [PATCH 15/32] =?UTF-8?q?fix:=E6=B3=A8=E4=BC=98=E5=8C=96=E5=A4=87?= =?UTF-8?q?=E4=BB=BD=E5=B7=A5=E5=8D=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 254 ++++++++------- server/mcp_server_supabase/README_zh.md | 398 ++++++++++-------------- 2 files changed, 299 insertions(+), 353 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 51e80a35..68cd70cf 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -1,48 +1,113 @@ -# Supabase MCP Server +# MCP Server: Supabase +> Manage AIDAP Supabase projects, Postgres schema, Edge Functions, and Storage directly from any MCP client. -**Supabase MCP Server** 是一款基于模型上下文协议(Model Context Protocol, MCP)的服务器,实现了对 AIDAP Supabase 服务的全链路智能化管理。通过自然语言指令,用户可以对项目、数据库、Edge Functions、存储等资源进行创建、查询、修改、删除等操作,从而大幅提升 Supabase 开发与运维的效率。 +English | [简体中文](./README_zh.md) ---- +| Item | Details | +| ---- | ------- | +| Version | v0.1.0 | +| Description | An MCP server for AIDAP Supabase that exposes project, database, Edge Functions, and Storage operations to AI assistants. | +| Category | Database | +| Tags | Supabase, PostgreSQL, AIDAP, Edge Functions, Storage | +| Docs | [Volcengine AIDAP Supabase](https://www.volcengine.com/docs/87275/2105900) | -## 项目概览 -| 项目 | 详情 | -| ---- | ---- | -| **版本** | v1.0.0 | -| **描述** | 基于 MCP 管理 AIDAP Supabase 资源,支持智能化数据库与应用开发 | -| **分类** | 数据库与应用开发 | -| **标签** | Supabase, PostgreSQL, Edge Functions, BaaS | +## Core Capabilities ---- +### 1. Project and Branch Management +- `list_projects` + List all available AIDAP Supabase projects. +- `get_project` + Get details for a specific project. You can pass either a workspace ID or a branch ID. +- `create_project` + Create a new Supabase project in AIDAP. +- `pause_project` + Pause a project. +- `restore_project` + Resume a paused project. +- `get_project_url` + Get the project API endpoint resolved from the current workspace or branch. +- `get_publishable_keys` + Get publishable and service role keys for a project. +- `list_branches` + List development branches under a project. +- `create_branch` + Create a new development branch. +- `delete_branch` + Delete a development branch. +- `reset_branch` + Reset a branch to the latest state supported by AIDAP. -## 关键特性 -- **自动默认分支解析**:`branch_id` 参数可选,系统会自动使用项目的默认分支。 -- **完整工具集合**:提供高阶工具,覆盖数据库、Edge Functions、存储、项目与分支等核心能力。 -- **安全与审计**:只读模式、凭证管理、细粒度日志查询与安全建议。 -- **跨语言支持**:兼容 Python、Node.js、Go 等多语言客户端。 +### 2. Database Development +- `execute_sql` + Execute raw SQL against the target Postgres database. +- `list_tables` + List tables from one or more schemas. +- `list_migrations` + List migration history stored in `supabase_migrations.schema_migrations`. +- `list_extensions` + List installed PostgreSQL extensions. +- `apply_migration` + Execute SQL and record the migration metadata. +- `generate_typescript_types` + Generate TypeScript definitions from database schemas. ---- +### 3. Edge Functions +- `list_edge_functions` + List all deployed Edge Functions. +- `get_edge_function` + Get function source code and metadata. +- `deploy_edge_function` + Deploy or update a function with Node.js or Python runtime. +- `delete_edge_function` + Delete a function by name. -## 快速开始 -### 系统依赖 +### 4. Storage +- `list_storage_buckets` + List storage buckets in the target project. +- `create_storage_bucket` + Create a storage bucket with optional public access, size limit, and MIME type restrictions. +- `delete_storage_bucket` + Delete a storage bucket. +- `get_storage_config` + Fetch storage service configuration from the workspace endpoint. +- `update_storage_config` + Update storage configuration when the current AIDAP endpoint supports it. + +## Compatibility Notes + +- The official Supabase MCP server is built around the Supabase Management API. AIDAP does not provide the same Management API, so this server maps compatible operations onto AIDAP workspace APIs and Supabase workspace endpoints. +- In AIDAP, `workspace` is the equivalent of a Supabase `project`. Tool parameters keep the name `project_id` for MCP compatibility. +- For most project-scoped tools, `project_id` accepts either a workspace ID or a branch ID. If a branch ID such as `br-xxx` is passed, the server resolves the parent workspace automatically. +- When `project_id` is omitted, the server uses `DEFAULT_PROJECT_ID` or `DEFAULT_WORKSPACE_ID` if configured. +- `reset_branch` accepts `migration_version` for compatibility, but the current AIDAP API ignores that field. +- `update_storage_config` may return `supported: false` if the current AIDAP workspace endpoint does not expose that capability. + +## Integration Guide + +### 1. Requirements - Python 3.10+ -- 推荐使用 `uv` 包管理器 +- [uv](https://github.com/astral-sh/uv) -### 安装 `uv` -```bash -curl -LsSf https://astral.sh/uv/install.sh | sh -``` +### 2. Credentials +Get `VOLCENGINE_ACCESS_KEY` and `VOLCENGINE_SECRET_KEY` from the [Volcengine Access Key Console](https://console.volcengine.com/iam/keymanage/). -### 本地开发(推荐) -在项目根目录执行: -```bash -uv sync -source .venv/bin/activate -mv .env_example .env # 填写环境变量 -``` +### 3. Environment Variables + +| Variable | Required | Description | +| -------- | -------- | ----------- | +| `VOLCENGINE_ACCESS_KEY` | Yes | Volcengine access key ID | +| `VOLCENGINE_SECRET_KEY` | Yes | Volcengine secret access key | +| `VOLCENGINE_REGION` | No | Region code, default `cn-beijing` | +| `DEFAULT_PROJECT_ID` | No | Default project ID used when `project_id` is omitted | +| `DEFAULT_WORKSPACE_ID` | No | Same purpose as `DEFAULT_PROJECT_ID` | +| `READ_ONLY` | No | Set to `true` to block write operations | +| `SUPABASE_ENDPOINT_SCHEME` | No | Endpoint scheme for workspace API URLs, default `http` | +| `SUPABASE_PROJECT_SLUG` | No | Edge Functions project slug, default `default` | + +## Quick Deployment + +### Method 1: Run with `uvx` -### 运行方式 -#### 方式一:使用 `uvx`(推荐) -在 MCP 客户端配置文件中添加: ```json { "mcpServers": { @@ -50,50 +115,62 @@ mv .env_example .env # 填写环境变量 "command": "uvx", "args": [ "--from", - "git+https://github.com/volcengine/mcp-server#subdirectory=server/mcp_server_supabase", + "git+https://github.com/volcengine/mcp-server.git#subdirectory=server/mcp_server_supabase", "mcp-server-supabase" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your-access-key", - "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", + "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", "VOLCENGINE_REGION": "cn-beijing" } } } } ``` -#### 方式二:本地直接运行 + +### Method 2: Run from local source with `uv` + +```bash +cd /absolute/path/to/mcp-server/server/mcp_server_supabase +uv sync +``` + ```json { "mcpServers": { - "supabase-dev": { + "supabase": { "command": "uv", "args": [ "--directory", - "/ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase", + "/absolute/path/to/mcp-server/server/mcp_server_supabase", "run", "mcp-server-supabase" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your-access-key", - "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", + "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", "VOLCENGINE_REGION": "cn-beijing", - "READ_ONLY": "true" + "DEFAULT_PROJECT_ID": "ws-xxxxxxxx" } } } } ``` -#### 方式三:Python 直接执行 + +### Method 3: Run with `python3` + ```json { "mcpServers": { "supabase": { - "command": "python", - "args": ["-m", "mcp_server_supabase.server"], + "command": "python3", + "args": [ + "-m", + "mcp_server_supabase.server" + ], "env": { - "VOLCENGINE_ACCESS_KEY": "your-access-key", - "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", + "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", "VOLCENGINE_REGION": "cn-beijing" } } @@ -101,78 +178,21 @@ mv .env_example .env # 填写环境变量 } ``` ---- - -## 配置说明 -主要配置文件位于 `server/mcp_server_supabase/src/mcp_server_supabase/config/config.yaml`,常用字段: -- `transport`:`sse`、`StreamableHTTP`、`stdio`(默认 `sse`) -- `auth`:`oauth`、`none` -- `credential`:`env`(从环境变量读取 AK/SK)或 `token` -- `credential.env`:`VOLCENGINE_ACCESS_KEY`、`VOLCENGINE_SECRET_KEY`、`VOLCENGINE_REGION` - ---- - -## 核心工具一览 -> **注**:以下为常用工具示例,完整列表请参见文档章节 "Tools"。 - -### 数据库操作(8) -- `list_tables` -- `execute_sql` -- `list_extensions` -- `list_migrations` -- `apply_migration` -- `list_databases` -- `create_database` -- `drop_database` - -### Edge Functions(5) -- `list_edge_functions` -- `get_edge_function` -- `deploy_edge_function` -- `delete_edge_function` -- `get_edge_function_logs` - -### 存储管理(8) -- `list_storage_buckets` -- `create_storage_bucket` -- `delete_storage_bucket` -- `list_storage_objects` -- `delete_storage_object` -- `get_storage_object_info` -- `get_storage_config` -- `update_storage_config` - -### 项目管理(11) -- `list_projects` -- `get_project` -- `create_project` -- `pause_project` -- `restore_project` -- `get_project_url` -- `get_publishable_keys` -- `list_branches` -- `create_branch` -- `delete_branch` -- `reset_branch` - ---- - -## 常用 Prompt 示例 -- **数据库**:`"列出我的数据库表"`、`"查询 users 表的所有数据"` -- **Edge Functions**:`"列出所有 Edge Functions"`、`"部署一个新的 Edge Function"` -- **存储**:`"列出所有存储桶"`、`"创建一个公开存储桶"` -- **项目**:`"列出我的所有项目"`、`"创建一个新的项目"` +## Prompt Examples ---- +- `List all my Supabase projects` +- `Show all branches for project ws-xxxx` +- `Execute SQL: select * from public.users limit 10` +- `Generate TypeScript types for schemas public,auth` +- `Deploy an Edge Function named webhook-handler` +- `List all storage buckets in project ws-xxxx` -## 文档与资源 -- [火山引擎 AIDAP Supabase 官方文档](https://www.volcengine.com/docs/87275/2105900) -- [Model Context Protocol 介绍](https://modelcontextprotocol.io/introduction) -- [Supabase 官方文档](https://supabase.com/docs) +## Notes ---- +- Most MCP desktop clients use `stdio`, so the JSON examples above are the recommended setup. +- Write tools are disabled when `READ_ONLY=true`. +- The server uses the default branch automatically when the target endpoint or key needs a branch and none is provided explicitly. ## License -本项目遵循 MIT 许可证: -[MIT License](https://github.com/volcengine/mcp-server/blob/main/LICENSE) +volcengine/mcp-server is licensed under the [MIT License](https://github.com/volcengine/mcp-server/blob/main/LICENSE). diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 7246d455..96dbcffb 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -1,68 +1,176 @@ -# Supabase MCP Server (Python 版本) - -> 将 Supabase/AIDAP 项目连接到 Cursor、Claude、Windsurf 等 AI 助手 +# MCP Server: Supabase +> 在 MCP 客户端中直接管理 AIDAP Supabase 项目、Postgres 数据库、Edge Functions 和 Storage。 [English](./README.md) | 简体中文 -## 功能特性 - -通过 [Model Context Protocol](https://modelcontextprotocol.io/introduction) (MCP),AI 助手可以直接与你的 Supabase/AIDAP 项目交互。 - -### 支持的功能 +| 项目 | 详情 | +| ---- | ---- | +| 版本 | v0.1.0 | +| 描述 | 面向 AIDAP Supabase 的 MCP Server,为 AI 助手提供项目、数据库、Edge Functions 和存储管理能力。 | +| 分类 | 数据库 | +| 标签 | Supabase, PostgreSQL, AIDAP, Edge Functions, Storage | +| 文档 | [火山引擎 AIDAP Supabase](https://www.volcengine.com/docs/87275/2105900) | + +## 核心能力 + +### 1. 项目与分支管理 +- `list_projects` + 列出当前账号下可用的 AIDAP Supabase 项目。 +- `get_project` + 获取指定项目详情,支持传入工作空间 ID 或分支 ID。 +- `create_project` + 在 AIDAP 中创建新的 Supabase 项目。 +- `pause_project` + 暂停项目。 +- `restore_project` + 恢复已暂停项目。 +- `get_project_url` + 获取当前工作空间或分支对应的项目访问地址。 +- `get_publishable_keys` + 获取项目的 publishable key 和 service role key。 +- `list_branches` + 列出项目下的开发分支。 +- `create_branch` + 创建开发分支。 +- `delete_branch` + 删除开发分支。 +- `reset_branch` + 将分支重置到 AIDAP 当前支持的最新状态。 + +### 2. 数据库开发 +- `execute_sql` + 直接执行 Postgres SQL。 +- `list_tables` + 按 schema 列出数据表。 +- `list_migrations` + 查看 `supabase_migrations.schema_migrations` 中记录的迁移历史。 +- `list_extensions` + 列出已安装的 PostgreSQL 扩展。 +- `apply_migration` + 执行 SQL 并写入迁移记录。 +- `generate_typescript_types` + 基于数据库 schema 生成 TypeScript 类型定义。 + +### 3. Edge Functions +- `list_edge_functions` + 列出当前项目中的 Edge Functions。 +- `get_edge_function` + 获取函数源码和元数据。 +- `deploy_edge_function` + 使用 Node.js 或 Python 运行时部署或更新函数。 +- `delete_edge_function` + 按名称删除函数。 + +### 4. Storage +- `list_storage_buckets` + 列出项目中的存储桶。 +- `create_storage_bucket` + 创建存储桶,支持配置公开访问、文件大小限制和 MIME 类型限制。 +- `delete_storage_bucket` + 删除存储桶。 +- `get_storage_config` + 获取当前工作空间端点暴露的存储配置。 +- `update_storage_config` + 在当前端点支持时更新存储配置。 + +## 兼容性说明 + +- 官方 Supabase MCP Server 主要基于 Supabase Management API 实现。AIDAP 没有同等的 Management API,所以这里是通过 AIDAP 的 workspace API 和 Supabase 工作空间端点来实现兼容能力。 +- 在 AIDAP 里,`workspace` 对应 Supabase 的 `project`。为了兼容 MCP 侧常见命名,工具参数仍然使用 `project_id`。 +- 大多数项目级工具里的 `project_id` 同时支持传入工作空间 ID 或分支 ID。传入 `br-xxx` 这类分支 ID 时,服务端会自动解析其所属 workspace。 +- 未显式传入 `project_id` 时,服务端会优先使用 `DEFAULT_PROJECT_ID` 或 `DEFAULT_WORKSPACE_ID`。 +- `reset_branch` 为兼容接口保留了 `migration_version` 参数,但当前 AIDAP API 会忽略它。 +- `update_storage_config` 在部分 AIDAP 工作空间端点上可能返回 `supported: false`,表示当前端点暂不支持该能力。 + +## 接入指南 + +### 1. 环境依赖 +- Python 3.10+ +- [uv](https://github.com/astral-sh/uv) + +### 2. 获取凭证 +从[火山引擎访问密钥控制台](https://console.volcengine.com/iam/keymanage/)获取 `VOLCENGINE_ACCESS_KEY` 和 `VOLCENGINE_SECRET_KEY`。 + +### 3. 环境变量 -- ✅ **项目管理** - 列出、创建、暂停/恢复项目,管理分支与访问入口 -- ✅ **数据库管理** - 列出表、执行 SQL、应用迁移、管理数据库和账户 ✨ 增强 -- ✅ **Edge Functions** - 部署、获取代码和管理 Edge Functions ✨ 增强 -- ✅ **调试工具** - 获取多服务日志和性能/安全建议 ✨ 新增 -- ✅ **开发工具** - 生成 TypeScript 类型、获取 API URL 和密钥 ✨ 新增 -- ✅ **存储管理** - 管理存储桶和对象 -- ✅ **分支管理** - 创建、删除、重置、恢复分支 -- ✅ **AIDAP 集成** - 火山引擎工作空间管理(workspace = project) - -## 快速开始 - -### 前置要求 +| 变量名 | 必需 | 说明 | +| ------ | ---- | ---- | +| `VOLCENGINE_ACCESS_KEY` | 是 | 火山引擎 Access Key ID | +| `VOLCENGINE_SECRET_KEY` | 是 | 火山引擎 Secret Access Key | +| `VOLCENGINE_REGION` | 否 | 区域,默认 `cn-beijing` | +| `DEFAULT_PROJECT_ID` | 否 | 未传 `project_id` 时使用的默认项目 ID | +| `DEFAULT_WORKSPACE_ID` | 否 | 与 `DEFAULT_PROJECT_ID` 作用相同 | +| `READ_ONLY` | 否 | 设置为 `true` 后禁止写操作 | +| `SUPABASE_ENDPOINT_SCHEME` | 否 | 工作空间端点协议,默认 `http` | +| `SUPABASE_PROJECT_SLUG` | 否 | Edge Functions 使用的项目 slug,默认 `default` | -⚠️ **重要**: 此 MCP server 依赖 volcengine-python-sdk(包含 AIDAP 模块),需要先安装: +## 快速部署 -```bash -# 克隆 SDK 仓库 -git clone https://github.com/sjcsjcsjc/volcengine-python-sdk.git +### 方式一:使用 `uvx` -# 安装 SDK -cd volcengine-python-sdk -pip install -e . +```json +{ + "mcpServers": { + "supabase": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/volcengine/mcp-server.git#subdirectory=server/mcp_server_supabase", + "mcp-server-supabase" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", + "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", + "VOLCENGINE_REGION": "cn-beijing" + } + } + } +} ``` -### 本地安装运行 - -安装完 volcengine-python-sdk 后: +### 方式二:本地源码运行 ```bash -# 安装 mcp-server-supabase -cd /path/to/mcp-server/server/mcp_server_supabase -pip install -e . - -# 运行服务器 -python -m mcp_server_supabase.server +cd /absolute/path/to/mcp-server/server/mcp_server_supabase +uv sync ``` -### MCP 客户端配置 +```json +{ + "mcpServers": { + "supabase": { + "command": "uv", + "args": [ + "--directory", + "/absolute/path/to/mcp-server/server/mcp_server_supabase", + "run", + "mcp-server-supabase" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", + "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", + "VOLCENGINE_REGION": "cn-beijing", + "DEFAULT_PROJECT_ID": "ws-xxxxxxxx" + } + } + } +} +``` -在 MCP 客户端(如 Claude Desktop、Cursor、Windsurf)中配置: +### 方式三:使用 `python3` ```json { "mcpServers": { "supabase": { - "command": "python", + "command": "python3", "args": [ "-m", "mcp_server_supabase.server" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your-access-key", - "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", + "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", "VOLCENGINE_REGION": "cn-beijing" } } @@ -70,203 +178,21 @@ python -m mcp_server_supabase.server } ``` +## Prompt 示例 -## 环境变量配置 - -| 变量名 | 必需 | 说明 | -|--------|------|------| -| `VOLCENGINE_ACCESS_KEY` | ✅ | 火山引擎访问密钥 | -| `VOLCENGINE_SECRET_KEY` | ✅ | 火山引擎私密密钥 | -| `VOLCENGINE_REGION` | ⭕ | 区域(默认:cn-beijing) | -| `READ_ONLY` | ⭕ | 只读模式(设置为 "true" 启用) | - - -## 🎯 自动默认分支解析 - -**新功能!** 现在大部分工具的 `branch_id` 参数都是可选的。如果不提供 `branch_id`,系统会自动使用项目的默认分支。 - -### 工作原理 - -1. **自动获取**:首次调用时,系统自动查询项目的默认分支 -2. **智能缓存**:默认分支 ID 会被缓存,避免重复 API 调用 -3. **自动刷新**:当设置新的默认分支时,缓存会自动清除 - -### 使用示例 - -```python -# 之前:必须提供 branch_id -execute_sql(workspace_id="xxx", branch_id="br-xxx", query="SELECT * FROM users") - -# 现在:branch_id 可选,自动使用默认分支 -execute_sql(workspace_id="xxx", query="SELECT * FROM users") -``` - -### 缓存管理 - -如果需要手动清除缓存(例如更改了默认分支): - -```python -# 清除特定工作空间的缓存 -clear_default_branch_cache(workspace_id="xxx") - -# 清除所有缓存 -clear_default_branch_cache() -``` - - -## 可用工具(53 个) - -### 数据库操作(8 个) -- `list_tables` - 列出数据库表 -- `execute_sql` - 执行 SQL 查询 -- `list_extensions` - 列出数据库扩展 -- `list_migrations` - 列出迁移历史 ✨ 新增 -- `apply_migration` - 应用数据库迁移并记录到 schema_migrations ✨ 新增 -- `list_databases` - 列出所有数据库 -- `create_database` - 创建新数据库 -- `drop_database` - 删除数据库 - -### Edge Functions(5 个) -- `list_edge_functions` - 列出 Edge Functions -- `get_edge_function` - 获取 Edge Function 源代码 ✨ 新增 -- `deploy_edge_function` - 部署或更新 Edge Function ✨ 新增 -- `delete_edge_function` - 删除 Edge Function ✨ 新增 -- `get_edge_function_logs` - 获取函数日志 - -### 调试工具(2 个) -- `get_logs` - 获取服务日志 (postgres/api/auth/storage/realtime/functions) ✨ 新增 -- `get_advisors` - 获取性能和安全建议 (检查缺失索引、未使用索引、缺失主键等) ✨ 新增 - -### 开发工具(3 个) -- `generate_typescript_types` - 根据数据库 schema 生成 TypeScript 类型定义 ✨ 新增 -- `get_project_url` - 获取项目 API URL -- `get_publishable_keys` - 获取可发布的 API 密钥 - -### 存储管理(8 个) -- `list_storage_buckets` - 列出存储桶 -- `create_storage_bucket` - 创建存储桶 -- `delete_storage_bucket` - 删除存储桶 -- `list_storage_objects` - 列出存储对象 -- `delete_storage_object` - 删除存储对象 -- `get_storage_object_info` - 获取对象元数据 -- `get_storage_config` - 获取存储配置 ✨ 新增 -- `update_storage_config` - 更新存储配置(需要付费计划) ✨ 新增 - -### 项目管理(11 个) -- `list_projects` - 列出所有项目 -- `get_project` - 获取项目详情 -- `create_project` - 创建新项目 -- `pause_project` - 暂停项目 -- `restore_project` - 恢复项目 -- `get_project_url` - 获取项目端点 -- `get_publishable_keys` - 获取项目 API 密钥 -- `list_branches` - 列出项目分支 -- `create_branch` - 创建项目分支 -- `delete_branch` - 删除项目分支 -- `reset_branch` - 重置项目分支 - -### 数据库账户管理(4 个) -- `list_db_accounts` - 列出数据库账户 -- `create_db_account` - 创建数据库账户 -- `delete_db_account` - 删除数据库账户 -- `reset_db_account_password` - 重置账户密码 - -### 分支管理(10 个) -- `list_branches` - 列出所有分支 -- `get_branch_detail` - 获取分支详情 -- `create_branch` - 创建新分支 -- `delete_branch` - 删除分支 -- `reset_branch` - 重置分支(对齐官方:重置开发分支迁移;当前 AIDAP SDK 不支持 `migration_version` 参数) -- `restart_branch` - 重启分支 -- `restore_branch` - 恢复分支 -- `set_default_branch` - 设置默认分支 -- `list_restorable_branches` - 列出可恢复的分支 -- `clear_default_branch_cache` - 清除默认分支缓存 ✨ 新增 - -## 使用示例 - -配置完成后,在 AI 助手中可以这样使用: - -``` -"帮我查看数据库中的所有表" -"执行 SQL: SELECT * FROM users LIMIT 10" -"生成数据库的 TypeScript 类型定义" -"部署一个新的 Edge Function" -"查看最近的 API 日志" -"列出所有组织和项目" -"列出所有 AIDAP 工作空间" -"获取工作空间的 API 密钥" -``` - -## 安全建议 - -⚠️ 连接 LLM 到数据源存在固有风险,请遵循以下最佳实践: - -1. **不要连接生产环境** - 使用开发项目,避免暴露真实数据 -2. **启用只读模式** - 设置 `READ_ONLY=true` 限制写操作 -3. **项目范围限制** - 设置 `SUPABASE_PROJECT_REF` 限制访问范围 -4. **审查工具调用** - 始终在 MCP 客户端中审查并批准工具调用 - -## 本地开发 - -```bash -# 克隆仓库 -git clone https://github.com/volcengine/mcp-server.git -cd mcp-server/server/mcp_server_supabase - -# 安装依赖 -uv pip install -e ".[dev]" - -# 运行测试 -pytest - -# 代码格式化 -black src/ -ruff check src/ -``` - -## 项目结构 - -``` -mcp_server_supabase/ -├── pyproject.toml # 项目配置 -├── README.md # 英文文档 -├── README_zh.md # 中文文档 -└── src/ - └── mcp_server_supabase/ - ├── __init__.py - └── server.py # 主入口(FastMCP 实现) -``` - -## 常见问题 - -### Q: 如何获取 Supabase Access Token? - -A: 访问 [Supabase Dashboard](https://supabase.com/dashboard/account/tokens) 生成个人访问令牌。 - -### Q: 如何获取 AIDAP 密钥? - -A: 登录火山引擎控制台,在 [访问控制](https://console.volcengine.com/iam/keymanage/) 页面创建 Access Key。 - -### Q: AIDAP 中的 workspace 和 Supabase 的 project 有什么区别? - -A: 在 AIDAP 中,workspace 就是 Supabase 的 project。两者是等价的概念,只是名称不同。 - -### Q: 只读模式有什么限制? - -A: 只读模式下,只能执行 SELECT、WITH、EXPLAIN 查询,无法执行 INSERT、UPDATE、DELETE、CREATE 等写操作。 - -### Q: 如何更新到最新版本? - -A: 使用 `uvx` 会自动使用最新版本,无需手动更新。 +- `列出我所有的 Supabase 项目` +- `查看项目 ws-xxxx 下的所有分支` +- `执行 SQL: select * from public.users limit 10` +- `为 public,auth schema 生成 TypeScript 类型` +- `部署一个名为 webhook-handler 的 Edge Function` +- `列出项目 ws-xxxx 下的所有存储桶` -## 相关资源 +## 补充说明 -- [Model Context Protocol 文档](https://modelcontextprotocol.io/introduction) -- [Supabase 文档](https://supabase.com/docs) -- [AIDAP 文档](https://www.volcengine.com/docs/6431/1181698) -- [火山引擎 MCP Server 仓库](https://github.com/volcengine/mcp-server) +- 大多数 MCP 桌面客户端默认使用 `stdio`,上面的配置方式最通用。 +- 设置 `READ_ONLY=true` 后,所有写入类工具都会被拦截。 +- 当端点解析或 API Key 获取依赖分支,而你又没有显式指定分支时,服务端会自动使用默认分支。 -## 许可证 +## License -Apache 2.0 - 详见 [LICENSE](../../LICENSE) 文件 +volcengine/mcp-server 采用 [MIT 许可证](https://github.com/volcengine/mcp-server/blob/main/LICENSE) 授权。 From 69c818224fefd2ae3a7d682322da8d159bc90401 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:28:12 +0800 Subject: [PATCH 16/32] =?UTF-8?q?fix:=E6=B3=A8=E4=BC=98=E5=8C=96=E5=A4=87?= =?UTF-8?q?=E4=BB=BD=E5=B7=A5=E5=8D=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 235 +++++----------- server/mcp_server_supabase/README_zh.md | 233 +++++----------- .../src/mcp_server_supabase/runtime.py | 14 +- .../src/mcp_server_supabase/server.py | 9 +- .../src/mcp_server_supabase/tool_registry.py | 144 +++++----- .../tools/edge_function_tools.py | 10 +- .../tools/workspace_tools.py | 257 +++++++----------- 7 files changed, 332 insertions(+), 570 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 68cd70cf..d6807de8 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -1,198 +1,105 @@ -# MCP Server: Supabase -> Manage AIDAP Supabase projects, Postgres schema, Edge Functions, and Storage directly from any MCP client. - -English | [简体中文](./README_zh.md) - -| Item | Details | -| ---- | ------- | -| Version | v0.1.0 | -| Description | An MCP server for AIDAP Supabase that exposes project, database, Edge Functions, and Storage operations to AI assistants. | -| Category | Database | -| Tags | Supabase, PostgreSQL, AIDAP, Edge Functions, Storage | -| Docs | [Volcengine AIDAP Supabase](https://www.volcengine.com/docs/87275/2105900) | - -## Core Capabilities - -### 1. Project and Branch Management -- `list_projects` - List all available AIDAP Supabase projects. -- `get_project` - Get details for a specific project. You can pass either a workspace ID or a branch ID. -- `create_project` - Create a new Supabase project in AIDAP. -- `pause_project` - Pause a project. -- `restore_project` - Resume a paused project. -- `get_project_url` - Get the project API endpoint resolved from the current workspace or branch. -- `get_publishable_keys` - Get publishable and service role keys for a project. -- `list_branches` - List development branches under a project. -- `create_branch` - Create a new development branch. -- `delete_branch` - Delete a development branch. -- `reset_branch` - Reset a branch to the latest state supported by AIDAP. - -### 2. Database Development -- `execute_sql` - Execute raw SQL against the target Postgres database. -- `list_tables` - List tables from one or more schemas. -- `list_migrations` - List migration history stored in `supabase_migrations.schema_migrations`. -- `list_extensions` - List installed PostgreSQL extensions. -- `apply_migration` - Execute SQL and record the migration metadata. -- `generate_typescript_types` - Generate TypeScript definitions from database schemas. - -### 3. Edge Functions -- `list_edge_functions` - List all deployed Edge Functions. -- `get_edge_function` - Get function source code and metadata. -- `deploy_edge_function` - Deploy or update a function with Node.js or Python runtime. -- `delete_edge_function` - Delete a function by name. - -### 4. Storage -- `list_storage_buckets` - List storage buckets in the target project. -- `create_storage_bucket` - Create a storage bucket with optional public access, size limit, and MIME type restrictions. -- `delete_storage_bucket` - Delete a storage bucket. -- `get_storage_config` - Fetch storage service configuration from the workspace endpoint. -- `update_storage_config` - Update storage configuration when the current AIDAP endpoint supports it. +# Supabase MCP Server -## Compatibility Notes +Supabase MCP server for AIDAP workspaces. -- The official Supabase MCP server is built around the Supabase Management API. AIDAP does not provide the same Management API, so this server maps compatible operations onto AIDAP workspace APIs and Supabase workspace endpoints. -- In AIDAP, `workspace` is the equivalent of a Supabase `project`. Tool parameters keep the name `project_id` for MCP compatibility. -- For most project-scoped tools, `project_id` accepts either a workspace ID or a branch ID. If a branch ID such as `br-xxx` is passed, the server resolves the parent workspace automatically. -- When `project_id` is omitted, the server uses `DEFAULT_PROJECT_ID` or `DEFAULT_WORKSPACE_ID` if configured. -- `reset_branch` accepts `migration_version` for compatibility, but the current AIDAP API ignores that field. -- `update_storage_config` may return `supported: false` if the current AIDAP workspace endpoint does not expose that capability. +## Overview -## Integration Guide +This server exposes Supabase capabilities through MCP and uses AIDAP workspaces as the primary resource model. -### 1. Requirements -- Python 3.10+ -- [uv](https://github.com/astral-sh/uv) +Supported areas: -### 2. Credentials -Get `VOLCENGINE_ACCESS_KEY` and `VOLCENGINE_SECRET_KEY` from the [Volcengine Access Key Console](https://console.volcengine.com/iam/keymanage/). +- workspace lifecycle +- branch lifecycle +- database access +- Edge Functions +- storage +- TypeScript type generation -### 3. Environment Variables +## Environment Variables -| Variable | Required | Description | -| -------- | -------- | ----------- | -| `VOLCENGINE_ACCESS_KEY` | Yes | Volcengine access key ID | -| `VOLCENGINE_SECRET_KEY` | Yes | Volcengine secret access key | -| `VOLCENGINE_REGION` | No | Region code, default `cn-beijing` | -| `DEFAULT_PROJECT_ID` | No | Default project ID used when `project_id` is omitted | -| `DEFAULT_WORKSPACE_ID` | No | Same purpose as `DEFAULT_PROJECT_ID` | +| Name | Required | Description | +| --- | --- | --- | +| `VOLCENGINE_ACCESS_KEY` | Yes | Volcengine access key | +| `VOLCENGINE_SECRET_KEY` | Yes | Volcengine secret key | +| `VOLCENGINE_REGION` | No | Region, default `cn-beijing` | | `READ_ONLY` | No | Set to `true` to block write operations | -| `SUPABASE_ENDPOINT_SCHEME` | No | Endpoint scheme for workspace API URLs, default `http` | -| `SUPABASE_PROJECT_SLUG` | No | Edge Functions project slug, default `default` | - -## Quick Deployment - -### Method 1: Run with `uvx` - -```json -{ - "mcpServers": { - "supabase": { - "command": "uvx", - "args": [ - "--from", - "git+https://github.com/volcengine/mcp-server.git#subdirectory=server/mcp_server_supabase", - "mcp-server-supabase" - ], - "env": { - "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", - "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", - "VOLCENGINE_REGION": "cn-beijing" - } - } - } -} -``` +| `DEFAULT_WORKSPACE_ID` | No | Default workspace used when `workspace_id` is omitted | +| `SUPABASE_WORKSPACE_SLUG` | No | Edge Functions slug, default `default` | +| `SUPABASE_ENDPOINT_SCHEME` | No | `http` or `https`, default `http` | -### Method 2: Run from local source with `uv` +## Run ```bash -cd /absolute/path/to/mcp-server/server/mcp_server_supabase -uv sync -``` - -```json -{ - "mcpServers": { - "supabase": { - "command": "uv", - "args": [ - "--directory", - "/absolute/path/to/mcp-server/server/mcp_server_supabase", - "run", - "mcp-server-supabase" - ], - "env": { - "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", - "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", - "VOLCENGINE_REGION": "cn-beijing", - "DEFAULT_PROJECT_ID": "ws-xxxxxxxx" - } - } - } -} +python -m mcp_server_supabase.server ``` -### Method 3: Run with `python3` +## MCP Client Example ```json { "mcpServers": { "supabase": { - "command": "python3", + "command": "python", "args": [ "-m", "mcp_server_supabase.server" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", - "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", - "VOLCENGINE_REGION": "cn-beijing" + "VOLCENGINE_ACCESS_KEY": "your-access-key", + "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_REGION": "cn-beijing", + "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" } } } } ``` -## Prompt Examples +## Tools + +### Workspace + +- `list_workspaces` +- `get_workspace` +- `create_workspace` +- `pause_workspace` +- `restore_workspace` +- `get_workspace_url` +- `get_publishable_keys` + +### Branch + +- `list_branches` +- `create_branch` +- `delete_branch` +- `reset_branch` + +### Database + +- `execute_sql` +- `list_tables` +- `list_migrations` +- `list_extensions` +- `apply_migration` +- `generate_typescript_types` + +### Edge Functions -- `List all my Supabase projects` -- `Show all branches for project ws-xxxx` -- `Execute SQL: select * from public.users limit 10` -- `Generate TypeScript types for schemas public,auth` -- `Deploy an Edge Function named webhook-handler` -- `List all storage buckets in project ws-xxxx` +- `list_edge_functions` +- `get_edge_function` +- `deploy_edge_function` +- `delete_edge_function` -## Notes +### Storage -- Most MCP desktop clients use `stdio`, so the JSON examples above are the recommended setup. -- Write tools are disabled when `READ_ONLY=true`. -- The server uses the default branch automatically when the target endpoint or key needs a branch and none is provided explicitly. +- `list_storage_buckets` +- `create_storage_bucket` +- `delete_storage_bucket` +- `get_storage_config` +- `update_storage_config` -## License +## Usage Notes -volcengine/mcp-server is licensed under the [MIT License](https://github.com/volcengine/mcp-server/blob/main/LICENSE). +- Any `workspace_id` parameter can also accept a branch ID. +- When `workspace_id` is omitted, the server uses `DEFAULT_WORKSPACE_ID` if configured. +- `get_publishable_keys` resolves the default branch automatically when needed. diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 96dbcffb..051b844b 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -1,198 +1,105 @@ -# MCP Server: Supabase -> 在 MCP 客户端中直接管理 AIDAP Supabase 项目、Postgres 数据库、Edge Functions 和 Storage。 - -[English](./README.md) | 简体中文 - -| 项目 | 详情 | -| ---- | ---- | -| 版本 | v0.1.0 | -| 描述 | 面向 AIDAP Supabase 的 MCP Server,为 AI 助手提供项目、数据库、Edge Functions 和存储管理能力。 | -| 分类 | 数据库 | -| 标签 | Supabase, PostgreSQL, AIDAP, Edge Functions, Storage | -| 文档 | [火山引擎 AIDAP Supabase](https://www.volcengine.com/docs/87275/2105900) | - -## 核心能力 - -### 1. 项目与分支管理 -- `list_projects` - 列出当前账号下可用的 AIDAP Supabase 项目。 -- `get_project` - 获取指定项目详情,支持传入工作空间 ID 或分支 ID。 -- `create_project` - 在 AIDAP 中创建新的 Supabase 项目。 -- `pause_project` - 暂停项目。 -- `restore_project` - 恢复已暂停项目。 -- `get_project_url` - 获取当前工作空间或分支对应的项目访问地址。 -- `get_publishable_keys` - 获取项目的 publishable key 和 service role key。 -- `list_branches` - 列出项目下的开发分支。 -- `create_branch` - 创建开发分支。 -- `delete_branch` - 删除开发分支。 -- `reset_branch` - 将分支重置到 AIDAP 当前支持的最新状态。 - -### 2. 数据库开发 -- `execute_sql` - 直接执行 Postgres SQL。 -- `list_tables` - 按 schema 列出数据表。 -- `list_migrations` - 查看 `supabase_migrations.schema_migrations` 中记录的迁移历史。 -- `list_extensions` - 列出已安装的 PostgreSQL 扩展。 -- `apply_migration` - 执行 SQL 并写入迁移记录。 -- `generate_typescript_types` - 基于数据库 schema 生成 TypeScript 类型定义。 - -### 3. Edge Functions -- `list_edge_functions` - 列出当前项目中的 Edge Functions。 -- `get_edge_function` - 获取函数源码和元数据。 -- `deploy_edge_function` - 使用 Node.js 或 Python 运行时部署或更新函数。 -- `delete_edge_function` - 按名称删除函数。 - -### 4. Storage -- `list_storage_buckets` - 列出项目中的存储桶。 -- `create_storage_bucket` - 创建存储桶,支持配置公开访问、文件大小限制和 MIME 类型限制。 -- `delete_storage_bucket` - 删除存储桶。 -- `get_storage_config` - 获取当前工作空间端点暴露的存储配置。 -- `update_storage_config` - 在当前端点支持时更新存储配置。 +# Supabase MCP Server -## 兼容性说明 +面向 AIDAP 工作区的 Supabase MCP 服务。 -- 官方 Supabase MCP Server 主要基于 Supabase Management API 实现。AIDAP 没有同等的 Management API,所以这里是通过 AIDAP 的 workspace API 和 Supabase 工作空间端点来实现兼容能力。 -- 在 AIDAP 里,`workspace` 对应 Supabase 的 `project`。为了兼容 MCP 侧常见命名,工具参数仍然使用 `project_id`。 -- 大多数项目级工具里的 `project_id` 同时支持传入工作空间 ID 或分支 ID。传入 `br-xxx` 这类分支 ID 时,服务端会自动解析其所属 workspace。 -- 未显式传入 `project_id` 时,服务端会优先使用 `DEFAULT_PROJECT_ID` 或 `DEFAULT_WORKSPACE_ID`。 -- `reset_branch` 为兼容接口保留了 `migration_version` 参数,但当前 AIDAP API 会忽略它。 -- `update_storage_config` 在部分 AIDAP 工作空间端点上可能返回 `supported: false`,表示当前端点暂不支持该能力。 +## 概览 -## 接入指南 +这个服务通过 MCP 暴露 Supabase 能力,并统一使用 AIDAP `workspace` 作为核心资源模型。 -### 1. 环境依赖 -- Python 3.10+ -- [uv](https://github.com/astral-sh/uv) +支持范围: -### 2. 获取凭证 -从[火山引擎访问密钥控制台](https://console.volcengine.com/iam/keymanage/)获取 `VOLCENGINE_ACCESS_KEY` 和 `VOLCENGINE_SECRET_KEY`。 +- 工作区生命周期管理 +- 分支生命周期管理 +- 数据库访问 +- Edge Functions +- 存储管理 +- TypeScript 类型生成 -### 3. 环境变量 +## 环境变量 | 变量名 | 必需 | 说明 | -| ------ | ---- | ---- | -| `VOLCENGINE_ACCESS_KEY` | 是 | 火山引擎 Access Key ID | -| `VOLCENGINE_SECRET_KEY` | 是 | 火山引擎 Secret Access Key | +| --- | --- | --- | +| `VOLCENGINE_ACCESS_KEY` | 是 | 火山引擎访问密钥 | +| `VOLCENGINE_SECRET_KEY` | 是 | 火山引擎私密密钥 | | `VOLCENGINE_REGION` | 否 | 区域,默认 `cn-beijing` | -| `DEFAULT_PROJECT_ID` | 否 | 未传 `project_id` 时使用的默认项目 ID | -| `DEFAULT_WORKSPACE_ID` | 否 | 与 `DEFAULT_PROJECT_ID` 作用相同 | -| `READ_ONLY` | 否 | 设置为 `true` 后禁止写操作 | -| `SUPABASE_ENDPOINT_SCHEME` | 否 | 工作空间端点协议,默认 `http` | -| `SUPABASE_PROJECT_SLUG` | 否 | Edge Functions 使用的项目 slug,默认 `default` | - -## 快速部署 - -### 方式一:使用 `uvx` - -```json -{ - "mcpServers": { - "supabase": { - "command": "uvx", - "args": [ - "--from", - "git+https://github.com/volcengine/mcp-server.git#subdirectory=server/mcp_server_supabase", - "mcp-server-supabase" - ], - "env": { - "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", - "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", - "VOLCENGINE_REGION": "cn-beijing" - } - } - } -} -``` +| `READ_ONLY` | 否 | 设为 `true` 时禁止写操作 | +| `DEFAULT_WORKSPACE_ID` | 否 | 未传 `workspace_id` 时使用的默认工作区 | +| `SUPABASE_WORKSPACE_SLUG` | 否 | Edge Functions 使用的 slug,默认 `default` | +| `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` 或 `https`,默认 `http` | -### 方式二:本地源码运行 +## 启动 ```bash -cd /absolute/path/to/mcp-server/server/mcp_server_supabase -uv sync -``` - -```json -{ - "mcpServers": { - "supabase": { - "command": "uv", - "args": [ - "--directory", - "/absolute/path/to/mcp-server/server/mcp_server_supabase", - "run", - "mcp-server-supabase" - ], - "env": { - "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", - "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", - "VOLCENGINE_REGION": "cn-beijing", - "DEFAULT_PROJECT_ID": "ws-xxxxxxxx" - } - } - } -} +python -m mcp_server_supabase.server ``` -### 方式三:使用 `python3` +## MCP 客户端配置示例 ```json { "mcpServers": { "supabase": { - "command": "python3", + "command": "python", "args": [ "-m", "mcp_server_supabase.server" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your_volcengine_ak", - "VOLCENGINE_SECRET_KEY": "your_volcengine_sk", - "VOLCENGINE_REGION": "cn-beijing" + "VOLCENGINE_ACCESS_KEY": "your-access-key", + "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_REGION": "cn-beijing", + "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" } } } } ``` -## Prompt 示例 +## 工具列表 + +### 工作区 + +- `list_workspaces` +- `get_workspace` +- `create_workspace` +- `pause_workspace` +- `restore_workspace` +- `get_workspace_url` +- `get_publishable_keys` + +### 分支 + +- `list_branches` +- `create_branch` +- `delete_branch` +- `reset_branch` + +### 数据库 + +- `execute_sql` +- `list_tables` +- `list_migrations` +- `list_extensions` +- `apply_migration` +- `generate_typescript_types` + +### Edge Functions -- `列出我所有的 Supabase 项目` -- `查看项目 ws-xxxx 下的所有分支` -- `执行 SQL: select * from public.users limit 10` -- `为 public,auth schema 生成 TypeScript 类型` -- `部署一个名为 webhook-handler 的 Edge Function` -- `列出项目 ws-xxxx 下的所有存储桶` +- `list_edge_functions` +- `get_edge_function` +- `deploy_edge_function` +- `delete_edge_function` -## 补充说明 +### 存储 -- 大多数 MCP 桌面客户端默认使用 `stdio`,上面的配置方式最通用。 -- 设置 `READ_ONLY=true` 后,所有写入类工具都会被拦截。 -- 当端点解析或 API Key 获取依赖分支,而你又没有显式指定分支时,服务端会自动使用默认分支。 +- `list_storage_buckets` +- `create_storage_bucket` +- `delete_storage_bucket` +- `get_storage_config` +- `update_storage_config` -## License +## 使用说明 -volcengine/mcp-server 采用 [MIT 许可证](https://github.com/volcengine/mcp-server/blob/main/LICENSE) 授权。 +- 所有 `workspace_id` 参数都可以直接传分支 ID。 +- 未传 `workspace_id` 时,如果配置了 `DEFAULT_WORKSPACE_ID`,服务会自动使用它。 +- `get_publishable_keys` 在需要时会自动解析默认分支。 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py index bffe946a..d4f103c4 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py @@ -8,7 +8,7 @@ @dataclass(slots=True) class SupabaseRuntime: aidap_client: AidapClient - default_project_id: Optional[str] + default_workspace_id: Optional[str] edge_tools: EdgeFunctionTools storage_tools: StorageTools database_tools: DatabaseTools @@ -16,15 +16,15 @@ class SupabaseRuntime: def create_runtime( - default_project_id: Optional[str] = None, + default_workspace_id: Optional[str] = None, aidap_client: Optional[AidapClient] = None, ) -> SupabaseRuntime: client = aidap_client or AidapClient() return SupabaseRuntime( aidap_client=client, - default_project_id=default_project_id, - edge_tools=EdgeFunctionTools(client, default_project_id), - storage_tools=StorageTools(client, default_project_id), - database_tools=DatabaseTools(client, default_project_id), - workspace_tools=WorkspaceTools(client, default_project_id), + default_workspace_id=default_workspace_id, + edge_tools=EdgeFunctionTools(client, default_workspace_id), + storage_tools=StorageTools(client, default_workspace_id), + database_tools=DatabaseTools(client, default_workspace_id), + workspace_tools=WorkspaceTools(client, default_workspace_id), ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index 588cbcab..554b2b82 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -14,14 +14,15 @@ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) -default_project_id = os.getenv("DEFAULT_PROJECT_ID") or os.getenv("DEFAULT_WORKSPACE_ID") +default_workspace_id = os.getenv("DEFAULT_WORKSPACE_ID") + def create_mcp( port: int | None = None, default_target_id: str | None = None, ) -> FastMCP: resolved_port = port if port is not None else int(os.getenv("PORT", "8000")) - resolved_default_target_id = default_target_id if default_target_id is not None else default_project_id + resolved_default_target_id = default_target_id if default_target_id is not None else default_workspace_id runtime = create_runtime(resolved_default_target_id) mcp = FastMCP("Supabase MCP Server (AIDAP)", port=resolved_port) register_tools(mcp, runtime) @@ -38,8 +39,8 @@ def main(): logger.info(f"Starting Supabase MCP Server on port {args.port}") logger.info(f"Read-only mode: {READ_ONLY}") - if default_project_id: - logger.info(f"Default project ID: {default_project_id}") + if default_workspace_id: + logger.info(f"Default workspace ID: {default_workspace_id}") create_mcp(port=args.port).run() diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py index baa42932..55384234 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -9,21 +9,21 @@ def register_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: _register_edge_tools(mcp, runtime) _register_storage_tools(mcp, runtime) _register_database_tools(mcp, runtime) - _register_project_tools(mcp, runtime) + _register_workspace_tools(mcp, runtime) def _register_edge_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: edge_tools = runtime.edge_tools @mcp.tool() - async def list_edge_functions(project_id: str = None) -> str: - """Lists all Edge Functions in a project.""" - return await edge_tools.list_edge_functions(project_id) + async def list_edge_functions(workspace_id: str = None) -> str: + """Lists all Edge Functions in a workspace or branch.""" + return await edge_tools.list_edge_functions(workspace_id) @mcp.tool() - async def get_edge_function(function_name: str, project_id: str = None) -> str: + async def get_edge_function(function_name: str, workspace_id: str = None) -> str: """Retrieves the source code and configuration for an Edge Function.""" - return await edge_tools.get_edge_function(function_name, project_id) + return await edge_tools.get_edge_function(function_name, workspace_id) @mcp.tool() async def deploy_edge_function( @@ -32,19 +32,17 @@ async def deploy_edge_function( verify_jwt: bool = True, runtime: str = "native-node20/v1", import_map: str = None, - project_id: str = None, + workspace_id: str = None, ) -> str: """Deploys a new Edge Function or updates an existing one. Args: function_name: Name of the function to deploy source_code: Source code for the function - verify_jwt: Whether to verify JWT tokens (default: True) - runtime: Runtime environment (default: native-node20/v1) - Options: native-node20/v1, native-python3.9/v1, - native-python3.10/v1, native-python3.12/v1 + verify_jwt: Whether to verify JWT tokens + runtime: Runtime environment import_map: Optional import map JSON for dependencies - project_id: The project ID (optional) + workspace_id: The workspace ID or branch ID """ return await edge_tools.deploy_edge_function( function_name, @@ -52,22 +50,22 @@ async def deploy_edge_function( verify_jwt, runtime, import_map, - project_id, + workspace_id, ) @mcp.tool() - async def delete_edge_function(function_name: str, project_id: str = None) -> str: + async def delete_edge_function(function_name: str, workspace_id: str = None) -> str: """Deletes an Edge Function.""" - return await edge_tools.delete_edge_function(function_name, project_id) + return await edge_tools.delete_edge_function(function_name, workspace_id) def _register_storage_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: storage_tools = runtime.storage_tools @mcp.tool() - async def list_storage_buckets(project_id: str = None) -> str: - """Lists all storage buckets in a project.""" - return await storage_tools.list_storage_buckets(project_id) + async def list_storage_buckets(workspace_id: str = None) -> str: + """Lists all storage buckets in a workspace or branch.""" + return await storage_tools.list_storage_buckets(workspace_id) @mcp.tool() async def create_storage_bucket( @@ -75,7 +73,7 @@ async def create_storage_bucket( public: bool = False, file_size_limit: int = None, allowed_mime_types: str | list[str] = None, - project_id: str = None, + workspace_id: str = None, ) -> str: """Creates a new storage bucket.""" return await storage_tools.create_storage_bucket( @@ -83,119 +81,119 @@ async def create_storage_bucket( public, file_size_limit, allowed_mime_types, - project_id, + workspace_id, ) @mcp.tool() - async def delete_storage_bucket(bucket_name: str, project_id: str = None) -> str: + async def delete_storage_bucket(bucket_name: str, workspace_id: str = None) -> str: """Deletes a storage bucket.""" - return await storage_tools.delete_storage_bucket(bucket_name, project_id) + return await storage_tools.delete_storage_bucket(bucket_name, workspace_id) @mcp.tool() - async def get_storage_config(project_id: str = None) -> str: - """Gets the storage configuration for a project.""" - return await storage_tools.get_storage_config(project_id) + async def get_storage_config(workspace_id: str = None) -> str: + """Gets the storage configuration for a workspace or branch.""" + return await storage_tools.get_storage_config(workspace_id) @mcp.tool() - async def update_storage_config(config: str, project_id: str = None) -> str: - """Updates the storage configuration for a project.""" - return await storage_tools.update_storage_config(json.loads(config), project_id) + async def update_storage_config(config: str, workspace_id: str = None) -> str: + """Updates the storage configuration for a workspace or branch.""" + return await storage_tools.update_storage_config(json.loads(config), workspace_id) def _register_database_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: database_tools = runtime.database_tools @mcp.tool() - async def execute_sql(query: str, project_id: str = None) -> str: + async def execute_sql(query: str, workspace_id: str = None) -> str: """Executes raw SQL in the Postgres database.""" - return await database_tools.execute_sql(query, project_id) + return await database_tools.execute_sql(query, workspace_id) @mcp.tool() - async def list_tables(schemas: str = "public", project_id: str = None) -> str: + async def list_tables(schemas: str = "public", workspace_id: str = None) -> str: """Lists all tables in one or more schemas.""" schema_list = [schema.strip() for schema in schemas.split(",")] - return await database_tools.list_tables(schema_list, project_id) + return await database_tools.list_tables(schema_list, workspace_id) @mcp.tool() - async def list_migrations(project_id: str = None) -> str: + async def list_migrations(workspace_id: str = None) -> str: """Lists all migrations in the database.""" - return await database_tools.list_migrations(project_id) + return await database_tools.list_migrations(workspace_id) @mcp.tool() - async def list_extensions(project_id: str = None) -> str: + async def list_extensions(workspace_id: str = None) -> str: """Lists all PostgreSQL extensions in the database.""" - return await database_tools.list_extensions(project_id) + return await database_tools.list_extensions(workspace_id) @mcp.tool() - async def apply_migration(name: str, query: str, project_id: str = None) -> str: + async def apply_migration(name: str, query: str, workspace_id: str = None) -> str: """Applies a migration to the database.""" - return await database_tools.apply_migration(name, query, project_id) + return await database_tools.apply_migration(name, query, workspace_id) @mcp.tool() - async def generate_typescript_types(schemas: str = "public", project_id: str = None) -> str: + async def generate_typescript_types(schemas: str = "public", workspace_id: str = None) -> str: """Generates TypeScript definitions from database schema.""" schema_list = [schema.strip() for schema in schemas.split(",") if schema.strip()] - return await database_tools.generate_typescript_types(schema_list, project_id) + return await database_tools.generate_typescript_types(schema_list, workspace_id) -def _register_project_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: +def _register_workspace_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: workspace_tools = runtime.workspace_tools @mcp.tool() - async def list_projects() -> str: - """Lists all available projects.""" - return await workspace_tools.list_projects() + async def list_workspaces() -> str: + """Lists all available workspaces.""" + return await workspace_tools.list_workspaces() @mcp.tool() - async def get_project(project_id: str) -> str: - """Gets details for a specific project.""" - return await workspace_tools.get_project(project_id) + async def get_workspace(workspace_id: str) -> str: + """Gets details for a specific workspace or branch target.""" + return await workspace_tools.get_workspace(workspace_id) @mcp.tool() - async def create_project( - project_name: str, + async def create_workspace( + workspace_name: str, engine_version: str = "Supabase_1_24", engine_type: str = "Supabase", ) -> str: - """Creates a new project.""" - return await workspace_tools.create_project(project_name, engine_version, engine_type) + """Creates a new workspace.""" + return await workspace_tools.create_workspace(workspace_name, engine_version, engine_type) @mcp.tool() - async def pause_project(project_id: str = None) -> str: - """Pauses a project.""" - return await workspace_tools.pause_project(project_id) + async def pause_workspace(workspace_id: str = None) -> str: + """Pauses a workspace.""" + return await workspace_tools.pause_workspace(workspace_id) @mcp.tool() - async def restore_project(project_id: str = None) -> str: - """Restores a project.""" - return await workspace_tools.restore_project(project_id) + async def restore_workspace(workspace_id: str = None) -> str: + """Restores a workspace.""" + return await workspace_tools.restore_workspace(workspace_id) @mcp.tool() - async def get_project_url(project_id: str = None) -> str: - """Gets API endpoint URL for a project.""" - return await workspace_tools.get_project_url(project_id) + async def get_workspace_url(workspace_id: str = None) -> str: + """Gets API endpoint URL for a workspace or branch.""" + return await workspace_tools.get_workspace_url(workspace_id) @mcp.tool() - async def get_publishable_keys(project_id: str = None, reveal: bool = False) -> str: - """Gets API keys for a project.""" - return await workspace_tools.get_publishable_keys(project_id, reveal) + async def get_publishable_keys(workspace_id: str = None, reveal: bool = False) -> str: + """Gets API keys for a workspace or branch.""" + return await workspace_tools.get_publishable_keys(workspace_id, reveal) @mcp.tool() - async def list_branches(project_id: str = None) -> str: - """Lists all development branches of a project.""" - return await workspace_tools.list_branches(project_id) + async def list_branches(workspace_id: str = None) -> str: + """Lists all development branches of a workspace.""" + return await workspace_tools.list_branches(workspace_id) @mcp.tool() - async def create_branch(name: str = "develop", project_id: str = None) -> str: + async def create_branch(name: str = "develop", workspace_id: str = None) -> str: """Creates a development branch.""" - return await workspace_tools.create_branch(name, project_id) + return await workspace_tools.create_branch(name, workspace_id) @mcp.tool() - async def delete_branch(branch_id: str, project_id: str = None) -> str: + async def delete_branch(branch_id: str, workspace_id: str = None) -> str: """Deletes a development branch.""" - return await workspace_tools.delete_branch(branch_id, project_id) + return await workspace_tools.delete_branch(branch_id, workspace_id) @mcp.tool() - async def reset_branch(branch_id: str, migration_version: str = None, project_id: str = None) -> str: - """Resets migrations of a development branch. Any untracked data or schema changes will be lost.""" - return await workspace_tools.reset_branch(branch_id, migration_version, project_id) + async def reset_branch(branch_id: str, migration_version: str = None, workspace_id: str = None) -> str: + """Resets a development branch. Any untracked data or schema changes will be lost.""" + return await workspace_tools.reset_branch(branch_id, migration_version, workspace_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index 438e35a7..5345d895 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -40,7 +40,7 @@ RESERVED_SLUGS = {"deploy", "body", "health", "metrics"} MAX_SLUG_LENGTH = 127 MAX_CODE_SIZE = 10 * 1024 * 1024 # 10MB -PROJECT_SLUG = os.getenv("SUPABASE_PROJECT_SLUG", "default").strip() or "default" +WORKSPACE_SLUG = os.getenv("SUPABASE_WORKSPACE_SLUG", "default").strip() or "default" class EdgeFunctionTools(BaseTools): @@ -146,7 +146,7 @@ async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[ logger.info(f"Listing edge functions for workspace {ws_id}") client = await self._get_client(ws_id, branch_id) - result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions") + result = await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions") functions = [EdgeFunction(**func) for func in result] logger.info(f"Found {len(functions)} edge functions") @@ -161,7 +161,7 @@ async def get_edge_function(self, function_name: str, workspace_id: Optional[str client = await self._get_client(ws_id, branch_id) encoded_name = quote(function_name, safe="") try: - result = await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}") + result = await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions/{encoded_name}") except SupabaseApiError as e: payload_text = self._extract_error_text(e.payload).lower() if "function not found" in payload_text or "not found" in payload_text: @@ -249,7 +249,7 @@ async def deploy_edge_function( # AIDAP 部署 API 路径 result = await client.call_api( - f"/v1/projects/{PROJECT_SLUG}/functions/deploy?slug={encoded_name}", + f"/v1/projects/{WORKSPACE_SLUG}/functions/deploy?slug={encoded_name}", method="POST", json_data=data ) @@ -272,7 +272,7 @@ async def delete_edge_function(self, function_name: str, workspace_id: Optional[ client = await self._get_client(ws_id, branch_id) encoded_name = quote(function_name, safe="") - await client.call_api(f"/v1/projects/{PROJECT_SLUG}/functions/{encoded_name}", method="DELETE") + await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions/{encoded_name}", method="DELETE") logger.info(f"Successfully deleted edge function '{function_name}'") return {"success": True, "message": "Edge function deleted successfully"} diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 5856fd36..c82899e2 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -1,6 +1,6 @@ import asyncio -import logging import inspect +import logging from typing import Any, Optional from ..utils import compact_dict, pick_value, read_only_check, resolve_target, to_json @@ -9,8 +9,6 @@ class WorkspaceTools: - """Tools for managing workspaces""" - def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): self.aidap_client = aidap_client self.default_workspace_id = default_workspace_id @@ -28,13 +26,9 @@ async def _resolve_target(self, target_id: Optional[str]) -> tuple[Optional[str] return await resolve_target(self.aidap_client, target_id, self.default_workspace_id) def _workspace_view(self, source: Any) -> dict: - workspace_id = self._pick(source, "workspace_id") - workspace_name = self._pick(source, "workspace_name") - project_name = self._pick(source, "project_name") payload = { - "workspace_id": workspace_id, - "workspace_name": workspace_name, - "project_name": project_name or workspace_name, + "workspace_id": self._pick(source, "workspace_id"), + "workspace_name": self._pick(source, "workspace_name"), "status": self._pick(source, "workspace_status", "status"), "region": self._pick(source, "region_id", "region"), "created_at": self._pick(source, "create_time", "created_at"), @@ -49,12 +43,12 @@ def _branch_view(self, branch: dict, workspace_payload: Optional[dict] = None) - workspace_payload = workspace_payload or {} payload = { "branch_id": branch.get("branch_id"), - "branch_name": branch.get("name"), + "branch_name": branch.get("branch_name") or branch.get("name"), "status": branch.get("status") or workspace_payload.get("status"), "default": branch.get("default"), "parent_id": branch.get("parent_id"), - "root_project_id": workspace_payload.get("workspace_id") or branch.get("workspace_id"), - "root_project_name": workspace_payload.get("workspace_name"), + "workspace_id": workspace_payload.get("workspace_id") or branch.get("workspace_id"), + "workspace_name": workspace_payload.get("workspace_name"), "created_at": branch.get("created_at") or workspace_payload.get("created_at"), "updated_at": branch.get("updated_at") or workspace_payload.get("updated_at"), "engine_type": workspace_payload.get("engine_type"), @@ -64,32 +58,6 @@ def _branch_view(self, branch: dict, workspace_payload: Optional[dict] = None) - } return self._compact(payload) - def _project_view(self, source: Any) -> dict: - workspace_payload = self._workspace_view(source) - project_name = workspace_payload.get("project_name") or workspace_payload.get("workspace_name") - payload = { - "project_id": workspace_payload.get("workspace_id"), - "project_name": project_name, - "status": workspace_payload.get("status"), - "region": workspace_payload.get("region"), - "created_at": workspace_payload.get("created_at"), - "updated_at": workspace_payload.get("updated_at"), - "engine_type": workspace_payload.get("engine_type"), - "engine_version": workspace_payload.get("engine_version"), - "deletion_protection_status": workspace_payload.get("deletion_protection_status"), - } - return self._compact(payload) - - def _with_project_alias(self, payload: dict, project_id: Optional[str] = None, project_name: Optional[str] = None) -> dict: - result = {key: value for key, value in dict(payload).items() if key not in {"workspace_id", "workspace_name"}} - resolved_project_id = result.get("project_id") or project_id - resolved_project_name = result.get("project_name") or project_name - if resolved_project_id: - result["project_id"] = resolved_project_id - if resolved_project_name: - result["project_name"] = resolved_project_name - return result - def _describe_workspaces_response(self): from volcenginesdkaidap.models import DescribeWorkspacesRequest, FilterForDescribeWorkspacesInput @@ -127,79 +95,74 @@ def _mask_key(self, value: Optional[str], reveal: bool) -> Optional[str]: return "*" * len(value) return f"{value[:6]}...{value[-4:]}" - async def list_projects(self) -> str: + async def list_workspaces(self) -> str: try: response = self._describe_workspaces_response() raw_workspaces = list(getattr(response, "workspaces", []) or []) - projects = [self._project_view(ws) for ws in raw_workspaces] + workspaces = [self._workspace_view(workspace) for workspace in raw_workspaces] return self._to_json({ "success": True, - "projects": projects, - "count": len(projects) + "workspaces": workspaces, + "count": len(workspaces), }) except Exception as e: - logger.error(f"Error listing projects: {e}") + logger.error(f"Error listing workspaces: {e}") return self._to_json({ "success": False, - "error": str(e) + "error": str(e), }) - async def get_project(self, project_id: str) -> str: + async def get_workspace(self, workspace_id: str) -> str: try: - ws_id, branch_id = await self._resolve_target(project_id) + ws_id, branch_id = await self._resolve_target(workspace_id) if not ws_id: return self._to_json({ "success": False, - "error": "project_id is required" + "error": "workspace_id is required", }) - ws = self._find_workspace_source(ws_id) - if ws is not None: - project_info = self._project_view(ws) - if branch_id: - branch = await self.aidap_client.get_branch(ws_id, branch_id) - if branch: - branch_view = self._branch_view(branch, self._workspace_view(ws)) - project_info.update({ - "project_id": branch_id, - "project_name": branch.get("name") or project_info.get("project_name"), - **branch_view, - }) + workspace_source = self._find_workspace_source(ws_id) + if workspace_source is None: return self._to_json({ - "success": True, - "project": project_info + "success": False, + "error": "Workspace not found", }) + workspace_info = self._workspace_view(workspace_source) + if branch_id: + branch = await self.aidap_client.get_branch(ws_id, branch_id) + if branch: + workspace_info.update(self._branch_view(branch, workspace_info)) return self._to_json({ - "success": False, - "error": "Project not found" + "success": True, + "workspace": workspace_info, }) except Exception as e: - logger.error(f"Error getting project: {e}") + logger.error(f"Error getting workspace: {e}") return self._to_json({ "success": False, - "error": str(e) + "error": str(e), }) @read_only_check - async def create_project( + async def create_workspace( self, - project_name: str, + workspace_name: str, engine_version: str = "Supabase_1_24", engine_type: str = "Supabase", ) -> str: - if not project_name or not project_name.strip(): - return self._to_json({"success": False, "error": "project_name is required"}) + if not workspace_name or not workspace_name.strip(): + return self._to_json({"success": False, "error": "workspace_name is required"}) result = await self.aidap_client.create_workspace( - workspace_name=project_name.strip(), + workspace_name=workspace_name.strip(), engine_type=engine_type, - engine_version=engine_version + engine_version=engine_version, ) if not isinstance(result, dict): - return self._to_json({"success": False, "error": "Unexpected create project response"}) + return self._to_json({"success": False, "error": "Unexpected create workspace response"}) if result.get("success"): mapped = { "success": True, - "project_id": result.get("workspace_id"), - "project_name": result.get("workspace_name") or project_name.strip(), + "workspace_id": result.get("workspace_id"), + "workspace_name": result.get("workspace_name") or workspace_name.strip(), "engine_type": result.get("engine_type"), "engine_version": result.get("engine_version"), } @@ -207,83 +170,68 @@ async def create_project( return self._to_json(result) @read_only_check - async def restore_project(self, project_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(project_id) + async def restore_workspace(self, workspace_id: Optional[str] = None) -> str: + ws_id, _ = await self._resolve_target(workspace_id) if not ws_id: - return self._to_json({"success": False, "error": "project_id is required"}) + return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.start_workspace(ws_id) - if isinstance(result, dict): - result = self._with_project_alias(result, ws_id) - return self._to_json(result) + return self._to_json(result if isinstance(result, dict) else {"success": bool(result), "workspace_id": ws_id}) @read_only_check - async def pause_project(self, project_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(project_id) + async def pause_workspace(self, workspace_id: Optional[str] = None) -> str: + ws_id, _ = await self._resolve_target(workspace_id) if not ws_id: - return self._to_json({"success": False, "error": "project_id is required"}) + return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.stop_workspace(ws_id) - if isinstance(result, dict): - result = self._with_project_alias(result, ws_id) - return self._to_json(result) + return self._to_json(result if isinstance(result, dict) else {"success": bool(result), "workspace_id": ws_id}) @read_only_check async def create_branch( self, name: str = "develop", - project_id: Optional[str] = None, + workspace_id: Optional[str] = None, ) -> str: - ws_id, _ = await self._resolve_target(project_id) + ws_id, _ = await self._resolve_target(workspace_id) if not ws_id: - return self._to_json({"success": False, "error": "project_id is required"}) + return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.create_branch(ws_id, name) if result.get("success") and result.get("branch_id"): - branch_id = result["branch_id"] - result.pop("workspace_id", None) - result.pop("workspace_name", None) - result.update({ - "project_id": branch_id, - "project_name": result.get("name") or name, - "root_project_id": ws_id, - "target_type": "branch", - }) - endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=branch_id, use_cache=False) + branch_payload = self._branch_view(result, {"workspace_id": ws_id}) + branch_payload["branch_name"] = branch_payload.get("branch_name") or name + response_payload = { + "success": True, + **branch_payload, + } + endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=result["branch_id"], use_cache=False) if endpoint: - result["project_url"] = endpoint - result["api_url"] = endpoint + response_payload["workspace_url"] = endpoint + response_payload["api_url"] = endpoint + return self._to_json(self._compact(response_payload)) return self._to_json(result) - async def list_branches(self, project_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(project_id) + async def list_branches(self, workspace_id: Optional[str] = None) -> str: + ws_id, _ = await self._resolve_target(workspace_id) if not ws_id: - return self._to_json({"success": False, "error": "project_id is required"}) + return self._to_json({"success": False, "error": "workspace_id is required"}) try: + workspace_source = self._find_workspace_source(ws_id) + workspace_payload = self._workspace_view(workspace_source) if workspace_source is not None else {"workspace_id": ws_id} branches = await self.aidap_client.list_branches(ws_id) - normalized_branches = [] - for branch in branches: - normalized_branch = dict(branch) - root_project_id = normalized_branch.pop("workspace_id", None) - normalized_branch.pop("workspace_name", None) - if normalized_branch.get("branch_id"): - normalized_branch["project_id"] = normalized_branch["branch_id"] - normalized_branch["project_name"] = normalized_branch.get("name") - normalized_branch["target_type"] = "branch" - if root_project_id: - normalized_branch["root_project_id"] = root_project_id - normalized_branches.append(normalized_branch) + normalized_branches = [self._branch_view(branch, workspace_payload) for branch in branches] return self._to_json({"success": True, "branches": normalized_branches}) except Exception as e: logger.error(f"Error listing branches: {e}") return self._to_json({"success": False, "error": str(e)}) @read_only_check - async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(project_id) + async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: + ws_id, _ = await self._resolve_target(workspace_id) if not ws_id: return self._to_json({ "success": False, - "error": "project_id is required", - "error_detail": self._error_detail("MissingProjectId", "project_id is required", False), + "error": "workspace_id is required", + "error_detail": self._error_detail("MissingWorkspaceId", "workspace_id is required", False), }) if not branch_id or not branch_id.strip(): return self._to_json({ @@ -295,15 +243,15 @@ async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) try: branches = await self.aidap_client.list_branches(ws_id) - exists = any(b.get("branch_id") == normalized_branch_id for b in branches) + exists = any(branch.get("branch_id") == normalized_branch_id for branch in branches) if not exists: return self._to_json({ "success": False, - "error": f"Branch '{normalized_branch_id}' not found in project '{ws_id}'", + "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", "error_detail": self._error_detail( "BranchNotFound", - f"Branch '{normalized_branch_id}' not found in project '{ws_id}'", - False + f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", + False, ), }) except Exception as e: @@ -323,7 +271,7 @@ async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) "error_detail": self._error_detail( result.get("code", "DeleteBranchFailed"), error_text, - bool(result.get("retriable", False)) + bool(result.get("retriable", False)), ), }) @@ -333,9 +281,9 @@ async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) await asyncio.sleep(1) try: branches = await self.aidap_client.list_branches(ws_id) - exists = any(b.get("branch_id") == normalized_branch_id for b in branches) + exists = any(branch.get("branch_id") == normalized_branch_id for branch in branches) if not exists: - return self._to_json({"success": True, "branch_id": normalized_branch_id}) + return self._to_json({"success": True, "branch_id": normalized_branch_id, "workspace_id": ws_id}) except Exception as e: last_list_error = str(e) @@ -346,7 +294,7 @@ async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) "error_detail": self._error_detail( "DeleteBranchVerifyFailed", f"Delete requested for branch '{normalized_branch_id}' but verification failed: {last_list_error}", - True + True, ), }) return self._to_json({ @@ -355,38 +303,37 @@ async def delete_branch(self, branch_id: str, project_id: Optional[str] = None) "error_detail": self._error_detail( "BranchStillExists", f"Delete requested for branch '{normalized_branch_id}' but branch still exists", - True + True, ), }) - async def get_project_url(self, project_id: Optional[str] = None) -> str: - ws_id, branch_id = await self._resolve_target(project_id) + async def get_workspace_url(self, workspace_id: Optional[str] = None) -> str: + ws_id, branch_id = await self._resolve_target(workspace_id) if not ws_id: - return self._to_json({"success": False, "error": "project_id is required"}) + return self._to_json({"success": False, "error": "workspace_id is required"}) endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=branch_id) if not endpoint: + target_id = branch_id or ws_id return self._to_json({ "success": False, - "error": f"Could not get endpoint for project {ws_id if not branch_id else branch_id}" + "error": f"Could not get endpoint for workspace {target_id}", }) payload = { "success": True, - "project_id": branch_id or ws_id, - "project_url": endpoint, - "api_url": endpoint + "workspace_id": ws_id, + "workspace_url": endpoint, + "api_url": endpoint, } if branch_id: - payload.update({ - "branch_id": branch_id, - "root_project_id": ws_id, - "target_type": "branch", - }) + payload["branch_id"] = branch_id + payload["target_type"] = "branch" return self._to_json(payload) async def _get_api_keys_payload(self, workspace_id: str, branch_id: Optional[str] = None, reveal: bool = False) -> dict: - keys = await self.aidap_client.get_api_keys(workspace_id, branch_id=branch_id) + resolved_branch_id = branch_id or await self.aidap_client.get_default_branch_id(workspace_id) + keys = await self.aidap_client.get_api_keys(workspace_id, branch_id=resolved_branch_id) publishable_key = None anon_key = None service_role_key = None @@ -405,23 +352,22 @@ async def _get_api_keys_payload(self, workspace_id: str, branch_id: Optional[str }) payload = { "success": True, - "project_id": branch_id or workspace_id, + "workspace_id": workspace_id, "reveal": reveal, "publishable_key": self._mask_key(publishable_key, reveal), "anon_key": self._mask_key(anon_key, reveal), "service_role_key": self._mask_key(service_role_key, reveal), - "keys": masked_keys + "keys": masked_keys, } - if branch_id: - payload["branch_id"] = branch_id - payload["root_project_id"] = workspace_id + if resolved_branch_id: + payload["branch_id"] = resolved_branch_id payload["target_type"] = "branch" return payload - async def get_publishable_keys(self, project_id: Optional[str] = None, reveal: bool = False) -> str: - ws_id, branch_id = await self._resolve_target(project_id) + async def get_publishable_keys(self, workspace_id: Optional[str] = None, reveal: bool = False) -> str: + ws_id, branch_id = await self._resolve_target(workspace_id) if not ws_id: - return self._to_json({"success": False, "error": "project_id is required"}) + return self._to_json({"success": False, "error": "workspace_id is required"}) try: payload = await self._get_api_keys_payload(ws_id, branch_id=branch_id, reveal=reveal) @@ -435,19 +381,22 @@ async def reset_branch( self, branch_id: str, migration_version: Optional[str] = None, - project_id: Optional[str] = None, + workspace_id: Optional[str] = None, ) -> str: - ws_id, _ = await self._resolve_target(project_id) + ws_id, _ = await self._resolve_target(workspace_id) if not ws_id: return self._to_json({ "success": False, - "error": "project_id is required" + "error": "workspace_id is required", }) try: result = await self.aidap_client.reset_branch(ws_id, branch_id) if not isinstance(result, dict): result = {"success": bool(result)} + if result.get("success"): + result.setdefault("workspace_id", ws_id) + result.setdefault("branch_id", branch_id) if migration_version: result["warning"] = "migration_version is ignored because current AIDAP reset_branch API does not support version-targeted reset" return self._to_json(result) @@ -455,5 +404,5 @@ async def reset_branch( logger.error(f"Error resetting branch: {e}") return self._to_json({ "success": False, - "error": str(e) + "error": str(e), }) From 7cbcb3855c2b260d57e8fe94380a088333387fb5 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:30:43 +0800 Subject: [PATCH 17/32] fix:supbase --- .../src/mcp_server_supabase/tools/workspace_tools.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index c82899e2..e2630ed3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -333,6 +333,8 @@ async def get_workspace_url(self, workspace_id: Optional[str] = None) -> str: async def _get_api_keys_payload(self, workspace_id: str, branch_id: Optional[str] = None, reveal: bool = False) -> dict: resolved_branch_id = branch_id or await self.aidap_client.get_default_branch_id(workspace_id) + if not resolved_branch_id: + raise RuntimeError(f"Could not resolve default branch for workspace {workspace_id}") keys = await self.aidap_client.get_api_keys(workspace_id, branch_id=resolved_branch_id) publishable_key = None anon_key = None From 03d26cb6a49170127c6f488c6d89ecf332afe7fc Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:34:03 +0800 Subject: [PATCH 18/32] fix:supbase --- .../src/mcp_server_supabase/tools/edge_function_tools.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index 5345d895..7a353679 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -109,9 +109,6 @@ def _validate_function_name(self, function_name: str) -> None: if function_name in RESERVED_SLUGS: raise ValueError(f"Function name '{function_name}' is reserved") - if not re.match(r"^[a-z0-9][a-z0-9-]*$", function_name): - raise ValueError("Function name must match ^[a-z0-9][a-z0-9-]*$") - def _validate_runtime(self, runtime: str) -> None: """验证运行时""" if runtime not in RUNTIME_CONFIG: From 5f383dbb6adf25702ca8c383503ef3635944a1da Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:35:07 +0800 Subject: [PATCH 19/32] fix:supbase --- .../mcp_server_supabase/tools/edge_function_tools.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index 7a353679..b274c15c 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -99,15 +99,7 @@ def _normalize_function_payload(self, payload: object) -> object: return result def _validate_function_name(self, function_name: str) -> None: - """验证函数名称""" - if not function_name: - raise ValueError("Function name cannot be empty") - - if len(function_name) > MAX_SLUG_LENGTH: - raise ValueError(f"Function name too long (max {MAX_SLUG_LENGTH} characters)") - - if function_name in RESERVED_SLUGS: - raise ValueError(f"Function name '{function_name}' is reserved") + return def _validate_runtime(self, runtime: str) -> None: """验证运行时""" From e5c2fcde947e3d39ea7afbffb50002db0bc56f84 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:48:53 +0800 Subject: [PATCH 20/32] fix:supbase --- server/mcp_server_supabase/README.md | 184 +++++++++++------- server/mcp_server_supabase/README_zh.md | 184 +++++++++++------- .../src/mcp_server_supabase/tool_registry.py | 8 - .../tools/storage_tools.py | 28 +-- 4 files changed, 239 insertions(+), 165 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index d6807de8..3c0e95ac 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -1,52 +1,104 @@ # Supabase MCP Server -Supabase MCP server for AIDAP workspaces. +English | [简体中文](README_zh.md) -## Overview +> Supabase MCP server for AIDAP workspaces. It exposes workspace, branch, database, Edge Functions, storage, and TypeScript type generation capabilities through MCP. -This server exposes Supabase capabilities through MCP and uses AIDAP workspaces as the primary resource model. +| Item | Details | +| ---- | ---- | +| Version | v0.1.0 | +| Description | Supabase MCP server built on top of AIDAP workspaces | +| Category | Database / Developer Tools | +| Tags | Supabase, PostgreSQL, Edge Functions, Storage, AIDAP | -Supported areas: +## Tools + +### Workspace and Branch + +| Tool | Description | +| ---- | ---- | +| `list_workspaces` | List all available Supabase workspaces in the current account | +| `get_workspace` | Get workspace details; branch IDs are also accepted | +| `create_workspace` | Create a new Supabase workspace | +| `pause_workspace` | Pause a workspace | +| `restore_workspace` | Resume a paused workspace | +| `get_workspace_url` | Get the API endpoint for a workspace or branch | +| `get_publishable_keys` | Get publishable, anon, and service role keys | +| `list_branches` | List branches under a workspace | +| `create_branch` | Create a development branch | +| `delete_branch` | Delete a development branch | +| `reset_branch` | Reset a branch to its baseline state | + +### Database + +| Tool | Description | +| ---- | ---- | +| `execute_sql` | Execute raw SQL against the Postgres database | +| `list_tables` | List tables in one or more schemas | +| `list_migrations` | List records from `supabase_migrations.schema_migrations` | +| `list_extensions` | List installed PostgreSQL extensions | +| `apply_migration` | Run migration SQL and record it in `supabase_migrations.schema_migrations` | +| `generate_typescript_types` | Generate TypeScript definitions from schema metadata | + +### Edge Functions + +| Tool | Description | +| ---- | ---- | +| `list_edge_functions` | List Edge Functions in a workspace or branch | +| `get_edge_function` | Get the source code and configuration of an Edge Function | +| `deploy_edge_function` | Create or update an Edge Function | +| `delete_edge_function` | Delete an Edge Function | + +### Storage -- workspace lifecycle -- branch lifecycle -- database access -- Edge Functions -- storage -- TypeScript type generation +| Tool | Description | +| ---- | ---- | +| `list_storage_buckets` | List storage buckets | +| `create_storage_bucket` | Create a new storage bucket | +| `delete_storage_bucket` | Delete a storage bucket | +| `get_storage_config` | Get storage configuration | + +## Authentication + +Use Volcengine AK/SK authentication. Obtain your credentials from the [Volcengine API Access Key console](https://console.volcengine.com/iam/keymanage/). ## Environment Variables -| Name | Required | Description | -| --- | --- | --- | -| `VOLCENGINE_ACCESS_KEY` | Yes | Volcengine access key | -| `VOLCENGINE_SECRET_KEY` | Yes | Volcengine secret key | -| `VOLCENGINE_REGION` | No | Region, default `cn-beijing` | -| `READ_ONLY` | No | Set to `true` to block write operations | -| `DEFAULT_WORKSPACE_ID` | No | Default workspace used when `workspace_id` is omitted | -| `SUPABASE_WORKSPACE_SLUG` | No | Edge Functions slug, default `default` | -| `SUPABASE_ENDPOINT_SCHEME` | No | `http` or `https`, default `http` | +| Name | Required | Default | Description | +| ---- | ---- | ---- | ---- | +| `VOLCENGINE_ACCESS_KEY` | Yes | - | Volcengine access key | +| `VOLCENGINE_SECRET_KEY` | Yes | - | Volcengine secret key | +| `VOLCENGINE_REGION` | No | `cn-beijing` | Region used for the AIDAP API | +| `DEFAULT_WORKSPACE_ID` | No | - | Default target used when `workspace_id` is omitted | +| `READ_ONLY` | No | `false` | Set to `true` to block all mutating tools | +| `SUPABASE_WORKSPACE_SLUG` | No | `default` | Project slug used by Edge Functions APIs | +| `SUPABASE_ENDPOINT_SCHEME` | No | `http` | Endpoint scheme used when building workspace URLs | +| `PORT` | No | `8000` | Port used when running the server directly | + +## Deployment -## Run +### Run from a local checkout ```bash -python -m mcp_server_supabase.server +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase ``` -## MCP Client Example +### MCP client config with local source ```json { "mcpServers": { "supabase": { - "command": "python", + "command": "uv", "args": [ - "-m", - "mcp_server_supabase.server" + "--directory", + "/ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase", + "run", + "mcp-server-supabase" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your-access-key", - "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_ACCESS_KEY": "", + "VOLCENGINE_SECRET_KEY": "", "VOLCENGINE_REGION": "cn-beijing", "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" } @@ -55,51 +107,53 @@ python -m mcp_server_supabase.server } ``` -## Tools - -### Workspace +### MCP client config with `uvx` -- `list_workspaces` -- `get_workspace` -- `create_workspace` -- `pause_workspace` -- `restore_workspace` -- `get_workspace_url` -- `get_publishable_keys` - -### Branch +```json +{ + "mcpServers": { + "supabase": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/volcengine/mcp-server#subdirectory=server/mcp_server_supabase", + "mcp-server-supabase" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "", + "VOLCENGINE_SECRET_KEY": "", + "VOLCENGINE_REGION": "cn-beijing", + "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" + } + } + } +} +``` -- `list_branches` -- `create_branch` -- `delete_branch` -- `reset_branch` +### Direct Python entrypoint -### Database +```bash +python3 -m mcp_server_supabase.server --port 8000 +``` -- `execute_sql` -- `list_tables` -- `list_migrations` -- `list_extensions` -- `apply_migration` -- `generate_typescript_types` +The package exposes both `mcp-server-supabase` and `supabase-aidap`. The examples above use `mcp-server-supabase`. -### Edge Functions +## Usage Notes -- `list_edge_functions` -- `get_edge_function` -- `deploy_edge_function` -- `delete_edge_function` +- If `workspace_id` is omitted, the server falls back to `DEFAULT_WORKSPACE_ID` when configured. +- If a branch ID such as `br-xxxx` is provided, the server resolves the corresponding workspace automatically. +- `get_publishable_keys` resolves the default branch automatically when needed. +- `reset_branch` accepts `migration_version`, but the current AIDAP API ignores that value and performs a branch reset only. +- `deploy_edge_function` currently supports `native-node20/v1`, `native-python3.9/v1`, `native-python3.10/v1`, and `native-python3.12/v1`. -### Storage +## Compatible Clients -- `list_storage_buckets` -- `create_storage_bucket` -- `delete_storage_bucket` -- `get_storage_config` -- `update_storage_config` +- Cursor +- Claude Desktop +- Cline +- Trae +- Any MCP client that supports `stdio` -## Usage Notes +## License -- Any `workspace_id` parameter can also accept a branch ID. -- When `workspace_id` is omitted, the server uses `DEFAULT_WORKSPACE_ID` if configured. -- `get_publishable_keys` resolves the default branch automatically when needed. +volcengine/mcp-server is licensed under the [MIT License](https://github.com/volcengine/mcp-server/blob/main/LICENSE). diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 051b844b..8079b28a 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -1,52 +1,104 @@ # Supabase MCP Server -面向 AIDAP 工作区的 Supabase MCP 服务。 +[English](README.md) | 简体中文 -## 概览 +> 面向 AIDAP workspace 的 Supabase MCP Server,通过 MCP 暴露工作区、分支、数据库、Edge Functions、Storage 和 TypeScript 类型生成能力。 -这个服务通过 MCP 暴露 Supabase 能力,并统一使用 AIDAP `workspace` 作为核心资源模型。 +| 项目 | 详情 | +| ---- | ---- | +| 版本 | v0.1.0 | +| 描述 | 基于 AIDAP workspace 的 Supabase MCP Server | +| 分类 | 数据库 / 开发工具 | +| 标签 | Supabase, PostgreSQL, Edge Functions, Storage, AIDAP | -支持范围: +## 工具列表 + +### 工作区与分支 + +| 工具 | 说明 | +| ---- | ---- | +| `list_workspaces` | 列出当前账号下可访问的 Supabase workspace | +| `get_workspace` | 查询 workspace 详情,也支持直接传 branch ID | +| `create_workspace` | 创建新的 Supabase workspace | +| `pause_workspace` | 暂停 workspace | +| `restore_workspace` | 恢复已暂停的 workspace | +| `get_workspace_url` | 获取 workspace 或 branch 的 API 地址 | +| `get_publishable_keys` | 获取 publishable、anon、service_role 等密钥 | +| `list_branches` | 列出 workspace 下的分支 | +| `create_branch` | 创建开发分支 | +| `delete_branch` | 删除开发分支 | +| `reset_branch` | 将分支重置到初始状态 | + +### 数据库 + +| 工具 | 说明 | +| ---- | ---- | +| `execute_sql` | 在 Postgres 数据库上执行原始 SQL | +| `list_tables` | 列出一个或多个 schema 下的表 | +| `list_migrations` | 查询 `supabase_migrations.schema_migrations` 中的迁移记录 | +| `list_extensions` | 列出已安装的 PostgreSQL 扩展 | +| `apply_migration` | 执行迁移 SQL,并写入 `supabase_migrations.schema_migrations` | +| `generate_typescript_types` | 根据 schema 元数据生成 TypeScript 类型定义 | + +### Edge Functions + +| 工具 | 说明 | +| ---- | ---- | +| `list_edge_functions` | 列出 workspace 或 branch 下的 Edge Functions | +| `get_edge_function` | 获取 Edge Function 的代码和配置 | +| `deploy_edge_function` | 创建或更新 Edge Function | +| `delete_edge_function` | 删除 Edge Function | + +### Storage + +| 工具 | 说明 | +| ---- | ---- | +| `list_storage_buckets` | 列出存储桶 | +| `create_storage_bucket` | 创建新的存储桶 | +| `delete_storage_bucket` | 删除存储桶 | +| `get_storage_config` | 获取 Storage 配置 | -- 工作区生命周期管理 -- 分支生命周期管理 -- 数据库访问 -- Edge Functions -- 存储管理 -- TypeScript 类型生成 +## 鉴权方式 + +使用火山引擎 AK/SK 鉴权。可在[火山引擎 API 访问密钥控制台](https://console.volcengine.com/iam/keymanage/)获取凭证。 ## 环境变量 -| 变量名 | 必需 | 说明 | -| --- | --- | --- | -| `VOLCENGINE_ACCESS_KEY` | 是 | 火山引擎访问密钥 | -| `VOLCENGINE_SECRET_KEY` | 是 | 火山引擎私密密钥 | -| `VOLCENGINE_REGION` | 否 | 区域,默认 `cn-beijing` | -| `READ_ONLY` | 否 | 设为 `true` 时禁止写操作 | -| `DEFAULT_WORKSPACE_ID` | 否 | 未传 `workspace_id` 时使用的默认工作区 | -| `SUPABASE_WORKSPACE_SLUG` | 否 | Edge Functions 使用的 slug,默认 `default` | -| `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` 或 `https`,默认 `http` | +| 变量名 | 必需 | 默认值 | 说明 | +| ---- | ---- | ---- | ---- | +| `VOLCENGINE_ACCESS_KEY` | 是 | - | 火山引擎 Access Key | +| `VOLCENGINE_SECRET_KEY` | 是 | - | 火山引擎 Secret Key | +| `VOLCENGINE_REGION` | 否 | `cn-beijing` | AIDAP API 所在地域 | +| `DEFAULT_WORKSPACE_ID` | 否 | - | 未传 `workspace_id` 时使用的默认目标 | +| `READ_ONLY` | 否 | `false` | 设为 `true` 后会禁止所有写操作工具 | +| `SUPABASE_WORKSPACE_SLUG` | 否 | `default` | Edge Functions API 使用的项目 slug | +| `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` | 生成 workspace URL 时使用的协议 | +| `PORT` | 否 | `8000` | 直接启动服务时监听的端口 | + +## 部署 -## 启动 +### 在本地代码仓库中运行 ```bash -python -m mcp_server_supabase.server +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase ``` -## MCP 客户端配置示例 +### 使用本地源码配置 MCP Client ```json { "mcpServers": { "supabase": { - "command": "python", + "command": "uv", "args": [ - "-m", - "mcp_server_supabase.server" + "--directory", + "/ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase", + "run", + "mcp-server-supabase" ], "env": { - "VOLCENGINE_ACCESS_KEY": "your-access-key", - "VOLCENGINE_SECRET_KEY": "your-secret-key", + "VOLCENGINE_ACCESS_KEY": "", + "VOLCENGINE_SECRET_KEY": "", "VOLCENGINE_REGION": "cn-beijing", "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" } @@ -55,51 +107,53 @@ python -m mcp_server_supabase.server } ``` -## 工具列表 - -### 工作区 - -- `list_workspaces` -- `get_workspace` -- `create_workspace` -- `pause_workspace` -- `restore_workspace` -- `get_workspace_url` -- `get_publishable_keys` +### 使用 `uvx` 配置 MCP Client -### 分支 +```json +{ + "mcpServers": { + "supabase": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/volcengine/mcp-server#subdirectory=server/mcp_server_supabase", + "mcp-server-supabase" + ], + "env": { + "VOLCENGINE_ACCESS_KEY": "", + "VOLCENGINE_SECRET_KEY": "", + "VOLCENGINE_REGION": "cn-beijing", + "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" + } + } + } +} +``` -- `list_branches` -- `create_branch` -- `delete_branch` -- `reset_branch` +### 直接使用 Python 入口启动 -### 数据库 +```bash +python3 -m mcp_server_supabase.server --port 8000 +``` -- `execute_sql` -- `list_tables` -- `list_migrations` -- `list_extensions` -- `apply_migration` -- `generate_typescript_types` +这个包同时暴露了 `mcp-server-supabase` 和 `supabase-aidap` 两个入口,示例统一使用 `mcp-server-supabase`。 -### Edge Functions +## 使用说明 -- `list_edge_functions` -- `get_edge_function` -- `deploy_edge_function` -- `delete_edge_function` +- 如果没有显式传入 `workspace_id`,且配置了 `DEFAULT_WORKSPACE_ID`,服务会自动使用这个默认目标。 +- 如果传入的是 `br-xxxx` 这样的 branch ID,服务会自动解析所属 workspace。 +- `get_publishable_keys` 在需要时会自动解析默认分支。 +- `reset_branch` 虽然接收 `migration_version` 参数,但当前 AIDAP API 会忽略这个值,只执行分支重置。 +- `deploy_edge_function` 当前支持 `native-node20/v1`、`native-python3.9/v1`、`native-python3.10/v1`、`native-python3.12/v1`。 -### 存储 +## 可适配客户端 -- `list_storage_buckets` -- `create_storage_bucket` -- `delete_storage_bucket` -- `get_storage_config` -- `update_storage_config` +- Cursor +- Claude Desktop +- Cline +- Trae +- 所有支持 `stdio` 的 MCP Client -## 使用说明 +## License -- 所有 `workspace_id` 参数都可以直接传分支 ID。 -- 未传 `workspace_id` 时,如果配置了 `DEFAULT_WORKSPACE_ID`,服务会自动使用它。 -- `get_publishable_keys` 在需要时会自动解析默认分支。 +volcengine/mcp-server is licensed under the [MIT License](https://github.com/volcengine/mcp-server/blob/main/LICENSE). diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py index 55384234..789f9cc7 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -1,5 +1,3 @@ -import json - from mcp.server.fastmcp import FastMCP from .runtime import SupabaseRuntime @@ -94,12 +92,6 @@ async def get_storage_config(workspace_id: str = None) -> str: """Gets the storage configuration for a workspace or branch.""" return await storage_tools.get_storage_config(workspace_id) - @mcp.tool() - async def update_storage_config(config: str, workspace_id: str = None) -> str: - """Updates the storage configuration for a workspace or branch.""" - return await storage_tools.update_storage_config(json.loads(config), workspace_id) - - def _register_database_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: database_tools = runtime.database_tools diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 5845b896..18b6f10e 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -1,10 +1,9 @@ -from typing import Optional, List, Dict, Any +from typing import Optional, List import logging import json from .base import BaseTools from ..utils import handle_errors, read_only_check from ..models import StorageConfig -from ..platform.supabase_client import SupabaseApiError logger = logging.getLogger(__name__) @@ -96,28 +95,3 @@ async def get_storage_config(self, workspace_id: Optional[str] = None) -> Storag client = await self._get_client(ws_id, branch_id) result = await client.call_api("/storage/v1/config") return StorageConfig(**result) - - @handle_errors - @read_only_check - async def update_storage_config( - self, - config: Dict[str, Any], - workspace_id: Optional[str] = None, - ) -> dict: - if not isinstance(config, dict) or not config: - raise ValueError("config must be a non-empty object") - - ws_id, branch_id = await self._resolve_target(workspace_id) - client = await self._get_client(ws_id, branch_id) - try: - await client.call_api("/storage/v1/config", method="PUT", json_data=config) - except SupabaseApiError as e: - if e.status_code == 404 and e.path == "/storage/v1/config": - return { - "success": False, - "supported": False, - "code": "UnsupportedOperation", - "error": "Updating storage config is not supported by current AIDAP workspace endpoint" - } - raise - return {"success": True} From 8c990ae1511863a814fe7338b3ccd6e3463de124 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Fri, 6 Mar 2026 15:49:02 +0800 Subject: [PATCH 21/32] fix:supbase --- .../mcp_server_supabase/src/mcp_server_supabase/tool_registry.py | 1 + 1 file changed, 1 insertion(+) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py index 789f9cc7..6df7b161 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -92,6 +92,7 @@ async def get_storage_config(workspace_id: str = None) -> str: """Gets the storage configuration for a workspace or branch.""" return await storage_tools.get_storage_config(workspace_id) + def _register_database_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: database_tools = runtime.database_tools From 31de61661ba1f3afae00c050e27e73ed987955f8 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 10:37:10 +0800 Subject: [PATCH 22/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 31 ++++- server/mcp_server_supabase/README_zh.md | 31 ++++- server/mcp_server_supabase/pyproject.toml | 2 + .../src/mcp_server_supabase/server.py | 119 ++++++++++++++++-- 4 files changed, 167 insertions(+), 16 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 3c0e95ac..01e99893 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -73,7 +73,13 @@ Use Volcengine AK/SK authentication. Obtain your credentials from the [Volcengin | `READ_ONLY` | No | `false` | Set to `true` to block all mutating tools | | `SUPABASE_WORKSPACE_SLUG` | No | `default` | Project slug used by Edge Functions APIs | | `SUPABASE_ENDPOINT_SCHEME` | No | `http` | Endpoint scheme used when building workspace URLs | -| `PORT` | No | `8000` | Port used when running the server directly | +| `MCP_SERVER_HOST` | No | `0.0.0.0` | Host used by `sse` and `streamable-http` transports | +| `MCP_SERVER_PORT` | No | `8000` | Preferred port variable for network transports | +| `PORT` | No | `8000` | Backward-compatible port variable | +| `MCP_MOUNT_PATH` | No | `/` | Base mount path for HTTP transports | +| `MCP_SSE_PATH` | No | `/sse` | SSE endpoint path | +| `MCP_MESSAGE_PATH` | No | `/messages/` | SSE message POST path | +| `STREAMABLE_HTTP_PATH` | No | `/mcp` | Streamable HTTP endpoint path | ## Deployment @@ -83,6 +89,21 @@ Use Volcengine AK/SK authentication. Obtain your credentials from the [Volcengin uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase ``` +### Run with an explicit transport + +```bash +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase --transport stdio +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase --transport sse --host 0.0.0.0 --port 8000 +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase --transport streamable-http --host 0.0.0.0 --port 8000 +``` + +### Dedicated network entrypoints + +```bash +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase-sse +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase-streamable +``` + ### MCP client config with local source ```json @@ -134,9 +155,10 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s ```bash python3 -m mcp_server_supabase.server --port 8000 +python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ``` -The package exposes both `mcp-server-supabase` and `supabase-aidap`. The examples above use `mcp-server-supabase`. +The package exposes `mcp-server-supabase`, `supabase-aidap`, `mcp-server-supabase-sse`, and `mcp-server-supabase-streamable`. The examples above use `mcp-server-supabase`. ## Usage Notes @@ -145,6 +167,9 @@ The package exposes both `mcp-server-supabase` and `supabase-aidap`. The example - `get_publishable_keys` resolves the default branch automatically when needed. - `reset_branch` accepts `migration_version`, but the current AIDAP API ignores that value and performs a branch reset only. - `deploy_edge_function` currently supports `native-node20/v1`, `native-python3.9/v1`, `native-python3.10/v1`, and `native-python3.12/v1`. +- `--transport sse` serves the MCP SSE endpoint at `MCP_SSE_PATH` and the message endpoint at `MCP_MESSAGE_PATH`. +- `--transport streamable-http` serves the MCP HTTP endpoint at `STREAMABLE_HTTP_PATH`. +- For remote deployments, `streamable-http` is usually the better default; `sse` remains available for clients that still require it. ## Compatible Clients @@ -152,7 +177,7 @@ The package exposes both `mcp-server-supabase` and `supabase-aidap`. The example - Claude Desktop - Cline - Trae -- Any MCP client that supports `stdio` +- Any MCP client that supports `stdio`, `sse`, or `streamable-http` ## License diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 8079b28a..e8349ddd 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -73,7 +73,13 @@ | `READ_ONLY` | 否 | `false` | 设为 `true` 后会禁止所有写操作工具 | | `SUPABASE_WORKSPACE_SLUG` | 否 | `default` | Edge Functions API 使用的项目 slug | | `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` | 生成 workspace URL 时使用的协议 | -| `PORT` | 否 | `8000` | 直接启动服务时监听的端口 | +| `MCP_SERVER_HOST` | 否 | `0.0.0.0` | `sse` 和 `streamable-http` 使用的监听地址 | +| `MCP_SERVER_PORT` | 否 | `8000` | 网络传输优先使用的端口变量 | +| `PORT` | 否 | `8000` | 兼容保留的端口变量 | +| `MCP_MOUNT_PATH` | 否 | `/` | HTTP 传输的基础挂载路径 | +| `MCP_SSE_PATH` | 否 | `/sse` | SSE 连接路径 | +| `MCP_MESSAGE_PATH` | 否 | `/messages/` | SSE 消息投递路径 | +| `STREAMABLE_HTTP_PATH` | 否 | `/mcp` | Streamable HTTP 路径 | ## 部署 @@ -83,6 +89,21 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase ``` +### 显式指定 transport 启动 + +```bash +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase --transport stdio +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase --transport sse --host 0.0.0.0 --port 8000 +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase --transport streamable-http --host 0.0.0.0 --port 8000 +``` + +### 独立网络启动入口 + +```bash +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase-sse +uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase-streamable +``` + ### 使用本地源码配置 MCP Client ```json @@ -134,9 +155,10 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s ```bash python3 -m mcp_server_supabase.server --port 8000 +python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ``` -这个包同时暴露了 `mcp-server-supabase` 和 `supabase-aidap` 两个入口,示例统一使用 `mcp-server-supabase`。 +这个包同时暴露了 `mcp-server-supabase`、`supabase-aidap`、`mcp-server-supabase-sse` 和 `mcp-server-supabase-streamable` 四个入口,示例统一使用 `mcp-server-supabase`。 ## 使用说明 @@ -145,6 +167,9 @@ python3 -m mcp_server_supabase.server --port 8000 - `get_publishable_keys` 在需要时会自动解析默认分支。 - `reset_branch` 虽然接收 `migration_version` 参数,但当前 AIDAP API 会忽略这个值,只执行分支重置。 - `deploy_edge_function` 当前支持 `native-node20/v1`、`native-python3.9/v1`、`native-python3.10/v1`、`native-python3.12/v1`。 +- `--transport sse` 会在 `MCP_SSE_PATH` 暴露 SSE 连接地址,并在 `MCP_MESSAGE_PATH` 暴露消息投递地址。 +- `--transport streamable-http` 会在 `STREAMABLE_HTTP_PATH` 暴露 MCP HTTP 地址。 +- 远程部署通常更推荐 `streamable-http`,但为了兼容仍保留 `sse`。 ## 可适配客户端 @@ -152,7 +177,7 @@ python3 -m mcp_server_supabase.server --port 8000 - Claude Desktop - Cline - Trae -- 所有支持 `stdio` 的 MCP Client +- 所有支持 `stdio`、`sse` 或 `streamable-http` 的 MCP Client ## License diff --git a/server/mcp_server_supabase/pyproject.toml b/server/mcp_server_supabase/pyproject.toml index b1348b7c..0590f69d 100644 --- a/server/mcp_server_supabase/pyproject.toml +++ b/server/mcp_server_supabase/pyproject.toml @@ -29,6 +29,8 @@ legacy = [ [project.scripts] mcp-server-supabase = "mcp_server_supabase.server:main" supabase-aidap = "mcp_server_supabase.server:main" +mcp-server-supabase-sse = "mcp_server_supabase.sse:main" +mcp-server-supabase-streamable = "mcp_server_supabase.streamable_http:main" [build-system] requires = ["hatchling"] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index 554b2b82..ceabccb3 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -14,17 +14,78 @@ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) -default_workspace_id = os.getenv("DEFAULT_WORKSPACE_ID") +DEFAULT_HOST = "0.0.0.0" +DEFAULT_PORT = 8000 +DEFAULT_MOUNT_PATH = "/" +DEFAULT_SSE_PATH = "/sse" +DEFAULT_MESSAGE_PATH = "/messages/" +DEFAULT_STREAMABLE_HTTP_PATH = "/mcp" + + +def _resolve_port(port: int | None = None) -> int: + if port is not None: + return port + return int(os.getenv("MCP_SERVER_PORT", os.getenv("PORT", str(DEFAULT_PORT)))) + + +def _resolve_host(host: str | None = None) -> str: + if host is not None: + return host + return os.getenv("MCP_SERVER_HOST", DEFAULT_HOST) + + +def _resolve_default_workspace_id(default_target_id: str | None = None) -> str | None: + if default_target_id is not None: + return default_target_id + return os.getenv("DEFAULT_WORKSPACE_ID") + + +def _resolve_mount_path(mount_path: str | None = None) -> str: + if mount_path is not None: + return mount_path + return os.getenv("MCP_MOUNT_PATH", DEFAULT_MOUNT_PATH) + + +def _resolve_sse_path(sse_path: str | None = None) -> str: + if sse_path is not None: + return sse_path + return os.getenv("MCP_SSE_PATH", DEFAULT_SSE_PATH) + + +def _resolve_message_path(message_path: str | None = None) -> str: + if message_path is not None: + return message_path + return os.getenv("MCP_MESSAGE_PATH", DEFAULT_MESSAGE_PATH) + + +def _resolve_streamable_http_path(streamable_http_path: str | None = None) -> str: + if streamable_http_path is not None: + return streamable_http_path + return os.getenv("STREAMABLE_HTTP_PATH", DEFAULT_STREAMABLE_HTTP_PATH) def create_mcp( port: int | None = None, + host: str | None = None, default_target_id: str | None = None, + mount_path: str | None = None, + sse_path: str | None = None, + message_path: str | None = None, + streamable_http_path: str | None = None, ) -> FastMCP: - resolved_port = port if port is not None else int(os.getenv("PORT", "8000")) - resolved_default_target_id = default_target_id if default_target_id is not None else default_workspace_id + resolved_port = _resolve_port(port) + resolved_host = _resolve_host(host) + resolved_default_target_id = _resolve_default_workspace_id(default_target_id) runtime = create_runtime(resolved_default_target_id) - mcp = FastMCP("Supabase MCP Server (AIDAP)", port=resolved_port) + mcp = FastMCP( + "Supabase MCP Server (AIDAP)", + host=resolved_host, + port=resolved_port, + mount_path=_resolve_mount_path(mount_path), + sse_path=_resolve_sse_path(sse_path), + message_path=_resolve_message_path(message_path), + streamable_http_path=_resolve_streamable_http_path(streamable_http_path), + ) register_tools(mcp, runtime) return mcp @@ -32,17 +93,55 @@ def create_mcp( mcp = create_mcp() +def run_server( + transport: str = "stdio", + port: int | None = None, + host: str | None = None, + default_target_id: str | None = None, +) -> None: + create_mcp( + port=port, + host=host, + default_target_id=default_target_id, + ).run(transport=transport) + + def main(): parser = argparse.ArgumentParser(description="Supabase MCP Server") - parser.add_argument("--port", type=int, default=8000, help="Port to run the server on") + parser.add_argument( + "--transport", + "-t", + choices=["sse", "stdio", "streamable-http"], + default="stdio", + help="Transport protocol to use", + ) + parser.add_argument("--host", type=str, default=None, help="Host to bind for network transports") + parser.add_argument("--port", type=int, default=None, help="Port to run the server on") args = parser.parse_args() - logger.info(f"Starting Supabase MCP Server on port {args.port}") - logger.info(f"Read-only mode: {READ_ONLY}") - if default_workspace_id: - logger.info(f"Default workspace ID: {default_workspace_id}") + resolved_host = _resolve_host(args.host) + resolved_port = _resolve_port(args.port) + resolved_default_workspace_id = _resolve_default_workspace_id() + + logger.info("Starting Supabase MCP Server with %s transport", args.transport) + logger.info("Read-only mode: %s", READ_ONLY) + if resolved_default_workspace_id: + logger.info("Default workspace ID: %s", resolved_default_workspace_id) + if args.transport != "stdio": + logger.info( + "Server binding: host=%s port=%s sse_path=%s message_path=%s streamable_http_path=%s", + resolved_host, + resolved_port, + _resolve_sse_path(), + _resolve_message_path(), + _resolve_streamable_http_path(), + ) - create_mcp(port=args.port).run() + run_server( + transport=args.transport, + port=args.port, + host=args.host, + ) if __name__ == "__main__": From 2e6ec901ce0431b959355f2f76a338b7eac7e5f0 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 10:37:18 +0800 Subject: [PATCH 23/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../mcp_server_supabase/src/mcp_server_supabase/sse.py | 9 +++++++++ .../src/mcp_server_supabase/streamable_http.py | 9 +++++++++ 2 files changed, 18 insertions(+) create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/sse.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/streamable_http.py diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/sse.py b/server/mcp_server_supabase/src/mcp_server_supabase/sse.py new file mode 100644 index 00000000..554e9ad6 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/sse.py @@ -0,0 +1,9 @@ +from .server import run_server + + +def main() -> None: + run_server(transport="sse") + + +if __name__ == "__main__": + main() diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/streamable_http.py b/server/mcp_server_supabase/src/mcp_server_supabase/streamable_http.py new file mode 100644 index 00000000..2b7d8ff4 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/streamable_http.py @@ -0,0 +1,9 @@ +from .server import run_server + + +def main() -> None: + run_server(transport="streamable-http") + + +if __name__ == "__main__": + main() From e8f9b6f08629f7968249c9491d45eb017d2d3bf6 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 14:21:47 +0800 Subject: [PATCH 24/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 54 ++++++++---- server/mcp_server_supabase/README_zh.md | 55 +++++++++---- .../src/mcp_server_supabase/runtime.py | 16 ++-- .../src/mcp_server_supabase/server.py | 82 +++++++++++++++---- .../src/mcp_server_supabase/tools/base.py | 26 ++---- .../tools/workspace_tools.py | 5 +- .../src/mcp_server_supabase/utils/__init__.py | 3 +- .../src/mcp_server_supabase/utils/targets.py | 11 +-- 8 files changed, 165 insertions(+), 87 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 01e99893..7bd1dd23 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -13,7 +13,7 @@ English | [简体中文](README_zh.md) ## Tools -### Workspace and Branch +### `account` | Tool | Description | | ---- | ---- | @@ -22,14 +22,11 @@ English | [简体中文](README_zh.md) | `create_workspace` | Create a new Supabase workspace | | `pause_workspace` | Pause a workspace | | `restore_workspace` | Resume a paused workspace | -| `get_workspace_url` | Get the API endpoint for a workspace or branch | -| `get_publishable_keys` | Get publishable, anon, and service role keys | -| `list_branches` | List branches under a workspace | -| `create_branch` | Create a development branch | -| `delete_branch` | Delete a development branch | -| `reset_branch` | Reset a branch to its baseline state | +### `docs` -### Database +No tools are currently exposed. + +### `database` | Tool | Description | | ---- | ---- | @@ -38,9 +35,20 @@ English | [简体中文](README_zh.md) | `list_migrations` | List records from `supabase_migrations.schema_migrations` | | `list_extensions` | List installed PostgreSQL extensions | | `apply_migration` | Run migration SQL and record it in `supabase_migrations.schema_migrations` | + +### `debugging` + +No tools are currently exposed. + +### `development` + +| Tool | Description | +| ---- | ---- | +| `get_workspace_url` | Get the API endpoint for a workspace or branch | +| `get_publishable_keys` | Get publishable, anon, and service role keys | | `generate_typescript_types` | Generate TypeScript definitions from schema metadata | -### Edge Functions +### `functions` | Tool | Description | | ---- | ---- | @@ -49,7 +57,16 @@ English | [简体中文](README_zh.md) | `deploy_edge_function` | Create or update an Edge Function | | `delete_edge_function` | Delete an Edge Function | -### Storage +### `branching` + +| Tool | Description | +| ---- | ---- | +| `list_branches` | List branches under a workspace | +| `create_branch` | Create a development branch | +| `delete_branch` | Delete a development branch | +| `reset_branch` | Reset a branch to its baseline state | + +### `storage` | Tool | Description | | ---- | ---- | @@ -69,7 +86,10 @@ Use Volcengine AK/SK authentication. Obtain your credentials from the [Volcengin | `VOLCENGINE_ACCESS_KEY` | Yes | - | Volcengine access key | | `VOLCENGINE_SECRET_KEY` | Yes | - | Volcengine secret key | | `VOLCENGINE_REGION` | No | `cn-beijing` | Region used for the AIDAP API | -| `DEFAULT_WORKSPACE_ID` | No | - | Default target used when `workspace_id` is omitted | +| `WORKSPACE_REF` | No | - | Connection-level hard scope. When set, `account` tools are hidden and workspace-scoped calls are forced to this target | +| `FEATURES` | No | `account,database,debugging,development,docs,functions,branching` | Official feature groups. `storage` is disabled by default | +| `ENABLED_TOOLS` | No | - | Comma-separated allowlist applied after `features` filtering | +| `DISABLED_TOOLS` | No | - | Comma-separated denylist that overrides `ENABLED_TOOLS` | | `READ_ONLY` | No | `false` | Set to `true` to block all mutating tools | | `SUPABASE_WORKSPACE_SLUG` | No | `default` | Project slug used by Edge Functions APIs | | `SUPABASE_ENDPOINT_SCHEME` | No | `http` | Endpoint scheme used when building workspace URLs | @@ -121,7 +141,8 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s "VOLCENGINE_ACCESS_KEY": "", "VOLCENGINE_SECRET_KEY": "", "VOLCENGINE_REGION": "cn-beijing", - "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" + "WORKSPACE_REF": "ws-xxxxxxxx", + "FEATURES": "database,functions" } } } @@ -144,7 +165,8 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s "VOLCENGINE_ACCESS_KEY": "", "VOLCENGINE_SECRET_KEY": "", "VOLCENGINE_REGION": "cn-beijing", - "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" + "WORKSPACE_REF": "ws-xxxxxxxx", + "FEATURES": "database,functions" } } } @@ -162,7 +184,11 @@ The package exposes `mcp-server-supabase`, `supabase-aidap`, `mcp-server-supabas ## Usage Notes -- If `workspace_id` is omitted, the server falls back to `DEFAULT_WORKSPACE_ID` when configured. +- `WORKSPACE_REF` applies a hard workspace scope to the connection and removes `workspace_id` from visible tool schemas. +- When `WORKSPACE_REF` is active, `account` tools are hidden and any explicit `workspace_id` outside the scope is rejected. +- `FEATURES` accepts only the official groups: `account`, `docs`, `database`, `debugging`, `development`, `functions`, `storage`, and `branching`. +- If `FEATURES` is not set, the default enabled groups are `account`, `database`, `debugging`, `development`, `docs`, `functions`, and `branching`. `storage` stays disabled by default. +- `ENABLED_TOOLS` and `DISABLED_TOOLS` narrow the tool set after feature filtering. `DISABLED_TOOLS` takes precedence. - If a branch ID such as `br-xxxx` is provided, the server resolves the corresponding workspace automatically. - `get_publishable_keys` resolves the default branch automatically when needed. - `reset_branch` accepts `migration_version`, but the current AIDAP API ignores that value and performs a branch reset only. diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index e8349ddd..baeec262 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -13,7 +13,7 @@ ## 工具列表 -### 工作区与分支 +### `account` | 工具 | 说明 | | ---- | ---- | @@ -22,14 +22,12 @@ | `create_workspace` | 创建新的 Supabase workspace | | `pause_workspace` | 暂停 workspace | | `restore_workspace` | 恢复已暂停的 workspace | -| `get_workspace_url` | 获取 workspace 或 branch 的 API 地址 | -| `get_publishable_keys` | 获取 publishable、anon、service_role 等密钥 | -| `list_branches` | 列出 workspace 下的分支 | -| `create_branch` | 创建开发分支 | -| `delete_branch` | 删除开发分支 | -| `reset_branch` | 将分支重置到初始状态 | -### 数据库 +### `docs` + +当前没有暴露工具。 + +### `database` | 工具 | 说明 | | ---- | ---- | @@ -38,9 +36,20 @@ | `list_migrations` | 查询 `supabase_migrations.schema_migrations` 中的迁移记录 | | `list_extensions` | 列出已安装的 PostgreSQL 扩展 | | `apply_migration` | 执行迁移 SQL,并写入 `supabase_migrations.schema_migrations` | + +### `debugging` + +当前没有暴露工具。 + +### `development` + +| 工具 | 说明 | +| ---- | ---- | +| `get_workspace_url` | 获取 workspace 或 branch 的 API 地址 | +| `get_publishable_keys` | 获取 publishable、anon、service_role 等密钥 | | `generate_typescript_types` | 根据 schema 元数据生成 TypeScript 类型定义 | -### Edge Functions +### `functions` | 工具 | 说明 | | ---- | ---- | @@ -49,7 +58,16 @@ | `deploy_edge_function` | 创建或更新 Edge Function | | `delete_edge_function` | 删除 Edge Function | -### Storage +### `branching` + +| 工具 | 说明 | +| ---- | ---- | +| `list_branches` | 列出 workspace 下的分支 | +| `create_branch` | 创建开发分支 | +| `delete_branch` | 删除开发分支 | +| `reset_branch` | 将分支重置到初始状态 | + +### `storage` | 工具 | 说明 | | ---- | ---- | @@ -69,7 +87,10 @@ | `VOLCENGINE_ACCESS_KEY` | 是 | - | 火山引擎 Access Key | | `VOLCENGINE_SECRET_KEY` | 是 | - | 火山引擎 Secret Key | | `VOLCENGINE_REGION` | 否 | `cn-beijing` | AIDAP API 所在地域 | -| `DEFAULT_WORKSPACE_ID` | 否 | - | 未传 `workspace_id` 时使用的默认目标 | +| `WORKSPACE_REF` | 否 | - | 连接级 workspace scope,设置后会隐藏 `account` 组工具,并强制所有 workspace-scoped 调用只能访问这个目标 | +| `FEATURES` | 否 | `account,database,debugging,development,docs,functions,branching` | 官方 feature groups,`storage` 默认关闭 | +| `ENABLED_TOOLS` | 否 | - | 逗号分隔的工具白名单,作用在 `features` 过滤之后 | +| `DISABLED_TOOLS` | 否 | - | 逗号分隔的工具黑名单,优先级高于 `ENABLED_TOOLS` | | `READ_ONLY` | 否 | `false` | 设为 `true` 后会禁止所有写操作工具 | | `SUPABASE_WORKSPACE_SLUG` | 否 | `default` | Edge Functions API 使用的项目 slug | | `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` | 生成 workspace URL 时使用的协议 | @@ -121,7 +142,8 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s "VOLCENGINE_ACCESS_KEY": "", "VOLCENGINE_SECRET_KEY": "", "VOLCENGINE_REGION": "cn-beijing", - "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" + "WORKSPACE_REF": "ws-xxxxxxxx", + "FEATURES": "database,functions" } } } @@ -144,7 +166,8 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s "VOLCENGINE_ACCESS_KEY": "", "VOLCENGINE_SECRET_KEY": "", "VOLCENGINE_REGION": "cn-beijing", - "DEFAULT_WORKSPACE_ID": "ws-xxxxxxxx" + "WORKSPACE_REF": "ws-xxxxxxxx", + "FEATURES": "database,functions" } } } @@ -162,7 +185,11 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ## 使用说明 -- 如果没有显式传入 `workspace_id`,且配置了 `DEFAULT_WORKSPACE_ID`,服务会自动使用这个默认目标。 +- `WORKSPACE_REF` 会把连接 hard-scope 到单个目标,并在 tool schema 中移除 `workspace_id`。 +- `WORKSPACE_REF` 生效时,`account` 组工具不会暴露,且显式传入其他 `workspace_id` 会被拒绝。 +- `FEATURES` 只接受官方 8 个分组:`account`、`docs`、`database`、`debugging`、`development`、`functions`、`storage`、`branching`。 +- 如果没有设置 `FEATURES`,默认启用 `account`、`database`、`debugging`、`development`、`docs`、`functions`、`branching`,`storage` 默认关闭。 +- `ENABLED_TOOLS` 和 `DISABLED_TOOLS` 会在 feature 过滤之后继续收窄工具集,且 `DISABLED_TOOLS` 优先。 - 如果传入的是 `br-xxxx` 这样的 branch ID,服务会自动解析所属 workspace。 - `get_publishable_keys` 在需要时会自动解析默认分支。 - `reset_branch` 虽然接收 `migration_version` 参数,但当前 AIDAP API 会忽略这个值,只执行分支重置。 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py index d4f103c4..86981b86 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import Optional from .platform import AidapClient from .tools import DatabaseTools, EdgeFunctionTools, StorageTools, WorkspaceTools @@ -8,23 +7,18 @@ @dataclass(slots=True) class SupabaseRuntime: aidap_client: AidapClient - default_workspace_id: Optional[str] edge_tools: EdgeFunctionTools storage_tools: StorageTools database_tools: DatabaseTools workspace_tools: WorkspaceTools -def create_runtime( - default_workspace_id: Optional[str] = None, - aidap_client: Optional[AidapClient] = None, -) -> SupabaseRuntime: +def create_runtime(aidap_client: AidapClient | None = None) -> SupabaseRuntime: client = aidap_client or AidapClient() return SupabaseRuntime( aidap_client=client, - default_workspace_id=default_workspace_id, - edge_tools=EdgeFunctionTools(client, default_workspace_id), - storage_tools=StorageTools(client, default_workspace_id), - database_tools=DatabaseTools(client, default_workspace_id), - workspace_tools=WorkspaceTools(client, default_workspace_id), + edge_tools=EdgeFunctionTools(client), + storage_tools=StorageTools(client), + database_tools=DatabaseTools(client), + workspace_tools=WorkspaceTools(client), ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index ceabccb3..b2fb15b2 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -2,11 +2,11 @@ import logging import os -from mcp.server.fastmcp import FastMCP - from .config import READ_ONLY from .runtime import create_runtime from .tool_registry import register_tools +from .access_policy import build_partial_access_policy +from .scoped_mcp import ScopedFastMCP logger = logging.getLogger(__name__) logging.basicConfig( @@ -34,10 +34,28 @@ def _resolve_host(host: str | None = None) -> str: return os.getenv("MCP_SERVER_HOST", DEFAULT_HOST) -def _resolve_default_workspace_id(default_target_id: str | None = None) -> str | None: - if default_target_id is not None: - return default_target_id - return os.getenv("DEFAULT_WORKSPACE_ID") +def _resolve_workspace_ref(workspace_ref: str | None = None) -> str | None: + if workspace_ref is not None: + return workspace_ref + return os.getenv("WORKSPACE_REF") + + +def _resolve_features(features: str | None = None) -> str | None: + if features is not None: + return features + return os.getenv("FEATURES") + + +def _resolve_enabled_tools(enabled_tools: str | None = None) -> str | None: + if enabled_tools is not None: + return enabled_tools + return os.getenv("ENABLED_TOOLS") + + +def _resolve_disabled_tools(disabled_tools: str | None = None) -> str | None: + if disabled_tools is not None: + return disabled_tools + return os.getenv("DISABLED_TOOLS") def _resolve_mount_path(mount_path: str | None = None) -> str: @@ -67,18 +85,27 @@ def _resolve_streamable_http_path(streamable_http_path: str | None = None) -> st def create_mcp( port: int | None = None, host: str | None = None, - default_target_id: str | None = None, + workspace_ref: str | None = None, + features: str | None = None, + enabled_tools: str | None = None, + disabled_tools: str | None = None, mount_path: str | None = None, sse_path: str | None = None, message_path: str | None = None, streamable_http_path: str | None = None, -) -> FastMCP: +) -> ScopedFastMCP: resolved_port = _resolve_port(port) resolved_host = _resolve_host(host) - resolved_default_target_id = _resolve_default_workspace_id(default_target_id) - runtime = create_runtime(resolved_default_target_id) - mcp = FastMCP( + access_policy = build_partial_access_policy( + workspace_ref=_resolve_workspace_ref(workspace_ref), + features=_resolve_features(features), + enabled_tools=_resolve_enabled_tools(enabled_tools), + disabled_tools=_resolve_disabled_tools(disabled_tools), + ) + runtime = create_runtime() + mcp = ScopedFastMCP( "Supabase MCP Server (AIDAP)", + access_policy=access_policy, host=resolved_host, port=resolved_port, mount_path=_resolve_mount_path(mount_path), @@ -97,12 +124,18 @@ def run_server( transport: str = "stdio", port: int | None = None, host: str | None = None, - default_target_id: str | None = None, + workspace_ref: str | None = None, + features: str | None = None, + enabled_tools: str | None = None, + disabled_tools: str | None = None, ) -> None: create_mcp( port=port, host=host, - default_target_id=default_target_id, + workspace_ref=workspace_ref, + features=features, + enabled_tools=enabled_tools, + disabled_tools=disabled_tools, ).run(transport=transport) @@ -117,16 +150,29 @@ def main(): ) parser.add_argument("--host", type=str, default=None, help="Host to bind for network transports") parser.add_argument("--port", type=int, default=None, help="Port to run the server on") + parser.add_argument("--workspace-ref", type=str, default=None, help="Hard-scope the connection to a single workspace") + parser.add_argument("--features", type=str, default=None, help="Comma-separated official feature groups") + parser.add_argument("--enabled-tools", type=str, default=None, help="Comma-separated whitelist of tool names") + parser.add_argument("--disabled-tools", type=str, default=None, help="Comma-separated blacklist of tool names") args = parser.parse_args() resolved_host = _resolve_host(args.host) resolved_port = _resolve_port(args.port) - resolved_default_workspace_id = _resolve_default_workspace_id() + resolved_workspace_ref = _resolve_workspace_ref(args.workspace_ref) + resolved_features = _resolve_features(args.features) + resolved_enabled_tools = _resolve_enabled_tools(args.enabled_tools) + resolved_disabled_tools = _resolve_disabled_tools(args.disabled_tools) logger.info("Starting Supabase MCP Server with %s transport", args.transport) logger.info("Read-only mode: %s", READ_ONLY) - if resolved_default_workspace_id: - logger.info("Default workspace ID: %s", resolved_default_workspace_id) + if resolved_workspace_ref: + logger.info("Workspace scope: %s", resolved_workspace_ref) + if resolved_features: + logger.info("Feature groups: %s", resolved_features) + if resolved_enabled_tools: + logger.info("Enabled tools: %s", resolved_enabled_tools) + if resolved_disabled_tools: + logger.info("Disabled tools: %s", resolved_disabled_tools) if args.transport != "stdio": logger.info( "Server binding: host=%s port=%s sse_path=%s message_path=%s streamable_http_path=%s", @@ -141,6 +187,10 @@ def main(): transport=args.transport, port=args.port, host=args.host, + workspace_ref=args.workspace_ref, + features=args.features, + enabled_tools=args.enabled_tools, + disabled_tools=args.disabled_tools, ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py index 677e5fc2..0360c0dc 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py @@ -1,37 +1,25 @@ from typing import Optional from ..platform import AidapClient, SupabaseClient -from ..utils import resolve_target, select_target_id +from ..utils import resolve_target class BaseTools: - """Base class for all tool classes""" - - def __init__(self, aidap_client: AidapClient, workspace_id: Optional[str] = None): + def __init__(self, aidap_client: AidapClient): self.aidap = aidap_client - self.default_workspace_id = workspace_id def _get_workspace_id(self, workspace_id: Optional[str]) -> str: - """Get workspace ID from parameter or default""" - result = select_target_id(workspace_id, self.default_workspace_id) - if not result: - raise ValueError( - "workspace_id is required: not provided as parameter and no default workspace_id configured. " - "Please provide workspace_id or set DEFAULT_WORKSPACE_ID environment variable." - ) - return result + if not workspace_id: + raise ValueError("workspace_id is required") + return workspace_id async def _resolve_target(self, workspace_id: Optional[str]) -> tuple[str, Optional[str]]: target = self._get_workspace_id(workspace_id) - resolved_workspace_id, branch_id = await resolve_target(self.aidap, target, None) + resolved_workspace_id, branch_id = await resolve_target(self.aidap, target) if not resolved_workspace_id: - raise ValueError( - "workspace_id is required: not provided as parameter and no default workspace_id configured. " - "Please provide workspace_id or set DEFAULT_WORKSPACE_ID environment variable." - ) + raise ValueError("workspace_id is required") return resolved_workspace_id, branch_id async def _get_client(self, workspace_id: str, branch_id: Optional[str] = None) -> SupabaseClient: - """Get Supabase client for workspace""" import logging logger = logging.getLogger(__name__) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index e2630ed3..6d9b9028 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -9,9 +9,8 @@ class WorkspaceTools: - def __init__(self, aidap_client, default_workspace_id: Optional[str] = None): + def __init__(self, aidap_client): self.aidap_client = aidap_client - self.default_workspace_id = default_workspace_id def _to_json(self, payload: dict) -> str: return to_json(payload) @@ -23,7 +22,7 @@ def _pick(self, source: Any, *field_names: str) -> Any: return pick_value(source, *field_names) async def _resolve_target(self, target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: - return await resolve_target(self.aidap_client, target_id, self.default_workspace_id) + return await resolve_target(self.aidap_client, target_id) def _workspace_view(self, source: Any) -> dict: payload = { diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py index e43d67e1..449a83a0 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py @@ -1,6 +1,6 @@ from .common import compact_dict, pick_value, to_json from .decorators import format_error, handle_errors, read_only_check -from .targets import resolve_target, select_target_id +from .targets import resolve_target __all__ = [ 'compact_dict', @@ -9,6 +9,5 @@ 'pick_value', 'read_only_check', 'resolve_target', - 'select_target_id', 'to_json', ] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py index a5f1126e..79b4f284 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py @@ -1,12 +1,7 @@ from typing import Optional -def select_target_id(target_id: Optional[str], default_target_id: Optional[str]) -> Optional[str]: - return target_id or default_target_id - - -async def resolve_target(aidap_client, target_id: Optional[str], default_target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: - resolved_id = select_target_id(target_id, default_target_id) - if not resolved_id: +async def resolve_target(aidap_client, target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: + if not target_id: return None, None - return await aidap_client.resolve_workspace_and_branch(resolved_id) + return await aidap_client.resolve_workspace_and_branch(target_id) From c318ab0b8910deca8cf2b3955fad7b552c69eab3 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 14:21:52 +0800 Subject: [PATCH 25/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/mcp_server_supabase/access_policy.py | 253 ++++++++++++++++++ .../src/mcp_server_supabase/scoped_mcp.py | 75 ++++++ 2 files changed, 328 insertions(+) create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py new file mode 100644 index 00000000..01fae365 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py @@ -0,0 +1,253 @@ +import json +from dataclasses import dataclass +from typing import Any + + +OFFICIAL_FEATURE_GROUPS = ( + "account", + "docs", + "database", + "debugging", + "development", + "functions", + "storage", + "branching", +) +DEFAULT_FEATURE_GROUPS = frozenset({ + "account", + "database", + "debugging", + "development", + "docs", + "functions", + "branching", +}) + + +@dataclass(frozen=True) +class ToolPolicy: + feature: str + scoped: bool + + +TOOL_POLICIES = { + "list_workspaces": ToolPolicy("account", False), + "get_workspace": ToolPolicy("account", True), + "create_workspace": ToolPolicy("account", False), + "pause_workspace": ToolPolicy("account", True), + "restore_workspace": ToolPolicy("account", True), + "execute_sql": ToolPolicy("database", True), + "list_tables": ToolPolicy("database", True), + "list_migrations": ToolPolicy("database", True), + "list_extensions": ToolPolicy("database", True), + "apply_migration": ToolPolicy("database", True), + "get_workspace_url": ToolPolicy("development", True), + "get_publishable_keys": ToolPolicy("development", True), + "generate_typescript_types": ToolPolicy("development", True), + "list_edge_functions": ToolPolicy("functions", True), + "get_edge_function": ToolPolicy("functions", True), + "deploy_edge_function": ToolPolicy("functions", True), + "delete_edge_function": ToolPolicy("functions", True), + "list_storage_buckets": ToolPolicy("storage", True), + "create_storage_bucket": ToolPolicy("storage", True), + "delete_storage_bucket": ToolPolicy("storage", True), + "get_storage_config": ToolPolicy("storage", True), + "list_branches": ToolPolicy("branching", True), + "create_branch": ToolPolicy("branching", True), + "delete_branch": ToolPolicy("branching", True), + "reset_branch": ToolPolicy("branching", True), +} + +ALL_TOOL_NAMES = frozenset(TOOL_POLICIES.keys()) +FEATURE_TOOLS = { + feature: frozenset(name for name, policy in TOOL_POLICIES.items() if policy.feature == feature) + for feature in OFFICIAL_FEATURE_GROUPS +} +SCOPED_TOOL_NAMES = frozenset(name for name, policy in TOOL_POLICIES.items() if policy.scoped) + + +@dataclass(frozen=True) +class PartialAccessPolicy: + workspace_ref: str | None = None + features: frozenset[str] | None = None + enabled_tools: frozenset[str] | None = None + disabled_tools: frozenset[str] | None = None + + +@dataclass(frozen=True) +class ResolvedAccessPolicy: + workspace_ref: str | None + features: frozenset[str] + enabled_tools: frozenset[str] | None + disabled_tools: frozenset[str] + + +def _normalize_name(value: Any) -> str: + if not isinstance(value, str): + raise ValueError("Expected string value") + normalized = value.strip() + if not normalized: + raise ValueError("Value cannot be empty") + return normalized + + +def _expand_names(value: Any) -> list[str]: + if value is None: + return [] + if isinstance(value, str): + text = value.strip() + if not text: + return [] + if text.startswith("["): + parsed = json.loads(text) + if not isinstance(parsed, list): + raise ValueError("Expected a JSON array") + return [_normalize_name(item) for item in parsed] + return [_normalize_name(item) for item in text.split(",") if item.strip()] + if isinstance(value, (list, tuple, set, frozenset)): + return [_normalize_name(item) for item in value] + raise ValueError("Unsupported value type") + + +def _parse_name_set(value: Any) -> frozenset[str] | None: + names = _expand_names(value) + if not names: + return None + return frozenset(names) + + +def _parse_query_name_set(params: Any, name: str) -> frozenset[str] | None: + if params is None: + return None + values: list[str] = [] + if hasattr(params, "getlist"): + values = [value for value in params.getlist(name) if value is not None] + elif hasattr(params, "get"): + value = params.get(name) + if value is not None: + values = [value] + if not values: + return None + names: list[str] = [] + for value in values: + names.extend(_expand_names(value)) + if not names: + return frozenset() + return frozenset(names) + + +def _parse_workspace_ref(value: Any) -> str | None: + if value is None: + return None + if not isinstance(value, str): + raise ValueError("workspace_ref must be a string") + normalized = value.strip() + if not normalized: + return None + return normalized + + +def _validate_features(features: frozenset[str] | None) -> frozenset[str] | None: + if features is None: + return None + invalid = sorted(features - set(OFFICIAL_FEATURE_GROUPS)) + if invalid: + raise ValueError(f"Unsupported features: {', '.join(invalid)}") + return features + + +def _validate_tools(tools: frozenset[str] | None, field_name: str) -> frozenset[str] | None: + if tools is None: + return None + invalid = sorted(tools - ALL_TOOL_NAMES) + if invalid: + raise ValueError(f"Unsupported {field_name}: {', '.join(invalid)}") + return tools + + +def build_partial_access_policy( + workspace_ref: Any = None, + features: Any = None, + enabled_tools: Any = None, + disabled_tools: Any = None, +) -> PartialAccessPolicy: + return PartialAccessPolicy( + workspace_ref=_parse_workspace_ref(workspace_ref), + features=_validate_features(_parse_name_set(features)), + enabled_tools=_validate_tools(_parse_name_set(enabled_tools), "enabled_tools"), + disabled_tools=_validate_tools(_parse_name_set(disabled_tools), "disabled_tools"), + ) + + +def build_query_access_policy(params: Any) -> PartialAccessPolicy | None: + if params is None: + return None + workspace_ref = _parse_workspace_ref(params.get("workspace_ref")) if hasattr(params, "get") else None + features = _validate_features(_parse_query_name_set(params, "features")) + enabled_tools = _validate_tools(_parse_query_name_set(params, "enabled_tools"), "enabled_tools") + disabled_tools = _validate_tools(_parse_query_name_set(params, "disabled_tools"), "disabled_tools") + if workspace_ref is None and features is None and enabled_tools is None and disabled_tools is None: + return None + return PartialAccessPolicy( + workspace_ref=workspace_ref, + features=features, + enabled_tools=enabled_tools, + disabled_tools=disabled_tools, + ) + + +def resolve_access_policy( + server_policy: PartialAccessPolicy | None, + request_policy: PartialAccessPolicy | None, +) -> ResolvedAccessPolicy: + server_policy = server_policy or PartialAccessPolicy() + request_policy = request_policy or PartialAccessPolicy() + + if server_policy.workspace_ref and request_policy.workspace_ref and server_policy.workspace_ref != request_policy.workspace_ref: + raise ValueError("workspace_ref does not match the server scope") + workspace_ref = server_policy.workspace_ref or request_policy.workspace_ref + + features = DEFAULT_FEATURE_GROUPS + if server_policy.features is not None: + features = server_policy.features + if request_policy.features is not None: + features = request_policy.features if server_policy.features is None else features & request_policy.features + + enabled_tools = server_policy.enabled_tools + if request_policy.enabled_tools is not None: + enabled_tools = request_policy.enabled_tools if enabled_tools is None else enabled_tools & request_policy.enabled_tools + + disabled_tools = frozenset() + if server_policy.disabled_tools: + disabled_tools |= server_policy.disabled_tools + if request_policy.disabled_tools: + disabled_tools |= request_policy.disabled_tools + + return ResolvedAccessPolicy( + workspace_ref=workspace_ref, + features=features, + enabled_tools=enabled_tools, + disabled_tools=disabled_tools, + ) + + +def resolve_allowed_tools(policy: ResolvedAccessPolicy) -> frozenset[str]: + allowed = frozenset().union(*(FEATURE_TOOLS[feature] for feature in policy.features)) + if policy.workspace_ref: + allowed -= FEATURE_TOOLS["account"] + if policy.enabled_tools is not None: + allowed &= policy.enabled_tools + allowed -= policy.disabled_tools + return allowed + + +def workspace_scope_schema(tool_name: str, input_schema: dict[str, Any], workspace_ref: str | None) -> dict[str, Any]: + if not workspace_ref or tool_name not in SCOPED_TOOL_NAMES: + return input_schema + properties = dict(input_schema.get("properties", {})) + properties.pop("workspace_id", None) + result = dict(input_schema) + result["properties"] = properties + if "required" in result: + result["required"] = [name for name in result.get("required", []) if name != "workspace_id"] + return result diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py b/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py new file mode 100644 index 00000000..a103bd75 --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py @@ -0,0 +1,75 @@ +from mcp.server.fastmcp import FastMCP +from mcp.server.fastmcp.exceptions import ToolError +from mcp.types import Tool as MCPTool + +from .access_policy import ( + PartialAccessPolicy, + ResolvedAccessPolicy, + SCOPED_TOOL_NAMES, + build_query_access_policy, + resolve_access_policy, + resolve_allowed_tools, + workspace_scope_schema, +) + + +class ScopedFastMCP(FastMCP): + def __init__(self, *args, access_policy: PartialAccessPolicy | None = None, **kwargs): + super().__init__(*args, **kwargs) + self._access_policy = access_policy or PartialAccessPolicy() + self._session_policies: dict[int, ResolvedAccessPolicy] = {} + + def _resolve_current_policy(self): + context = self.get_context() + request_context = getattr(context, "_request_context", None) + if request_context is None: + return resolve_access_policy(self._access_policy, None) + + session_key = id(request_context.session) + request = request_context.request + if request is not None: + request_policy = build_query_access_policy(request.query_params) + if request_policy is not None: + resolved_policy = resolve_access_policy(self._access_policy, request_policy) + self._session_policies[session_key] = resolved_policy + return resolved_policy + + cached_policy = self._session_policies.get(session_key) + if cached_policy is not None: + return cached_policy + + resolved_policy = resolve_access_policy(self._access_policy, None) + self._session_policies[session_key] = resolved_policy + return resolved_policy + + async def list_tools(self): + policy = self._resolve_current_policy() + allowed_tools = resolve_allowed_tools(policy) + tools = await super().list_tools() + visible_tools = [] + for tool in tools: + if tool.name not in allowed_tools: + continue + scoped_schema = workspace_scope_schema(tool.name, tool.inputSchema, policy.workspace_ref) + if scoped_schema is tool.inputSchema: + visible_tools.append(tool) + continue + payload = tool.model_dump(exclude_none=False) + payload["inputSchema"] = scoped_schema + visible_tools.append(MCPTool(**payload)) + return visible_tools + + async def call_tool(self, name: str, arguments: dict[str, object]): + policy = self._resolve_current_policy() + allowed_tools = resolve_allowed_tools(policy) + if name not in allowed_tools: + raise ToolError(f"Tool '{name}' is not available for the current connection") + + effective_arguments = dict(arguments or {}) + if policy.workspace_ref and name in SCOPED_TOOL_NAMES: + provided_workspace_id = effective_arguments.get("workspace_id") + if provided_workspace_id not in {None, "", policy.workspace_ref}: + raise ToolError("workspace_id is outside the current workspace_ref scope") + effective_arguments["workspace_id"] = policy.workspace_ref + + return await super().call_tool(name, effective_arguments) From 0a6f8ab5232044e57b97c883e8fdc15212786e84 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 15:33:05 +0800 Subject: [PATCH 26/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 106 +++++++++---- server/mcp_server_supabase/README_zh.md | 106 +++++++++---- server/mcp_server_supabase/pyproject.toml | 2 +- .../src/mcp_server_supabase/access_policy.py | 110 +++++++++----- .../src/mcp_server_supabase/config.py | 26 ---- .../src/mcp_server_supabase/credentials.py | 139 ++++++++++++++++++ .../platform/aidap_client.py | 100 ++++++------- .../src/mcp_server_supabase/runtime.py | 8 +- .../src/mcp_server_supabase/server.py | 34 ++--- .../src/mcp_server_supabase/tool_registry.py | 14 +- .../src/mcp_server_supabase/tools/base.py | 30 ++-- .../tools/database_tools.py | 40 +++-- .../tools/edge_function_tools.py | 17 +-- .../tools/storage_tools.py | 18 +-- .../tools/workspace_tools.py | 44 +++--- .../src/mcp_server_supabase/utils/__init__.py | 4 +- .../src/mcp_server_supabase/utils/targets.py | 13 +- 17 files changed, 527 insertions(+), 284 deletions(-) create mode 100644 server/mcp_server_supabase/src/mcp_server_supabase/credentials.py diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 7bd1dd23..44447a48 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -2,14 +2,14 @@ English | [简体中文](README_zh.md) -> Supabase MCP server for AIDAP workspaces. It exposes workspace, branch, database, Edge Functions, storage, and TypeScript type generation capabilities through MCP. +> MCP server for Volcengine Supabase workspaces. It exposes workspace, branch, database, Edge Functions, storage, and TypeScript type generation capabilities through MCP. | Item | Details | | ---- | ---- | | Version | v0.1.0 | -| Description | Supabase MCP server built on top of AIDAP workspaces | +| Description | MCP server built on top of Volcengine Supabase workspaces | | Category | Database / Developer Tools | -| Tags | Supabase, PostgreSQL, Edge Functions, Storage, AIDAP | +| Tags | Supabase, PostgreSQL, Edge Functions, Storage, Volcengine | ## Tools @@ -18,7 +18,7 @@ English | [简体中文](README_zh.md) | Tool | Description | | ---- | ---- | | `list_workspaces` | List all available Supabase workspaces in the current account | -| `get_workspace` | Get workspace details; branch IDs are also accepted | +| `get_workspace` | Get workspace details | | `create_workspace` | Create a new Supabase workspace | | `pause_workspace` | Pause a workspace | | `restore_workspace` | Resume a paused workspace | @@ -44,7 +44,7 @@ No tools are currently exposed. | Tool | Description | | ---- | ---- | -| `get_workspace_url` | Get the API endpoint for a workspace or branch | +| `get_workspace_url` | Get the API endpoint for a workspace | | `get_publishable_keys` | Get publishable, anon, and service role keys | | `generate_typescript_types` | Generate TypeScript definitions from schema metadata | @@ -52,7 +52,7 @@ No tools are currently exposed. | Tool | Description | | ---- | ---- | -| `list_edge_functions` | List Edge Functions in a workspace or branch | +| `list_edge_functions` | List Edge Functions in a workspace | | `get_edge_function` | Get the source code and configuration of an Edge Function | | `deploy_edge_function` | Create or update an Edge Function | | `delete_edge_function` | Delete an Edge Function | @@ -77,20 +77,26 @@ No tools are currently exposed. ## Authentication -Use Volcengine AK/SK authentication. Obtain your credentials from the [Volcengine API Access Key console](https://console.volcengine.com/iam/keymanage/). +This server supports both local static credentials and cloud-deployment credentials. + +- Local deployment: use `VOLCENGINE_ACCESS_KEY`, `VOLCENGINE_SECRET_KEY`, and optional `VOLCENGINE_SESSION_TOKEN` +- Cloud deployment: pass a base64-encoded STS JSON payload in the `authorization` header, or expose the same value through the `authorization` environment variable +- VeFaaS deployment: if no explicit credentials are provided, the server can also read `/var/run/secrets/iam/credential` + +Static AK/SK can be obtained from the [Volcengine API Access Key console](https://console.volcengine.com/iam/keymanage/). ## Environment Variables | Name | Required | Default | Description | | ---- | ---- | ---- | ---- | -| `VOLCENGINE_ACCESS_KEY` | Yes | - | Volcengine access key | -| `VOLCENGINE_SECRET_KEY` | Yes | - | Volcengine secret key | -| `VOLCENGINE_REGION` | No | `cn-beijing` | Region used for the AIDAP API | +| `VOLCENGINE_ACCESS_KEY` | No | - | Volcengine access key for local static authentication | +| `VOLCENGINE_SECRET_KEY` | No | - | Volcengine secret key for local static authentication | +| `VOLCENGINE_SESSION_TOKEN` | No | - | Optional session token used with temporary local credentials | +| `VOLCENGINE_REGION` | No | `cn-beijing` | Region used for the Volcengine API | | `WORKSPACE_REF` | No | - | Connection-level hard scope. When set, `account` tools are hidden and workspace-scoped calls are forced to this target | | `FEATURES` | No | `account,database,debugging,development,docs,functions,branching` | Official feature groups. `storage` is disabled by default | -| `ENABLED_TOOLS` | No | - | Comma-separated allowlist applied after `features` filtering | -| `DISABLED_TOOLS` | No | - | Comma-separated denylist that overrides `ENABLED_TOOLS` | -| `READ_ONLY` | No | `false` | Set to `true` to block all mutating tools | +| `DISABLED_TOOLS` | No | - | Comma-separated denylist applied after all other policy filters | +| `READ_ONLY` | No | `false` | Server-level default for connection `read_only`; when enabled, mutating tools are hidden | | `SUPABASE_WORKSPACE_SLUG` | No | `default` | Project slug used by Edge Functions APIs | | `SUPABASE_ENDPOINT_SCHEME` | No | `http` | Endpoint scheme used when building workspace URLs | | `MCP_SERVER_HOST` | No | `0.0.0.0` | Host used by `sse` and `streamable-http` transports | @@ -124,7 +130,7 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase-streamable ``` -### MCP client config with local source +### AI tool integration with local source ```json { @@ -149,7 +155,7 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s } ``` -### MCP client config with `uvx` +### AI tool integration with `uvx` ```json { @@ -180,7 +186,21 @@ python3 -m mcp_server_supabase.server --port 8000 python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ``` -The package exposes `mcp-server-supabase`, `supabase-aidap`, `mcp-server-supabase-sse`, and `mcp-server-supabase-streamable`. The examples above use `mcp-server-supabase`. +### Cloud deployment credential format + +When the server runs behind a remote MCP gateway or another agent platform, you can provide STS credentials through the `authorization` header. The value should be a base64-encoded JSON object such as: + +```json +{ + "AccessKeyId": "", + "SecretAccessKey": "", + "SessionToken": "", + "CurrentTime": "2026-03-10T10:00:00+08:00", + "ExpiredTime": "2026-03-10T12:00:00+08:00" +} +``` + +The package exposes `mcp-server-supabase`, the compatibility alias `supabase-aidap`, `mcp-server-supabase-sse`, and `mcp-server-supabase-streamable`. The examples above use `mcp-server-supabase`. ## Usage Notes @@ -188,22 +208,58 @@ The package exposes `mcp-server-supabase`, `supabase-aidap`, `mcp-server-supabas - When `WORKSPACE_REF` is active, `account` tools are hidden and any explicit `workspace_id` outside the scope is rejected. - `FEATURES` accepts only the official groups: `account`, `docs`, `database`, `debugging`, `development`, `functions`, `storage`, and `branching`. - If `FEATURES` is not set, the default enabled groups are `account`, `database`, `debugging`, `development`, `docs`, `functions`, and `branching`. `storage` stays disabled by default. -- `ENABLED_TOOLS` and `DISABLED_TOOLS` narrow the tool set after feature filtering. `DISABLED_TOOLS` takes precedence. -- If a branch ID such as `br-xxxx` is provided, the server resolves the corresponding workspace automatically. +- `read_only=true` can be supplied as an HTTP query parameter to hide all mutating tools for that connection. `READ_ONLY=true` applies the same policy as a server default. +- `DISABLED_TOOLS` takes tool names such as `execute_sql,deploy_edge_function` and removes them after the rest of the policy has been resolved. +- Credential precedence is: static env AK/SK, request `authorization`, env `authorization`, then VeFaaS IAM credentials. +- Request-scoped STS credentials disable workspace metadata cache reuse to avoid cross-connection cache leakage. +- `workspace_id` and `workspace_ref` accept workspace IDs only. Branch IDs such as `br-xxxx` are rejected. - `get_publishable_keys` resolves the default branch automatically when needed. -- `reset_branch` accepts `migration_version`, but the current AIDAP API ignores that value and performs a branch reset only. +- `reset_branch` accepts `migration_version`, but the current Volcengine API ignores that value and performs a branch reset only. - `deploy_edge_function` currently supports `native-node20/v1`, `native-python3.9/v1`, `native-python3.10/v1`, and `native-python3.12/v1`. - `--transport sse` serves the MCP SSE endpoint at `MCP_SSE_PATH` and the message endpoint at `MCP_MESSAGE_PATH`. - `--transport streamable-http` serves the MCP HTTP endpoint at `STREAMABLE_HTTP_PATH`. - For remote deployments, `streamable-http` is usually the better default; `sse` remains available for clients that still require it. -## Compatible Clients +## Policy Precedence + +### Tool filtering order within one connection + +1. `features` selects the base tool set +2. `workspace_ref` removes `account` tools and scopes the connection to one workspace +3. `read_only` removes all mutating tools +4. `disabled_tools` removes specific tool names last + +### Server defaults vs connection-scoped options + +1. `workspace_ref` +The server setting is a hard boundary. A connection that sends a different `workspace_ref` is rejected. If the server does not set one, the connection may choose its own. +2. `features` +If both the server and the connection set `features`, the effective set is the intersection. A connection cannot widen the server-allowed feature range. +3. `read_only` +If either the server or the connection sets `read_only=true`, the effective result is `true`. +4. `disabled_tools` +Server-side and connection-side deny lists are unioned. If either side disables a tool, it stays unavailable. + +## Integration Modes + +### AI tools + +This server works with Cursor, Claude Desktop, Cline, Trae, and any other MCP client that supports `stdio`, `sse`, or `streamable-http`. + +- Local integrations usually use `stdio` +- Configure `command`, `args`, and `env` in the client +- Local source mode usually injects static AK/SK through `env` +- The two `mcpServers` JSON examples above follow this pattern + +### Custom AI agents + +If your agent runtime can spawn a local MCP process, you can keep using `stdio`. If your agent runs on a server, in containers, or in a multi-instance environment, `streamable-http` or `sse` is usually the better integration path. -- Cursor -- Claude Desktop -- Cline -- Trae -- Any MCP client that supports `stdio`, `sse`, or `streamable-http` +- `stdio`: have the agent spawn `mcp-server-supabase` as a child process +- `streamable-http`: connect to `http://:/mcp` +- `sse`: connect to `http://:/sse` and post messages to `http://:/messages/` +- Remote or cloud deployments can forward STS credentials with the `authorization` header instead of baking long-lived AK/SK into the server environment +- Connection-scoped options can be passed through HTTP query parameters, including `workspace_ref`, `features`, `read_only`, and `disabled_tools` ## License diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index baeec262..429c66e7 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -2,14 +2,14 @@ [English](README.md) | 简体中文 -> 面向 AIDAP workspace 的 Supabase MCP Server,通过 MCP 暴露工作区、分支、数据库、Edge Functions、Storage 和 TypeScript 类型生成能力。 +> 面向火山引擎 Supabase 的 MCP Server,通过 MCP 暴露工作区、分支、数据库、Edge Functions、Storage 和 TypeScript 类型生成能力。 | 项目 | 详情 | | ---- | ---- | | 版本 | v0.1.0 | -| 描述 | 基于 AIDAP workspace 的 Supabase MCP Server | +| 描述 | 基于火山引擎 Supabase workspace 的 MCP Server | | 分类 | 数据库 / 开发工具 | -| 标签 | Supabase, PostgreSQL, Edge Functions, Storage, AIDAP | +| 标签 | Supabase, PostgreSQL, Edge Functions, Storage, Volcengine | ## 工具列表 @@ -18,7 +18,7 @@ | 工具 | 说明 | | ---- | ---- | | `list_workspaces` | 列出当前账号下可访问的 Supabase workspace | -| `get_workspace` | 查询 workspace 详情,也支持直接传 branch ID | +| `get_workspace` | 查询 workspace 详情 | | `create_workspace` | 创建新的 Supabase workspace | | `pause_workspace` | 暂停 workspace | | `restore_workspace` | 恢复已暂停的 workspace | @@ -45,7 +45,7 @@ | 工具 | 说明 | | ---- | ---- | -| `get_workspace_url` | 获取 workspace 或 branch 的 API 地址 | +| `get_workspace_url` | 获取 workspace 的 API 地址 | | `get_publishable_keys` | 获取 publishable、anon、service_role 等密钥 | | `generate_typescript_types` | 根据 schema 元数据生成 TypeScript 类型定义 | @@ -53,7 +53,7 @@ | 工具 | 说明 | | ---- | ---- | -| `list_edge_functions` | 列出 workspace 或 branch 下的 Edge Functions | +| `list_edge_functions` | 列出 workspace 下的 Edge Functions | | `get_edge_function` | 获取 Edge Function 的代码和配置 | | `deploy_edge_function` | 创建或更新 Edge Function | | `delete_edge_function` | 删除 Edge Function | @@ -78,20 +78,26 @@ ## 鉴权方式 -使用火山引擎 AK/SK 鉴权。可在[火山引擎 API 访问密钥控制台](https://console.volcengine.com/iam/keymanage/)获取凭证。 +同时支持本地静态凭证和云部署动态凭证。 + +- 本地部署:使用 `VOLCENGINE_ACCESS_KEY`、`VOLCENGINE_SECRET_KEY` 和可选的 `VOLCENGINE_SESSION_TOKEN` +- 云部署:通过 `authorization` header 传入 base64 编码后的 STS JSON,也可以通过 `authorization` 环境变量传入同样的内容 +- VeFaaS 部署:如果没有显式凭证,服务也会尝试读取 `/var/run/secrets/iam/credential` + +静态 AK/SK 可在[火山引擎 API 访问密钥控制台](https://console.volcengine.com/iam/keymanage/)获取。 ## 环境变量 | 变量名 | 必需 | 默认值 | 说明 | | ---- | ---- | ---- | ---- | -| `VOLCENGINE_ACCESS_KEY` | 是 | - | 火山引擎 Access Key | -| `VOLCENGINE_SECRET_KEY` | 是 | - | 火山引擎 Secret Key | -| `VOLCENGINE_REGION` | 否 | `cn-beijing` | AIDAP API 所在地域 | +| `VOLCENGINE_ACCESS_KEY` | 否 | - | 本地静态鉴权使用的火山引擎 Access Key | +| `VOLCENGINE_SECRET_KEY` | 否 | - | 本地静态鉴权使用的火山引擎 Secret Key | +| `VOLCENGINE_SESSION_TOKEN` | 否 | - | 临时本地凭证使用的 Session Token | +| `VOLCENGINE_REGION` | 否 | `cn-beijing` | 火山引擎 API 所在地域 | | `WORKSPACE_REF` | 否 | - | 连接级 workspace scope,设置后会隐藏 `account` 组工具,并强制所有 workspace-scoped 调用只能访问这个目标 | | `FEATURES` | 否 | `account,database,debugging,development,docs,functions,branching` | 官方 feature groups,`storage` 默认关闭 | -| `ENABLED_TOOLS` | 否 | - | 逗号分隔的工具白名单,作用在 `features` 过滤之后 | -| `DISABLED_TOOLS` | 否 | - | 逗号分隔的工具黑名单,优先级高于 `ENABLED_TOOLS` | -| `READ_ONLY` | 否 | `false` | 设为 `true` 后会禁止所有写操作工具 | +| `DISABLED_TOOLS` | 否 | - | 逗号分隔的工具黑名单,在其他策略之后做最终剔除 | +| `READ_ONLY` | 否 | `false` | 连接级 `read_only` 的服务端默认值;启用后会隐藏所有写工具 | | `SUPABASE_WORKSPACE_SLUG` | 否 | `default` | Edge Functions API 使用的项目 slug | | `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` | 生成 workspace URL 时使用的协议 | | `MCP_SERVER_HOST` | 否 | `0.0.0.0` | `sse` 和 `streamable-http` 使用的监听地址 | @@ -125,7 +131,7 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-server-supabase-streamable ``` -### 使用本地源码配置 MCP Client +### AI 工具使用本地源码接入 ```json { @@ -150,7 +156,7 @@ uv --directory /ABSOLUTE/PATH/TO/mcp-server/server/mcp_server_supabase run mcp-s } ``` -### 使用 `uvx` 配置 MCP Client +### AI 工具使用 `uvx` 接入 ```json { @@ -181,7 +187,21 @@ python3 -m mcp_server_supabase.server --port 8000 python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ``` -这个包同时暴露了 `mcp-server-supabase`、`supabase-aidap`、`mcp-server-supabase-sse` 和 `mcp-server-supabase-streamable` 四个入口,示例统一使用 `mcp-server-supabase`。 +### 云部署凭证格式 + +如果服务部署在远程 MCP 网关、Agent 平台或其他服务端环境中,可以通过 `authorization` header 传入 STS 凭证。header 对应的值需要是下面这类 JSON 的 base64 编码结果: + +```json +{ + "AccessKeyId": "", + "SecretAccessKey": "", + "SessionToken": "", + "CurrentTime": "2026-03-10T10:00:00+08:00", + "ExpiredTime": "2026-03-10T12:00:00+08:00" +} +``` + +这个包同时暴露了 `mcp-server-supabase`、兼容别名 `supabase-aidap`、`mcp-server-supabase-sse` 和 `mcp-server-supabase-streamable` 四个入口,示例统一使用 `mcp-server-supabase`。 ## 使用说明 @@ -189,22 +209,58 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 - `WORKSPACE_REF` 生效时,`account` 组工具不会暴露,且显式传入其他 `workspace_id` 会被拒绝。 - `FEATURES` 只接受官方 8 个分组:`account`、`docs`、`database`、`debugging`、`development`、`functions`、`storage`、`branching`。 - 如果没有设置 `FEATURES`,默认启用 `account`、`database`、`debugging`、`development`、`docs`、`functions`、`branching`,`storage` 默认关闭。 -- `ENABLED_TOOLS` 和 `DISABLED_TOOLS` 会在 feature 过滤之后继续收窄工具集,且 `DISABLED_TOOLS` 优先。 -- 如果传入的是 `br-xxxx` 这样的 branch ID,服务会自动解析所属 workspace。 +- 可以通过 HTTP query 参数 `read_only=true` 把当前连接切到只读模式,并隐藏所有写工具。`READ_ONLY=true` 会把这条策略作为服务端默认值。 +- `DISABLED_TOOLS` 填工具名,例如 `execute_sql,deploy_edge_function`,会在其他策略计算完成后做最终剔除。 +- 凭证优先级是:静态环境变量 AK/SK、请求 `authorization`、环境变量 `authorization`、VeFaaS IAM 凭证。 +- 当凭证来自单次请求的 STS header 时,workspace 元数据相关缓存会自动停用,避免跨连接复用缓存。 +- `workspace_id` 和 `workspace_ref` 只接受 workspace ID,`br-xxxx` 这样的 branch ID 会被直接拒绝。 - `get_publishable_keys` 在需要时会自动解析默认分支。 -- `reset_branch` 虽然接收 `migration_version` 参数,但当前 AIDAP API 会忽略这个值,只执行分支重置。 +- `reset_branch` 虽然接收 `migration_version` 参数,但当前火山引擎 API 会忽略这个值,只执行分支重置。 - `deploy_edge_function` 当前支持 `native-node20/v1`、`native-python3.9/v1`、`native-python3.10/v1`、`native-python3.12/v1`。 - `--transport sse` 会在 `MCP_SSE_PATH` 暴露 SSE 连接地址,并在 `MCP_MESSAGE_PATH` 暴露消息投递地址。 - `--transport streamable-http` 会在 `STREAMABLE_HTTP_PATH` 暴露 MCP HTTP 地址。 - 远程部署通常更推荐 `streamable-http`,但为了兼容仍保留 `sse`。 -## 可适配客户端 +## 配置优先级 + +### 单条连接内的工具过滤顺序 + +1. `features` 先决定基础工具集合 +2. `workspace_ref` 再移除 `account` 工具,并把连接限制到单个 workspace +3. `read_only` 再移除所有写工具 +4. `disabled_tools` 最后按工具名做剔除 + +### 服务端默认值和连接参数的合并规则 + +1. `workspace_ref` +服务端配置的是硬边界。连接如果传了不同的 `workspace_ref` 会被拒绝;如果服务端没配,连接可以自行指定。 +2. `features` +如果服务端和连接都配置了,实际生效的是两者交集;连接不能扩大服务端允许的 feature 范围。 +3. `read_only` +只要服务端或连接任意一侧是 `true`,最终就是 `true`。 +4. `disabled_tools` +服务端和连接两边会取并集,任意一侧禁掉的工具最终都不可用。 + +## 接入方式 + +### AI 工具 + +适用于 Cursor、Claude Desktop、Cline、Trae 等带 MCP 配置界面的 AI 工具,也适用于其他支持 `stdio`、`sse` 或 `streamable-http` 的 MCP Client。 + +- 本地集成通常使用 `stdio` +- 直接在客户端配置 `command`、`args` 和 `env` +- 本地源码接入通常通过 `env` 传静态 AK/SK +- 上面的两个 `mcpServers` JSON 示例就是这类接入方式 + +### 自研 AI Agent + +如果你的 Agent Runtime 可以直接拉起本地 MCP 进程,可以继续使用 `stdio`。如果你的 Agent 部署在服务端、容器或多实例环境,更推荐用 `streamable-http` 或 `sse` 暴露远程地址再接入。 -- Cursor -- Claude Desktop -- Cline -- Trae -- 所有支持 `stdio`、`sse` 或 `streamable-http` 的 MCP Client +- `stdio`:Agent 进程直接拉起 `mcp-server-supabase` +- `streamable-http`:连接 `http://:/mcp` +- `sse`:连接 `http://:/sse`,并向 `http://:/messages/` 投递消息 +- 远程或云部署场景可以通过 `authorization` header 透传 STS 凭证,而不是把长期 AK/SK 固化在服务环境变量里 +- 连接级参数可以通过 HTTP query 传入,例如 `workspace_ref`、`features`、`read_only`、`disabled_tools` ## License diff --git a/server/mcp_server_supabase/pyproject.toml b/server/mcp_server_supabase/pyproject.toml index 0590f69d..f1c92777 100644 --- a/server/mcp_server_supabase/pyproject.toml +++ b/server/mcp_server_supabase/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "mcp-server-supabase" version = "0.1.0" -description = "MCP server for Supabase/AIDAP" +description = "MCP server for Volcengine Supabase" readme = "README.md" requires-python = ">=3.10" license = { text = "Apache-2.0" } diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py index 01fae365..9f134921 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py @@ -28,34 +28,35 @@ class ToolPolicy: feature: str scoped: bool + mutating: bool TOOL_POLICIES = { - "list_workspaces": ToolPolicy("account", False), - "get_workspace": ToolPolicy("account", True), - "create_workspace": ToolPolicy("account", False), - "pause_workspace": ToolPolicy("account", True), - "restore_workspace": ToolPolicy("account", True), - "execute_sql": ToolPolicy("database", True), - "list_tables": ToolPolicy("database", True), - "list_migrations": ToolPolicy("database", True), - "list_extensions": ToolPolicy("database", True), - "apply_migration": ToolPolicy("database", True), - "get_workspace_url": ToolPolicy("development", True), - "get_publishable_keys": ToolPolicy("development", True), - "generate_typescript_types": ToolPolicy("development", True), - "list_edge_functions": ToolPolicy("functions", True), - "get_edge_function": ToolPolicy("functions", True), - "deploy_edge_function": ToolPolicy("functions", True), - "delete_edge_function": ToolPolicy("functions", True), - "list_storage_buckets": ToolPolicy("storage", True), - "create_storage_bucket": ToolPolicy("storage", True), - "delete_storage_bucket": ToolPolicy("storage", True), - "get_storage_config": ToolPolicy("storage", True), - "list_branches": ToolPolicy("branching", True), - "create_branch": ToolPolicy("branching", True), - "delete_branch": ToolPolicy("branching", True), - "reset_branch": ToolPolicy("branching", True), + "list_workspaces": ToolPolicy("account", False, False), + "get_workspace": ToolPolicy("account", True, False), + "create_workspace": ToolPolicy("account", False, True), + "pause_workspace": ToolPolicy("account", True, True), + "restore_workspace": ToolPolicy("account", True, True), + "execute_sql": ToolPolicy("database", True, True), + "list_tables": ToolPolicy("database", True, False), + "list_migrations": ToolPolicy("database", True, False), + "list_extensions": ToolPolicy("database", True, False), + "apply_migration": ToolPolicy("database", True, True), + "get_workspace_url": ToolPolicy("development", True, False), + "get_publishable_keys": ToolPolicy("development", True, False), + "generate_typescript_types": ToolPolicy("development", True, False), + "list_edge_functions": ToolPolicy("functions", True, False), + "get_edge_function": ToolPolicy("functions", True, False), + "deploy_edge_function": ToolPolicy("functions", True, True), + "delete_edge_function": ToolPolicy("functions", True, True), + "list_storage_buckets": ToolPolicy("storage", True, False), + "create_storage_bucket": ToolPolicy("storage", True, True), + "delete_storage_bucket": ToolPolicy("storage", True, True), + "get_storage_config": ToolPolicy("storage", True, False), + "list_branches": ToolPolicy("branching", True, False), + "create_branch": ToolPolicy("branching", True, True), + "delete_branch": ToolPolicy("branching", True, True), + "reset_branch": ToolPolicy("branching", True, True), } ALL_TOOL_NAMES = frozenset(TOOL_POLICIES.keys()) @@ -64,13 +65,14 @@ class ToolPolicy: for feature in OFFICIAL_FEATURE_GROUPS } SCOPED_TOOL_NAMES = frozenset(name for name, policy in TOOL_POLICIES.items() if policy.scoped) +MUTATING_TOOL_NAMES = frozenset(name for name, policy in TOOL_POLICIES.items() if policy.mutating) @dataclass(frozen=True) class PartialAccessPolicy: workspace_ref: str | None = None features: frozenset[str] | None = None - enabled_tools: frozenset[str] | None = None + read_only: bool | None = None disabled_tools: frozenset[str] | None = None @@ -78,7 +80,7 @@ class PartialAccessPolicy: class ResolvedAccessPolicy: workspace_ref: str | None features: frozenset[str] - enabled_tools: frozenset[str] | None + read_only: bool disabled_tools: frozenset[str] @@ -144,9 +146,43 @@ def _parse_workspace_ref(value: Any) -> str | None: normalized = value.strip() if not normalized: return None + if normalized.startswith("br-"): + raise ValueError("workspace_ref must be a workspace ID; branch IDs are not supported") return normalized +def _parse_read_only(value: Any) -> bool | None: + if value is None: + return None + if isinstance(value, bool): + return value + if not isinstance(value, str): + raise ValueError("read_only must be a boolean") + normalized = value.strip().lower() + if not normalized: + return None + if normalized in {"1", "true", "yes", "on"}: + return True + if normalized in {"0", "false", "no", "off"}: + return False + raise ValueError("read_only must be true or false") + + +def _parse_query_read_only(params: Any) -> bool | None: + if params is None: + return None + values: list[str] = [] + if hasattr(params, "getlist"): + values = [value for value in params.getlist("read_only") if value is not None] + elif hasattr(params, "get"): + value = params.get("read_only") + if value is not None: + values = [value] + if not values: + return None + return _parse_read_only(values[-1]) + + def _validate_features(features: frozenset[str] | None) -> frozenset[str] | None: if features is None: return None @@ -168,13 +204,13 @@ def _validate_tools(tools: frozenset[str] | None, field_name: str) -> frozenset[ def build_partial_access_policy( workspace_ref: Any = None, features: Any = None, - enabled_tools: Any = None, + read_only: Any = None, disabled_tools: Any = None, ) -> PartialAccessPolicy: return PartialAccessPolicy( workspace_ref=_parse_workspace_ref(workspace_ref), features=_validate_features(_parse_name_set(features)), - enabled_tools=_validate_tools(_parse_name_set(enabled_tools), "enabled_tools"), + read_only=_parse_read_only(read_only), disabled_tools=_validate_tools(_parse_name_set(disabled_tools), "disabled_tools"), ) @@ -184,14 +220,14 @@ def build_query_access_policy(params: Any) -> PartialAccessPolicy | None: return None workspace_ref = _parse_workspace_ref(params.get("workspace_ref")) if hasattr(params, "get") else None features = _validate_features(_parse_query_name_set(params, "features")) - enabled_tools = _validate_tools(_parse_query_name_set(params, "enabled_tools"), "enabled_tools") + read_only = _parse_query_read_only(params) disabled_tools = _validate_tools(_parse_query_name_set(params, "disabled_tools"), "disabled_tools") - if workspace_ref is None and features is None and enabled_tools is None and disabled_tools is None: + if workspace_ref is None and features is None and read_only is None and disabled_tools is None: return None return PartialAccessPolicy( workspace_ref=workspace_ref, features=features, - enabled_tools=enabled_tools, + read_only=read_only, disabled_tools=disabled_tools, ) @@ -213,9 +249,7 @@ def resolve_access_policy( if request_policy.features is not None: features = request_policy.features if server_policy.features is None else features & request_policy.features - enabled_tools = server_policy.enabled_tools - if request_policy.enabled_tools is not None: - enabled_tools = request_policy.enabled_tools if enabled_tools is None else enabled_tools & request_policy.enabled_tools + read_only = bool(server_policy.read_only) or bool(request_policy.read_only) disabled_tools = frozenset() if server_policy.disabled_tools: @@ -226,7 +260,7 @@ def resolve_access_policy( return ResolvedAccessPolicy( workspace_ref=workspace_ref, features=features, - enabled_tools=enabled_tools, + read_only=read_only, disabled_tools=disabled_tools, ) @@ -235,8 +269,8 @@ def resolve_allowed_tools(policy: ResolvedAccessPolicy) -> frozenset[str]: allowed = frozenset().union(*(FEATURE_TOOLS[feature] for feature in policy.features)) if policy.workspace_ref: allowed -= FEATURE_TOOLS["account"] - if policy.enabled_tools is not None: - allowed &= policy.enabled_tools + if policy.read_only: + allowed -= MUTATING_TOOL_NAMES allowed -= policy.disabled_tools return allowed diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/config.py b/server/mcp_server_supabase/src/mcp_server_supabase/config.py index c9520ea3..8f0f07a0 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/config.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/config.py @@ -4,21 +4,11 @@ logger = logging.getLogger(__name__) READ_ONLY = os.getenv("READ_ONLY", "false").lower() == "true" - -VOLCENGINE_ACCESS_KEY = os.getenv("VOLCENGINE_ACCESS_KEY") -VOLCENGINE_SECRET_KEY = os.getenv("VOLCENGINE_SECRET_KEY") VOLCENGINE_REGION = os.getenv("VOLCENGINE_REGION", "cn-beijing") -# 验证必需的环境变量 -if not VOLCENGINE_ACCESS_KEY: - logger.warning("VOLCENGINE_ACCESS_KEY not set") -if not VOLCENGINE_SECRET_KEY: - logger.warning("VOLCENGINE_SECRET_KEY not set") - _default_branch_cache = {} _endpoint_cache = {} _api_key_cache = {} -_branch_workspace_cache = {} def get_branch_cache(): @@ -33,10 +23,6 @@ def get_api_key_cache(): return _api_key_cache -def get_branch_workspace_cache(): - return _branch_workspace_cache - - def clear_branch_cache(workspace_id: str = None): if workspace_id: _default_branch_cache.pop(workspace_id, None) @@ -69,19 +55,7 @@ def clear_api_key_cache(workspace_id: str = None, branch_id: str = None): _api_key_cache.clear() -def clear_branch_workspace_cache(workspace_id: str = None, branch_id: str = None): - if branch_id: - _branch_workspace_cache.pop(branch_id, None) - elif workspace_id: - branch_ids = [key for key, value in _branch_workspace_cache.items() if value == workspace_id] - for key in branch_ids: - _branch_workspace_cache.pop(key, None) - else: - _branch_workspace_cache.clear() - - def clear_all_caches(workspace_id: str = None, branch_id: str = None): clear_branch_cache(workspace_id) clear_endpoint_cache(workspace_id, branch_id) clear_api_key_cache(workspace_id, branch_id) - clear_branch_workspace_cache(workspace_id, branch_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py b/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py new file mode 100644 index 00000000..e3b0c58a --- /dev/null +++ b/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py @@ -0,0 +1,139 @@ +import base64 +import json +import os +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path +from typing import Any, Callable + + +VEFAAS_IAM_CREDENTIAL_PATH = "/var/run/secrets/iam/credential" +AUTHORIZATION_ENV_NAMES = ("authorization", "AUTHORIZATION") +STATIC_ACCESS_KEY_ENV_NAMES = ("VOLCENGINE_ACCESS_KEY", "VOLC_ACCESSKEY") +STATIC_SECRET_KEY_ENV_NAMES = ("VOLCENGINE_SECRET_KEY", "VOLC_SECRETKEY") +STATIC_SESSION_TOKEN_ENV_NAMES = ("VOLCENGINE_SESSION_TOKEN",) + + +@dataclass(frozen=True, slots=True) +class VolcengineCredentials: + access_key: str + secret_key: str + session_token: str + source: str + cacheable: bool + + +def _get_env_value(*names: str) -> str: + for name in names: + value = os.getenv(name) + if value: + return value + return "" + + +def _normalize_iso8601(value: str) -> str: + return value.replace("Z", "+00:00") if value.endswith("Z") else value + + +def _validate_sts_time_window(payload: dict[str, Any]) -> None: + current_time = payload.get("CurrentTime") + expired_time = payload.get("ExpiredTime") + if not current_time or not expired_time: + return + current_dt = datetime.fromisoformat(_normalize_iso8601(str(current_time))) + expired_dt = datetime.fromisoformat(_normalize_iso8601(str(expired_time))) + if current_dt > expired_dt: + raise ValueError("STS token is expired") + + +def _parse_authorization_payload(raw_value: str, source: str, cacheable: bool) -> VolcengineCredentials: + token = raw_value.split(" ", 1)[1] if " " in raw_value else raw_value + decoded_bytes = base64.b64decode(token) + payload = json.loads(decoded_bytes.decode("utf-8")) + _validate_sts_time_window(payload) + access_key = str(payload.get("AccessKeyId") or "").strip() + secret_key = str(payload.get("SecretAccessKey") or "").strip() + session_token = str(payload.get("SessionToken") or "").strip() + if not access_key or not secret_key: + raise ValueError("AccessKeyId or SecretAccessKey missing in authorization payload") + return VolcengineCredentials( + access_key=access_key, + secret_key=secret_key, + session_token=session_token, + source=source, + cacheable=cacheable, + ) + + +def _get_request_authorization(context_getter: Callable[[], Any] | None) -> str: + if context_getter is None: + return "" + try: + context = context_getter() + except Exception: + return "" + request_context = getattr(context, "request_context", None) + if request_context is None: + request_context = getattr(context, "_request_context", None) + request = getattr(request_context, "request", None) + if request is None: + return "" + return str(request.headers.get("authorization") or "").strip() + + +def _get_vefaas_iam_credentials() -> VolcengineCredentials | None: + path = Path(VEFAAS_IAM_CREDENTIAL_PATH) + if not path.exists(): + return None + payload = json.loads(path.read_text()) + access_key = str(payload.get("access_key_id") or "").strip() + secret_key = str(payload.get("secret_access_key") or "").strip() + session_token = str(payload.get("session_token") or "").strip() + if not access_key or not secret_key: + return None + return VolcengineCredentials( + access_key=access_key, + secret_key=secret_key, + session_token=session_token, + source="vefaas_iam", + cacheable=True, + ) + + +def resolve_volcengine_credentials(context_getter: Callable[[], Any] | None = None) -> VolcengineCredentials: + static_access_key = _get_env_value(*STATIC_ACCESS_KEY_ENV_NAMES) + static_secret_key = _get_env_value(*STATIC_SECRET_KEY_ENV_NAMES) + static_session_token = _get_env_value(*STATIC_SESSION_TOKEN_ENV_NAMES) + if static_access_key and static_secret_key: + return VolcengineCredentials( + access_key=static_access_key, + secret_key=static_secret_key, + session_token=static_session_token, + source="env", + cacheable=True, + ) + + request_authorization = _get_request_authorization(context_getter) + if request_authorization: + return _parse_authorization_payload( + request_authorization, + source="request_authorization", + cacheable=False, + ) + + env_authorization = _get_env_value(*AUTHORIZATION_ENV_NAMES) + if env_authorization: + return _parse_authorization_payload( + env_authorization, + source="env_authorization", + cacheable=True, + ) + + vefaas_credentials = _get_vefaas_iam_credentials() + if vefaas_credentials is not None: + return vefaas_credentials + + raise ValueError( + "Volcengine credentials are not configured. " + "Set VOLCENGINE_ACCESS_KEY/VOLCENGINE_SECRET_KEY, provide authorization, or mount VeFaaS IAM credentials." + ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index de038c82..4a811b63 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -2,17 +2,16 @@ import asyncio import os import random +from collections.abc import Callable from typing import Any, Optional from ..config import ( - VOLCENGINE_ACCESS_KEY, - VOLCENGINE_SECRET_KEY, VOLCENGINE_REGION, get_branch_cache, - get_branch_workspace_cache, get_endpoint_cache, get_api_key_cache, clear_all_caches, ) +from ..credentials import resolve_volcengine_credentials from ..utils import pick_value logger = logging.getLogger(__name__) @@ -43,14 +42,31 @@ class AidapClient: - def __init__(self) -> None: + def __init__(self, context_getter: Callable[[], Any] | None = None) -> None: + self._context_getter = context_getter + + def _get_credentials(self): + return resolve_volcengine_credentials(self._context_getter) + + def _should_use_cache(self, use_cache: bool) -> bool: + if not use_cache: + return False + return self._get_credentials().cacheable + + def _create_client(self) -> AIDAPApi: + credentials = self._get_credentials() configuration = volcenginesdkcore.Configuration() - configuration.ak = VOLCENGINE_ACCESS_KEY - configuration.sk = VOLCENGINE_SECRET_KEY + configuration.ak = credentials.access_key + configuration.sk = credentials.secret_key configuration.region = VOLCENGINE_REGION - + if credentials.session_token: + configuration.session_token = credentials.session_token api_client = volcenginesdkcore.ApiClient(configuration) - self.client = AIDAPApi(api_client) + return AIDAPApi(api_client) + + @property + def client(self) -> AIDAPApi: + return self._create_client() def _branch_error_code(self, error_text: str) -> str: if "OperationDenied_BranchNotReady" in error_text: @@ -64,21 +80,6 @@ def _branch_error_code(self, error_text: str) -> str: def _pick_value(self, source: Any, *field_names: str) -> Any: return pick_value(source, *field_names) - def _looks_like_branch_id(self, value: Optional[str]) -> bool: - return bool(value and value.strip().startswith("br-")) - - def _cache_branch_workspace(self, workspace_id: Optional[str], branch_id: Optional[str]) -> None: - if workspace_id and branch_id: - get_branch_workspace_cache()[branch_id] = workspace_id - - def _workspace_ids_from_response(self, response: Any) -> list[str]: - workspace_ids = [] - for workspace in list(getattr(response, "workspaces", []) or []): - workspace_id = self._pick_value(workspace, "workspace_id") - if workspace_id: - workspace_ids.append(workspace_id) - return workspace_ids - def _branch_payload(self, branch: Any, fallback_name: Optional[str] = None) -> dict: parent_branch = self._pick_value(branch, "parent_branch") parent_id = self._pick_value(parent_branch, "branch_id", "parent_id") @@ -94,9 +95,7 @@ def _branch_payload(self, branch: Any, fallback_name: Optional[str] = None) -> d "created_at": self._pick_value(branch, "create_time", "created_at"), "updated_at": self._pick_value(branch, "update_time", "updated_at"), } - result = {key: value for key, value in payload.items() if value is not None} - self._cache_branch_workspace(result.get("workspace_id"), result.get("branch_id")) - return result + return {key: value for key, value in payload.items() if value is not None} def _describe_supabase_workspaces_response(self): request = DescribeWorkspacesRequest() @@ -120,27 +119,6 @@ async def _find_branch( await self._sleep_backoff(attempt, base_seconds=0.5, max_seconds=3.0) return None - async def _find_workspace_id_for_branch(self, branch_id: str) -> Optional[str]: - cached_workspace_id = get_branch_workspace_cache().get(branch_id) - if cached_workspace_id: - return cached_workspace_id - response = self._describe_supabase_workspaces_response() - for workspace_id in self._workspace_ids_from_response(response): - branch = await self._find_branch(workspace_id, branch_id=branch_id, max_attempts=1) - if branch: - self._cache_branch_workspace(workspace_id, branch_id) - return workspace_id - return None - - async def resolve_workspace_and_branch(self, workspace_or_branch_id: str) -> tuple[str, Optional[str]]: - normalized_id = workspace_or_branch_id.strip() - if not self._looks_like_branch_id(normalized_id): - return normalized_id, None - workspace_id = await self._find_workspace_id_for_branch(normalized_id) - if not workspace_id: - raise ValueError(f"Could not resolve workspace for branch {normalized_id}") - return workspace_id, normalized_id - async def get_branch(self, workspace_id: str, branch_id: str) -> Optional[dict]: return await self._find_branch(workspace_id, branch_id=branch_id, max_attempts=1) @@ -156,7 +134,8 @@ async def _sleep_backoff( async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) -> Optional[str]: cache = get_branch_cache() - if use_cache and workspace_id in cache: + cache_enabled = self._should_use_cache(use_cache) + if cache_enabled and workspace_id in cache: return cache[workspace_id] try: @@ -167,14 +146,14 @@ async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) for branch in response.branches: if getattr(branch, 'default', False): branch_id = branch.branch_id - cache[workspace_id] = branch_id - self._cache_branch_workspace(workspace_id, branch_id) + if cache_enabled: + cache[workspace_id] = branch_id return branch_id first_branch = response.branches[0] branch_id = first_branch.branch_id - cache[workspace_id] = branch_id - self._cache_branch_workspace(workspace_id, branch_id) + if cache_enabled: + cache[workspace_id] = branch_id return branch_id return None @@ -324,11 +303,11 @@ async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: } async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: - # 检查缓存 cache_key = f"{workspace_id}:{branch_id}" if branch_id else workspace_id endpoint_cache = get_endpoint_cache() + cache_enabled = self._should_use_cache(use_cache) - if use_cache and cache_key in endpoint_cache: + if cache_enabled and cache_key in endpoint_cache: return endpoint_cache[cache_key] if not branch_id: @@ -357,7 +336,8 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, result = f"https://{domain}" else: result = f"http://{domain}:80" - endpoint_cache[cache_key] = result + if cache_enabled: + endpoint_cache[cache_key] = result return result if domains: @@ -365,7 +345,8 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, result = f"https://{domains[0]}" else: result = f"http://{domains[0]}:80" - endpoint_cache[cache_key] = result + if cache_enabled: + endpoint_cache[cache_key] = result return result return None @@ -406,11 +387,11 @@ async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: async def get_api_key(self, workspace_id: str, key_type: str = "service_role", branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: - # 检查缓存 cache_key = f"{workspace_id}:{key_type}:{branch_id}" if branch_id else f"{workspace_id}:{key_type}" api_key_cache = get_api_key_cache() + cache_enabled = self._should_use_cache(use_cache) - if use_cache and cache_key in api_key_cache: + if cache_enabled and cache_key in api_key_cache: return api_key_cache[cache_key] if not branch_id: @@ -436,7 +417,8 @@ async def get_api_key(self, workspace_id: str, key_type: str = "service_role", if hasattr(key, 'type') and key.type == target_type: result = key.key if hasattr(key, 'key') else None if result: - api_key_cache[cache_key] = result + if cache_enabled: + api_key_cache[cache_key] = result return result return None diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py index 86981b86..ed414bab 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/runtime.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from typing import Any, Callable from .platform import AidapClient from .tools import DatabaseTools, EdgeFunctionTools, StorageTools, WorkspaceTools @@ -13,8 +14,11 @@ class SupabaseRuntime: workspace_tools: WorkspaceTools -def create_runtime(aidap_client: AidapClient | None = None) -> SupabaseRuntime: - client = aidap_client or AidapClient() +def create_runtime( + aidap_client: AidapClient | None = None, + context_getter: Callable[[], Any] | None = None, +) -> SupabaseRuntime: + client = aidap_client or AidapClient(context_getter=context_getter) return SupabaseRuntime( aidap_client=client, edge_tools=EdgeFunctionTools(client), diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index b2fb15b2..48586081 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -2,7 +2,6 @@ import logging import os -from .config import READ_ONLY from .runtime import create_runtime from .tool_registry import register_tools from .access_policy import build_partial_access_policy @@ -46,10 +45,10 @@ def _resolve_features(features: str | None = None) -> str | None: return os.getenv("FEATURES") -def _resolve_enabled_tools(enabled_tools: str | None = None) -> str | None: - if enabled_tools is not None: - return enabled_tools - return os.getenv("ENABLED_TOOLS") +def _resolve_read_only(read_only: str | bool | None = None) -> str | bool | None: + if read_only is not None: + return read_only + return os.getenv("READ_ONLY") def _resolve_disabled_tools(disabled_tools: str | None = None) -> str | None: @@ -87,7 +86,7 @@ def create_mcp( host: str | None = None, workspace_ref: str | None = None, features: str | None = None, - enabled_tools: str | None = None, + read_only: str | bool | None = None, disabled_tools: str | None = None, mount_path: str | None = None, sse_path: str | None = None, @@ -99,12 +98,11 @@ def create_mcp( access_policy = build_partial_access_policy( workspace_ref=_resolve_workspace_ref(workspace_ref), features=_resolve_features(features), - enabled_tools=_resolve_enabled_tools(enabled_tools), + read_only=_resolve_read_only(read_only), disabled_tools=_resolve_disabled_tools(disabled_tools), ) - runtime = create_runtime() mcp = ScopedFastMCP( - "Supabase MCP Server (AIDAP)", + "Supabase MCP Server (Volcengine)", access_policy=access_policy, host=resolved_host, port=resolved_port, @@ -113,6 +111,7 @@ def create_mcp( message_path=_resolve_message_path(message_path), streamable_http_path=_resolve_streamable_http_path(streamable_http_path), ) + runtime = create_runtime(context_getter=mcp.get_context) register_tools(mcp, runtime) return mcp @@ -126,7 +125,7 @@ def run_server( host: str | None = None, workspace_ref: str | None = None, features: str | None = None, - enabled_tools: str | None = None, + read_only: str | bool | None = None, disabled_tools: str | None = None, ) -> None: create_mcp( @@ -134,7 +133,7 @@ def run_server( host=host, workspace_ref=workspace_ref, features=features, - enabled_tools=enabled_tools, + read_only=read_only, disabled_tools=disabled_tools, ).run(transport=transport) @@ -152,7 +151,7 @@ def main(): parser.add_argument("--port", type=int, default=None, help="Port to run the server on") parser.add_argument("--workspace-ref", type=str, default=None, help="Hard-scope the connection to a single workspace") parser.add_argument("--features", type=str, default=None, help="Comma-separated official feature groups") - parser.add_argument("--enabled-tools", type=str, default=None, help="Comma-separated whitelist of tool names") + parser.add_argument("--read-only", nargs="?", const="true", default=None, help="Hide all mutating tools for this connection") parser.add_argument("--disabled-tools", type=str, default=None, help="Comma-separated blacklist of tool names") args = parser.parse_args() @@ -160,17 +159,18 @@ def main(): resolved_port = _resolve_port(args.port) resolved_workspace_ref = _resolve_workspace_ref(args.workspace_ref) resolved_features = _resolve_features(args.features) - resolved_enabled_tools = _resolve_enabled_tools(args.enabled_tools) + resolved_read_only = _resolve_read_only(args.read_only) resolved_disabled_tools = _resolve_disabled_tools(args.disabled_tools) + resolved_read_only_value = build_partial_access_policy(read_only=resolved_read_only).read_only logger.info("Starting Supabase MCP Server with %s transport", args.transport) - logger.info("Read-only mode: %s", READ_ONLY) + logger.info("Read-only mode: %s", bool(resolved_read_only_value)) if resolved_workspace_ref: logger.info("Workspace scope: %s", resolved_workspace_ref) if resolved_features: logger.info("Feature groups: %s", resolved_features) - if resolved_enabled_tools: - logger.info("Enabled tools: %s", resolved_enabled_tools) + if resolved_read_only_value is not None: + logger.info("Connection read_only: %s", resolved_read_only_value) if resolved_disabled_tools: logger.info("Disabled tools: %s", resolved_disabled_tools) if args.transport != "stdio": @@ -189,7 +189,7 @@ def main(): host=args.host, workspace_ref=args.workspace_ref, features=args.features, - enabled_tools=args.enabled_tools, + read_only=args.read_only, disabled_tools=args.disabled_tools, ) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py index 6df7b161..9934cda4 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -15,7 +15,7 @@ def _register_edge_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: @mcp.tool() async def list_edge_functions(workspace_id: str = None) -> str: - """Lists all Edge Functions in a workspace or branch.""" + """Lists all Edge Functions in a workspace.""" return await edge_tools.list_edge_functions(workspace_id) @mcp.tool() @@ -40,7 +40,7 @@ async def deploy_edge_function( verify_jwt: Whether to verify JWT tokens runtime: Runtime environment import_map: Optional import map JSON for dependencies - workspace_id: The workspace ID or branch ID + workspace_id: The workspace ID """ return await edge_tools.deploy_edge_function( function_name, @@ -62,7 +62,7 @@ def _register_storage_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: @mcp.tool() async def list_storage_buckets(workspace_id: str = None) -> str: - """Lists all storage buckets in a workspace or branch.""" + """Lists all storage buckets in a workspace.""" return await storage_tools.list_storage_buckets(workspace_id) @mcp.tool() @@ -89,7 +89,7 @@ async def delete_storage_bucket(bucket_name: str, workspace_id: str = None) -> s @mcp.tool() async def get_storage_config(workspace_id: str = None) -> str: - """Gets the storage configuration for a workspace or branch.""" + """Gets the storage configuration for a workspace.""" return await storage_tools.get_storage_config(workspace_id) @@ -139,7 +139,7 @@ async def list_workspaces() -> str: @mcp.tool() async def get_workspace(workspace_id: str) -> str: - """Gets details for a specific workspace or branch target.""" + """Gets details for a specific workspace.""" return await workspace_tools.get_workspace(workspace_id) @mcp.tool() @@ -163,12 +163,12 @@ async def restore_workspace(workspace_id: str = None) -> str: @mcp.tool() async def get_workspace_url(workspace_id: str = None) -> str: - """Gets API endpoint URL for a workspace or branch.""" + """Gets API endpoint URL for a workspace.""" return await workspace_tools.get_workspace_url(workspace_id) @mcp.tool() async def get_publishable_keys(workspace_id: str = None, reveal: bool = False) -> str: - """Gets API keys for a workspace or branch.""" + """Gets API keys for a workspace.""" return await workspace_tools.get_publishable_keys(workspace_id, reveal) @mcp.tool() diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py index 0360c0dc..2d6c0ed8 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py @@ -1,38 +1,30 @@ from typing import Optional from ..platform import AidapClient, SupabaseClient -from ..utils import resolve_target +from ..utils import resolve_workspace_id class BaseTools: def __init__(self, aidap_client: AidapClient): self.aidap = aidap_client - def _get_workspace_id(self, workspace_id: Optional[str]) -> str: - if not workspace_id: - raise ValueError("workspace_id is required") - return workspace_id - - async def _resolve_target(self, workspace_id: Optional[str]) -> tuple[str, Optional[str]]: - target = self._get_workspace_id(workspace_id) - resolved_workspace_id, branch_id = await resolve_target(self.aidap, target) + def _resolve_workspace_id(self, workspace_id: Optional[str]) -> str: + resolved_workspace_id = resolve_workspace_id(workspace_id) if not resolved_workspace_id: raise ValueError("workspace_id is required") - return resolved_workspace_id, branch_id + return resolved_workspace_id - async def _get_client(self, workspace_id: str, branch_id: Optional[str] = None) -> SupabaseClient: + async def _get_client(self, workspace_id: str) -> SupabaseClient: import logging logger = logging.getLogger(__name__) - endpoint = await self.aidap.get_endpoint(workspace_id, branch_id=branch_id) - logger.info(f"[DEBUG] Got endpoint for {workspace_id} branch={branch_id}: {endpoint}") + endpoint = await self.aidap.get_endpoint(workspace_id) + logger.info(f"[DEBUG] Got endpoint for {workspace_id}: {endpoint}") if not endpoint: - target = branch_id or workspace_id - raise ValueError(f"Could not get endpoint for target {target}") + raise ValueError(f"Could not get endpoint for workspace {workspace_id}") - api_key = await self.aidap.get_api_key(workspace_id, "service_role", branch_id=branch_id) - logger.info(f"[DEBUG] Got API key for {workspace_id} branch={branch_id}: {'yes' if api_key else 'no'}") + api_key = await self.aidap.get_api_key(workspace_id, "service_role") + logger.info(f"[DEBUG] Got API key for {workspace_id}: {'yes' if api_key else 'no'}") if not api_key: - target = branch_id or workspace_id - raise ValueError(f"Could not get API key for target {target}") + raise ValueError(f"Could not get API key for workspace {workspace_id}") return SupabaseClient(endpoint, api_key) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index f6ba117e..75d00191 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -13,13 +13,13 @@ async def _execute_sql_raw(self, query: str, workspace_id: Optional[str] = None) if not query or not query.strip(): raise ValueError("SQL query cannot be empty") - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) logger.info( "Executing SQL query", - extra={"workspace_id": ws_id, "branch_id": branch_id, "query_length": len(query)} + extra={"workspace_id": ws_id, "query_length": len(query)} ) - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) result = await client.call_api("/pg/query", method="POST", json_data={"query": query}) if isinstance(result, dict) and isinstance(result.get("data"), list): @@ -58,18 +58,28 @@ async def list_tables(self, schemas: List[str] = None, workspace_id: Optional[st @handle_errors async def list_migrations(self, workspace_id: Optional[str] = None) -> List[dict]: - query = """ - CREATE SCHEMA IF NOT EXISTS supabase_migrations; - CREATE TABLE IF NOT EXISTS supabase_migrations.schema_migrations ( - version text PRIMARY KEY, - name text NOT NULL, - inserted_at timestamptz NOT NULL DEFAULT now() - ); - SELECT version, name - FROM supabase_migrations.schema_migrations - ORDER BY version DESC - """ - return await self._execute_sql_raw(query, workspace_id) + existence_rows = await self._execute_sql_raw( + """ + SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_schema = 'supabase_migrations' + AND table_name = 'schema_migrations' + ) AS exists + """, + workspace_id, + ) + exists = bool(existence_rows and existence_rows[0].get("exists")) + if not exists: + return [] + return await self._execute_sql_raw( + """ + SELECT version, name + FROM supabase_migrations.schema_migrations + ORDER BY version DESC + """, + workspace_id, + ) @handle_errors async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict]: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index b274c15c..0d8a118f 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -131,10 +131,10 @@ def _extract_error_text(self, payload: object) -> str: @handle_errors async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[EdgeFunction]: - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) logger.info(f"Listing edge functions for workspace {ws_id}") - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) result = await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions") functions = [EdgeFunction(**func) for func in result] @@ -144,10 +144,10 @@ async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[ @handle_errors async def get_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: self._validate_function_name(function_name) - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) logger.info(f"Getting edge function '{function_name}' from workspace {ws_id}") - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) encoded_name = quote(function_name, safe="") try: result = await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions/{encoded_name}") @@ -202,7 +202,7 @@ async def deploy_edge_function( self._validate_code_size(source_code) self._validate_runtime_compatibility(runtime, source_code) - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) entrypoint = self._get_entrypoint(runtime) logger.info( @@ -210,7 +210,6 @@ async def deploy_edge_function( extra={ "function_name": function_name, "workspace_id": ws_id, - "branch_id": branch_id, "runtime": runtime, "verify_jwt": verify_jwt, "entrypoint": entrypoint, @@ -218,7 +217,7 @@ async def deploy_edge_function( } ) - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) encoded_name = quote(function_name, safe="") @@ -256,10 +255,10 @@ async def deploy_edge_function( @read_only_check async def delete_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: self._validate_function_name(function_name) - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) logger.info(f"Deleting edge function '{function_name}' from workspace {ws_id}") - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) encoded_name = quote(function_name, safe="") await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions/{encoded_name}", method="DELETE") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 18b6f10e..70288b8e 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -35,10 +35,10 @@ def _normalize_allowed_mime_types(self, allowed_mime_types: Optional[str | list[ @handle_errors async def list_storage_buckets(self, workspace_id: Optional[str] = None) -> List[dict]: - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) logger.info(f"Listing storage buckets for workspace {ws_id}") - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) result = await client.call_api("/storage/v1/bucket") logger.info(f"Found {len(result)} storage buckets") @@ -57,13 +57,13 @@ async def create_storage_bucket( if not bucket_name or not bucket_name.strip(): raise ValueError("Bucket name cannot be empty") - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) logger.info( f"Creating storage bucket '{bucket_name}'", - extra={"workspace_id": ws_id, "branch_id": branch_id, "public": public} + extra={"workspace_id": ws_id, "public": public} ) - client = await self._get_client(ws_id, branch_id) + client = await self._get_client(ws_id) data = { "name": bucket_name, @@ -82,8 +82,8 @@ async def create_storage_bucket( async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[str] = None) -> dict: if not bucket_name or not bucket_name.strip(): raise ValueError("Bucket name cannot be empty") - ws_id, branch_id = await self._resolve_target(workspace_id) - client = await self._get_client(ws_id, branch_id) + ws_id = self._resolve_workspace_id(workspace_id) + client = await self._get_client(ws_id) response = await client.call_api(f"/storage/v1/bucket/{bucket_name}", method="DELETE") if isinstance(response, dict) and "error" in response: raise ValueError(response["error"]) @@ -91,7 +91,7 @@ async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[s @handle_errors async def get_storage_config(self, workspace_id: Optional[str] = None) -> StorageConfig: - ws_id, branch_id = await self._resolve_target(workspace_id) - client = await self._get_client(ws_id, branch_id) + ws_id = self._resolve_workspace_id(workspace_id) + client = await self._get_client(ws_id) result = await client.call_api("/storage/v1/config") return StorageConfig(**result) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 6d9b9028..1ae5b4ad 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -3,7 +3,7 @@ import logging from typing import Any, Optional -from ..utils import compact_dict, pick_value, read_only_check, resolve_target, to_json +from ..utils import compact_dict, pick_value, read_only_check, resolve_workspace_id, to_json logger = logging.getLogger(__name__) @@ -21,8 +21,8 @@ def _compact(self, payload: dict) -> dict: def _pick(self, source: Any, *field_names: str) -> Any: return pick_value(source, *field_names) - async def _resolve_target(self, target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: - return await resolve_target(self.aidap_client, target_id) + def _resolve_workspace_id(self, workspace_id: Optional[str]) -> Optional[str]: + return resolve_workspace_id(workspace_id) def _workspace_view(self, source: Any) -> dict: payload = { @@ -113,7 +113,7 @@ async def list_workspaces(self) -> str: async def get_workspace(self, workspace_id: str) -> str: try: - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({ "success": False, @@ -126,10 +126,6 @@ async def get_workspace(self, workspace_id: str) -> str: "error": "Workspace not found", }) workspace_info = self._workspace_view(workspace_source) - if branch_id: - branch = await self.aidap_client.get_branch(ws_id, branch_id) - if branch: - workspace_info.update(self._branch_view(branch, workspace_info)) return self._to_json({ "success": True, "workspace": workspace_info, @@ -170,7 +166,7 @@ async def create_workspace( @read_only_check async def restore_workspace(self, workspace_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.start_workspace(ws_id) @@ -178,7 +174,7 @@ async def restore_workspace(self, workspace_id: Optional[str] = None) -> str: @read_only_check async def pause_workspace(self, workspace_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({"success": False, "error": "workspace_id is required"}) result = await self.aidap_client.stop_workspace(ws_id) @@ -190,7 +186,7 @@ async def create_branch( name: str = "develop", workspace_id: Optional[str] = None, ) -> str: - ws_id, _ = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({"success": False, "error": "workspace_id is required"}) @@ -210,7 +206,7 @@ async def create_branch( return self._to_json(result) async def list_branches(self, workspace_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({"success": False, "error": "workspace_id is required"}) try: @@ -225,7 +221,7 @@ async def list_branches(self, workspace_id: Optional[str] = None) -> str: @read_only_check async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: - ws_id, _ = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({ "success": False, @@ -307,16 +303,15 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None }) async def get_workspace_url(self, workspace_id: Optional[str] = None) -> str: - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({"success": False, "error": "workspace_id is required"}) - endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=branch_id) + endpoint = await self.aidap_client.get_endpoint(ws_id) if not endpoint: - target_id = branch_id or ws_id return self._to_json({ "success": False, - "error": f"Could not get endpoint for workspace {target_id}", + "error": f"Could not get endpoint for workspace {ws_id}", }) payload = { @@ -325,13 +320,10 @@ async def get_workspace_url(self, workspace_id: Optional[str] = None) -> str: "workspace_url": endpoint, "api_url": endpoint, } - if branch_id: - payload["branch_id"] = branch_id - payload["target_type"] = "branch" return self._to_json(payload) - async def _get_api_keys_payload(self, workspace_id: str, branch_id: Optional[str] = None, reveal: bool = False) -> dict: - resolved_branch_id = branch_id or await self.aidap_client.get_default_branch_id(workspace_id) + async def _get_api_keys_payload(self, workspace_id: str, reveal: bool = False) -> dict: + resolved_branch_id = await self.aidap_client.get_default_branch_id(workspace_id) if not resolved_branch_id: raise RuntimeError(f"Could not resolve default branch for workspace {workspace_id}") keys = await self.aidap_client.get_api_keys(workspace_id, branch_id=resolved_branch_id) @@ -366,12 +358,12 @@ async def _get_api_keys_payload(self, workspace_id: str, branch_id: Optional[str return payload async def get_publishable_keys(self, workspace_id: Optional[str] = None, reveal: bool = False) -> str: - ws_id, branch_id = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({"success": False, "error": "workspace_id is required"}) try: - payload = await self._get_api_keys_payload(ws_id, branch_id=branch_id, reveal=reveal) + payload = await self._get_api_keys_payload(ws_id, reveal=reveal) return self._to_json(payload) except Exception as e: logger.error(f"Error getting publishable keys: {e}") @@ -384,7 +376,7 @@ async def reset_branch( migration_version: Optional[str] = None, workspace_id: Optional[str] = None, ) -> str: - ws_id, _ = await self._resolve_target(workspace_id) + ws_id = self._resolve_workspace_id(workspace_id) if not ws_id: return self._to_json({ "success": False, @@ -399,7 +391,7 @@ async def reset_branch( result.setdefault("workspace_id", ws_id) result.setdefault("branch_id", branch_id) if migration_version: - result["warning"] = "migration_version is ignored because current AIDAP reset_branch API does not support version-targeted reset" + result["warning"] = "migration_version is ignored because the current Volcengine reset_branch API does not support version-targeted reset" return self._to_json(result) except Exception as e: logger.error(f"Error resetting branch: {e}") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py index 449a83a0..2f342a03 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py @@ -1,6 +1,6 @@ from .common import compact_dict, pick_value, to_json from .decorators import format_error, handle_errors, read_only_check -from .targets import resolve_target +from .targets import resolve_workspace_id __all__ = [ 'compact_dict', @@ -8,6 +8,6 @@ 'handle_errors', 'pick_value', 'read_only_check', - 'resolve_target', + 'resolve_workspace_id', 'to_json', ] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py index 79b4f284..150fb538 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/targets.py @@ -1,7 +1,12 @@ from typing import Optional -async def resolve_target(aidap_client, target_id: Optional[str]) -> tuple[Optional[str], Optional[str]]: - if not target_id: - return None, None - return await aidap_client.resolve_workspace_and_branch(target_id) +def resolve_workspace_id(workspace_id: Optional[str]) -> Optional[str]: + if not workspace_id: + return None + normalized_id = workspace_id.strip() + if not normalized_id: + return None + if normalized_id.startswith("br-"): + raise ValueError("workspace_id must be a workspace ID; branch IDs are not supported") + return normalized_id From 325b168f4c6222a45efe484c988be5f46857dec7 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 16:14:59 +0800 Subject: [PATCH 27/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 25 ++--- server/mcp_server_supabase/README_zh.md | 25 ++--- .../src/mcp_server_supabase/access_policy.py | 106 ++++-------------- .../src/mcp_server_supabase/scoped_mcp.py | 25 +---- 4 files changed, 35 insertions(+), 146 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 44447a48..79fe8916 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -93,10 +93,10 @@ Static AK/SK can be obtained from the [Volcengine API Access Key console](https: | `VOLCENGINE_SECRET_KEY` | No | - | Volcengine secret key for local static authentication | | `VOLCENGINE_SESSION_TOKEN` | No | - | Optional session token used with temporary local credentials | | `VOLCENGINE_REGION` | No | `cn-beijing` | Region used for the Volcengine API | -| `WORKSPACE_REF` | No | - | Connection-level hard scope. When set, `account` tools are hidden and workspace-scoped calls are forced to this target | +| `WORKSPACE_REF` | No | - | Startup-level hard scope. When set, `account` tools are hidden and workspace-scoped calls are forced to this target | | `FEATURES` | No | `account,database,debugging,development,docs,functions,branching` | Official feature groups. `storage` is disabled by default | | `DISABLED_TOOLS` | No | - | Comma-separated denylist applied after all other policy filters | -| `READ_ONLY` | No | `false` | Server-level default for connection `read_only`; when enabled, mutating tools are hidden | +| `READ_ONLY` | No | `false` | Startup-level read-only switch; when enabled, mutating tools are hidden | | `SUPABASE_WORKSPACE_SLUG` | No | `default` | Project slug used by Edge Functions APIs | | `SUPABASE_ENDPOINT_SCHEME` | No | `http` | Endpoint scheme used when building workspace URLs | | `MCP_SERVER_HOST` | No | `0.0.0.0` | Host used by `sse` and `streamable-http` transports | @@ -204,11 +204,11 @@ The package exposes `mcp-server-supabase`, the compatibility alias `supabase-aid ## Usage Notes -- `WORKSPACE_REF` applies a hard workspace scope to the connection and removes `workspace_id` from visible tool schemas. +- `WORKSPACE_REF` applies a hard workspace scope for the server instance and removes `workspace_id` from visible tool schemas. - When `WORKSPACE_REF` is active, `account` tools are hidden and any explicit `workspace_id` outside the scope is rejected. - `FEATURES` accepts only the official groups: `account`, `docs`, `database`, `debugging`, `development`, `functions`, `storage`, and `branching`. - If `FEATURES` is not set, the default enabled groups are `account`, `database`, `debugging`, `development`, `docs`, `functions`, and `branching`. `storage` stays disabled by default. -- `read_only=true` can be supplied as an HTTP query parameter to hide all mutating tools for that connection. `READ_ONLY=true` applies the same policy as a server default. +- `READ_ONLY=true` hides all mutating tools for the server instance. - `DISABLED_TOOLS` takes tool names such as `execute_sql,deploy_edge_function` and removes them after the rest of the policy has been resolved. - Credential precedence is: static env AK/SK, request `authorization`, env `authorization`, then VeFaaS IAM credentials. - Request-scoped STS credentials disable workspace metadata cache reuse to avoid cross-connection cache leakage. @@ -222,24 +222,13 @@ The package exposes `mcp-server-supabase`, the compatibility alias `supabase-aid ## Policy Precedence -### Tool filtering order within one connection +### Tool filtering order at startup 1. `features` selects the base tool set -2. `workspace_ref` removes `account` tools and scopes the connection to one workspace +2. `workspace_ref` removes `account` tools and scopes the server to one workspace 3. `read_only` removes all mutating tools 4. `disabled_tools` removes specific tool names last -### Server defaults vs connection-scoped options - -1. `workspace_ref` -The server setting is a hard boundary. A connection that sends a different `workspace_ref` is rejected. If the server does not set one, the connection may choose its own. -2. `features` -If both the server and the connection set `features`, the effective set is the intersection. A connection cannot widen the server-allowed feature range. -3. `read_only` -If either the server or the connection sets `read_only=true`, the effective result is `true`. -4. `disabled_tools` -Server-side and connection-side deny lists are unioned. If either side disables a tool, it stays unavailable. - ## Integration Modes ### AI tools @@ -259,7 +248,7 @@ If your agent runtime can spawn a local MCP process, you can keep using `stdio`. - `streamable-http`: connect to `http://:/mcp` - `sse`: connect to `http://:/sse` and post messages to `http://:/messages/` - Remote or cloud deployments can forward STS credentials with the `authorization` header instead of baking long-lived AK/SK into the server environment -- Connection-scoped options can be passed through HTTP query parameters, including `workspace_ref`, `features`, `read_only`, and `disabled_tools` +- Tool visibility and workspace scope are fixed when the server starts through env vars or CLI flags ## License diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index 429c66e7..f841dab7 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -94,10 +94,10 @@ | `VOLCENGINE_SECRET_KEY` | 否 | - | 本地静态鉴权使用的火山引擎 Secret Key | | `VOLCENGINE_SESSION_TOKEN` | 否 | - | 临时本地凭证使用的 Session Token | | `VOLCENGINE_REGION` | 否 | `cn-beijing` | 火山引擎 API 所在地域 | -| `WORKSPACE_REF` | 否 | - | 连接级 workspace scope,设置后会隐藏 `account` 组工具,并强制所有 workspace-scoped 调用只能访问这个目标 | +| `WORKSPACE_REF` | 否 | - | 服务启动级 workspace scope,设置后会隐藏 `account` 组工具,并强制所有 workspace-scoped 调用只能访问这个目标 | | `FEATURES` | 否 | `account,database,debugging,development,docs,functions,branching` | 官方 feature groups,`storage` 默认关闭 | | `DISABLED_TOOLS` | 否 | - | 逗号分隔的工具黑名单,在其他策略之后做最终剔除 | -| `READ_ONLY` | 否 | `false` | 连接级 `read_only` 的服务端默认值;启用后会隐藏所有写工具 | +| `READ_ONLY` | 否 | `false` | 服务启动级只读开关;启用后会隐藏所有写工具 | | `SUPABASE_WORKSPACE_SLUG` | 否 | `default` | Edge Functions API 使用的项目 slug | | `SUPABASE_ENDPOINT_SCHEME` | 否 | `http` | 生成 workspace URL 时使用的协议 | | `MCP_SERVER_HOST` | 否 | `0.0.0.0` | `sse` 和 `streamable-http` 使用的监听地址 | @@ -205,11 +205,11 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ## 使用说明 -- `WORKSPACE_REF` 会把连接 hard-scope 到单个目标,并在 tool schema 中移除 `workspace_id`。 +- `WORKSPACE_REF` 会把服务实例 hard-scope 到单个目标,并在 tool schema 中移除 `workspace_id`。 - `WORKSPACE_REF` 生效时,`account` 组工具不会暴露,且显式传入其他 `workspace_id` 会被拒绝。 - `FEATURES` 只接受官方 8 个分组:`account`、`docs`、`database`、`debugging`、`development`、`functions`、`storage`、`branching`。 - 如果没有设置 `FEATURES`,默认启用 `account`、`database`、`debugging`、`development`、`docs`、`functions`、`branching`,`storage` 默认关闭。 -- 可以通过 HTTP query 参数 `read_only=true` 把当前连接切到只读模式,并隐藏所有写工具。`READ_ONLY=true` 会把这条策略作为服务端默认值。 +- `READ_ONLY=true` 会让整个服务实例进入只读模式,并隐藏所有写工具。 - `DISABLED_TOOLS` 填工具名,例如 `execute_sql,deploy_edge_function`,会在其他策略计算完成后做最终剔除。 - 凭证优先级是:静态环境变量 AK/SK、请求 `authorization`、环境变量 `authorization`、VeFaaS IAM 凭证。 - 当凭证来自单次请求的 STS header 时,workspace 元数据相关缓存会自动停用,避免跨连接复用缓存。 @@ -223,24 +223,13 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ## 配置优先级 -### 单条连接内的工具过滤顺序 +### 启动时的工具过滤顺序 1. `features` 先决定基础工具集合 -2. `workspace_ref` 再移除 `account` 工具,并把连接限制到单个 workspace +2. `workspace_ref` 再移除 `account` 工具,并把服务限制到单个 workspace 3. `read_only` 再移除所有写工具 4. `disabled_tools` 最后按工具名做剔除 -### 服务端默认值和连接参数的合并规则 - -1. `workspace_ref` -服务端配置的是硬边界。连接如果传了不同的 `workspace_ref` 会被拒绝;如果服务端没配,连接可以自行指定。 -2. `features` -如果服务端和连接都配置了,实际生效的是两者交集;连接不能扩大服务端允许的 feature 范围。 -3. `read_only` -只要服务端或连接任意一侧是 `true`,最终就是 `true`。 -4. `disabled_tools` -服务端和连接两边会取并集,任意一侧禁掉的工具最终都不可用。 - ## 接入方式 ### AI 工具 @@ -260,7 +249,7 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 - `streamable-http`:连接 `http://:/mcp` - `sse`:连接 `http://:/sse`,并向 `http://:/messages/` 投递消息 - 远程或云部署场景可以通过 `authorization` header 透传 STS 凭证,而不是把长期 AK/SK 固化在服务环境变量里 -- 连接级参数可以通过 HTTP query 传入,例如 `workspace_ref`、`features`、`read_only`、`disabled_tools` +- 工具可见性和 workspace scope 在服务启动时通过环境变量或 CLI 参数固定下来 ## License diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py index 9f134921..956a1581 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py @@ -118,26 +118,6 @@ def _parse_name_set(value: Any) -> frozenset[str] | None: return frozenset(names) -def _parse_query_name_set(params: Any, name: str) -> frozenset[str] | None: - if params is None: - return None - values: list[str] = [] - if hasattr(params, "getlist"): - values = [value for value in params.getlist(name) if value is not None] - elif hasattr(params, "get"): - value = params.get(name) - if value is not None: - values = [value] - if not values: - return None - names: list[str] = [] - for value in values: - names.extend(_expand_names(value)) - if not names: - return frozenset() - return frozenset(names) - - def _parse_workspace_ref(value: Any) -> str | None: if value is None: return None @@ -168,21 +148,6 @@ def _parse_read_only(value: Any) -> bool | None: raise ValueError("read_only must be true or false") -def _parse_query_read_only(params: Any) -> bool | None: - if params is None: - return None - values: list[str] = [] - if hasattr(params, "getlist"): - values = [value for value in params.getlist("read_only") if value is not None] - elif hasattr(params, "get"): - value = params.get("read_only") - if value is not None: - values = [value] - if not values: - return None - return _parse_read_only(values[-1]) - - def _validate_features(features: frozenset[str] | None) -> frozenset[str] | None: if features is None: return None @@ -215,53 +180,13 @@ def build_partial_access_policy( ) -def build_query_access_policy(params: Any) -> PartialAccessPolicy | None: - if params is None: - return None - workspace_ref = _parse_workspace_ref(params.get("workspace_ref")) if hasattr(params, "get") else None - features = _validate_features(_parse_query_name_set(params, "features")) - read_only = _parse_query_read_only(params) - disabled_tools = _validate_tools(_parse_query_name_set(params, "disabled_tools"), "disabled_tools") - if workspace_ref is None and features is None and read_only is None and disabled_tools is None: - return None - return PartialAccessPolicy( - workspace_ref=workspace_ref, - features=features, - read_only=read_only, - disabled_tools=disabled_tools, - ) - - -def resolve_access_policy( - server_policy: PartialAccessPolicy | None, - request_policy: PartialAccessPolicy | None, -) -> ResolvedAccessPolicy: - server_policy = server_policy or PartialAccessPolicy() - request_policy = request_policy or PartialAccessPolicy() - - if server_policy.workspace_ref and request_policy.workspace_ref and server_policy.workspace_ref != request_policy.workspace_ref: - raise ValueError("workspace_ref does not match the server scope") - workspace_ref = server_policy.workspace_ref or request_policy.workspace_ref - - features = DEFAULT_FEATURE_GROUPS - if server_policy.features is not None: - features = server_policy.features - if request_policy.features is not None: - features = request_policy.features if server_policy.features is None else features & request_policy.features - - read_only = bool(server_policy.read_only) or bool(request_policy.read_only) - - disabled_tools = frozenset() - if server_policy.disabled_tools: - disabled_tools |= server_policy.disabled_tools - if request_policy.disabled_tools: - disabled_tools |= request_policy.disabled_tools - +def resolve_access_policy(policy: PartialAccessPolicy | None) -> ResolvedAccessPolicy: + policy = policy or PartialAccessPolicy() return ResolvedAccessPolicy( - workspace_ref=workspace_ref, - features=features, - read_only=read_only, - disabled_tools=disabled_tools, + workspace_ref=policy.workspace_ref, + features=policy.features or DEFAULT_FEATURE_GROUPS, + read_only=bool(policy.read_only), + disabled_tools=policy.disabled_tools or frozenset(), ) @@ -276,12 +201,21 @@ def resolve_allowed_tools(policy: ResolvedAccessPolicy) -> frozenset[str]: def workspace_scope_schema(tool_name: str, input_schema: dict[str, Any], workspace_ref: str | None) -> dict[str, Any]: - if not workspace_ref or tool_name not in SCOPED_TOOL_NAMES: + if tool_name not in SCOPED_TOOL_NAMES: return input_schema - properties = dict(input_schema.get("properties", {})) - properties.pop("workspace_id", None) result = dict(input_schema) + properties = dict(input_schema.get("properties", {})) result["properties"] = properties - if "required" in result: - result["required"] = [name for name in result.get("required", []) if name != "workspace_id"] + required = [name for name in result.get("required", []) if name != "workspace_id"] + if workspace_ref: + properties.pop("workspace_id", None) + if required: + result["required"] = required + elif "required" in result: + result.pop("required", None) + return result + if "workspace_id" in properties and "workspace_id" not in required: + required.append("workspace_id") + if required: + result["required"] = required return result diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py b/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py index a103bd75..08df2b0b 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py @@ -4,9 +4,7 @@ from .access_policy import ( PartialAccessPolicy, - ResolvedAccessPolicy, SCOPED_TOOL_NAMES, - build_query_access_policy, resolve_access_policy, resolve_allowed_tools, workspace_scope_schema, @@ -17,30 +15,9 @@ class ScopedFastMCP(FastMCP): def __init__(self, *args, access_policy: PartialAccessPolicy | None = None, **kwargs): super().__init__(*args, **kwargs) self._access_policy = access_policy or PartialAccessPolicy() - self._session_policies: dict[int, ResolvedAccessPolicy] = {} def _resolve_current_policy(self): - context = self.get_context() - request_context = getattr(context, "_request_context", None) - if request_context is None: - return resolve_access_policy(self._access_policy, None) - - session_key = id(request_context.session) - request = request_context.request - if request is not None: - request_policy = build_query_access_policy(request.query_params) - if request_policy is not None: - resolved_policy = resolve_access_policy(self._access_policy, request_policy) - self._session_policies[session_key] = resolved_policy - return resolved_policy - - cached_policy = self._session_policies.get(session_key) - if cached_policy is not None: - return cached_policy - - resolved_policy = resolve_access_policy(self._access_policy, None) - self._session_policies[session_key] = resolved_policy - return resolved_policy + return resolve_access_policy(self._access_policy) async def list_tools(self): policy = self._resolve_current_policy() From 0a2793dbe3890cc5625bbff4bd0de22104c73e2c Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 20:39:32 +0800 Subject: [PATCH 28/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 7 +- server/mcp_server_supabase/README_zh.md | 7 +- server/mcp_server_supabase/pyproject.toml | 1 - .../src/mcp_server_supabase/access_policy.py | 85 ++----- .../src/mcp_server_supabase/config.py | 58 ----- .../src/mcp_server_supabase/credentials.py | 22 +- .../platform/aidap_client.py | 88 ++----- .../platform/supabase_client.py | 44 ++-- .../src/mcp_server_supabase/scoped_mcp.py | 27 +-- .../src/mcp_server_supabase/server.py | 177 ++++++-------- .../src/mcp_server_supabase/tool_registry.py | 225 +++++++++++++++--- .../src/mcp_server_supabase/tools/base.py | 5 - .../tools/database_tools.py | 33 +-- .../tools/edge_function_tools.py | 44 ++-- .../tools/storage_tools.py | 8 +- .../tools/workspace_tools.py | 219 ++++++++--------- .../src/mcp_server_supabase/utils/__init__.py | 4 +- .../mcp_server_supabase/utils/decorators.py | 17 +- 18 files changed, 461 insertions(+), 610 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index 79fe8916..e2391599 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -64,7 +64,7 @@ No tools are currently exposed. | `list_branches` | List branches under a workspace | | `create_branch` | Create a development branch | | `delete_branch` | Delete a development branch | -| `reset_branch` | Reset a branch to its baseline state | +| `restore_branch` | Restore a branch to its baseline state | ### `storage` @@ -200,7 +200,7 @@ When the server runs behind a remote MCP gateway or another agent platform, you } ``` -The package exposes `mcp-server-supabase`, the compatibility alias `supabase-aidap`, `mcp-server-supabase-sse`, and `mcp-server-supabase-streamable`. The examples above use `mcp-server-supabase`. +The package exposes `mcp-server-supabase`, `mcp-server-supabase-sse`, and `mcp-server-supabase-streamable`. The examples above use `mcp-server-supabase`. ## Usage Notes @@ -211,10 +211,9 @@ The package exposes `mcp-server-supabase`, the compatibility alias `supabase-aid - `READ_ONLY=true` hides all mutating tools for the server instance. - `DISABLED_TOOLS` takes tool names such as `execute_sql,deploy_edge_function` and removes them after the rest of the policy has been resolved. - Credential precedence is: static env AK/SK, request `authorization`, env `authorization`, then VeFaaS IAM credentials. -- Request-scoped STS credentials disable workspace metadata cache reuse to avoid cross-connection cache leakage. - `workspace_id` and `workspace_ref` accept workspace IDs only. Branch IDs such as `br-xxxx` are rejected. - `get_publishable_keys` resolves the default branch automatically when needed. -- `reset_branch` accepts `migration_version`, but the current Volcengine API ignores that value and performs a branch reset only. +- `restore_branch` does not support `migration_version`; it maps to the current Volcengine `BranchRestore` capability. - `deploy_edge_function` currently supports `native-node20/v1`, `native-python3.9/v1`, `native-python3.10/v1`, and `native-python3.12/v1`. - `--transport sse` serves the MCP SSE endpoint at `MCP_SSE_PATH` and the message endpoint at `MCP_MESSAGE_PATH`. - `--transport streamable-http` serves the MCP HTTP endpoint at `STREAMABLE_HTTP_PATH`. diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index f841dab7..c6726bbf 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -65,7 +65,7 @@ | `list_branches` | 列出 workspace 下的分支 | | `create_branch` | 创建开发分支 | | `delete_branch` | 删除开发分支 | -| `reset_branch` | 将分支重置到初始状态 | +| `restore_branch` | 将分支恢复到初始状态 | ### `storage` @@ -201,7 +201,7 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 } ``` -这个包同时暴露了 `mcp-server-supabase`、兼容别名 `supabase-aidap`、`mcp-server-supabase-sse` 和 `mcp-server-supabase-streamable` 四个入口,示例统一使用 `mcp-server-supabase`。 +这个包同时暴露了 `mcp-server-supabase`、`mcp-server-supabase-sse` 和 `mcp-server-supabase-streamable` 三个入口,示例统一使用 `mcp-server-supabase`。 ## 使用说明 @@ -212,10 +212,9 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 - `READ_ONLY=true` 会让整个服务实例进入只读模式,并隐藏所有写工具。 - `DISABLED_TOOLS` 填工具名,例如 `execute_sql,deploy_edge_function`,会在其他策略计算完成后做最终剔除。 - 凭证优先级是:静态环境变量 AK/SK、请求 `authorization`、环境变量 `authorization`、VeFaaS IAM 凭证。 -- 当凭证来自单次请求的 STS header 时,workspace 元数据相关缓存会自动停用,避免跨连接复用缓存。 - `workspace_id` 和 `workspace_ref` 只接受 workspace ID,`br-xxxx` 这样的 branch ID 会被直接拒绝。 - `get_publishable_keys` 在需要时会自动解析默认分支。 -- `reset_branch` 虽然接收 `migration_version` 参数,但当前火山引擎 API 会忽略这个值,只执行分支重置。 +- `restore_branch` 不支持 `migration_version`,当前实际映射到火山引擎的 `BranchRestore` 能力。 - `deploy_edge_function` 当前支持 `native-node20/v1`、`native-python3.9/v1`、`native-python3.10/v1`、`native-python3.12/v1`。 - `--transport sse` 会在 `MCP_SSE_PATH` 暴露 SSE 连接地址,并在 `MCP_MESSAGE_PATH` 暴露消息投递地址。 - `--transport streamable-http` 会在 `STREAMABLE_HTTP_PATH` 暴露 MCP HTTP 地址。 diff --git a/server/mcp_server_supabase/pyproject.toml b/server/mcp_server_supabase/pyproject.toml index f1c92777..746a3170 100644 --- a/server/mcp_server_supabase/pyproject.toml +++ b/server/mcp_server_supabase/pyproject.toml @@ -28,7 +28,6 @@ legacy = [ [project.scripts] mcp-server-supabase = "mcp_server_supabase.server:main" -supabase-aidap = "mcp_server_supabase.server:main" mcp-server-supabase-sse = "mcp_server_supabase.sse:main" mcp-server-supabase-streamable = "mcp_server_supabase.streamable_http:main" diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py index 956a1581..978faa16 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/access_policy.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from typing import Any +from .tool_registry import TOOL_DEFINITIONS OFFICIAL_FEATURE_GROUPS = ( "account", @@ -23,65 +24,21 @@ "branching", }) - -@dataclass(frozen=True) -class ToolPolicy: - feature: str - scoped: bool - mutating: bool - - -TOOL_POLICIES = { - "list_workspaces": ToolPolicy("account", False, False), - "get_workspace": ToolPolicy("account", True, False), - "create_workspace": ToolPolicy("account", False, True), - "pause_workspace": ToolPolicy("account", True, True), - "restore_workspace": ToolPolicy("account", True, True), - "execute_sql": ToolPolicy("database", True, True), - "list_tables": ToolPolicy("database", True, False), - "list_migrations": ToolPolicy("database", True, False), - "list_extensions": ToolPolicy("database", True, False), - "apply_migration": ToolPolicy("database", True, True), - "get_workspace_url": ToolPolicy("development", True, False), - "get_publishable_keys": ToolPolicy("development", True, False), - "generate_typescript_types": ToolPolicy("development", True, False), - "list_edge_functions": ToolPolicy("functions", True, False), - "get_edge_function": ToolPolicy("functions", True, False), - "deploy_edge_function": ToolPolicy("functions", True, True), - "delete_edge_function": ToolPolicy("functions", True, True), - "list_storage_buckets": ToolPolicy("storage", True, False), - "create_storage_bucket": ToolPolicy("storage", True, True), - "delete_storage_bucket": ToolPolicy("storage", True, True), - "get_storage_config": ToolPolicy("storage", True, False), - "list_branches": ToolPolicy("branching", True, False), - "create_branch": ToolPolicy("branching", True, True), - "delete_branch": ToolPolicy("branching", True, True), - "reset_branch": ToolPolicy("branching", True, True), -} - -ALL_TOOL_NAMES = frozenset(TOOL_POLICIES.keys()) +ALL_TOOL_NAMES = frozenset(tool.name for tool in TOOL_DEFINITIONS) FEATURE_TOOLS = { - feature: frozenset(name for name, policy in TOOL_POLICIES.items() if policy.feature == feature) + feature: frozenset(tool.name for tool in TOOL_DEFINITIONS if tool.feature == feature) for feature in OFFICIAL_FEATURE_GROUPS } -SCOPED_TOOL_NAMES = frozenset(name for name, policy in TOOL_POLICIES.items() if policy.scoped) -MUTATING_TOOL_NAMES = frozenset(name for name, policy in TOOL_POLICIES.items() if policy.mutating) +SCOPED_TOOL_NAMES = frozenset(tool.name for tool in TOOL_DEFINITIONS if tool.scoped) +MUTATING_TOOL_NAMES = frozenset(tool.name for tool in TOOL_DEFINITIONS if tool.mutating) @dataclass(frozen=True) -class PartialAccessPolicy: +class AccessPolicy: workspace_ref: str | None = None - features: frozenset[str] | None = None - read_only: bool | None = None - disabled_tools: frozenset[str] | None = None - - -@dataclass(frozen=True) -class ResolvedAccessPolicy: - workspace_ref: str | None - features: frozenset[str] - read_only: bool - disabled_tools: frozenset[str] + features: frozenset[str] = DEFAULT_FEATURE_GROUPS + read_only: bool = False + disabled_tools: frozenset[str] = frozenset() def _normalize_name(value: Any) -> str: @@ -166,31 +123,21 @@ def _validate_tools(tools: frozenset[str] | None, field_name: str) -> frozenset[ return tools -def build_partial_access_policy( +def build_access_policy( workspace_ref: Any = None, features: Any = None, read_only: Any = None, disabled_tools: Any = None, -) -> PartialAccessPolicy: - return PartialAccessPolicy( +) -> AccessPolicy: + return AccessPolicy( workspace_ref=_parse_workspace_ref(workspace_ref), - features=_validate_features(_parse_name_set(features)), - read_only=_parse_read_only(read_only), - disabled_tools=_validate_tools(_parse_name_set(disabled_tools), "disabled_tools"), - ) - - -def resolve_access_policy(policy: PartialAccessPolicy | None) -> ResolvedAccessPolicy: - policy = policy or PartialAccessPolicy() - return ResolvedAccessPolicy( - workspace_ref=policy.workspace_ref, - features=policy.features or DEFAULT_FEATURE_GROUPS, - read_only=bool(policy.read_only), - disabled_tools=policy.disabled_tools or frozenset(), + features=_validate_features(_parse_name_set(features)) or DEFAULT_FEATURE_GROUPS, + read_only=bool(_parse_read_only(read_only)), + disabled_tools=_validate_tools(_parse_name_set(disabled_tools), "disabled_tools") or frozenset(), ) -def resolve_allowed_tools(policy: ResolvedAccessPolicy) -> frozenset[str]: +def resolve_allowed_tools(policy: AccessPolicy) -> frozenset[str]: allowed = frozenset().union(*(FEATURE_TOOLS[feature] for feature in policy.features)) if policy.workspace_ref: allowed -= FEATURE_TOOLS["account"] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/config.py b/server/mcp_server_supabase/src/mcp_server_supabase/config.py index 8f0f07a0..c7f3532c 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/config.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/config.py @@ -1,61 +1,3 @@ import os -import logging -logger = logging.getLogger(__name__) - -READ_ONLY = os.getenv("READ_ONLY", "false").lower() == "true" VOLCENGINE_REGION = os.getenv("VOLCENGINE_REGION", "cn-beijing") - -_default_branch_cache = {} -_endpoint_cache = {} -_api_key_cache = {} - - -def get_branch_cache(): - return _default_branch_cache - - -def get_endpoint_cache(): - return _endpoint_cache - - -def get_api_key_cache(): - return _api_key_cache - - -def clear_branch_cache(workspace_id: str = None): - if workspace_id: - _default_branch_cache.pop(workspace_id, None) - else: - _default_branch_cache.clear() - - -def clear_endpoint_cache(workspace_id: str = None, branch_id: str = None): - if workspace_id and branch_id: - _endpoint_cache.pop(f"{workspace_id}:{branch_id}", None) - elif workspace_id: - _endpoint_cache.pop(workspace_id, None) - keys_to_delete = [key for key in _endpoint_cache if key.startswith(f"{workspace_id}:")] - for key in keys_to_delete: - _endpoint_cache.pop(key, None) - else: - _endpoint_cache.clear() - - -def clear_api_key_cache(workspace_id: str = None, branch_id: str = None): - if workspace_id and branch_id: - keys_to_delete = [key for key in _api_key_cache if key.startswith(f"{workspace_id}:") and key.endswith(f":{branch_id}")] - for key in keys_to_delete: - _api_key_cache.pop(key, None) - elif workspace_id: - keys_to_delete = [key for key in _api_key_cache if key == workspace_id or key.startswith(f"{workspace_id}:")] - for key in keys_to_delete: - _api_key_cache.pop(key, None) - else: - _api_key_cache.clear() - - -def clear_all_caches(workspace_id: str = None, branch_id: str = None): - clear_branch_cache(workspace_id) - clear_endpoint_cache(workspace_id, branch_id) - clear_api_key_cache(workspace_id, branch_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py b/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py index e3b0c58a..17473e43 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/credentials.py @@ -19,8 +19,6 @@ class VolcengineCredentials: access_key: str secret_key: str session_token: str - source: str - cacheable: bool def _get_env_value(*names: str) -> str: @@ -46,7 +44,7 @@ def _validate_sts_time_window(payload: dict[str, Any]) -> None: raise ValueError("STS token is expired") -def _parse_authorization_payload(raw_value: str, source: str, cacheable: bool) -> VolcengineCredentials: +def _parse_authorization_payload(raw_value: str) -> VolcengineCredentials: token = raw_value.split(" ", 1)[1] if " " in raw_value else raw_value decoded_bytes = base64.b64decode(token) payload = json.loads(decoded_bytes.decode("utf-8")) @@ -60,8 +58,6 @@ def _parse_authorization_payload(raw_value: str, source: str, cacheable: bool) - access_key=access_key, secret_key=secret_key, session_token=session_token, - source=source, - cacheable=cacheable, ) @@ -95,8 +91,6 @@ def _get_vefaas_iam_credentials() -> VolcengineCredentials | None: access_key=access_key, secret_key=secret_key, session_token=session_token, - source="vefaas_iam", - cacheable=True, ) @@ -109,25 +103,15 @@ def resolve_volcengine_credentials(context_getter: Callable[[], Any] | None = No access_key=static_access_key, secret_key=static_secret_key, session_token=static_session_token, - source="env", - cacheable=True, ) request_authorization = _get_request_authorization(context_getter) if request_authorization: - return _parse_authorization_payload( - request_authorization, - source="request_authorization", - cacheable=False, - ) + return _parse_authorization_payload(request_authorization) env_authorization = _get_env_value(*AUTHORIZATION_ENV_NAMES) if env_authorization: - return _parse_authorization_payload( - env_authorization, - source="env_authorization", - cacheable=True, - ) + return _parse_authorization_payload(env_authorization) vefaas_credentials = _get_vefaas_iam_credentials() if vefaas_credentials is not None: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index 4a811b63..2cc1b7a0 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -4,13 +4,7 @@ import random from collections.abc import Callable from typing import Any, Optional -from ..config import ( - VOLCENGINE_REGION, - get_branch_cache, - get_endpoint_cache, - get_api_key_cache, - clear_all_caches, -) +from ..config import VOLCENGINE_REGION from ..credentials import resolve_volcengine_credentials from ..utils import pick_value @@ -22,10 +16,9 @@ from volcenginesdkaidap import AIDAPApi from volcenginesdkaidap.models import ( DescribeBranchesRequest, - DescribeWorkspacesRequest, DescribeWorkspaceEndpointRequest, DescribeAPIKeysRequest, - ResetBranchRequest, + BranchRestoreRequest, CreateBranchRequest, DeleteBranchRequest, BranchSettingsForCreateBranchInput, @@ -48,11 +41,6 @@ def __init__(self, context_getter: Callable[[], Any] | None = None) -> None: def _get_credentials(self): return resolve_volcengine_credentials(self._context_getter) - def _should_use_cache(self, use_cache: bool) -> bool: - if not use_cache: - return False - return self._get_credentials().cacheable - def _create_client(self) -> AIDAPApi: credentials = self._get_credentials() configuration = volcenginesdkcore.Configuration() @@ -97,10 +85,6 @@ def _branch_payload(self, branch: Any, fallback_name: Optional[str] = None) -> d } return {key: value for key, value in payload.items() if value is not None} - def _describe_supabase_workspaces_response(self): - request = DescribeWorkspacesRequest() - return self.client.describe_workspaces(request) - async def _find_branch( self, workspace_id: str, @@ -132,12 +116,7 @@ async def _sleep_backoff( jitter = random.uniform(0.0, delay * 0.2) await asyncio.sleep(delay + jitter) - async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) -> Optional[str]: - cache = get_branch_cache() - cache_enabled = self._should_use_cache(use_cache) - if cache_enabled and workspace_id in cache: - return cache[workspace_id] - + async def get_default_branch_id(self, workspace_id: str) -> Optional[str]: try: request = DescribeBranchesRequest(workspace_id=workspace_id) response = self.client.describe_branches(request) @@ -145,16 +124,10 @@ async def get_default_branch_id(self, workspace_id: str, use_cache: bool = True) if hasattr(response, 'branches') and response.branches: for branch in response.branches: if getattr(branch, 'default', False): - branch_id = branch.branch_id - if cache_enabled: - cache[workspace_id] = branch_id - return branch_id + return branch.branch_id first_branch = response.branches[0] - branch_id = first_branch.branch_id - if cache_enabled: - cache[workspace_id] = branch_id - return branch_id + return first_branch.branch_id return None except Exception as e: @@ -279,7 +252,6 @@ async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: branch_id=branch_id, ) self.client.delete_branch(request) - clear_all_caches(workspace_id, branch_id) return {"success": True} except Exception as e: error_text = str(e) @@ -302,14 +274,7 @@ async def delete_branch(self, workspace_id: str, branch_id: str) -> dict: "retriable": True, } - async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: - cache_key = f"{workspace_id}:{branch_id}" if branch_id else workspace_id - endpoint_cache = get_endpoint_cache() - cache_enabled = self._should_use_cache(use_cache) - - if cache_enabled and cache_key in endpoint_cache: - return endpoint_cache[cache_key] - + async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None) -> Optional[str]: if not branch_id: branch_id = await self.get_default_branch_id(workspace_id) if not branch_id: @@ -332,37 +297,25 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None, for domain in domains: if 'volces.com' in domain and 'ivolces.com' not in domain: - if ENDPOINT_SCHEME == "https": - result = f"https://{domain}" - else: - result = f"http://{domain}:80" - if cache_enabled: - endpoint_cache[cache_key] = result - return result + return f"https://{domain}" if ENDPOINT_SCHEME == "https" else f"http://{domain}:80" if domains: - if ENDPOINT_SCHEME == "https": - result = f"https://{domains[0]}" - else: - result = f"http://{domains[0]}:80" - if cache_enabled: - endpoint_cache[cache_key] = result - return result + return f"https://{domains[0]}" if ENDPOINT_SCHEME == "https" else f"http://{domains[0]}:80" return None except Exception as e: logger.error(f"Error getting endpoint: {e}") return None - async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: + async def restore_branch(self, workspace_id: str, branch_id: str) -> dict: max_attempts = 8 for attempt in range(1, max_attempts + 1): try: - request = ResetBranchRequest( + request = BranchRestoreRequest( workspace_id=workspace_id, branch_id=branch_id, ) - self.client.reset_branch(request) + self.client.branch_restore(request) return {"success": True} except Exception as e: error_text = str(e) @@ -371,7 +324,7 @@ async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: if retriable and attempt < max_attempts: await self._sleep_backoff(attempt) continue - logger.error(f"Error resetting branch: {e}") + logger.error(f"Error restoring branch: {e}") return { "success": False, "error": error_text, @@ -380,20 +333,13 @@ async def reset_branch(self, workspace_id: str, branch_id: str) -> dict: } return { "success": False, - "error": "reset_branch failed after retries", + "error": "restore_branch failed after retries", "code": "OperationDenied_BranchNotReady", "retriable": True, } async def get_api_key(self, workspace_id: str, key_type: str = "service_role", - branch_id: Optional[str] = None, use_cache: bool = True) -> Optional[str]: - cache_key = f"{workspace_id}:{key_type}:{branch_id}" if branch_id else f"{workspace_id}:{key_type}" - api_key_cache = get_api_key_cache() - cache_enabled = self._should_use_cache(use_cache) - - if cache_enabled and cache_key in api_key_cache: - return api_key_cache[cache_key] - + branch_id: Optional[str] = None) -> Optional[str]: if not branch_id: branch_id = await self.get_default_branch_id(workspace_id) if not branch_id: @@ -415,11 +361,7 @@ async def get_api_key(self, workspace_id: str, key_type: str = "service_role", for key in response.api_keys: if hasattr(key, 'type') and key.type == target_type: - result = key.key if hasattr(key, 'key') else None - if result: - if cache_enabled: - api_key_cache[cache_key] = result - return result + return key.key if hasattr(key, 'key') else None return None except Exception as e: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py index f28387a5..32e25988 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py @@ -2,7 +2,7 @@ import httpx import logging import json -from typing import Optional, Dict, Any +from typing import Dict, Any logger = logging.getLogger(__name__) @@ -30,21 +30,6 @@ class SupabaseClient: def __init__(self, endpoint: str, api_key: str): self.endpoint = endpoint self.api_key = api_key - self._client: Optional[httpx.AsyncClient] = None - - async def _get_client(self) -> httpx.AsyncClient: - """Get or create HTTP client with connection pooling""" - if self._client is None or self._client.is_closed: - self._client = httpx.AsyncClient( - timeout=30.0, - limits=httpx.Limits(max_keepalive_connections=5, max_connections=10) - ) - return self._client - - async def close(self): - """Close HTTP client""" - if self._client and not self._client.is_closed: - await self._client.aclose() async def call_api( self, @@ -57,7 +42,7 @@ async def call_api( timeout: float = 30.0 ) -> Any: url = f"{self.endpoint}{path}" - logger.info(f"[DEBUG] Calling API: method={method}, url={url}, path={path}") + logger.debug("Calling API method=%s url=%s path=%s", method, url, path) default_headers = { "apikey": self.api_key, @@ -66,19 +51,22 @@ async def call_api( if headers: default_headers.update(headers) - client = await self._get_client() for attempt in range(3): try: - if content: - response = await client.request( - method, url, content=content, headers=default_headers, - params=params, timeout=timeout - ) - else: - response = await client.request( - method, url, json=json_data, headers=default_headers, - params=params, timeout=timeout - ) + async with httpx.AsyncClient( + timeout=timeout, + limits=httpx.Limits(max_keepalive_connections=5, max_connections=10), + ) as client: + if content: + response = await client.request( + method, url, content=content, headers=default_headers, + params=params, timeout=timeout + ) + else: + response = await client.request( + method, url, json=json_data, headers=default_headers, + params=params, timeout=timeout + ) response.raise_for_status() if response.status_code == 204 or not response.content: diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py b/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py index 08df2b0b..f15b165a 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/scoped_mcp.py @@ -3,31 +3,26 @@ from mcp.types import Tool as MCPTool from .access_policy import ( - PartialAccessPolicy, + AccessPolicy, SCOPED_TOOL_NAMES, - resolve_access_policy, resolve_allowed_tools, workspace_scope_schema, ) class ScopedFastMCP(FastMCP): - def __init__(self, *args, access_policy: PartialAccessPolicy | None = None, **kwargs): + def __init__(self, *args, access_policy: AccessPolicy | None = None, **kwargs): super().__init__(*args, **kwargs) - self._access_policy = access_policy or PartialAccessPolicy() - - def _resolve_current_policy(self): - return resolve_access_policy(self._access_policy) + self._access_policy = access_policy or AccessPolicy() + self._allowed_tools = resolve_allowed_tools(self._access_policy) async def list_tools(self): - policy = self._resolve_current_policy() - allowed_tools = resolve_allowed_tools(policy) tools = await super().list_tools() visible_tools = [] for tool in tools: - if tool.name not in allowed_tools: + if tool.name not in self._allowed_tools: continue - scoped_schema = workspace_scope_schema(tool.name, tool.inputSchema, policy.workspace_ref) + scoped_schema = workspace_scope_schema(tool.name, tool.inputSchema, self._access_policy.workspace_ref) if scoped_schema is tool.inputSchema: visible_tools.append(tool) continue @@ -37,16 +32,14 @@ async def list_tools(self): return visible_tools async def call_tool(self, name: str, arguments: dict[str, object]): - policy = self._resolve_current_policy() - allowed_tools = resolve_allowed_tools(policy) - if name not in allowed_tools: + if name not in self._allowed_tools: raise ToolError(f"Tool '{name}' is not available for the current connection") effective_arguments = dict(arguments or {}) - if policy.workspace_ref and name in SCOPED_TOOL_NAMES: + if self._access_policy.workspace_ref and name in SCOPED_TOOL_NAMES: provided_workspace_id = effective_arguments.get("workspace_id") - if provided_workspace_id not in {None, "", policy.workspace_ref}: + if provided_workspace_id not in {None, "", self._access_policy.workspace_ref}: raise ToolError("workspace_id is outside the current workspace_ref scope") - effective_arguments["workspace_id"] = policy.workspace_ref + effective_arguments["workspace_id"] = self._access_policy.workspace_ref return await super().call_tool(name, effective_arguments) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/server.py b/server/mcp_server_supabase/src/mcp_server_supabase/server.py index 48586081..38422bc5 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/server.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/server.py @@ -1,10 +1,11 @@ import argparse import logging import os +from dataclasses import dataclass from .runtime import create_runtime from .tool_registry import register_tools -from .access_policy import build_partial_access_policy +from .access_policy import AccessPolicy, build_access_policy from .scoped_mcp import ScopedFastMCP logger = logging.getLogger(__name__) @@ -21,67 +22,32 @@ DEFAULT_STREAMABLE_HTTP_PATH = "/mcp" -def _resolve_port(port: int | None = None) -> int: - if port is not None: - return port - return int(os.getenv("MCP_SERVER_PORT", os.getenv("PORT", str(DEFAULT_PORT)))) +@dataclass(frozen=True, slots=True) +class ServerConfig: + host: str + port: int + access_policy: AccessPolicy + mount_path: str + sse_path: str + message_path: str + streamable_http_path: str -def _resolve_host(host: str | None = None) -> str: - if host is not None: - return host - return os.getenv("MCP_SERVER_HOST", DEFAULT_HOST) +def _resolve_string(value: str | None, env_name: str, default: str | None = None) -> str | None: + if value is not None: + return value + if default is None: + return os.getenv(env_name) + return os.getenv(env_name, default) -def _resolve_workspace_ref(workspace_ref: str | None = None) -> str | None: - if workspace_ref is not None: - return workspace_ref - return os.getenv("WORKSPACE_REF") - - -def _resolve_features(features: str | None = None) -> str | None: - if features is not None: - return features - return os.getenv("FEATURES") - - -def _resolve_read_only(read_only: str | bool | None = None) -> str | bool | None: +def _resolve_read_only(read_only: str | bool | None) -> str | bool | None: if read_only is not None: return read_only return os.getenv("READ_ONLY") -def _resolve_disabled_tools(disabled_tools: str | None = None) -> str | None: - if disabled_tools is not None: - return disabled_tools - return os.getenv("DISABLED_TOOLS") - - -def _resolve_mount_path(mount_path: str | None = None) -> str: - if mount_path is not None: - return mount_path - return os.getenv("MCP_MOUNT_PATH", DEFAULT_MOUNT_PATH) - - -def _resolve_sse_path(sse_path: str | None = None) -> str: - if sse_path is not None: - return sse_path - return os.getenv("MCP_SSE_PATH", DEFAULT_SSE_PATH) - - -def _resolve_message_path(message_path: str | None = None) -> str: - if message_path is not None: - return message_path - return os.getenv("MCP_MESSAGE_PATH", DEFAULT_MESSAGE_PATH) - - -def _resolve_streamable_http_path(streamable_http_path: str | None = None) -> str: - if streamable_http_path is not None: - return streamable_http_path - return os.getenv("STREAMABLE_HTTP_PATH", DEFAULT_STREAMABLE_HTTP_PATH) - - -def create_mcp( +def build_server_config( port: int | None = None, host: str | None = None, workspace_ref: str | None = None, @@ -92,33 +58,41 @@ def create_mcp( sse_path: str | None = None, message_path: str | None = None, streamable_http_path: str | None = None, -) -> ScopedFastMCP: - resolved_port = _resolve_port(port) - resolved_host = _resolve_host(host) - access_policy = build_partial_access_policy( - workspace_ref=_resolve_workspace_ref(workspace_ref), - features=_resolve_features(features), - read_only=_resolve_read_only(read_only), - disabled_tools=_resolve_disabled_tools(disabled_tools), +) -> ServerConfig: + resolved_port = port if port is not None else int(os.getenv("MCP_SERVER_PORT", os.getenv("PORT", str(DEFAULT_PORT)))) + resolved_host = _resolve_string(host, "MCP_SERVER_HOST", DEFAULT_HOST) or DEFAULT_HOST + return ServerConfig( + host=resolved_host, + port=resolved_port, + access_policy=build_access_policy( + workspace_ref=_resolve_string(workspace_ref, "WORKSPACE_REF"), + features=_resolve_string(features, "FEATURES"), + read_only=_resolve_read_only(read_only), + disabled_tools=_resolve_string(disabled_tools, "DISABLED_TOOLS"), + ), + mount_path=_resolve_string(mount_path, "MCP_MOUNT_PATH", DEFAULT_MOUNT_PATH) or DEFAULT_MOUNT_PATH, + sse_path=_resolve_string(sse_path, "MCP_SSE_PATH", DEFAULT_SSE_PATH) or DEFAULT_SSE_PATH, + message_path=_resolve_string(message_path, "MCP_MESSAGE_PATH", DEFAULT_MESSAGE_PATH) or DEFAULT_MESSAGE_PATH, + streamable_http_path=_resolve_string(streamable_http_path, "STREAMABLE_HTTP_PATH", DEFAULT_STREAMABLE_HTTP_PATH) or DEFAULT_STREAMABLE_HTTP_PATH, ) + + +def create_mcp(config: ServerConfig) -> ScopedFastMCP: mcp = ScopedFastMCP( "Supabase MCP Server (Volcengine)", - access_policy=access_policy, - host=resolved_host, - port=resolved_port, - mount_path=_resolve_mount_path(mount_path), - sse_path=_resolve_sse_path(sse_path), - message_path=_resolve_message_path(message_path), - streamable_http_path=_resolve_streamable_http_path(streamable_http_path), + access_policy=config.access_policy, + host=config.host, + port=config.port, + mount_path=config.mount_path, + sse_path=config.sse_path, + message_path=config.message_path, + streamable_http_path=config.streamable_http_path, ) runtime = create_runtime(context_getter=mcp.get_context) register_tools(mcp, runtime) return mcp -mcp = create_mcp() - - def run_server( transport: str = "stdio", port: int | None = None, @@ -128,17 +102,18 @@ def run_server( read_only: str | bool | None = None, disabled_tools: str | None = None, ) -> None: - create_mcp( + config = build_server_config( port=port, host=host, workspace_ref=workspace_ref, features=features, read_only=read_only, disabled_tools=disabled_tools, - ).run(transport=transport) + ) + create_mcp(config).run(transport=transport) -def main(): +def main() -> None: parser = argparse.ArgumentParser(description="Supabase MCP Server") parser.add_argument( "--transport", @@ -149,42 +124,13 @@ def main(): ) parser.add_argument("--host", type=str, default=None, help="Host to bind for network transports") parser.add_argument("--port", type=int, default=None, help="Port to run the server on") - parser.add_argument("--workspace-ref", type=str, default=None, help="Hard-scope the connection to a single workspace") + parser.add_argument("--workspace-ref", type=str, default=None, help="Hard-scope the server to a single workspace") parser.add_argument("--features", type=str, default=None, help="Comma-separated official feature groups") - parser.add_argument("--read-only", nargs="?", const="true", default=None, help="Hide all mutating tools for this connection") + parser.add_argument("--read-only", nargs="?", const="true", default=None, help="Hide all mutating tools for the server") parser.add_argument("--disabled-tools", type=str, default=None, help="Comma-separated blacklist of tool names") args = parser.parse_args() - resolved_host = _resolve_host(args.host) - resolved_port = _resolve_port(args.port) - resolved_workspace_ref = _resolve_workspace_ref(args.workspace_ref) - resolved_features = _resolve_features(args.features) - resolved_read_only = _resolve_read_only(args.read_only) - resolved_disabled_tools = _resolve_disabled_tools(args.disabled_tools) - resolved_read_only_value = build_partial_access_policy(read_only=resolved_read_only).read_only - - logger.info("Starting Supabase MCP Server with %s transport", args.transport) - logger.info("Read-only mode: %s", bool(resolved_read_only_value)) - if resolved_workspace_ref: - logger.info("Workspace scope: %s", resolved_workspace_ref) - if resolved_features: - logger.info("Feature groups: %s", resolved_features) - if resolved_read_only_value is not None: - logger.info("Connection read_only: %s", resolved_read_only_value) - if resolved_disabled_tools: - logger.info("Disabled tools: %s", resolved_disabled_tools) - if args.transport != "stdio": - logger.info( - "Server binding: host=%s port=%s sse_path=%s message_path=%s streamable_http_path=%s", - resolved_host, - resolved_port, - _resolve_sse_path(), - _resolve_message_path(), - _resolve_streamable_http_path(), - ) - - run_server( - transport=args.transport, + config = build_server_config( port=args.port, host=args.host, workspace_ref=args.workspace_ref, @@ -193,6 +139,25 @@ def main(): disabled_tools=args.disabled_tools, ) + logger.info("Starting Supabase MCP Server with %s transport", args.transport) + logger.info("Read-only mode: %s", config.access_policy.read_only) + if config.access_policy.workspace_ref: + logger.info("Workspace scope: %s", config.access_policy.workspace_ref) + logger.info("Feature groups: %s", ",".join(sorted(config.access_policy.features))) + if config.access_policy.disabled_tools: + logger.info("Disabled tools: %s", ",".join(sorted(config.access_policy.disabled_tools))) + if args.transport != "stdio": + logger.info( + "Server binding: host=%s port=%s sse_path=%s message_path=%s streamable_http_path=%s", + config.host, + config.port, + config.sse_path, + config.message_path, + config.streamable_http_path, + ) + + create_mcp(config).run(transport=args.transport) + if __name__ == "__main__": main() diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py index 9934cda4..0c34081e 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -1,29 +1,46 @@ +from dataclasses import dataclass +from typing import Awaitable, Callable + from mcp.server.fastmcp import FastMCP from .runtime import SupabaseRuntime -def register_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: - _register_edge_tools(mcp, runtime) - _register_storage_tools(mcp, runtime) - _register_database_tools(mcp, runtime) - _register_workspace_tools(mcp, runtime) +ToolBuilder = Callable[[SupabaseRuntime], Callable[..., Awaitable[str]]] + +@dataclass(frozen=True) +class ToolDefinition: + name: str + feature: str + scoped: bool + mutating: bool + build: ToolBuilder -def _register_edge_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + +def _build_list_edge_functions(runtime: SupabaseRuntime): edge_tools = runtime.edge_tools - @mcp.tool() async def list_edge_functions(workspace_id: str = None) -> str: """Lists all Edge Functions in a workspace.""" return await edge_tools.list_edge_functions(workspace_id) - @mcp.tool() + return list_edge_functions + + +def _build_get_edge_function(runtime: SupabaseRuntime): + edge_tools = runtime.edge_tools + async def get_edge_function(function_name: str, workspace_id: str = None) -> str: """Retrieves the source code and configuration for an Edge Function.""" return await edge_tools.get_edge_function(function_name, workspace_id) - @mcp.tool() + return get_edge_function + + +def _build_deploy_edge_function(runtime: SupabaseRuntime): + edge_tools = runtime.edge_tools + async def deploy_edge_function( function_name: str, source_code: str, @@ -51,21 +68,32 @@ async def deploy_edge_function( workspace_id, ) - @mcp.tool() + return deploy_edge_function + + +def _build_delete_edge_function(runtime: SupabaseRuntime): + edge_tools = runtime.edge_tools + async def delete_edge_function(function_name: str, workspace_id: str = None) -> str: """Deletes an Edge Function.""" return await edge_tools.delete_edge_function(function_name, workspace_id) + return delete_edge_function + -def _register_storage_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: +def _build_list_storage_buckets(runtime: SupabaseRuntime): storage_tools = runtime.storage_tools - @mcp.tool() async def list_storage_buckets(workspace_id: str = None) -> str: """Lists all storage buckets in a workspace.""" return await storage_tools.list_storage_buckets(workspace_id) - @mcp.tool() + return list_storage_buckets + + +def _build_create_storage_bucket(runtime: SupabaseRuntime): + storage_tools = runtime.storage_tools + async def create_storage_bucket( bucket_name: str, public: bool = False, @@ -82,67 +110,114 @@ async def create_storage_bucket( workspace_id, ) - @mcp.tool() + return create_storage_bucket + + +def _build_delete_storage_bucket(runtime: SupabaseRuntime): + storage_tools = runtime.storage_tools + async def delete_storage_bucket(bucket_name: str, workspace_id: str = None) -> str: """Deletes a storage bucket.""" return await storage_tools.delete_storage_bucket(bucket_name, workspace_id) - @mcp.tool() + return delete_storage_bucket + + +def _build_get_storage_config(runtime: SupabaseRuntime): + storage_tools = runtime.storage_tools + async def get_storage_config(workspace_id: str = None) -> str: """Gets the storage configuration for a workspace.""" return await storage_tools.get_storage_config(workspace_id) + return get_storage_config + -def _register_database_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: +def _build_execute_sql(runtime: SupabaseRuntime): database_tools = runtime.database_tools - @mcp.tool() async def execute_sql(query: str, workspace_id: str = None) -> str: """Executes raw SQL in the Postgres database.""" return await database_tools.execute_sql(query, workspace_id) - @mcp.tool() + return execute_sql + + +def _build_list_tables(runtime: SupabaseRuntime): + database_tools = runtime.database_tools + async def list_tables(schemas: str = "public", workspace_id: str = None) -> str: """Lists all tables in one or more schemas.""" schema_list = [schema.strip() for schema in schemas.split(",")] return await database_tools.list_tables(schema_list, workspace_id) - @mcp.tool() + return list_tables + + +def _build_list_migrations(runtime: SupabaseRuntime): + database_tools = runtime.database_tools + async def list_migrations(workspace_id: str = None) -> str: """Lists all migrations in the database.""" return await database_tools.list_migrations(workspace_id) - @mcp.tool() + return list_migrations + + +def _build_list_extensions(runtime: SupabaseRuntime): + database_tools = runtime.database_tools + async def list_extensions(workspace_id: str = None) -> str: """Lists all PostgreSQL extensions in the database.""" return await database_tools.list_extensions(workspace_id) - @mcp.tool() + return list_extensions + + +def _build_apply_migration(runtime: SupabaseRuntime): + database_tools = runtime.database_tools + async def apply_migration(name: str, query: str, workspace_id: str = None) -> str: """Applies a migration to the database.""" return await database_tools.apply_migration(name, query, workspace_id) - @mcp.tool() + return apply_migration + + +def _build_generate_typescript_types(runtime: SupabaseRuntime): + database_tools = runtime.database_tools + async def generate_typescript_types(schemas: str = "public", workspace_id: str = None) -> str: """Generates TypeScript definitions from database schema.""" schema_list = [schema.strip() for schema in schemas.split(",") if schema.strip()] return await database_tools.generate_typescript_types(schema_list, workspace_id) + return generate_typescript_types -def _register_workspace_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + +def _build_list_workspaces(runtime: SupabaseRuntime): workspace_tools = runtime.workspace_tools - @mcp.tool() async def list_workspaces() -> str: """Lists all available workspaces.""" return await workspace_tools.list_workspaces() - @mcp.tool() + return list_workspaces + + +def _build_get_workspace(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def get_workspace(workspace_id: str) -> str: """Gets details for a specific workspace.""" return await workspace_tools.get_workspace(workspace_id) - @mcp.tool() + return get_workspace + + +def _build_create_workspace(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def create_workspace( workspace_name: str, engine_version: str = "Supabase_1_24", @@ -151,42 +226,118 @@ async def create_workspace( """Creates a new workspace.""" return await workspace_tools.create_workspace(workspace_name, engine_version, engine_type) - @mcp.tool() + return create_workspace + + +def _build_pause_workspace(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def pause_workspace(workspace_id: str = None) -> str: """Pauses a workspace.""" return await workspace_tools.pause_workspace(workspace_id) - @mcp.tool() + return pause_workspace + + +def _build_restore_workspace(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def restore_workspace(workspace_id: str = None) -> str: """Restores a workspace.""" return await workspace_tools.restore_workspace(workspace_id) - @mcp.tool() + return restore_workspace + + +def _build_get_workspace_url(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def get_workspace_url(workspace_id: str = None) -> str: """Gets API endpoint URL for a workspace.""" return await workspace_tools.get_workspace_url(workspace_id) - @mcp.tool() + return get_workspace_url + + +def _build_get_publishable_keys(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def get_publishable_keys(workspace_id: str = None, reveal: bool = False) -> str: """Gets API keys for a workspace.""" return await workspace_tools.get_publishable_keys(workspace_id, reveal) - @mcp.tool() + return get_publishable_keys + + +def _build_list_branches(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def list_branches(workspace_id: str = None) -> str: """Lists all development branches of a workspace.""" return await workspace_tools.list_branches(workspace_id) - @mcp.tool() + return list_branches + + +def _build_create_branch(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def create_branch(name: str = "develop", workspace_id: str = None) -> str: """Creates a development branch.""" return await workspace_tools.create_branch(name, workspace_id) - @mcp.tool() + return create_branch + + +def _build_delete_branch(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + async def delete_branch(branch_id: str, workspace_id: str = None) -> str: """Deletes a development branch.""" return await workspace_tools.delete_branch(branch_id, workspace_id) - @mcp.tool() - async def reset_branch(branch_id: str, migration_version: str = None, workspace_id: str = None) -> str: - """Resets a development branch. Any untracked data or schema changes will be lost.""" - return await workspace_tools.reset_branch(branch_id, migration_version, workspace_id) + return delete_branch + + +def _build_restore_branch(runtime: SupabaseRuntime): + workspace_tools = runtime.workspace_tools + + async def restore_branch(branch_id: str, workspace_id: str = None) -> str: + """Restores a development branch to its baseline state. Any untracked data or schema changes will be lost.""" + return await workspace_tools.restore_branch(branch_id, workspace_id) + + return restore_branch + + +TOOL_DEFINITIONS = ( + ToolDefinition("list_workspaces", "account", False, False, _build_list_workspaces), + ToolDefinition("get_workspace", "account", True, False, _build_get_workspace), + ToolDefinition("create_workspace", "account", False, True, _build_create_workspace), + ToolDefinition("pause_workspace", "account", True, True, _build_pause_workspace), + ToolDefinition("restore_workspace", "account", True, True, _build_restore_workspace), + ToolDefinition("execute_sql", "database", True, True, _build_execute_sql), + ToolDefinition("list_tables", "database", True, False, _build_list_tables), + ToolDefinition("list_migrations", "database", True, False, _build_list_migrations), + ToolDefinition("list_extensions", "database", True, False, _build_list_extensions), + ToolDefinition("apply_migration", "database", True, True, _build_apply_migration), + ToolDefinition("get_workspace_url", "development", True, False, _build_get_workspace_url), + ToolDefinition("get_publishable_keys", "development", True, False, _build_get_publishable_keys), + ToolDefinition("generate_typescript_types", "development", True, False, _build_generate_typescript_types), + ToolDefinition("list_edge_functions", "functions", True, False, _build_list_edge_functions), + ToolDefinition("get_edge_function", "functions", True, False, _build_get_edge_function), + ToolDefinition("deploy_edge_function", "functions", True, True, _build_deploy_edge_function), + ToolDefinition("delete_edge_function", "functions", True, True, _build_delete_edge_function), + ToolDefinition("list_storage_buckets", "storage", True, False, _build_list_storage_buckets), + ToolDefinition("create_storage_bucket", "storage", True, True, _build_create_storage_bucket), + ToolDefinition("delete_storage_bucket", "storage", True, True, _build_delete_storage_bucket), + ToolDefinition("get_storage_config", "storage", True, False, _build_get_storage_config), + ToolDefinition("list_branches", "branching", True, False, _build_list_branches), + ToolDefinition("create_branch", "branching", True, True, _build_create_branch), + ToolDefinition("delete_branch", "branching", True, True, _build_delete_branch), + ToolDefinition("restore_branch", "branching", True, True, _build_restore_branch), +) + + +def register_tools(mcp: FastMCP, runtime: SupabaseRuntime) -> None: + for tool_definition in TOOL_DEFINITIONS: + mcp.tool()(tool_definition.build(runtime)) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py index 2d6c0ed8..3c85c718 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/base.py @@ -14,16 +14,11 @@ def _resolve_workspace_id(self, workspace_id: Optional[str]) -> str: return resolved_workspace_id async def _get_client(self, workspace_id: str) -> SupabaseClient: - import logging - logger = logging.getLogger(__name__) - endpoint = await self.aidap.get_endpoint(workspace_id) - logger.info(f"[DEBUG] Got endpoint for {workspace_id}: {endpoint}") if not endpoint: raise ValueError(f"Could not get endpoint for workspace {workspace_id}") api_key = await self.aidap.get_api_key(workspace_id, "service_role") - logger.info(f"[DEBUG] Got API key for {workspace_id}: {'yes' if api_key else 'no'}") if not api_key: raise ValueError(f"Could not get API key for workspace {workspace_id}") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py index 75d00191..501758b0 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/database_tools.py @@ -2,19 +2,18 @@ import logging from datetime import datetime, timezone from .base import BaseTools -from ..utils import handle_errors, read_only_check +from ..utils import handle_errors logger = logging.getLogger(__name__) class DatabaseTools(BaseTools): - """使用 REST API 方式执行 SQL""" async def _execute_sql_raw(self, query: str, workspace_id: Optional[str] = None) -> List[dict]: if not query or not query.strip(): raise ValueError("SQL query cannot be empty") ws_id = self._resolve_workspace_id(workspace_id) - logger.info( + logger.debug( "Executing SQL query", extra={"workspace_id": ws_id, "query_length": len(query)} ) @@ -30,21 +29,22 @@ async def _execute_sql_raw(self, query: str, workspace_id: Optional[str] = None) logger.debug(f"SQL query returned {len(result)} rows") return result + def _normalize_schemas(self, schemas: Optional[List[str]] = None) -> List[str]: + normalized = [schema.strip() for schema in (schemas or ["public"]) if schema and schema.strip()] + if not normalized: + raise ValueError("At least one schema is required") + for schema in normalized: + if not schema.replace('_', '').isalnum(): + raise ValueError(f"Invalid schema name: {schema}") + return normalized + @handle_errors async def execute_sql(self, query: str, workspace_id: Optional[str] = None) -> List[dict]: return await self._execute_sql_raw(query, workspace_id) @handle_errors async def list_tables(self, schemas: List[str] = None, workspace_id: Optional[str] = None) -> List[dict]: - if schemas is None: - schemas = ["public"] - - # 验证 schema 名称,防止 SQL 注入 - for schema in schemas: - if not schema.replace('_', '').isalnum(): - raise ValueError(f"Invalid schema name: {schema}") - - schema_list = "', '".join(schemas) + schema_list = "', '".join(self._normalize_schemas(schemas)) query = f""" SELECT schemaname as schema, @@ -95,7 +95,6 @@ async def list_extensions(self, workspace_id: Optional[str] = None) -> List[dict return await self._execute_sql_raw(query, workspace_id) @handle_errors - @read_only_check async def apply_migration(self, name: str, query: str, workspace_id: Optional[str] = None) -> dict: if not name or not name.strip(): raise ValueError("Migration name cannot be empty") @@ -165,13 +164,7 @@ async def generate_typescript_types( schemas: List[str] = None, workspace_id: Optional[str] = None ) -> str: - if schemas is None: - schemas = ["public"] - for schema in schemas: - if not schema.replace('_', '').isalnum(): - raise ValueError(f"Invalid schema name: {schema}") - - schema_list = "', '".join(schemas) + schema_list = "', '".join(self._normalize_schemas(schemas)) query = f""" SELECT table_schema, diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py index 0d8a118f..9b1912c5 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/edge_function_tools.py @@ -6,13 +6,12 @@ import re from urllib.parse import quote from .base import BaseTools -from ..utils import handle_errors, read_only_check +from ..utils import handle_errors from ..models import EdgeFunction from ..platform.supabase_client import SupabaseApiError logger = logging.getLogger(__name__) -# 运行时配置 RUNTIME_CONFIG = { "native-node20/v1": { "entrypoint": "index.ts", @@ -36,10 +35,9 @@ } } -# 保留的函数名 RESERVED_SLUGS = {"deploy", "body", "health", "metrics"} MAX_SLUG_LENGTH = 127 -MAX_CODE_SIZE = 10 * 1024 * 1024 # 10MB +MAX_CODE_SIZE = 10 * 1024 * 1024 WORKSPACE_SLUG = os.getenv("SUPABASE_WORKSPACE_SLUG", "default").strip() or "default" @@ -99,7 +97,15 @@ def _normalize_function_payload(self, payload: object) -> object: return result def _validate_function_name(self, function_name: str) -> None: - return + normalized = (function_name or "").strip() + if not normalized: + raise ValueError("Function name cannot be empty") + if len(normalized) > MAX_SLUG_LENGTH: + raise ValueError(f"Function name too long: {len(normalized)} characters (max {MAX_SLUG_LENGTH})") + if normalized.lower() in RESERVED_SLUGS: + raise ValueError(f"Function name '{normalized}' is reserved") + if not re.fullmatch(r"[A-Za-z0-9][A-Za-z0-9_-]*", normalized): + raise ValueError("Function name must start with a letter or digit and contain only letters, digits, hyphens, or underscores") def _validate_runtime(self, runtime: str) -> None: """验证运行时""" @@ -132,20 +138,20 @@ def _extract_error_text(self, payload: object) -> str: @handle_errors async def list_edge_functions(self, workspace_id: Optional[str] = None) -> List[EdgeFunction]: ws_id = self._resolve_workspace_id(workspace_id) - logger.info(f"Listing edge functions for workspace {ws_id}") + logger.debug("Listing edge functions for workspace %s", ws_id) client = await self._get_client(ws_id) result = await client.call_api(f"/v1/projects/{WORKSPACE_SLUG}/functions") functions = [EdgeFunction(**func) for func in result] - logger.info(f"Found {len(functions)} edge functions") + logger.debug("Found %s edge functions", len(functions)) return functions @handle_errors async def get_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: self._validate_function_name(function_name) ws_id = self._resolve_workspace_id(workspace_id) - logger.info(f"Getting edge function '{function_name}' from workspace {ws_id}") + logger.debug("Getting edge function '%s' from workspace %s", function_name, ws_id) client = await self._get_client(ws_id) encoded_name = quote(function_name, safe="") @@ -162,7 +168,6 @@ async def get_edge_function(self, function_name: str, workspace_id: Optional[str return EdgeFunction(**result).model_dump() @handle_errors - @read_only_check async def deploy_edge_function( self, function_name: str, @@ -172,31 +177,12 @@ async def deploy_edge_function( import_map: Optional[str] = None, workspace_id: Optional[str] = None ) -> dict: - """ - 部署边缘函数 - - Args: - function_name: 函数名称 - source_code: 源代码 - verify_jwt: 是否验证 JWT - runtime: 运行时环境 (native-node20/v1, native-python3.9/v1, etc.) - import_map: 可选的 import map JSON - workspace_id: 工作空间 ID - - Returns: - 部署结果字典 - - Raises: - ValueError: 参数验证失败 - """ - # 验证输入 self._validate_function_name(function_name) self._validate_runtime(runtime) if not source_code or not source_code.strip(): raise ValueError("Source code cannot be empty") - # HTML 反转义,防止代码中的特殊字符被转义 source_code = html.unescape(source_code) self._validate_code_size(source_code) @@ -235,7 +221,6 @@ async def deploy_edge_function( except json.JSONDecodeError as e: raise ValueError(f"Invalid import map JSON: {e}") - # AIDAP 部署 API 路径 result = await client.call_api( f"/v1/projects/{WORKSPACE_SLUG}/functions/deploy?slug={encoded_name}", method="POST", @@ -252,7 +237,6 @@ async def deploy_edge_function( return result @handle_errors - @read_only_check async def delete_edge_function(self, function_name: str, workspace_id: Optional[str] = None) -> dict: self._validate_function_name(function_name) ws_id = self._resolve_workspace_id(workspace_id) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py index 70288b8e..52b5d62e 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/storage_tools.py @@ -2,7 +2,7 @@ import logging import json from .base import BaseTools -from ..utils import handle_errors, read_only_check +from ..utils import handle_errors from ..models import StorageConfig logger = logging.getLogger(__name__) @@ -36,16 +36,15 @@ def _normalize_allowed_mime_types(self, allowed_mime_types: Optional[str | list[ @handle_errors async def list_storage_buckets(self, workspace_id: Optional[str] = None) -> List[dict]: ws_id = self._resolve_workspace_id(workspace_id) - logger.info(f"Listing storage buckets for workspace {ws_id}") + logger.debug("Listing storage buckets for workspace %s", ws_id) client = await self._get_client(ws_id) result = await client.call_api("/storage/v1/bucket") - logger.info(f"Found {len(result)} storage buckets") + logger.debug("Found %s storage buckets", len(result)) return result @handle_errors - @read_only_check async def create_storage_bucket( self, bucket_name: str, @@ -78,7 +77,6 @@ async def create_storage_bucket( return await client.call_api("/storage/v1/bucket", method="POST", json_data=data) @handle_errors - @read_only_check async def delete_storage_bucket(self, bucket_name: str, workspace_id: Optional[str] = None) -> dict: if not bucket_name or not bucket_name.strip(): raise ValueError("Bucket name cannot be empty") diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 1ae5b4ad..685e051c 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -3,40 +3,55 @@ import logging from typing import Any, Optional -from ..utils import compact_dict, pick_value, read_only_check, resolve_workspace_id, to_json +from .base import BaseTools +from ..utils import compact_dict, pick_value, to_json logger = logging.getLogger(__name__) -class WorkspaceTools: - def __init__(self, aidap_client): - self.aidap_client = aidap_client +class WorkspaceTools(BaseTools): + _filter_supports_mode: bool | None = None - def _to_json(self, payload: dict) -> str: - return to_json(payload) + @classmethod + def _supports_workspace_filter_mode(cls) -> bool: + if cls._filter_supports_mode is None: + from volcenginesdkaidap.models import FilterForDescribeWorkspacesInput - def _compact(self, payload: dict) -> dict: - return compact_dict(payload) + cls._filter_supports_mode = "mode" in inspect.signature(FilterForDescribeWorkspacesInput).parameters + return cls._filter_supports_mode - def _pick(self, source: Any, *field_names: str) -> Any: - return pick_value(source, *field_names) + def _resolve_workspace_or_response( + self, + workspace_id: Optional[str], + detailed: bool = False, + ) -> tuple[str | None, str | None]: + try: + return self._resolve_workspace_id(workspace_id), None + except ValueError: + return None, self._workspace_required_response(detailed) - def _resolve_workspace_id(self, workspace_id: Optional[str]) -> Optional[str]: - return resolve_workspace_id(workspace_id) + def _workspace_required_response(self, detailed: bool = False) -> str: + payload = { + "success": False, + "error": "workspace_id is required", + } + if detailed: + payload["error_detail"] = self._error_detail("MissingWorkspaceId", "workspace_id is required", False) + return to_json(payload) def _workspace_view(self, source: Any) -> dict: payload = { - "workspace_id": self._pick(source, "workspace_id"), - "workspace_name": self._pick(source, "workspace_name"), - "status": self._pick(source, "workspace_status", "status"), - "region": self._pick(source, "region_id", "region"), - "created_at": self._pick(source, "create_time", "created_at"), - "updated_at": self._pick(source, "update_time", "updated_at"), - "engine_type": self._pick(source, "engine_type"), - "engine_version": self._pick(source, "engine_version"), - "deletion_protection_status": self._pick(source, "deletion_protection_status"), + "workspace_id": pick_value(source, "workspace_id"), + "workspace_name": pick_value(source, "workspace_name"), + "status": pick_value(source, "workspace_status", "status"), + "region": pick_value(source, "region_id", "region"), + "created_at": pick_value(source, "create_time", "created_at"), + "updated_at": pick_value(source, "update_time", "updated_at"), + "engine_type": pick_value(source, "engine_type"), + "engine_version": pick_value(source, "engine_version"), + "deletion_protection_status": pick_value(source, "deletion_protection_status"), } - return self._compact(payload) + return compact_dict(payload) def _branch_view(self, branch: dict, workspace_payload: Optional[dict] = None) -> dict: workspace_payload = workspace_payload or {} @@ -55,26 +70,25 @@ def _branch_view(self, branch: dict, workspace_payload: Optional[dict] = None) - "deletion_protection_status": workspace_payload.get("deletion_protection_status"), "target_type": "branch", } - return self._compact(payload) + return compact_dict(payload) def _describe_workspaces_response(self): from volcenginesdkaidap.models import DescribeWorkspacesRequest, FilterForDescribeWorkspacesInput - parameters = inspect.signature(FilterForDescribeWorkspacesInput).parameters filter_kwargs = { "name": "DBEngineVersion", "value": "Supabase_1_24", } - if "mode" in parameters: + if self._supports_workspace_filter_mode(): filter_kwargs["mode"] = "Exact" filters = [FilterForDescribeWorkspacesInput(**filter_kwargs)] request = DescribeWorkspacesRequest(filters=filters) - return self.aidap_client.client.describe_workspaces(request) + return self.aidap.client.describe_workspaces(request) def _find_workspace_source(self, workspace_id: str) -> Optional[Any]: response = self._describe_workspaces_response() for workspace in list(getattr(response, "workspaces", []) or []): - if self._pick(workspace, "workspace_id") == workspace_id: + if pick_value(workspace, "workspace_id") == workspace_id: return workspace return None @@ -99,14 +113,14 @@ async def list_workspaces(self) -> str: response = self._describe_workspaces_response() raw_workspaces = list(getattr(response, "workspaces", []) or []) workspaces = [self._workspace_view(workspace) for workspace in raw_workspaces] - return self._to_json({ + return to_json({ "success": True, "workspaces": workspaces, "count": len(workspaces), }) except Exception as e: logger.error(f"Error listing workspaces: {e}") - return self._to_json({ + return to_json({ "success": False, "error": str(e), }) @@ -114,30 +128,24 @@ async def list_workspaces(self) -> str: async def get_workspace(self, workspace_id: str) -> str: try: ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({ - "success": False, - "error": "workspace_id is required", - }) workspace_source = self._find_workspace_source(ws_id) if workspace_source is None: - return self._to_json({ + return to_json({ "success": False, "error": "Workspace not found", }) workspace_info = self._workspace_view(workspace_source) - return self._to_json({ + return to_json({ "success": True, "workspace": workspace_info, }) except Exception as e: logger.error(f"Error getting workspace: {e}") - return self._to_json({ + return to_json({ "success": False, "error": str(e), }) - @read_only_check async def create_workspace( self, workspace_name: str, @@ -145,14 +153,14 @@ async def create_workspace( engine_type: str = "Supabase", ) -> str: if not workspace_name or not workspace_name.strip(): - return self._to_json({"success": False, "error": "workspace_name is required"}) - result = await self.aidap_client.create_workspace( + return to_json({"success": False, "error": "workspace_name is required"}) + result = await self.aidap.create_workspace( workspace_name=workspace_name.strip(), engine_type=engine_type, engine_version=engine_version, ) if not isinstance(result, dict): - return self._to_json({"success": False, "error": "Unexpected create workspace response"}) + return to_json({"success": False, "error": "Unexpected create workspace response"}) if result.get("success"): mapped = { "success": True, @@ -161,36 +169,33 @@ async def create_workspace( "engine_type": result.get("engine_type"), "engine_version": result.get("engine_version"), } - return self._to_json(self._compact(mapped)) - return self._to_json(result) + return to_json(compact_dict(mapped)) + return to_json(result) - @read_only_check async def restore_workspace(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - result = await self.aidap_client.start_workspace(ws_id) - return self._to_json(result if isinstance(result, dict) else {"success": bool(result), "workspace_id": ws_id}) + ws_id, error_response = self._resolve_workspace_or_response(workspace_id) + if error_response: + return error_response + result = await self.aidap.start_workspace(ws_id) + return to_json(result if isinstance(result, dict) else {"success": bool(result), "workspace_id": ws_id}) - @read_only_check async def pause_workspace(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - result = await self.aidap_client.stop_workspace(ws_id) - return self._to_json(result if isinstance(result, dict) else {"success": bool(result), "workspace_id": ws_id}) + ws_id, error_response = self._resolve_workspace_or_response(workspace_id) + if error_response: + return error_response + result = await self.aidap.stop_workspace(ws_id) + return to_json(result if isinstance(result, dict) else {"success": bool(result), "workspace_id": ws_id}) - @read_only_check async def create_branch( self, name: str = "develop", workspace_id: Optional[str] = None, ) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) + ws_id, error_response = self._resolve_workspace_or_response(workspace_id) + if error_response: + return error_response - result = await self.aidap_client.create_branch(ws_id, name) + result = await self.aidap.create_branch(ws_id, name) if result.get("success") and result.get("branch_id"): branch_payload = self._branch_view(result, {"workspace_id": ws_id}) branch_payload["branch_name"] = branch_payload.get("branch_name") or name @@ -198,38 +203,31 @@ async def create_branch( "success": True, **branch_payload, } - endpoint = await self.aidap_client.get_endpoint(ws_id, branch_id=result["branch_id"], use_cache=False) + endpoint = await self.aidap.get_endpoint(ws_id, branch_id=result["branch_id"]) if endpoint: response_payload["workspace_url"] = endpoint response_payload["api_url"] = endpoint - return self._to_json(self._compact(response_payload)) - return self._to_json(result) + return to_json(compact_dict(response_payload)) + return to_json(result) async def list_branches(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) try: + ws_id = self._resolve_workspace_id(workspace_id) workspace_source = self._find_workspace_source(ws_id) workspace_payload = self._workspace_view(workspace_source) if workspace_source is not None else {"workspace_id": ws_id} - branches = await self.aidap_client.list_branches(ws_id) + branches = await self.aidap.list_branches(ws_id) normalized_branches = [self._branch_view(branch, workspace_payload) for branch in branches] - return self._to_json({"success": True, "branches": normalized_branches}) + return to_json({"success": True, "branches": normalized_branches}) except Exception as e: logger.error(f"Error listing branches: {e}") - return self._to_json({"success": False, "error": str(e)}) + return to_json({"success": False, "error": str(e)}) - @read_only_check async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({ - "success": False, - "error": "workspace_id is required", - "error_detail": self._error_detail("MissingWorkspaceId", "workspace_id is required", False), - }) + ws_id, error_response = self._resolve_workspace_or_response(workspace_id, detailed=True) + if error_response: + return error_response if not branch_id or not branch_id.strip(): - return self._to_json({ + return to_json({ "success": False, "error": "branch_id is required", "error_detail": self._error_detail("MissingBranchId", "branch_id is required", False), @@ -237,10 +235,10 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None normalized_branch_id = branch_id.strip() try: - branches = await self.aidap_client.list_branches(ws_id) + branches = await self.aidap.list_branches(ws_id) exists = any(branch.get("branch_id") == normalized_branch_id for branch in branches) if not exists: - return self._to_json({ + return to_json({ "success": False, "error": f"Branch '{normalized_branch_id}' not found in workspace '{ws_id}'", "error_detail": self._error_detail( @@ -251,16 +249,16 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None }) except Exception as e: logger.error(f"Error checking branch before delete: {e}") - return self._to_json({ + return to_json({ "success": False, "error": str(e), "error_detail": self._error_detail("ListBranchesFailed", str(e), True), }) - result = await self.aidap_client.delete_branch(ws_id, normalized_branch_id) + result = await self.aidap.delete_branch(ws_id, normalized_branch_id) if not result.get("success"): error_text = result.get("error", "delete branch failed") - return self._to_json({ + return to_json({ "success": False, "error": error_text, "error_detail": self._error_detail( @@ -275,15 +273,15 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None for _ in range(max_confirm_attempts): await asyncio.sleep(1) try: - branches = await self.aidap_client.list_branches(ws_id) + branches = await self.aidap.list_branches(ws_id) exists = any(branch.get("branch_id") == normalized_branch_id for branch in branches) if not exists: - return self._to_json({"success": True, "branch_id": normalized_branch_id, "workspace_id": ws_id}) + return to_json({"success": True, "branch_id": normalized_branch_id, "workspace_id": ws_id}) except Exception as e: last_list_error = str(e) if last_list_error: - return self._to_json({ + return to_json({ "success": False, "error": f"Delete requested for branch '{normalized_branch_id}' but verification failed: {last_list_error}", "error_detail": self._error_detail( @@ -292,7 +290,7 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None True, ), }) - return self._to_json({ + return to_json({ "success": False, "error": f"Delete requested for branch '{normalized_branch_id}' but branch still exists", "error_detail": self._error_detail( @@ -303,13 +301,13 @@ async def delete_branch(self, branch_id: str, workspace_id: Optional[str] = None }) async def get_workspace_url(self, workspace_id: Optional[str] = None) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) + ws_id, error_response = self._resolve_workspace_or_response(workspace_id) + if error_response: + return error_response - endpoint = await self.aidap_client.get_endpoint(ws_id) + endpoint = await self.aidap.get_endpoint(ws_id) if not endpoint: - return self._to_json({ + return to_json({ "success": False, "error": f"Could not get endpoint for workspace {ws_id}", }) @@ -320,13 +318,13 @@ async def get_workspace_url(self, workspace_id: Optional[str] = None) -> str: "workspace_url": endpoint, "api_url": endpoint, } - return self._to_json(payload) + return to_json(payload) async def _get_api_keys_payload(self, workspace_id: str, reveal: bool = False) -> dict: - resolved_branch_id = await self.aidap_client.get_default_branch_id(workspace_id) + resolved_branch_id = await self.aidap.get_default_branch_id(workspace_id) if not resolved_branch_id: raise RuntimeError(f"Could not resolve default branch for workspace {workspace_id}") - keys = await self.aidap_client.get_api_keys(workspace_id, branch_id=resolved_branch_id) + keys = await self.aidap.get_api_keys(workspace_id, branch_id=resolved_branch_id) publishable_key = None anon_key = None service_role_key = None @@ -358,44 +356,31 @@ async def _get_api_keys_payload(self, workspace_id: str, reveal: bool = False) - return payload async def get_publishable_keys(self, workspace_id: Optional[str] = None, reveal: bool = False) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({"success": False, "error": "workspace_id is required"}) - try: + ws_id = self._resolve_workspace_id(workspace_id) payload = await self._get_api_keys_payload(ws_id, reveal=reveal) - return self._to_json(payload) + return to_json(payload) except Exception as e: logger.error(f"Error getting publishable keys: {e}") - return self._to_json({"success": False, "error": str(e)}) + return to_json({"success": False, "error": str(e)}) - @read_only_check - async def reset_branch( + async def restore_branch( self, branch_id: str, - migration_version: Optional[str] = None, workspace_id: Optional[str] = None, ) -> str: - ws_id = self._resolve_workspace_id(workspace_id) - if not ws_id: - return self._to_json({ - "success": False, - "error": "workspace_id is required", - }) - try: - result = await self.aidap_client.reset_branch(ws_id, branch_id) + ws_id = self._resolve_workspace_id(workspace_id) + result = await self.aidap.restore_branch(ws_id, branch_id) if not isinstance(result, dict): result = {"success": bool(result)} if result.get("success"): result.setdefault("workspace_id", ws_id) result.setdefault("branch_id", branch_id) - if migration_version: - result["warning"] = "migration_version is ignored because the current Volcengine reset_branch API does not support version-targeted reset" - return self._to_json(result) + return to_json(result) except Exception as e: - logger.error(f"Error resetting branch: {e}") - return self._to_json({ + logger.error(f"Error restoring branch: {e}") + return to_json({ "success": False, "error": str(e), }) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py index 2f342a03..d1a3bd03 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/__init__.py @@ -1,13 +1,11 @@ from .common import compact_dict, pick_value, to_json -from .decorators import format_error, handle_errors, read_only_check +from .decorators import handle_errors from .targets import resolve_workspace_id __all__ = [ 'compact_dict', - 'format_error', 'handle_errors', 'pick_value', - 'read_only_check', 'resolve_workspace_id', 'to_json', ] diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py b/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py index aee00dcc..cd77bc31 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/utils/decorators.py @@ -1,14 +1,13 @@ -import json import logging from functools import wraps -from typing import Any, Callable +from typing import Callable from .common import to_json logger = logging.getLogger(__name__) -def format_error(e: Exception) -> str: +def _format_error(e: Exception) -> str: error_msg = str(e) if str(e) else f"{type(e).__name__}" return error_msg @@ -27,17 +26,7 @@ async def wrapper(*args, **kwargs) -> str: result = result.model_dump() return to_json(result) except Exception as e: - error_msg = format_error(e) + error_msg = _format_error(e) logger.error(f"Error in {func.__name__}: {error_msg}") return to_json({"error": error_msg}) return wrapper - - -def read_only_check(func: Callable) -> Callable: - @wraps(func) - async def wrapper(*args, **kwargs) -> Any: - from ..config import READ_ONLY - if READ_ONLY: - return to_json({"error": f"Cannot execute {func.__name__} in read-only mode"}) - return await func(*args, **kwargs) - return wrapper From 49e72dc41f33ce5e5ba8e6baaca9495010caeb20 Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 21:17:13 +0800 Subject: [PATCH 29/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 24 ++---------------------- server/mcp_server_supabase/README_zh.md | 24 ++---------------------- 2 files changed, 4 insertions(+), 44 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index e2391599..b8d0fc83 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -77,11 +77,7 @@ No tools are currently exposed. ## Authentication -This server supports both local static credentials and cloud-deployment credentials. - - Local deployment: use `VOLCENGINE_ACCESS_KEY`, `VOLCENGINE_SECRET_KEY`, and optional `VOLCENGINE_SESSION_TOKEN` -- Cloud deployment: pass a base64-encoded STS JSON payload in the `authorization` header, or expose the same value through the `authorization` environment variable -- VeFaaS deployment: if no explicit credentials are provided, the server can also read `/var/run/secrets/iam/credential` Static AK/SK can be obtained from the [Volcengine API Access Key console](https://console.volcengine.com/iam/keymanage/). @@ -89,8 +85,8 @@ Static AK/SK can be obtained from the [Volcengine API Access Key console](https: | Name | Required | Default | Description | | ---- | ---- | ---- | ---- | -| `VOLCENGINE_ACCESS_KEY` | No | - | Volcengine access key for local static authentication | -| `VOLCENGINE_SECRET_KEY` | No | - | Volcengine secret key for local static authentication | +| `VOLCENGINE_ACCESS_KEY` | Yes | - | Volcengine access key for local static authentication | +| `VOLCENGINE_SECRET_KEY` | Yes | - | Volcengine secret key for local static authentication | | `VOLCENGINE_SESSION_TOKEN` | No | - | Optional session token used with temporary local credentials | | `VOLCENGINE_REGION` | No | `cn-beijing` | Region used for the Volcengine API | | `WORKSPACE_REF` | No | - | Startup-level hard scope. When set, `account` tools are hidden and workspace-scoped calls are forced to this target | @@ -186,20 +182,6 @@ python3 -m mcp_server_supabase.server --port 8000 python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ``` -### Cloud deployment credential format - -When the server runs behind a remote MCP gateway or another agent platform, you can provide STS credentials through the `authorization` header. The value should be a base64-encoded JSON object such as: - -```json -{ - "AccessKeyId": "", - "SecretAccessKey": "", - "SessionToken": "", - "CurrentTime": "2026-03-10T10:00:00+08:00", - "ExpiredTime": "2026-03-10T12:00:00+08:00" -} -``` - The package exposes `mcp-server-supabase`, `mcp-server-supabase-sse`, and `mcp-server-supabase-streamable`. The examples above use `mcp-server-supabase`. ## Usage Notes @@ -210,7 +192,6 @@ The package exposes `mcp-server-supabase`, `mcp-server-supabase-sse`, and `mcp-s - If `FEATURES` is not set, the default enabled groups are `account`, `database`, `debugging`, `development`, `docs`, `functions`, and `branching`. `storage` stays disabled by default. - `READ_ONLY=true` hides all mutating tools for the server instance. - `DISABLED_TOOLS` takes tool names such as `execute_sql,deploy_edge_function` and removes them after the rest of the policy has been resolved. -- Credential precedence is: static env AK/SK, request `authorization`, env `authorization`, then VeFaaS IAM credentials. - `workspace_id` and `workspace_ref` accept workspace IDs only. Branch IDs such as `br-xxxx` are rejected. - `get_publishable_keys` resolves the default branch automatically when needed. - `restore_branch` does not support `migration_version`; it maps to the current Volcengine `BranchRestore` capability. @@ -246,7 +227,6 @@ If your agent runtime can spawn a local MCP process, you can keep using `stdio`. - `stdio`: have the agent spawn `mcp-server-supabase` as a child process - `streamable-http`: connect to `http://:/mcp` - `sse`: connect to `http://:/sse` and post messages to `http://:/messages/` -- Remote or cloud deployments can forward STS credentials with the `authorization` header instead of baking long-lived AK/SK into the server environment - Tool visibility and workspace scope are fixed when the server starts through env vars or CLI flags ## License diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index c6726bbf..bdda7e17 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -78,11 +78,7 @@ ## 鉴权方式 -同时支持本地静态凭证和云部署动态凭证。 - - 本地部署:使用 `VOLCENGINE_ACCESS_KEY`、`VOLCENGINE_SECRET_KEY` 和可选的 `VOLCENGINE_SESSION_TOKEN` -- 云部署:通过 `authorization` header 传入 base64 编码后的 STS JSON,也可以通过 `authorization` 环境变量传入同样的内容 -- VeFaaS 部署:如果没有显式凭证,服务也会尝试读取 `/var/run/secrets/iam/credential` 静态 AK/SK 可在[火山引擎 API 访问密钥控制台](https://console.volcengine.com/iam/keymanage/)获取。 @@ -90,8 +86,8 @@ | 变量名 | 必需 | 默认值 | 说明 | | ---- | ---- | ---- | ---- | -| `VOLCENGINE_ACCESS_KEY` | 否 | - | 本地静态鉴权使用的火山引擎 Access Key | -| `VOLCENGINE_SECRET_KEY` | 否 | - | 本地静态鉴权使用的火山引擎 Secret Key | +| `VOLCENGINE_ACCESS_KEY` | 是 | - | 本地静态鉴权使用的火山引擎 Access Key | +| `VOLCENGINE_SECRET_KEY` | 是 | - | 本地静态鉴权使用的火山引擎 Secret Key | | `VOLCENGINE_SESSION_TOKEN` | 否 | - | 临时本地凭证使用的 Session Token | | `VOLCENGINE_REGION` | 否 | `cn-beijing` | 火山引擎 API 所在地域 | | `WORKSPACE_REF` | 否 | - | 服务启动级 workspace scope,设置后会隐藏 `account` 组工具,并强制所有 workspace-scoped 调用只能访问这个目标 | @@ -187,20 +183,6 @@ python3 -m mcp_server_supabase.server --port 8000 python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 ``` -### 云部署凭证格式 - -如果服务部署在远程 MCP 网关、Agent 平台或其他服务端环境中,可以通过 `authorization` header 传入 STS 凭证。header 对应的值需要是下面这类 JSON 的 base64 编码结果: - -```json -{ - "AccessKeyId": "", - "SecretAccessKey": "", - "SessionToken": "", - "CurrentTime": "2026-03-10T10:00:00+08:00", - "ExpiredTime": "2026-03-10T12:00:00+08:00" -} -``` - 这个包同时暴露了 `mcp-server-supabase`、`mcp-server-supabase-sse` 和 `mcp-server-supabase-streamable` 三个入口,示例统一使用 `mcp-server-supabase`。 ## 使用说明 @@ -211,7 +193,6 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 - 如果没有设置 `FEATURES`,默认启用 `account`、`database`、`debugging`、`development`、`docs`、`functions`、`branching`,`storage` 默认关闭。 - `READ_ONLY=true` 会让整个服务实例进入只读模式,并隐藏所有写工具。 - `DISABLED_TOOLS` 填工具名,例如 `execute_sql,deploy_edge_function`,会在其他策略计算完成后做最终剔除。 -- 凭证优先级是:静态环境变量 AK/SK、请求 `authorization`、环境变量 `authorization`、VeFaaS IAM 凭证。 - `workspace_id` 和 `workspace_ref` 只接受 workspace ID,`br-xxxx` 这样的 branch ID 会被直接拒绝。 - `get_publishable_keys` 在需要时会自动解析默认分支。 - `restore_branch` 不支持 `migration_version`,当前实际映射到火山引擎的 `BranchRestore` 能力。 @@ -247,7 +228,6 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 - `stdio`:Agent 进程直接拉起 `mcp-server-supabase` - `streamable-http`:连接 `http://:/mcp` - `sse`:连接 `http://:/sse`,并向 `http://:/messages/` 投递消息 -- 远程或云部署场景可以通过 `authorization` header 透传 STS 凭证,而不是把长期 AK/SK 固化在服务环境变量里 - 工具可见性和 workspace scope 在服务启动时通过环境变量或 CLI 参数固定下来 ## License From 3beea41c73dcefae19b83fa35a99233df55f495c Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Tue, 10 Mar 2026 21:19:48 +0800 Subject: [PATCH 30/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/mcp_server_supabase/platform/supabase_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py index 32e25988..6efe7d02 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/supabase_client.py @@ -2,7 +2,7 @@ import httpx import logging import json -from typing import Dict, Any +from typing import Dict, Any, Optional logger = logging.getLogger(__name__) From 2499050f5541018b4bdfb9c6f2dfcbb245a116ab Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Wed, 11 Mar 2026 12:22:19 +0800 Subject: [PATCH 31/32] =?UTF-8?q?feat:=E6=94=AF=E6=8C=81sse=E5=92=8Cstream?= =?UTF-8?q?able?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- server/mcp_server_supabase/README.md | 4 ++-- server/mcp_server_supabase/README_zh.md | 4 ++-- .../platform/aidap_client.py | 24 ++++++++++++++++--- .../src/mcp_server_supabase/tool_registry.py | 11 ++++++--- .../tools/workspace_tools.py | 11 +++++++-- server/mcp_server_vmp/uv.lock | 9 ++++--- 6 files changed, 48 insertions(+), 15 deletions(-) diff --git a/server/mcp_server_supabase/README.md b/server/mcp_server_supabase/README.md index b8d0fc83..6bd52dfa 100644 --- a/server/mcp_server_supabase/README.md +++ b/server/mcp_server_supabase/README.md @@ -64,7 +64,7 @@ No tools are currently exposed. | `list_branches` | List branches under a workspace | | `create_branch` | Create a development branch | | `delete_branch` | Delete a development branch | -| `restore_branch` | Restore a branch to its baseline state | +| `restore_branch` | Restore branch data to a specified point in time and return the restored branch ID | ### `storage` @@ -194,7 +194,7 @@ The package exposes `mcp-server-supabase`, `mcp-server-supabase-sse`, and `mcp-s - `DISABLED_TOOLS` takes tool names such as `execute_sql,deploy_edge_function` and removes them after the rest of the policy has been resolved. - `workspace_id` and `workspace_ref` accept workspace IDs only. Branch IDs such as `br-xxxx` are rejected. - `get_publishable_keys` resolves the default branch automatically when needed. -- `restore_branch` does not support `migration_version`; it maps to the current Volcengine `BranchRestore` capability. +- `restore_branch` supports optional `time` and `source_branch_id` arguments and returns `backup_branch_id`. - `deploy_edge_function` currently supports `native-node20/v1`, `native-python3.9/v1`, `native-python3.10/v1`, and `native-python3.12/v1`. - `--transport sse` serves the MCP SSE endpoint at `MCP_SSE_PATH` and the message endpoint at `MCP_MESSAGE_PATH`. - `--transport streamable-http` serves the MCP HTTP endpoint at `STREAMABLE_HTTP_PATH`. diff --git a/server/mcp_server_supabase/README_zh.md b/server/mcp_server_supabase/README_zh.md index bdda7e17..1359837f 100644 --- a/server/mcp_server_supabase/README_zh.md +++ b/server/mcp_server_supabase/README_zh.md @@ -65,7 +65,7 @@ | `list_branches` | 列出 workspace 下的分支 | | `create_branch` | 创建开发分支 | | `delete_branch` | 删除开发分支 | -| `restore_branch` | 将分支恢复到初始状态 | +| `restore_branch` | 将分支数据恢复到指定时间点,并返回恢复出的新分支 ID | ### `storage` @@ -195,7 +195,7 @@ python3 -m mcp_server_supabase.server --transport sse --host 0.0.0.0 --port 8000 - `DISABLED_TOOLS` 填工具名,例如 `execute_sql,deploy_edge_function`,会在其他策略计算完成后做最终剔除。 - `workspace_id` 和 `workspace_ref` 只接受 workspace ID,`br-xxxx` 这样的 branch ID 会被直接拒绝。 - `get_publishable_keys` 在需要时会自动解析默认分支。 -- `restore_branch` 不支持 `migration_version`,当前实际映射到火山引擎的 `BranchRestore` 能力。 +- `restore_branch` 支持可选的 `time` 和 `source_branch_id` 参数,并返回 `backup_branch_id`。 - `deploy_edge_function` 当前支持 `native-node20/v1`、`native-python3.9/v1`、`native-python3.10/v1`、`native-python3.12/v1`。 - `--transport sse` 会在 `MCP_SSE_PATH` 暴露 SSE 连接地址,并在 `MCP_MESSAGE_PATH` 暴露消息投递地址。 - `--transport streamable-http` 会在 `STREAMABLE_HTTP_PATH` 暴露 MCP HTTP 地址。 diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py index 2cc1b7a0..5beae88c 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/platform/aidap_client.py @@ -19,6 +19,7 @@ DescribeWorkspaceEndpointRequest, DescribeAPIKeysRequest, BranchRestoreRequest, + RestoreSettingsForBranchRestoreInput, CreateBranchRequest, DeleteBranchRequest, BranchSettingsForCreateBranchInput, @@ -307,16 +308,33 @@ async def get_endpoint(self, workspace_id: str, branch_id: Optional[str] = None) logger.error(f"Error getting endpoint: {e}") return None - async def restore_branch(self, workspace_id: str, branch_id: str) -> dict: + async def restore_branch( + self, + workspace_id: str, + branch_id: str, + source_branch_id: Optional[str] = None, + time: Optional[str] = None, + ) -> dict: max_attempts = 8 for attempt in range(1, max_attempts + 1): try: request = BranchRestoreRequest( workspace_id=workspace_id, branch_id=branch_id, + restore_settings=RestoreSettingsForBranchRestoreInput( + source_branch_id=source_branch_id or branch_id, + time=time, + ), ) - self.client.branch_restore(request) - return {"success": True} + response = self.client.branch_restore(request) + return { + "success": True, + "workspace_id": workspace_id, + "branch_id": branch_id, + "source_branch_id": source_branch_id or branch_id, + "time": time, + "backup_branch_id": self._pick_value(response, "backup_branch_id", "BackupBranchID"), + } except Exception as e: error_text = str(e) code = self._branch_error_code(error_text) diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py index 0c34081e..1ffb95e1 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tool_registry.py @@ -302,9 +302,14 @@ async def delete_branch(branch_id: str, workspace_id: str = None) -> str: def _build_restore_branch(runtime: SupabaseRuntime): workspace_tools = runtime.workspace_tools - async def restore_branch(branch_id: str, workspace_id: str = None) -> str: - """Restores a development branch to its baseline state. Any untracked data or schema changes will be lost.""" - return await workspace_tools.restore_branch(branch_id, workspace_id) + async def restore_branch( + branch_id: str, + source_branch_id: str = None, + time: str = None, + workspace_id: str = None, + ) -> str: + """Restores branch data to a specified point in time and returns the restored branch ID.""" + return await workspace_tools.restore_branch(branch_id, source_branch_id, time, workspace_id) return restore_branch diff --git a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py index 685e051c..9bffac99 100644 --- a/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py +++ b/server/mcp_server_supabase/src/mcp_server_supabase/tools/workspace_tools.py @@ -367,17 +367,24 @@ async def get_publishable_keys(self, workspace_id: Optional[str] = None, reveal: async def restore_branch( self, branch_id: str, + source_branch_id: Optional[str] = None, + time: Optional[str] = None, workspace_id: Optional[str] = None, ) -> str: try: ws_id = self._resolve_workspace_id(workspace_id) - result = await self.aidap.restore_branch(ws_id, branch_id) + result = await self.aidap.restore_branch( + ws_id, + branch_id, + source_branch_id=source_branch_id, + time=time, + ) if not isinstance(result, dict): result = {"success": bool(result)} if result.get("success"): result.setdefault("workspace_id", ws_id) result.setdefault("branch_id", branch_id) - return to_json(result) + return to_json(compact_dict(result)) except Exception as e: logger.error(f"Error restoring branch: {e}") return to_json({ diff --git a/server/mcp_server_vmp/uv.lock b/server/mcp_server_vmp/uv.lock index f9620352..c2ca2b4b 100644 --- a/server/mcp_server_vmp/uv.lock +++ b/server/mcp_server_vmp/uv.lock @@ -208,7 +208,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "mcp", extras = ["cli"], specifier = ">=1.12.0" }, - { name = "volcengine-python-sdk", specifier = ">=3.0.1" }, + { name = "volcengine-python-sdk", specifier = ">=4.0.30" }, ] [[package]] @@ -664,7 +664,7 @@ wheels = [ [[package]] name = "volcengine-python-sdk" -version = "3.0.1" +version = "5.0.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -672,4 +672,7 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/44/cf/d841e615a6d541888545281b15ab381e73c098edc11375ef231b9ccde7a8/volcengine-python-sdk-3.0.1.tar.gz", hash = "sha256:2f1b95ec46a2ad74be298724692d025f35ff870b26584edebee3db89b8a10e55", size = 3981206, upload-time = "2025-05-14T03:47:24.545Z" } +sdist = { url = "https://files.pythonhosted.org/packages/75/cb/40540371d26b56fd62dae9fa2ad9abd532fa47bc198682d6d6cc18aaf787/volcengine_python_sdk-5.0.15.tar.gz", hash = "sha256:3c0ada7b7ba4733d797ee1bb45b589f3836e8b3fa70c64d68ad6170489c7d575", size = 7805321, upload-time = "2026-03-10T10:07:34.715Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/d9/83efc36e4dfe16bd411ceecf5ac0ddca5e646ae9953b8610c5dfb56027a9/volcengine_python_sdk-5.0.15-py2.py3-none-any.whl", hash = "sha256:b62552fc316371a10955365f4ceb0ec014077984ba782c3ffc71b224ca394be8", size = 30690184, upload-time = "2026-03-10T10:07:26.319Z" }, +] From 86ab117481558062534d67ea2c33f2ca9ed8911e Mon Sep 17 00:00:00 2001 From: "sunjiachao.st" Date: Wed, 11 Mar 2026 14:26:17 +0800 Subject: [PATCH 32/32] revert: restore mcp_server_vmp uv.lock --- server/mcp_server_vmp/uv.lock | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/server/mcp_server_vmp/uv.lock b/server/mcp_server_vmp/uv.lock index c2ca2b4b..f9620352 100644 --- a/server/mcp_server_vmp/uv.lock +++ b/server/mcp_server_vmp/uv.lock @@ -208,7 +208,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "mcp", extras = ["cli"], specifier = ">=1.12.0" }, - { name = "volcengine-python-sdk", specifier = ">=4.0.30" }, + { name = "volcengine-python-sdk", specifier = ">=3.0.1" }, ] [[package]] @@ -664,7 +664,7 @@ wheels = [ [[package]] name = "volcengine-python-sdk" -version = "5.0.15" +version = "3.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -672,7 +672,4 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/cb/40540371d26b56fd62dae9fa2ad9abd532fa47bc198682d6d6cc18aaf787/volcengine_python_sdk-5.0.15.tar.gz", hash = "sha256:3c0ada7b7ba4733d797ee1bb45b589f3836e8b3fa70c64d68ad6170489c7d575", size = 7805321, upload-time = "2026-03-10T10:07:34.715Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/d9/83efc36e4dfe16bd411ceecf5ac0ddca5e646ae9953b8610c5dfb56027a9/volcengine_python_sdk-5.0.15-py2.py3-none-any.whl", hash = "sha256:b62552fc316371a10955365f4ceb0ec014077984ba782c3ffc71b224ca394be8", size = 30690184, upload-time = "2026-03-10T10:07:26.319Z" }, -] +sdist = { url = "https://files.pythonhosted.org/packages/44/cf/d841e615a6d541888545281b15ab381e73c098edc11375ef231b9ccde7a8/volcengine-python-sdk-3.0.1.tar.gz", hash = "sha256:2f1b95ec46a2ad74be298724692d025f35ff870b26584edebee3db89b8a10e55", size = 3981206, upload-time = "2025-05-14T03:47:24.545Z" }