diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5837e4d5..7b1aa415 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -46,6 +46,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed
+- Fixed unit tests that still referenced the removed `create_changelog_subagent` by migrating them to `create_docs_research_subagent` expectations and `/agents` output assertions.
- Fixed duplicate agent launches when issue labels are added, removed, and re-added by checking if DAIV has already reacted to the issue before processing label events.
- Fixed sandbox archive layout to avoid adding the repository root folder; repository contents are now archived at the top level (while still excluding `.git`).
- Fixed handling of empty GitHub repositories when reading config files; the client now gracefully returns `None` instead of raising an exception when attempting to read files from empty repositories.
diff --git a/Makefile b/Makefile
index 1d6e86bb..9b750168 100644
--- a/Makefile
+++ b/Makefile
@@ -40,7 +40,7 @@ compilemessages:
uv run django-admin compilemessages
integration-tests:
- uv run pytest --reuse-db tests/integration_tests --no-cov --log-level=INFO -k test_skill_activated -n 4
+ uv run pytest --reuse-db tests/integration_tests --no-cov --log-level=INFO -m subagents
swebench:
uv run evals/swebench.py --dataset-path "SWE-bench/SWE-bench_Lite" --dataset-split "dev" --output-path predictions.json --num-samples 1
@@ -62,4 +62,4 @@ docs-serve:
uv run --only-group=docs mkdocs serve -o -a localhost:4000 -w docs/
langsmith-fetch:
- uv run langsmith-fetch traces --project-uuid 00d1a04e-0087-4813-9a18-5995cd5bee5c --limit 1 ./my-traces
+ uv run langsmith-fetch traces --project-uuid 00d1a04e-0087-4813-9a18-5995cd5bee5c --limit 4 ./my-traces
diff --git a/daiv/automation/agent/conf.py b/daiv/automation/agent/conf.py
index 13210c34..fd2b4a0b 100644
--- a/daiv/automation/agent/conf.py
+++ b/daiv/automation/agent/conf.py
@@ -41,6 +41,10 @@ class DAIVAgentSettings(BaseSettings):
default=ModelName.CLAUDE_HAIKU_4_5,
description="Model for the explore subagent, a fast model with capabilities to call tools.",
)
+ DOCS_RESEARCH_MODEL_NAME: ModelName | str = Field(
+ default=ModelName.GPT_5_1_CODEX_MINI,
+ description="Model for the docs research subagent, a fast model with capabilities to call tools.",
+ )
settings = DAIVAgentSettings()
diff --git a/daiv/automation/agent/constants.py b/daiv/automation/agent/constants.py
index d428ff86..6862411e 100644
--- a/daiv/automation/agent/constants.py
+++ b/daiv/automation/agent/constants.py
@@ -36,11 +36,12 @@ class ModelName(StrEnum):
# OpenAI models
GPT_4_1_MINI = "openrouter:openai/gpt-4.1-mini"
+ GPT_4_1 = "openrouter:openai/gpt-4.1"
+ GPT_5_1_CODEX_MINI = "openrouter:openai/gpt-5.1-codex-mini"
GPT_5_2 = "openrouter:openai/gpt-5.2"
- GPT_5_2_CODEX = "openrouter:openai/gpt-5.2-codex"
+ GPT_5_3_CODEX = "openrouter:openai/gpt-5.3-codex"
# z-ai models
- Z_AI_GLM_4_7 = "openrouter:z-ai/glm-4.7"
Z_AI_GLM_5 = "openrouter:z-ai/glm-5"
# minimax models
diff --git a/daiv/automation/agent/graph.py b/daiv/automation/agent/graph.py
index e4a331cd..9da099c4 100644
--- a/daiv/automation/agent/graph.py
+++ b/daiv/automation/agent/graph.py
@@ -40,7 +40,7 @@
from automation.agent.middlewares.web_search import WebSearchMiddleware
from automation.agent.prompts import DAIV_SYSTEM_PROMPT, WRITE_TODOS_SYSTEM_PROMPT
from automation.agent.subagents import (
- create_changelog_subagent,
+ create_docs_research_subagent,
create_explore_subagent,
create_general_purpose_subagent,
)
@@ -161,12 +161,13 @@ async def create_daiv_agent(
web_search_enabled=web_search_enabled,
web_fetch_enabled=_web_fetch_enabled,
),
- create_explore_subagent(backend, ctx),
- create_changelog_subagent(
- model, backend, ctx, sandbox_enabled=_sandbox_enabled, web_search_enabled=web_search_enabled
- ),
+ create_explore_subagent(backend),
]
+ if _web_fetch_enabled:
+ # only create the docs research subagent if web fetch is enabled as it requires web fetch to be enabled
+ subagents.append(create_docs_research_subagent(backend))
+
agent_conditional_middlewares = []
if web_search_enabled:
diff --git a/daiv/automation/agent/middlewares/web_fetch.py b/daiv/automation/agent/middlewares/web_fetch.py
index f7232f47..e27bfcd0 100644
--- a/daiv/automation/agent/middlewares/web_fetch.py
+++ b/daiv/automation/agent/middlewares/web_fetch.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import hashlib
+import ipaddress
import logging
from typing import TYPE_CHECKING, Annotated
from urllib.parse import urljoin, urlparse, urlunparse
@@ -19,6 +20,8 @@
if TYPE_CHECKING:
from collections.abc import Awaitable, Callable
+ from pydantic import SecretStr
+
logger = logging.getLogger("daiv.tools")
WEB_FETCH_NAME = "web_fetch"
@@ -54,6 +57,26 @@
"""
+def _get_auth_headers_for_url(url: str) -> dict[str, str]:
+ """
+ Return authentication headers configured for the given URL's domain.
+
+ Matching is exact: a configured domain of ``context7.com`` matches
+ only ``context7.com`` and not ``api.context7.com`` or ``notcontext7.com``.
+ """
+ hostname = urlparse(url).hostname or ""
+ matched: list[tuple[str, dict[str, SecretStr]]] = []
+ for domain, headers in settings.WEB_FETCH_AUTH_HEADERS.items():
+ if hostname == domain:
+ matched.append((domain, headers))
+ # Merge from least-specific to most-specific so longer domains win.
+ matched.sort(key=lambda item: len(item[0]))
+ result: dict[str, str] = {}
+ for _domain, headers in matched:
+ result.update({key: value.get_secret_value() for key, value in headers.items()})
+ return result
+
+
def _upgrade_http_to_https(url: str) -> str:
parsed = urlparse(url)
if parsed.scheme == "http":
@@ -61,20 +84,47 @@ def _upgrade_http_to_https(url: str) -> str:
return url
+def _is_private_or_local(hostname: str) -> bool:
+ """
+ Check if a hostname is a private/local IP address or localhost.
+ """
+ # Check hostname literals first
+ if hostname.lower() in {"localhost", "localhost.localdomain"}:
+ return True
+
+ try:
+ ip = ipaddress.ip_address(hostname)
+ return ip.is_private or ip.is_loopback or ip.is_link_local or ip.is_reserved or ip.is_multicast
+ except ValueError:
+ # Not a valid IP address, could be a hostname
+ # Check for localhost-like patterns
+ return hostname.lower().endswith(".local") or hostname.lower().endswith(".localhost")
+
+
def _is_valid_http_url(url: str) -> bool:
parsed = urlparse(url)
return parsed.scheme in {"http", "https"} and bool(parsed.netloc)
-async def _fetch_url_text(url: str, *, timeout_seconds: int, proxy_url: str | None) -> tuple[str, str, str]:
+async def _fetch_url_text(
+ url: str, *, timeout_seconds: int, proxy_url: str | None, extra_headers: dict[str, str] | None = None
+) -> tuple[str, str, str]:
"""
Returns (final_url, content_type, page_raw).
"""
from httpx import AsyncClient, HTTPError
+ # SSRF protection: block private/local addresses
+ parsed = urlparse(url)
+ hostname = parsed.hostname or ""
+ if _is_private_or_local(hostname):
+ raise ValueError(f"Requests to private/local addresses are blocked: {url}")
+
+ request_headers = {"User-Agent": USER_AGENT, **(extra_headers or {})}
+
async with AsyncClient(proxy=proxy_url, follow_redirects=False) as client:
try:
- response = await client.get(url, headers={"User-Agent": USER_AGENT}, timeout=timeout_seconds)
+ response = await client.get(url, headers=request_headers, timeout=timeout_seconds)
except HTTPError as e:
raise ValueError(f"Failed to fetch {url}: {e!r}") from e
@@ -86,7 +136,9 @@ async def _fetch_url_text(url: str, *, timeout_seconds: int, proxy_url: str | No
raise RuntimeError(f"{redirect_url}")
# Same-host redirects are fine to follow automatically (e.g., path normalization).
- return await _fetch_url_text(redirect_url, timeout_seconds=timeout_seconds, proxy_url=proxy_url)
+ return await _fetch_url_text(
+ redirect_url, timeout_seconds=timeout_seconds, proxy_url=proxy_url, extra_headers=extra_headers
+ )
if response.status_code >= 400:
raise ValueError(f"Failed to fetch {url} - status code {response.status_code}")
@@ -111,8 +163,12 @@ async def _fetch_markdown_for_url(url: str) -> str:
"""
Fetch the URL and return markdown content.
"""
+ auth_headers = _get_auth_headers_for_url(url)
final_url, content_type, page_raw = await _fetch_url_text(
- url, timeout_seconds=settings.WEB_FETCH_TIMEOUT_SECONDS, proxy_url=settings.WEB_FETCH_PROXY_URL
+ url,
+ timeout_seconds=settings.WEB_FETCH_TIMEOUT_SECONDS,
+ proxy_url=settings.WEB_FETCH_PROXY_URL,
+ extra_headers=auth_headers or None,
)
is_html = " None:
async def awrap_model_call(
self, request: ModelRequest, handler: Callable[[ModelRequest], Awaitable[ModelResponse]]
) -> ModelResponse:
- request = request.override(system_prompt=request.system_prompt + "\n\n" + WEB_FETCH_SYSTEM_PROMPT)
- return await handler(request)
+ system_prompt = ""
+ if request.system_prompt:
+ system_prompt = request.system_prompt + "\n\n"
+ system_prompt += WEB_FETCH_SYSTEM_PROMPT
+
+ return await handler(request.override(system_prompt=system_prompt))
diff --git a/daiv/automation/agent/subagents.py b/daiv/automation/agent/subagents.py
index 39b46e3a..9b454049 100644
--- a/daiv/automation/agent/subagents.py
+++ b/daiv/automation/agent/subagents.py
@@ -26,192 +26,12 @@
GENERAL_PURPOSE_SYSTEM_PROMPT = """You are an agent for DAIV. Given the user's message, you should use the tools available to complete the task. Do exactly what has been asked. When you complete the task respond with a detailed writeup.
-- For file searches: Use Grep or Glob when you need to search broadly. Use Read when you know the specific file path.
+- For file searches: Use `grep` or `glob` when you need to search broadly. Use `read_file` when you know the specific file path.
- NEVER proactively create documentation files (*.md) or README files. Only create documentation files if explicitly requested.
- Any file paths you return in your response MUST be absolute. Do NOT use relative paths.
""" # noqa: E501
-EXPLORE_SYSTEM_PROMPT = """\
-You are a file search specialist for DAIV. You excel at thoroughly navigating and exploring codebases.
-
-=== CRITICAL: READ-ONLY MODE - NO FILE MODIFICATIONS === This is a READ-ONLY exploration task. You are STRICTLY PROHIBITED from:
-- Creating new files (no write_file, touch, or file creation of any kind)
-- Modifying existing files (no edit_file operations)
-- Deleting files (no rm or deletion)
-- Moving or copying files (no mv or cp)
-- Creating temporary files anywhere, including /tmp
-
-Your role is EXCLUSIVELY to search and analyze existing code. You do NOT have access to file editing tools - attempting to edit files will fail.
-
-Your strengths:
-- Rapidly finding files using glob patterns
-- Searching code and text with powerful regex patterns
-- Reading and analyzing file contents
-
-Guidelines:
-- Use `glob` for broad file pattern matching
-- Use `grep` for searching file contents with regex
-- Use `read` when you know the specific file path you need to read
-- Adapt your search approach based on the thoroughness level specified by the caller
-- Return file paths as absolute paths in your final response
-- For clear communication, avoid using emojis
-- Communicate your final report directly as a regular message - do NOT attempt to create files
-
-NOTE: You are meant to be a fast agent that returns output as quickly as possible. In order to achieve this you must:
-
-- Make efficient use of the tools that you have at your disposal: be smart about how you search for files and implementations
-- Wherever possible you should try to spawn multiple parallel tool calls for grepping and reading files
-
-Complete the user's search request efficiently and report your findings clearly.""" # noqa: E501
-
-EXPLORE_SUBAGENT_DESCRIPTION = """Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.""" # noqa: E501
-
-
-CHANGELOG_SYSTEM_PROMPT = """\
-You are a meticulous release-notes editor and changelog specialist. Your job is to update the repository changelog by analyzing code changes from `git diff`, then writing concise, end-user-facing entries.
-
-## Core responsibilities
-1. Locate the changelog file (commonly `CHANGELOG.md`, but it may be `CHANGES.md`, `HISTORY.md`, or within `docs/`).
-2. Determine the changelog convention in use (e.g., Keep a Changelog with an **Unreleased** section; custom headings; versioned sections).
-3. Use the `bash` tool to run `git diff` (and related safe read-only git commands) to understand changes that should be reflected in the changelog.
-4. Update **only** the Unreleased section. Do not edit past released sections.
-5. Write entries for **end users** (what changed for them), not developers (no internal refactor notes unless they have user-visible impact).
-6. Ensure **one entry per logical change** (group multiple touched files/commits into a single bullet when they represent one user-facing change).
-
-## Early exit
-- BEFORE gathering detailed diffs, quickly check if changes exist for the requested scope.
- - If no changes exist, respond immediately: "No changes detected for [scope]. The changelog was not modified." Then stop.
- - For branch comparisons: run `git diff origin/...HEAD --stat` first to verify changes exist.
- - For uncommitted changes: run `git status --porcelain` first.
-
-## Tool usage (bash)
-- Scope interpretation rule:
- - If the request says **"uncommitted changes"** (or equivalent: "working tree changes", "local changes") and does not explicitly exclude untracked files, you MUST treat the scope as: unstaged + staged + untracked.
- - If the request explicitly says "tracked only", then exclude untracked files.
-- You MUST obtain changes via git by running commands such as:
- - `git diff` (default)
- - `git diff --name-only`
- - `git diff --stat`
- - `git diff origin/...HEAD` when a base reference is available
- - `git ls-files --others --exclude-standard -z | xargs -0 -I{} git diff --no-index -- /dev/null {}` to get untracked changes
- - `git log --oneline --decorate -n ` to help identify scope (optional)
-- Treat the repository as source of truth. Do not guess features beyond what diffs support.
-- Prefer a diff range when possible (e.g., last tag to HEAD). If you cannot infer the range safely, fall back to `git diff` against the default base configured by the environment.
-
-## Performance optimizations
-- When working in feature branches, prefer `origin/main` or `origin/master` over `main`/`master` for base references, since local main branches may not exist in shallow clones or CI environments.
-- For small changes (< 20 lines total per `git diff --stat`), skip intermediate commands and proceed directly to `git diff` for the full diff.
-- Run independent git commands in parallel when possible (e.g., `git diff --name-only` and `git diff --stat` can run together).
-
-## How to interpret diffs into changelog entries
-- Focus on user-visible outcomes:
- - New functionality → “Added”
- - Behavior changes → “Changed”
- - Bug fixes → “Fixed”
- - Removals → “Removed”
- - Deprecations → “Deprecated”
- - Security-related improvements → “Security” (only when clearly supported)
-- Ignore purely internal refactors unless they:
- - change behavior,
- - improve reliability/performance in a way users would notice,
- - fix a user-facing bug,
- - or change configuration/compatibility.
-- Grouping rule (one entry per logical change):
- - If multiple files changed to implement one feature/fix → one bullet.
- - If one file change includes multiple unrelated user-facing impacts → split into separate bullets.
-
-## Writing rules (end-user focused)
-- Use clear, non-technical language whenever possible.
-- Avoid implementation details (no class names, internal module names, PR numbers, commit hashes) unless the repo's changelog style explicitly includes them.
-- Prefer active, outcome-focused phrasing:
- - Good: “Fixed an issue where exports could fail on large files.”
- - Bad: “Refactored ExportService to handle stream backpressure.”
-- Each bullet should stand alone and be scannable.
-- Keep tense consistent with existing style (often past tense: Added/Fixed/Changed).
-- Don't overclaim: only include what you can support from diffs.
-
-## Existing entries & idempotency
-- Before adding new bullets, compare diffs to the current Unreleased entries.
-- If an entry already covers a change and is accurate, do NOT modify it (no rewording, no moving).
-- Only edit or remove an existing entry when it is inaccurate or contradicts the diff.
-- If all relevant changes are already documented accurately, do not modify the changelog; respond with a short confirmation.
-
-## Editing rules (Unreleased only)
-- You MUST NOT modify released sections (anything under a version heading/date).
-- You MUST preserve existing formatting, headings, and ordering.
-- If the Unreleased section has subsections (e.g., Added/Fixed/Changed), place bullets accordingly.
-- If the Unreleased section exists but lacks subsections, follow the file's existing pattern.
-- If no Unreleased section exists, do not invent a new structure silently:
- - Add an Unreleased section only if the changelog convention strongly implies it (e.g., Keep a Changelog). Otherwise, ask for guidance.
-
-## Workflow
-1. Discover conventions:
- - Find the changelog file.
- - Read the Unreleased section structure and any style rules.
- - If an `AGENTS.md` or contribution/release guide exists, follow its instructions.
-2. Gather change evidence with bash:
- - Run `git diff --name-only` and `git diff --stat`.
- - Run `git diff` to inspect relevant hunks.
- - For "uncommitted changes" scope, gather:
- - Unstaged changes: `git diff`
- - Staged changes: `git diff --cached`
- - Untracked files (diff each against `/dev/null`):
- - `while IFS= read -r -d '' f; do git diff --no-index -- /dev/null "$f"; done < <(git ls-files --others --exclude-standard -z)`
- - No-changes guard:
- - If the requested scope yields no changes (no output from `git diff`/`git diff --cached`, and `git ls-files --others --exclude-standard` is empty when untracked are in-scope), DO NOT modify the changelog.
- - Output a short message stating that no changes were detected for the specified scope and therefore the Unreleased section was left unchanged.
- - If an untracked file is binary or extremely large, avoid relying on raw diff output; instead, record a high-level description based on filename/context and any surrounding changes.
- - Optionally check recent commits with `git log` to help group changes.
-3. Identify logical changes:
- - Create a short internal list of user-visible changes.
- - Match these against existing Unreleased entries to avoid duplicates.
- - Map each to a category (Added/Changed/Fixed/etc.).
-4. Draft changelog bullets:
- - One bullet per logical change.
- - Match tone and formatting.
-5. Quality checks before applying edits:
- - Confirm every bullet is user-facing.
- - Confirm no duplicates.
- - Confirm only Unreleased is modified.
- - Confirm existing accurate entries are left unchanged.
- - Confirm bullets are supported by diffs.
-6. Apply the edit to the changelog file.
-7. Output:
- - Briefly summarize what you added (1-3 lines), without repeating the whole changelog.
-
-## Edge cases & fallback behavior
-- If the changelog file cannot be found:
- - Search typical locations and filenames.
- - If still missing, report what you checked and propose a default (`CHANGELOG.md`) but do not create a new changelog unless explicitly requested.
-- If the Unreleased section is ambiguous (multiple “Unreleased” headers, unusual structure):
- - Choose the one that matches the repo's primary changelog convention; if still unclear, ask a single targeted question.
-- If changes are purely internal and have no user impact:
- - Do not add entries just to add entries.
-- If the diff is extremely large:
- - Prioritize clearly user-visible changes (API changes, UI changes, configuration changes, bug fixes).
- - Group aggressively to maintain one entry per logical change.
-
-## Strict constraints
-- Update only the Unreleased section.
-- One entry per logical change.
-- Write for end users, not developers.
-- Do not invent details not supported by the git diff.""" # noqa: E501
-
-CHANGELOG_SUBAGENT_DESCRIPTION = """PROACTIVELY use this agent for any changelog-related task, including: updating changelogs, adding changelog entries, writing release notes, or documenting changes in CHANGELOG.md/CHANGES.md/HISTORY.md files.
-
-This agent is specialized for changelog updates and will, by default:
-- Analyze git diffs to discover user-visible changes automatically
-- Follow the repository's existing changelog format and conventions
-- APPLY the changelog update directly in the repository using `read_file` + `edit_file` (default behavior)
-
-When calling this agent, specify:
-1. WHERE to look for changes: "uncommitted changes including untracked files", "changes in branch ", "commits since last release tag", or "changes between .."
-2. (Optional) The changelog file path if known (e.g., "changelog is at CHANGELOG.md"). This avoids redundant file discovery.
-
-Do NOT specify WHAT to write—let the agent examine the diffs and infer user-facing changes. The agent will handle the entire changelog update workflow and return confirmation when complete.""" # noqa: E501
-
-
def create_general_purpose_subagent(
model: BaseChatModel,
backend: BackendProtocol,
@@ -263,7 +83,43 @@ def create_general_purpose_subagent(
)
-def create_explore_subagent(backend: BackendProtocol, runtime: RuntimeCtx) -> SubAgent:
+EXPLORE_SYSTEM_PROMPT = """\
+You are a file search specialist for DAIV. You excel at thoroughly navigating and exploring codebases.
+
+=== CRITICAL: READ-ONLY MODE - NO FILE MODIFICATIONS === This is a READ-ONLY exploration task. You are STRICTLY PROHIBITED from:
+- Creating new files (no write_file, touch, or file creation of any kind)
+- Modifying existing files (no edit_file operations)
+- Deleting files (no rm or deletion)
+- Moving or copying files (no mv or cp)
+- Creating temporary files anywhere, including /tmp
+
+Your role is EXCLUSIVELY to search and analyze existing code. You do NOT have access to file editing tools - attempting to edit files will fail.
+
+Your strengths:
+- Rapidly finding files using glob patterns
+- Searching code and text with powerful regex patterns
+- Reading and analyzing file contents
+
+Guidelines:
+- Use `glob` for broad file pattern matching
+- Use `grep` for searching file contents with regex
+- Use `read_file` when you know the specific file path you need to read
+- Adapt your search approach based on the thoroughness level specified by the caller
+- Return file paths as absolute paths in your final response
+- For clear communication, avoid using emojis
+- Communicate your final report directly as a regular message - do NOT attempt to create files
+
+NOTE: You are meant to be a fast agent that returns output as quickly as possible. In order to achieve this you must:
+
+- Make efficient use of the tools that you have at your disposal: be smart about how you search for files and implementations
+- Wherever possible you should try to spawn multiple parallel tool calls for grepping and reading files
+
+Complete the user's search request efficiently and report your findings clearly.""" # noqa: E501
+
+EXPLORE_SUBAGENT_DESCRIPTION = """Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.""" # noqa: E501
+
+
+def create_explore_subagent(backend: BackendProtocol, **kwargs) -> SubAgent:
"""
Create the explore subagent.
"""
@@ -298,21 +154,177 @@ def create_explore_subagent(backend: BackendProtocol, runtime: RuntimeCtx) -> Su
)
-def create_changelog_subagent(
- model: BaseChatModel,
- backend: BackendProtocol,
- runtime: RuntimeCtx,
- sandbox_enabled: bool = True,
- web_search_enabled: bool = True,
-) -> SubAgent:
+DOCS_RESEARCH_SYSTEM_PROMPT = """\
+You are an expert documentation researcher specializing in fetching up-to-date library and framework documentation from the Context7 API using the `web_fetch` tool.
+
+**Your Core Responsibilities:**
+1. Resolve the correct library ID for any given library or framework name
+2. Fetch relevant documentation using the Context7 API
+3. Return concise, actionable answers grounded entirely in fetched documentation
+4. Reproduce code examples exactly as they appear in the source
+5. Identify and report version caveats, deprecation warnings, and ambiguities
+6. Handle edge cases — missing libraries, rate limits, ambiguous names — with structured responses
+
+**Pre-Condition: Verify Context Before Fetching**
+
+Before proceeding to the fetch process, check whether the question contains sufficient context:
+
+- If the question references a specific library or language (e.g., "in React", "using Python", "django tasks"), proceed directly to Step 1. You should resolve ambiguous library names within a known ecosystem by searching, not by asking.
+- If the question contains no programming language or framework reference at all (e.g., "how do I use async/await" with no language mentioned), you should respond with a structured message stating what is missing.
+ Example: "Missing context: no programming language or framework was specified. Please include the target language or framework (e.g., Python, JavaScript, Rust, Django) in your query to unlock this request."
+- For all other vague questions, you should search first. If results are empty or ambiguous, you should respond with a structured message stating what is missing.
+ Example: "Missing context: the query returned no useful matches. Provide a more specific library name or topic to unlock this request."
+
+**Documentation Fetch Process:**
+
+1. **Resolve the Library ID**: Query the search endpoint to find the correct library:
+
+ `fetch(url="https://context7.com/api/v2/libs/search?libraryName=LIBRARY_NAME&query=TOPIC", prompt="")`
+
+ Parameters:
+ - `libraryName` (required): The library name (e.g., "react", "nextjs", "fastapi")
+ - `query` (required): The specific topic to search for — be precise, not generic
+
+ Response fields to use for selection:
+ - `id`: Library identifier used in the next fetch (e.g., `/websites/react_dev_reference`)
+ - `title`: Human-readable library name
+ - `trustScore`: Library reliability based on stars, activity, and age — higher is better
+
+2. **Select the Best Match**: You should choose the result with:
+ - Exact or closest name match to the `libraryName` you provided in Step 1
+ - Highest `trustScore` among exact name matches
+ - Version alignment if the user specified one (e.g., "React 19" → look for v19.x), otherwise the latest version
+ - Official or primary package over community forks
+
+3. **Fetch the Documentation**:
+
+ `fetch(url="https://context7.com/api/v2/context?libraryId=LIBRARY_ID&query=TOPIC&type=txt", prompt="")`
+
+ Parameters:
+ - `libraryId` (required): The `id` value from Step 2 in format /owner/repo, /owner/repo/version, or /owner/repo@version
+ - `query` (required): The user's specific question, URL-encoded (spaces as `+`)
+ - `type`: Use `txt` for readable plain-text output
+
+4. **Return a Focused Answer**: You should answer the user's specific question — you should not summarize the entire documentation page. Use the Output Format below.
+
+**Quality Standards:**
+- Always provide an empty prompt to the `web_fetch` tool to obtain the raw page content
+- Never answer from prior training knowledge — always fetch documentation first
+- Never state facts about library versions, release history, or current status from memory, not even as a passing remark — if version information is relevant, fetch it
+- Reproduce code examples character-for-character from the source — do not reword comments, remove parameters, or make any edits, even cosmetic ones
+- Your `query` parameter must reflect the user's specific question (e.g., `"useState+lazy+initialization"` not `"hooks"`)
+- Always confirm which library version the documentation covers, especially if the user requested a specific version
+- Prefer official library sources over mirrors or community forks
+- Use detailed, natural language queries for better results:
+
+ **Good — specific question:**
+ `fetch(url="https://context7.com/api/v2/context?libraryId=/vercel/next.js&query=How%20to%20implement%20authentication%20with%20middleware", prompt="")`
+
+ **Less optimal — vague query:**
+ `fetch(url="https://context7.com/api/v2/context?libraryId=/vercel/next.js&query=auth", prompt="")`
+
+**Output Format:**
+
+Provide your results structured as:
+```markdown
+## [Library Name] — [Topic]
+
+### Answer
+[Direct 2-3 sentence answer to the user's question]
+
+### Code Example
+[Code block taken directly from the documentation]
+
+### Notes
+[Version caveats, deprecation warnings, or important context — omit if none]
+
+### Source
+Library ID: [library ID used]
+```
+
+**Edge Cases:**
+
+Handle these situations as follows:
+
+- **Library not found**: Inform the user and suggest alternative spellings to try (e.g., "nextjs" vs "next.js" vs "next")
+- **Ambiguous library name**: If multiple results have similar scores, you should not ask for confirmation. Instead, respond with a structured message stating what is ambiguous.
+ Example: "Missing context: multiple libraries matched — specify which one you mean (e.g., django-tasks, celery, huey) to unlock this request."
+- **Version not available**: Fetch the closest available version and explicitly note the mismatch in the Notes field
+- **Rate limit hit**: Respond with a structured message stating what blocked the request.
+ Example: "Blocked: rate limit hit on Context7 API. Retry the same query to unlock this request."
+- **Docs don't address the question**: You may retry the context fetch at most 2 times with differently-worded queries. After 2 retries (3 fetches total for the same question), you must stop and synthesize an answer from what you have. Absence of evidence across 3 varied fetches is itself a finding — report it as such. Never make a 4th fetch for the same question.
+- **Empty or malformed response**: Retry once with `type=json`, then report the issue if it persists
+
+**Examples:**
+
+### Full Example — React useState Lazy Initialization
+
+**Step 1 — Find library ID:**
+fetch(url="https://context7.com/api/v2/libs/search?libraryName=react&query=useState+lazy+initialization", prompt="")
+
+**Step 2 — Select best match:**
+Result: id=/websites/react_dev_reference, title="React", highest trustScore → selected
+
+**Step 3 — Fetch documentation:**
+fetch(url="https://context7.com/api/v2/context?libraryId=/websites/react_dev_reference&query=useState+lazy+initialization&type=txt", prompt="")
+
+**Step 4 — Response:**
+
+## React — useState Lazy Initialization
+
+### Answer
+You can pass a function to `useState` instead of a value to defer expensive computation
+until the initial render. This is called lazy initialization and the function runs only once.
+
+### Code Example
+```js
+const [state, setState] = useState(() => computeExpensiveInitialValue());
+```
+
+### Notes
+Applies to React 16.8+. The initializer function receives no arguments.
+
+### Source
+Library ID: /websites/react_dev_reference
+
+---
+
+### Abbreviated Example — FastAPI Dependency Injection
+
+fetch(url="https://context7.com/api/v2/libs/search?libraryName=fastapi&query=dependency+injection", prompt="")
+fetch(url="https://context7.com/api/v2/context?libraryId=/fastapi/fastapi&query=dependency+injection+Depends&type=txt", prompt="")
+""" # noqa: E501
+
+
+DOCS_RESEARCH_SUBAGENT_DESCRIPTION = """Use this agent to fetch up-to-date documentation for public software libraries and frameworks via the Context7 API.
+
+Use it when:
+- Looking up how to use a specific library function, hook, or API
+- Finding official code examples for a feature
+- Verifying correct usage that may have changed since the model's knowledge cutoff
+- Confirming which version of a library introduced or deprecated a feature
+
+Do not use it for:
+- General programming questions not tied to a specific library
+- Private, internal, or authenticated documentation sources
+- Non-library topics such as language specifications or CLI tools
+
+When calling this agent, provide:
+- **Library name** (required): e.g., "react", "fastapi", "pandas"
+- **Topic** (required): the specific function, concept, or feature you need
+- **Version** (optional but recommended): specify if you need version-specific behavior, e.g., "React 19" or "Django 4.2
+""" # noqa: E501
+
+
+def create_docs_research_subagent(backend: BackendProtocol, **kwargs) -> SubAgent:
"""
- Create the changelog subagent.
+ Create the docs research subagent.
"""
+ model = BaseAgent.get_model(model=settings.DOCS_RESEARCH_MODEL_NAME)
_summarization_defaults = _compute_summarization_defaults(model)
middleware = [
- FilesystemMiddleware(backend=backend),
- GitPlatformMiddleware(git_platform=runtime.git_platform),
+ WebFetchMiddleware(),
SummarizationMiddleware(
model=model,
backend=backend,
@@ -326,16 +338,10 @@ def create_changelog_subagent(
PatchToolCallsMiddleware(),
]
- if web_search_enabled:
- middleware.append(WebSearchMiddleware())
-
- if sandbox_enabled:
- middleware.append(SandboxMiddleware(close_session=False))
-
return SubAgent(
- name="changelog-curator",
- description=CHANGELOG_SUBAGENT_DESCRIPTION,
- system_prompt=CHANGELOG_SYSTEM_PROMPT,
+ name="docs-research",
+ description=DOCS_RESEARCH_SUBAGENT_DESCRIPTION,
+ system_prompt=DOCS_RESEARCH_SYSTEM_PROMPT,
middleware=middleware,
model=model,
tools=[],
diff --git a/daiv/automation/conf.py b/daiv/automation/conf.py
index e74243d3..31162689 100644
--- a/daiv/automation/conf.py
+++ b/daiv/automation/conf.py
@@ -60,6 +60,15 @@ class AutomationSettings(BaseSettings):
"Maximum page content size (in characters) to analyze in one pass. Larger pages return a guidance message."
),
)
+ WEB_FETCH_AUTH_HEADERS: dict[str, dict[str, SecretStr]] = Field(
+ default_factory=dict,
+ description=(
+ "Domain-to-headers mapping for web_fetch authentication. "
+ "Keys are domain names (exact match only, e.g. 'context7.com' matches only 'context7.com' "
+ "and not 'api.context7.com'), values are dicts of header name to header value. "
+ 'Example: \'{"context7.com": {"X-API-Key": "sk-abc"}}\''
+ ),
+ )
settings = AutomationSettings()
diff --git a/pyproject.toml b/pyproject.toml
index d749901f..563a99fe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -40,7 +40,6 @@ dependencies = [
"langsmith[pytest]==0.7.4",
"langsmith-fetch==0.3.1",
"markdownify==1.2.2",
- "openevals==0.1.3",
"prompt-toolkit==3.0.52",
"psycopg[pool]==3.3.3",
"pydantic==2.12.5",
@@ -62,6 +61,7 @@ urls.source = "https://github.com/srtab/daiv"
dev = [
"coverage==7.13.4",
"datasets==4.5.0",
+ "openevals==0.1.3",
"prek==0.3.3",
"pyproject-fmt==2.16.1",
"pytest==9.0.2",
diff --git a/tests/integration_tests/evaluators.py b/tests/integration_tests/evaluators.py
index a6f47297..0403e8e6 100644
--- a/tests/integration_tests/evaluators.py
+++ b/tests/integration_tests/evaluators.py
@@ -1,11 +1,11 @@
-from openevals.llm import create_llm_as_judge
+from openevals.llm import create_async_llm_as_judge
from openevals.prompts import CORRECTNESS_PROMPT
from automation.agent.base import BaseAgent, ThinkingLevel
from automation.agent.constants import ModelName
-correctness_evaluator = create_llm_as_judge(
+correctness_evaluator = create_async_llm_as_judge(
prompt=CORRECTNESS_PROMPT,
feedback_key="correctness",
- judge=BaseAgent.get_model(model=ModelName.GPT_5_2_CODEX, thinking_level=ThinkingLevel.MEDIUM),
+ judge=BaseAgent.get_model(model=ModelName.GPT_5_3_CODEX, thinking_level=ThinkingLevel.MEDIUM),
)
diff --git a/tests/integration_tests/test_diff_to_metadata.py b/tests/integration_tests/test_diff_to_metadata.py
index 840f0297..2598f5f1 100644
--- a/tests/integration_tests/test_diff_to_metadata.py
+++ b/tests/integration_tests/test_diff_to_metadata.py
@@ -66,5 +66,5 @@ async def test_diff_to_metadata(inputs, reference_outputs):
t.log_outputs(outputs)
- result = correctness_evaluator(inputs=inputs, outputs=outputs, reference_outputs=reference_outputs)
+ result = await correctness_evaluator(inputs=inputs, outputs=outputs, reference_outputs=reference_outputs)
assert result["score"] is True, result["comment"]
diff --git a/tests/integration_tests/test_skills.py b/tests/integration_tests/test_skills.py
index 067a4326..c32d063e 100644
--- a/tests/integration_tests/test_skills.py
+++ b/tests/integration_tests/test_skills.py
@@ -15,33 +15,20 @@
@pytest.mark.langsmith(test_suite_name=TEST_SUITE)
@pytest.mark.parametrize("model_name", CODING_MODEL_NAMES)
@pytest.mark.parametrize(
- "inputs",
+ "user_message,skill",
[
+ pytest.param("Plan an implementation for echo slash command", "plan", id="plan-skill-triggered-by-user-intent"),
+ pytest.param("/plan implement echo slash command", "plan", id="plan-skill-triggered-by-slash-command"),
pytest.param(
- {"user_message": "Plan an implementation for echo slash command", "skill": "plan"},
- id="plan-skill-triggered-by-user-intent",
- ),
- pytest.param(
- {"user_message": "/plan implement echo slash command", "skill": "plan"},
- id="plan-skill-triggered-by-slash-command",
- ),
- pytest.param(
- {"user_message": "/plan address the issue #123", "skill": "plan"},
- id="plan-skill-triggered-by-slash-command-with-issue-reference",
- ),
- pytest.param(
- {"user_message": "Create an AGENTS.md for this repository", "skill": "init"},
- id="init-skill-triggered-by-user-intent",
- ),
- pytest.param({"user_message": "/init", "skill": "init"}, id="init-skill-triggered-by-slash-command"),
- pytest.param(
- {"user_message": "Analyze this repo and generate agent docs", "skill": "init"},
- id="init-skill-triggered-by-analyze-phrase",
+ "/plan address the issue #123", "plan", id="plan-skill-triggered-by-slash-command-with-issue-reference"
),
+ pytest.param("Create an AGENTS.md for this repository", "init", id="init-skill-triggered-by-user-intent"),
+ pytest.param("/init", "init", id="init-skill-triggered-by-slash-command"),
+ pytest.param("Analyze this repo and generate agent docs", "init", id="init-skill-triggered-by-analyze-phrase"),
],
)
-async def test_skill_activated(model_name, inputs):
- t.log_inputs({"model_name": model_name, "inputs": inputs})
+async def test_skill_activated(model_name, user_message, skill):
+ t.log_inputs({"model_name": model_name, "user_message": user_message, "skill": skill})
async with set_runtime_ctx(repo_id="srtab/daiv", scope=Scope.GLOBAL, ref="main") as ctx:
agent = await create_daiv_agent(
@@ -53,7 +40,7 @@ async def test_skill_activated(model_name, inputs):
sandbox_enabled=False,
)
result = await agent.ainvoke(
- {"messages": [{"role": "user", "content": inputs["user_message"]}]},
+ {"messages": [{"role": "user", "content": user_message}]},
context=ctx,
config={"configurable": {"thread_id": "1"}},
)
@@ -66,6 +53,6 @@ async def test_skill_activated(model_name, inputs):
assert any(tool_call["name"] == "skill" for tool_call in tool_calls), (
f"Expected skill tool call, but got {tool_calls}"
)
- assert any(
- tool_call["args"]["skill"] == inputs["skill"] for tool_call in tool_calls if tool_call["name"] == "skill"
- ), f"Expected skill tool call with the skill name '{inputs['skill']}', but got {tool_calls}"
+ assert any(tool_call["args"]["skill"] == skill for tool_call in tool_calls if tool_call["name"] == "skill"), (
+ f"Expected skill tool call with the skill name '{skill}', but got {tool_calls}"
+ )
diff --git a/tests/integration_tests/test_subagents.py b/tests/integration_tests/test_subagents.py
new file mode 100644
index 00000000..06ece3dc
--- /dev/null
+++ b/tests/integration_tests/test_subagents.py
@@ -0,0 +1,130 @@
+import pytest
+from deepagents.backends import StoreBackend
+from langchain.agents import create_agent
+from langchain.tools import ToolRuntime
+from langgraph.store.memory import InMemoryStore
+from langsmith import testing as t
+
+from automation.agent import BaseAgent, ThinkingLevel
+from automation.agent.subagents import create_docs_research_subagent
+from automation.agent.utils import extract_text_content
+
+from .evaluators import correctness_evaluator
+from .utils import FAST_MODEL_NAMES, extract_tool_calls
+
+TEST_SUITE = "DAIV: Subagents"
+
+
+async def run_subagent(model_name: str, query: str) -> dict:
+ model = BaseAgent.get_model(model=model_name, thinking_level=ThinkingLevel.MEDIUM)
+ runtime = ToolRuntime(
+ state={}, config={}, stream_writer=None, tool_call_id="test_call_1", store=InMemoryStore(), context={}
+ )
+ subagent_spec = create_docs_research_subagent(backend=StoreBackend(runtime=runtime))
+ subagent = create_agent(
+ model,
+ system_prompt=subagent_spec["system_prompt"],
+ tools=subagent_spec["tools"],
+ middleware=subagent_spec["middleware"],
+ name=subagent_spec["name"],
+ )
+ return await subagent.ainvoke({"messages": [{"role": "user", "content": query}]})
+
+
+@pytest.mark.subagents
+@pytest.mark.langsmith(test_suite_name=TEST_SUITE)
+@pytest.mark.parametrize("model_name", FAST_MODEL_NAMES)
+@pytest.mark.parametrize(
+ "query",
+ [
+ # Typical cases
+ "How do I use useReducer in React?",
+ "How does Next.js handle environment variables?",
+ "Show me how to define a route in FastAPI",
+ "How do I make a POST request with axios?",
+ # Edge cases for which we should never use training knowledge
+ "What's new in React 19?",
+ "How does Next.js 15 handle caching differently from Next.js 14?",
+ ],
+)
+async def test_docs_research_subagent_typical_execution(model_name, query):
+ t.log_inputs({"model_name": model_name, "query": query})
+
+ result = await run_subagent(model_name, query)
+
+ t.log_outputs(result)
+
+ tool_calls = extract_tool_calls(result["messages"])
+ text_content = extract_text_content(result["messages"][-1].content)
+ assert len(tool_calls) >= 2
+ assert all(tool_call["name"] == "web_fetch" for tool_call in tool_calls), f"Tool calls: {tool_calls}"
+ assert all(tool_call["args"]["prompt"] == "" for tool_call in tool_calls), f"Tool calls: {tool_calls}"
+ assert "### Answer" in text_content
+ assert "### Notes" in text_content
+ assert "### Source" in text_content
+
+ result = await correctness_evaluator(
+ inputs={"query": query}, outputs={"messages": result["messages"]}, reference_outputs=""
+ )
+ assert result["score"] is True, result["comment"]
+
+
+@pytest.mark.subagents
+@pytest.mark.langsmith(test_suite_name=TEST_SUITE)
+@pytest.mark.parametrize("model_name", FAST_MODEL_NAMES)
+@pytest.mark.parametrize(
+ "query, expected_tool_calls, reference_outputs",
+ [
+ pytest.param(
+ "How are you doing?",
+ 0,
+ [
+ "Missing context: no programming language or framework was specified. Please include the target language or framework (e.g., Python, JavaScript, Rust, Django) in your query to unlock this request.", # noqa: E501
+ "I'm doing well, thanks for asking! 👋\n\nI'm here and ready to help you with **documentation research**. I specialize in fetching up-to-date documentation from various libraries and frameworks using the Context7 API.\n\nIf you have a question about how to use a specific library or framework (like React, FastAPI, Django, Next.js, etc.), just ask and I'll:\n- Find the correct documentation\n- Pull the relevant code examples\n- Explain any important caveats or version requirements\n\nWhat can I help you with today?", # noqa: E501
+ ],
+ id="how-are-you-doing",
+ ),
+ pytest.param(
+ "How do I use async/await?",
+ 0,
+ [
+ "Missing context: no programming language or framework was specified. Please include the target language or framework (e.g., Python, JavaScript, Rust, Django) in your query to unlock this request." # noqa: E501
+ ],
+ id="how-do-i-use-async-await",
+ ),
+ pytest.param(
+ "How do I use django tasks on version 10?",
+ None,
+ [
+ '## Django — django tasks\n\n### Answer\nIn Django 6.0 (closest available docs to version 10), background work is defined as a module-level function decorated with `django.tasks.task`; this decorator can accept options such as `priority` and `queue_name`. Once defined, you call the task’s `.enqueue()` method with the same arguments you declared to queue it for execution, and an external worker process is responsible for running the task.\n\n### Code Example\n```python\nfrom django.core.mail import send_mail\nfrom django.tasks import task\n\n\n@task\ndef email_users(emails, subject, message):\n return send_mail(\n subject=subject, message=message, from_email=None, recipient_list=emails\n )\n```\n\n```python\nresult = email_users.enqueue(\n emails=["user@example.com"],\n subject="You have a message",\n message="Hello there!",\n)\n```\n\n### Notes\nDocumentation is sourced from Django 6.0 because version 10 materials are not available in Context7; behavior in Django 10 may differ.\n\n### Source\nLibrary ID: /websites/djangoproject_en_6_0', # noqa: E501
+ "I notice that Django version 10 does not exist in the current search results. The available versions shown are Django 2.2, 4.2, 5.2, and 6.0. \n\n**Missing context:** Django version 10 does not exist. The latest available version is Django 6.0. Did you mean:\n- **Django 1.0** (the original release from 2008)?\n- **Django 5.0 or 6.0** (the current latest versions)?\n\nPlease clarify which Django version you want to use with tasks, and I'll fetch the correct documentation for you.", # noqa: E501
+ ],
+ id="how-do-i-use-django-tasks-on-version-10",
+ ),
+ ],
+)
+async def test_docs_research_subagent_ask_clarifying_questions(
+ model_name, query, expected_tool_calls, reference_outputs
+):
+ """
+ Test the rules defined in the Quality Standards section: must confirm versions before fetching documentation.
+ """
+
+ t.log_inputs({"model_name": model_name, "query": query})
+
+ result = await run_subagent(model_name, query)
+
+ t.log_outputs(result)
+
+ tool_calls = extract_tool_calls(result["messages"])
+
+ if expected_tool_calls is not None:
+ assert len(tool_calls) == expected_tool_calls
+
+ assert all(tool_call["name"] == "web_fetch" for tool_call in tool_calls)
+ assert all(tool_call["args"]["prompt"] == "" for tool_call in tool_calls)
+
+ result = await correctness_evaluator(
+ inputs={"query": query}, outputs={"messages": result["messages"]}, reference_outputs=reference_outputs
+ )
+ assert result["score"] is True, result["comment"]
diff --git a/tests/integration_tests/utils.py b/tests/integration_tests/utils.py
index bbc7b702..1af66c22 100644
--- a/tests/integration_tests/utils.py
+++ b/tests/integration_tests/utils.py
@@ -39,13 +39,14 @@
ModelName.CLAUDE_OPUS_4_5,
ModelName.CLAUDE_OPUS_4_6,
ModelName.GPT_5_2,
- ModelName.GPT_5_2_CODEX,
- ModelName.Z_AI_GLM_4_7,
+ ModelName.GPT_5_3_CODEX,
ModelName.Z_AI_GLM_5,
ModelName.MINIMAX_M2_5,
ModelName.MOONSHOTAI_KIMI_K2_5,
]
+FAST_MODEL_NAMES = [ModelName.CLAUDE_HAIKU_4_5, ModelName.GPT_5_1_CODEX_MINI, ModelName.GPT_4_1]
+
def extract_tool_calls(messages: list[BaseMessage]) -> list[ToolCall]:
return [tool_call for message in messages if isinstance(message, AIMessage) for tool_call in message.tool_calls]
diff --git a/tests/unit_tests/automation/agent/middlewares/test_web_fetch.py b/tests/unit_tests/automation/agent/middlewares/test_web_fetch.py
index d1165eed..4347bc0a 100644
--- a/tests/unit_tests/automation/agent/middlewares/test_web_fetch.py
+++ b/tests/unit_tests/automation/agent/middlewares/test_web_fetch.py
@@ -6,6 +6,8 @@
from django.core.cache import cache
+from pydantic import SecretStr
+
from automation.agent.middlewares import web_fetch as web_fetch_module
@@ -215,6 +217,68 @@ async def test_rejects_large_content(httpx_mock):
assert "Page content is too large to safely analyze in one pass." in result
+async def test_get_auth_headers_exact_domain_match():
+ with patch.object(web_fetch_module, "settings") as mock_settings:
+ mock_settings.WEB_FETCH_AUTH_HEADERS = {"context7.com": {"X-API-Key": SecretStr("sk-abc")}}
+ result = web_fetch_module._get_auth_headers_for_url("https://context7.com/api/v1/context")
+ assert result == {"X-API-Key": "sk-abc"}
+
+
+async def test_get_auth_headers_subdomain_match():
+ with patch.object(web_fetch_module, "settings") as mock_settings:
+ mock_settings.WEB_FETCH_AUTH_HEADERS = {"context7.com": {"X-API-Key": SecretStr("sk-abc")}}
+ result = web_fetch_module._get_auth_headers_for_url("https://api.context7.com/endpoint")
+ assert result == {}
+
+
+async def test_get_auth_headers_no_match():
+ with patch.object(web_fetch_module, "settings") as mock_settings:
+ mock_settings.WEB_FETCH_AUTH_HEADERS = {"context7.com": {"X-API-Key": SecretStr("sk-abc")}}
+ result = web_fetch_module._get_auth_headers_for_url("https://example.com/page")
+ assert result == {}
+
+
+async def test_get_auth_headers_rejects_false_suffix():
+ with patch.object(web_fetch_module, "settings") as mock_settings:
+ mock_settings.WEB_FETCH_AUTH_HEADERS = {"context7.com": {"X-API-Key": SecretStr("sk-abc")}}
+ result = web_fetch_module._get_auth_headers_for_url("https://notcontext7.com/page")
+ assert result == {}
+
+
+async def test_get_auth_headers_more_specific_domain_wins():
+ with patch.object(web_fetch_module, "settings") as mock_settings:
+ mock_settings.WEB_FETCH_AUTH_HEADERS = {
+ "example.com": {"X-API-Key": SecretStr("generic")},
+ "api.example.com": {"X-API-Key": SecretStr("specific")},
+ }
+ result = web_fetch_module._get_auth_headers_for_url("https://api.example.com/v1")
+ assert result == {"X-API-Key": "specific"}
+
+
+async def test_fetch_url_text_injects_auth_headers(httpx_mock):
+ httpx_mock.add_response(
+ url="https://example.com/api/v1/context",
+ status_code=200,
+ headers={"content-type": "application/json"},
+ text='{"result": "ok"}',
+ )
+ with patch.object(web_fetch_module, "settings") as mock_settings:
+ mock_settings.WEB_FETCH_AUTH_HEADERS = {"example.com": {"X-API-Key": SecretStr("sk-abc")}}
+ mock_settings.WEB_FETCH_TIMEOUT_SECONDS = 1
+ mock_settings.WEB_FETCH_PROXY_URL = None
+ mock_settings.WEB_FETCH_MAX_CONTENT_CHARS = 999_999
+ mock_settings.WEB_FETCH_MODEL_NAME = None
+ result = await web_fetch_module.web_fetch_tool.ainvoke({
+ "url": "https://example.com/api/v1/context",
+ "prompt": "",
+ })
+
+ sent_requests = httpx_mock.get_requests()
+ assert len(sent_requests) == 1
+ assert sent_requests[0].headers["X-API-Key"] == "sk-abc"
+ assert "example.com" in result
+
+
async def test_model_failure_returns_contents(httpx_mock):
class _FailingModel:
async def ainvoke(self, _messages):
diff --git a/tests/unit_tests/automation/agent/middlewares/test_web_fetch_ssrf.py b/tests/unit_tests/automation/agent/middlewares/test_web_fetch_ssrf.py
new file mode 100644
index 00000000..337a37aa
--- /dev/null
+++ b/tests/unit_tests/automation/agent/middlewares/test_web_fetch_ssrf.py
@@ -0,0 +1,86 @@
+import pytest
+
+from automation.agent.middlewares import web_fetch as web_fetch_module
+
+
+@pytest.mark.parametrize(
+ "hostname, expected",
+ [
+ # Localhost variations
+ ("localhost", True),
+ ("LOCALHOST", True),
+ ("localhost.localdomain", True),
+ # IPv4 loopback
+ ("127.0.0.1", True),
+ ("127.0.0.255", True),
+ ("127.1.2.3", True),
+ # IPv4 private ranges
+ ("10.0.0.1", True),
+ ("10.255.255.255", True),
+ ("172.16.0.1", True),
+ ("172.31.255.255", True),
+ ("192.168.0.1", True),
+ ("192.168.255.255", True),
+ # Link-local
+ ("169.254.0.1", True),
+ ("169.254.169.254", True),
+ # IPv6 loopback
+ ("::1", True),
+ # IPv6 link-local
+ ("fe80::1", True),
+ ("FE80::1", True),
+ # IPv4-mapped IPv6 addresses
+ ("::ffff:127.0.0.1", True),
+ ("::ffff:192.168.1.1", True),
+ ("::ffff:10.0.0.1", True),
+ ("::ffff:c0a8:0101", True),
+ # Multicast addresses
+ ("224.0.0.1", True),
+ ("ff02::1", True),
+ # Local domain suffixes
+ ("service.local", True),
+ ("test.localhost", True),
+ # Public addresses (should NOT be blocked)
+ ("example.com", False),
+ ("8.8.8.8", False),
+ ("1.1.1.1", False),
+ ("context7.com", False),
+ ("api.context7.com", False),
+ ],
+)
+def test_is_private_or_local(hostname, expected):
+ assert web_fetch_module._is_private_or_local(hostname) == expected
+
+
+@pytest.mark.parametrize(
+ "url",
+ [
+ "http://localhost/admin",
+ "https://127.0.0.1/config",
+ "http://10.0.0.1/internal",
+ "https://192.168.1.1/admin",
+ "http://169.254.169.254/latest/meta-data/",
+ "https://[::1]/admin",
+ "http://service.local/api",
+ "https://test.localhost/data",
+ ],
+)
+async def test_fetch_url_text_rejects_ssrf_urls(url):
+ with pytest.raises(ValueError, match="Requests to private/local addresses are blocked"):
+ await web_fetch_module._fetch_url_text(url, timeout_seconds=1, proxy_url=None)
+
+
+@pytest.mark.parametrize(
+ "url",
+ [
+ "http://localhost:8000/",
+ "https://127.0.0.1:5000/admin",
+ "http://10.0.0.1:9000/internal",
+ "https://192.168.1.1:3000/config",
+ "http://169.254.169.254/",
+ "https://[::1]:8080/",
+ ],
+)
+async def test_web_fetch_tool_rejects_ssrf_urls(url):
+ result = await web_fetch_module.web_fetch_tool.ainvoke({"url": url, "prompt": ""})
+ assert "private" in result.lower() or "blocked" in result.lower()
diff --git a/tests/unit_tests/automation/agent/test_subagents.py b/tests/unit_tests/automation/agent/test_subagents.py
index eadd9ca3..7d5908a5 100644
--- a/tests/unit_tests/automation/agent/test_subagents.py
+++ b/tests/unit_tests/automation/agent/test_subagents.py
@@ -3,12 +3,14 @@
from unittest.mock import Mock
import pytest
+from deepagents.middleware import SummarizationMiddleware
+from automation.agent.middlewares.prompt_cache import AnthropicPromptCachingMiddleware
from automation.agent.middlewares.sandbox import SandboxMiddleware
from automation.agent.middlewares.web_fetch import WebFetchMiddleware
from automation.agent.middlewares.web_search import WebSearchMiddleware
from automation.agent.subagents import (
- create_changelog_subagent,
+ create_docs_research_subagent,
create_explore_subagent,
create_general_purpose_subagent,
)
@@ -64,7 +66,7 @@ class TestExploreSubagent:
def test_returns_subagent(self):
"""Test that create_explore_subagent returns a SubAgent."""
- result = create_explore_subagent(Mock(), Mock())
+ result = create_explore_subagent(Mock())
assert isinstance(result, dict)
assert result["name"] == "explore"
@@ -74,51 +76,22 @@ def test_returns_subagent(self):
assert "PROHIBITED" in result["system_prompt"]
-class TestChangelogSubagent:
- """Tests for create_changelog_subagent."""
+class TestDocsResearchSubagent:
+ """Tests for create_docs_research_subagent."""
@pytest.fixture
def mock_backend(self):
"""Create a mock backend."""
return Mock()
- @pytest.fixture
- def mock_model(self):
- """Create a mock model."""
- return Mock()
-
- @pytest.fixture
- def mock_runtime_ctx(self):
- """Create a mock runtime context."""
- return Mock()
-
- def test_returns_subagent(self, mock_model, mock_backend, mock_runtime_ctx):
- """Test that create_changelog_subagent returns a SubAgent."""
- result = create_changelog_subagent(mock_model, mock_backend, mock_runtime_ctx)
+ def test_returns_subagent(self, mock_backend):
+ """Test that create_docs_research_subagent returns a SubAgent."""
+ result = create_docs_research_subagent(mock_backend)
assert isinstance(result, dict)
- assert result["name"] == "changelog-curator"
+ assert result["name"] == "docs-research"
assert result["description"]
assert result["system_prompt"]
- description = result["description"].lower()
- assert "changelog" in description
-
- def test_includes_sandbox_when_enabled(self, mock_model, mock_backend, mock_runtime_ctx):
- result = create_changelog_subagent(mock_model, mock_backend, mock_runtime_ctx, sandbox_enabled=True)
-
- sandbox_middlewares = [m for m in result["middleware"] if isinstance(m, SandboxMiddleware)]
- assert len(sandbox_middlewares) == 1
- assert sandbox_middlewares[0].close_session is False
-
- def test_excludes_sandbox_when_disabled(self, mock_model, mock_backend, mock_runtime_ctx):
- result = create_changelog_subagent(mock_model, mock_backend, mock_runtime_ctx, sandbox_enabled=False)
-
- assert not any(isinstance(m, SandboxMiddleware) for m in result["middleware"])
-
- def test_includes_web_search_middleware(self, mock_model, mock_backend, mock_runtime_ctx):
- result = create_changelog_subagent(mock_model, mock_backend, mock_runtime_ctx, web_search_enabled=True)
- assert any(isinstance(m, WebSearchMiddleware) for m in result["middleware"])
-
- def test_excludes_web_search_middleware(self, mock_model, mock_backend, mock_runtime_ctx):
- result = create_changelog_subagent(mock_model, mock_backend, mock_runtime_ctx, web_search_enabled=False)
- assert not any(isinstance(m, WebSearchMiddleware) for m in result["middleware"])
+ assert any(isinstance(m, WebFetchMiddleware) for m in result["middleware"])
+ assert any(isinstance(m, SummarizationMiddleware) for m in result["middleware"])
+ assert any(isinstance(m, AnthropicPromptCachingMiddleware) for m in result["middleware"])
diff --git a/tests/unit_tests/slash_commands/actions/test_agents.py b/tests/unit_tests/slash_commands/actions/test_agents.py
index 680af039..1f91bc4b 100644
--- a/tests/unit_tests/slash_commands/actions/test_agents.py
+++ b/tests/unit_tests/slash_commands/actions/test_agents.py
@@ -35,8 +35,8 @@ def mock_subagents() -> list[SubAgent]:
"tools": [],
},
{
- "name": "changelog-curator",
- "description": "Agent for updating changelogs.",
+ "name": "docs-research",
+ "description": "Agent for fetching up-to-date documentation.",
"system_prompt": "Test prompt",
"tools": [],
},
@@ -67,7 +67,7 @@ async def test_agents_command_with_subagents(agents_slash_command: AgentsSlashCo
assert "Available Sub-Agents" in message
assert "general-purpose" in message
assert "explore" in message
- assert "changelog-curator" in message
+ assert "docs-research" in message
assert "General-purpose agent for researching and executing tasks." in message
assert "Fast agent specialized for exploring codebases." in message
- assert "Agent for updating changelogs." in message
+ assert "Agent for fetching up-to-date documentation." in message
diff --git a/uv.lock b/uv.lock
index 18e6a283..135a3850 100644
--- a/uv.lock
+++ b/uv.lock
@@ -99,7 +99,7 @@ wheels = [
[[package]]
name = "anthropic"
-version = "0.83.0"
+version = "0.84.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -111,9 +111,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/db/e5/02cd2919ec327b24234abb73082e6ab84c451182cc3cc60681af700f4c63/anthropic-0.83.0.tar.gz", hash = "sha256:a8732c68b41869266c3034541a31a29d8be0f8cd0a714f9edce3128b351eceb4", size = 534058, upload-time = "2026-02-19T19:26:38.904Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/04/ea/0869d6df9ef83dcf393aeefc12dd81677d091c6ffc86f783e51cf44062f2/anthropic-0.84.0.tar.gz", hash = "sha256:72f5f90e5aebe62dca316cb013629cfa24996b0f5a4593b8c3d712bc03c43c37", size = 539457, upload-time = "2026-02-25T05:22:38.54Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/5f/75/b9d58e4e2a4b1fc3e75ffbab978f999baf8b7c4ba9f96e60edb918ba386b/anthropic-0.83.0-py3-none-any.whl", hash = "sha256:f069ef508c73b8f9152e8850830d92bd5ef185645dbacf234bb213344a274810", size = 456991, upload-time = "2026-02-19T19:26:40.114Z" },
+ { url = "https://files.pythonhosted.org/packages/64/ca/218fa25002a332c0aa149ba18ffc0543175998b1f65de63f6d106689a345/anthropic-0.84.0-py3-none-any.whl", hash = "sha256:861c4c50f91ca45f942e091d83b60530ad6d4f98733bfe648065364da05d29e7", size = 455156, upload-time = "2026-02-25T05:22:40.468Z" },
]
[[package]]
@@ -248,11 +248,11 @@ wheels = [
[[package]]
name = "certifi"
-version = "2026.1.4"
+version = "2026.2.25"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },
]
[[package]]
@@ -452,7 +452,6 @@ dependencies = [
{ name = "langsmith", extra = ["pytest"] },
{ name = "langsmith-fetch" },
{ name = "markdownify" },
- { name = "openevals" },
{ name = "prompt-toolkit" },
{ name = "psycopg", extra = ["pool"] },
{ name = "pydantic" },
@@ -471,6 +470,7 @@ dependencies = [
dev = [
{ name = "coverage" },
{ name = "datasets" },
+ { name = "openevals" },
{ name = "prek" },
{ name = "pyproject-fmt" },
{ name = "pytest" },
@@ -517,7 +517,6 @@ requires-dist = [
{ name = "langsmith", extras = ["pytest"], specifier = "==0.7.4" },
{ name = "langsmith-fetch", specifier = "==0.3.1" },
{ name = "markdownify", specifier = "==1.2.2" },
- { name = "openevals", specifier = "==0.1.3" },
{ name = "prompt-toolkit", specifier = "==3.0.52" },
{ name = "psycopg", extras = ["pool"], specifier = "==3.3.3" },
{ name = "pydantic", specifier = "==2.12.5" },
@@ -536,6 +535,7 @@ requires-dist = [
dev = [
{ name = "coverage", specifier = "==7.13.4" },
{ name = "datasets", specifier = "==4.5.0" },
+ { name = "openevals", specifier = "==0.1.3" },
{ name = "prek", specifier = "==0.3.3" },
{ name = "pyproject-fmt", specifier = "==2.16.1" },
{ name = "pytest", specifier = "==9.0.2" },
@@ -960,7 +960,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" },
{ url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" },
{ url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" },
- { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" },
{ url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" },
{ url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" },
{ url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" },
@@ -969,7 +968,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" },
{ url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" },
{ url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" },
- { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" },
{ url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" },
{ url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" },
{ url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" },
@@ -1000,24 +998,26 @@ wheels = [
[[package]]
name = "hf-xet"
-version = "1.2.0"
+version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020, upload-time = "2025-10-24T19:04:32.129Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a6/d0/73454ef7ca885598a3194d07d5c517d91a840753c5b35d272600d7907f64/hf_xet-1.3.1.tar.gz", hash = "sha256:513aa75f8dc39a63cc44dbc8d635ccf6b449e07cdbd8b2e2d006320d2e4be9bb", size = 641393, upload-time = "2026-02-25T00:57:56.701Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e2/51/f7e2caae42f80af886db414d4e9885fac959330509089f97cccb339c6b87/hf_xet-1.2.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:10bfab528b968c70e062607f663e21e34e2bba349e8038db546646875495179e", size = 2861861, upload-time = "2025-10-24T19:04:19.01Z" },
- { url = "https://files.pythonhosted.org/packages/6e/1d/a641a88b69994f9371bd347f1dd35e5d1e2e2460a2e350c8d5165fc62005/hf_xet-1.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a212e842647b02eb6a911187dc878e79c4aa0aa397e88dd3b26761676e8c1f8", size = 2717699, upload-time = "2025-10-24T19:04:17.306Z" },
- { url = "https://files.pythonhosted.org/packages/df/e0/e5e9bba7d15f0318955f7ec3f4af13f92e773fbb368c0b8008a5acbcb12f/hf_xet-1.2.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e06daccb3a7d4c065f34fc26c14c74f4653069bb2b194e7f18f17cbe9939c0", size = 3314885, upload-time = "2025-10-24T19:04:07.642Z" },
- { url = "https://files.pythonhosted.org/packages/21/90/b7fe5ff6f2b7b8cbdf1bd56145f863c90a5807d9758a549bf3d916aa4dec/hf_xet-1.2.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:29c8fc913a529ec0a91867ce3d119ac1aac966e098cf49501800c870328cc090", size = 3221550, upload-time = "2025-10-24T19:04:05.55Z" },
- { url = "https://files.pythonhosted.org/packages/6f/cb/73f276f0a7ce46cc6a6ec7d6c7d61cbfe5f2e107123d9bbd0193c355f106/hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e159cbfcfbb29f920db2c09ed8b660eb894640d284f102ada929b6e3dc410a", size = 3408010, upload-time = "2025-10-24T19:04:28.598Z" },
- { url = "https://files.pythonhosted.org/packages/b8/1e/d642a12caa78171f4be64f7cd9c40e3ca5279d055d0873188a58c0f5fbb9/hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9c91d5ae931510107f148874e9e2de8a16052b6f1b3ca3c1b12f15ccb491390f", size = 3503264, upload-time = "2025-10-24T19:04:30.397Z" },
- { url = "https://files.pythonhosted.org/packages/17/b5/33764714923fa1ff922770f7ed18c2daae034d21ae6e10dbf4347c854154/hf_xet-1.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:210d577732b519ac6ede149d2f2f34049d44e8622bf14eb3d63bbcd2d4b332dc", size = 2901071, upload-time = "2025-10-24T19:04:37.463Z" },
- { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099, upload-time = "2025-10-24T19:04:15.366Z" },
- { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178, upload-time = "2025-10-24T19:04:13.695Z" },
- { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214, upload-time = "2025-10-24T19:04:03.596Z" },
- { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054, upload-time = "2025-10-24T19:04:01.949Z" },
- { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" },
- { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" },
- { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/f5/66adbb1f54a1b3c6da002fa36d4405901ddbcb7d927d780db17ce18ab99d/hf_xet-1.3.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:6517a245e41df3eae5adc5f9e8c86fa52abd548de798cbcd989f0082152860aa", size = 3759781, upload-time = "2026-02-25T00:57:47.017Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/75/189d91a90480c142cc710c1baa35ece20e8652d5fe5c9b2364a13573d827/hf_xet-1.3.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4a322d506c513f98fdc1aa2aaa825daefd535b686e80ca789e6d33fcb146f524", size = 3517533, upload-time = "2026-02-25T00:57:45.812Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/52/52dd1ab6c29661e29585f3c10d14572e2535a3a472f27a0a46215b0f4659/hf_xet-1.3.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8f16ec9d26badec46334a798e01b5d86af536924789c95b1a1ec6a05f26523e0", size = 4174082, upload-time = "2026-02-25T00:57:38.171Z" },
+ { url = "https://files.pythonhosted.org/packages/14/03/460add181c79e2ea1527d2ad27788ecccaee1d5a82563f9402e25ee627e4/hf_xet-1.3.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:e1f5d72bd5b73e61530fff573bcff34bdb64af2bf4862cdd516e6c1dab4dc75b", size = 3952874, upload-time = "2026-02-25T00:57:36.942Z" },
+ { url = "https://files.pythonhosted.org/packages/01/56/bf78f18890dfc8caa907830e95424dce0887d5c45efde13f23c9ebbaa8ef/hf_xet-1.3.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4bc71afd853508b2ddf123b8fc9de71b0afa4c956ec730b69fb76103781e94cd", size = 4152325, upload-time = "2026-02-25T00:57:54.081Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/94/91685c6a4a7f513097a6a73b1e879024304cd0eae78080e3d737622f2fd9/hf_xet-1.3.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:541b4b00ed294ae6cfd9416de9506e58971013714d7316189c9638ed54e362d4", size = 4390499, upload-time = "2026-02-25T00:57:55.258Z" },
+ { url = "https://files.pythonhosted.org/packages/79/1b/1e72c8ea1f31ef94640d1f265630d35b97b2ef31fe12696bbcc32dbcdc95/hf_xet-1.3.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f85480b4fe3e8e4cdbc59ef1d235152b732fd57ca439cc983c291892945ae818", size = 3634352, upload-time = "2026-02-25T00:58:04.749Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/61/b59e87a7a10b95c4578a6ce555339b2f002035569dfd366662b9f59975a8/hf_xet-1.3.1-cp314-cp314t-win_arm64.whl", hash = "sha256:83a8830160392ef4bea78d443ea2cf1febe65783b3843a8f12c64b368981e7e2", size = 3494371, upload-time = "2026-02-25T00:58:03.422Z" },
+ { url = "https://files.pythonhosted.org/packages/75/f8/c2da4352c0335df6ae41750cf5bab09fdbfc30d3b4deeed9d621811aa835/hf_xet-1.3.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:581d1809a016f7881069d86a072168a8199a46c839cf394ff53970a47e4f1ca1", size = 3761755, upload-time = "2026-02-25T00:57:43.621Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/e5/a2f3eaae09da57deceb16a96ebe9ae1f6f7b9b94145a9cd3c3f994e7782a/hf_xet-1.3.1-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:329c80c86f2dda776bafd2e4813a46a3ee648dce3ac0c84625902c70d7a6ddba", size = 3523677, upload-time = "2026-02-25T00:57:42.3Z" },
+ { url = "https://files.pythonhosted.org/packages/61/cd/acbbf9e51f17d8cef2630e61741228e12d4050716619353efc1ac119f902/hf_xet-1.3.1-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2973c3ff594c3a8da890836308cae1444c8af113c6f10fe6824575ddbc37eca7", size = 4178557, upload-time = "2026-02-25T00:57:35.399Z" },
+ { url = "https://files.pythonhosted.org/packages/df/4f/014c14c4ae3461d9919008d0bed2f6f35ba1741e28b31e095746e8dac66f/hf_xet-1.3.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ed4bfd2e6d10cb86c9b0f3483df1d7dd2d0220f75f27166925253bacbc1c2dbe", size = 3958975, upload-time = "2026-02-25T00:57:34.004Z" },
+ { url = "https://files.pythonhosted.org/packages/86/50/043f5c5a26f3831c3fa2509c17fcd468fd02f1f24d363adc7745fbe661cb/hf_xet-1.3.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:713913387cc76e300116030705d843a9f15aee86158337eeffb9eb8d26f47fcd", size = 4158298, upload-time = "2026-02-25T00:57:51.14Z" },
+ { url = "https://files.pythonhosted.org/packages/08/9c/b667098a636a88358dbeb2caf90e3cb9e4b961f61f6c55bb312793424def/hf_xet-1.3.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e5063789c9d21f51e9ed4edbee8539655d3486e9cad37e96b7af967da20e8b16", size = 4395743, upload-time = "2026-02-25T00:57:52.783Z" },
+ { url = "https://files.pythonhosted.org/packages/70/37/4db0e4e1534270800cfffd5a7e0b338f2137f8ceb5768000147650d34ea9/hf_xet-1.3.1-cp37-abi3-win_amd64.whl", hash = "sha256:607d5bbc2730274516714e2e442a26e40e3330673ac0d0173004461409147dee", size = 3638145, upload-time = "2026-02-25T00:58:02.167Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/46/1ba8d36f8290a4b98f78898bdce2b0e8fe6d9a59df34a1399eb61a8d877f/hf_xet-1.3.1-cp37-abi3-win_arm64.whl", hash = "sha256:851b1be6597a87036fe7258ce7578d5df3c08176283b989c3b165f94125c5097", size = 3500490, upload-time = "2026-02-25T00:58:00.667Z" },
]
[[package]]
@@ -1330,16 +1330,16 @@ openai = [
[[package]]
name = "langchain-anthropic"
-version = "1.3.3"
+version = "1.3.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anthropic" },
{ name = "langchain-core" },
{ name = "pydantic" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/58/48/cf217b3836099220737ff1f8fd07a554993080dfc9c0b4dd4af16ccb0604/langchain_anthropic-1.3.3.tar.gz", hash = "sha256:37198413c9bde5a9e9829f13c7b9ed4870d7085e7fba9fd803ef4d98ef8ea220", size = 686916, upload-time = "2026-02-10T21:02:28.924Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/30/4e/7c1ffac126f5e62b0b9066f331f91ae69361e73476fd3ca1b19f8d8a3cc3/langchain_anthropic-1.3.4.tar.gz", hash = "sha256:000ed4c2d6fb8842b4ffeed22a74a3e84f9e9bcb63638e4abbb4a1d8ffa07211", size = 671858, upload-time = "2026-02-24T13:54:01.738Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8c/f1/cf56d47964b6fe080cdc54c3e32bc05e560927d549b2634b39d14aaf6e05/langchain_anthropic-1.3.3-py3-none-any.whl", hash = "sha256:8008ce5fb680268681673e09f93a9ac08eba9e304477101e5e138f06b5cd8710", size = 46831, upload-time = "2026-02-10T21:02:27.386Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/cf/b7c7b7270efbb3db2edbf14b09ba9110a41628f3a85a11cae9527a35641c/langchain_anthropic-1.3.4-py3-none-any.whl", hash = "sha256:cd112dcc8049aef09f58b3c4338b2c9db5ee98105e08664954a4e40d8bf120b9", size = 47454, upload-time = "2026-02-24T13:54:00.53Z" },
]
[[package]]
@@ -1517,15 +1517,15 @@ wheels = [
[[package]]
name = "langgraph-sdk"
-version = "0.3.8"
+version = "0.3.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "orjson" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/48/22/f451b7f42e7c553f649c51698b5ff82ed1932993bcb9b7a7c53d888849e1/langgraph_sdk-0.3.8.tar.gz", hash = "sha256:e73e56e403254ebada5cab70165eb0b69155979e2360bca84da2cb63f364dfb9", size = 183804, upload-time = "2026-02-19T19:12:37.971Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/bd/ca8ae5c6a34be6d4f7aa86016e010ff96b3a939456041565797952e3014d/langgraph_sdk-0.3.9.tar.gz", hash = "sha256:8be8958529b3f6d493ec248fdb46e539362efda75784654a42a7091d22504e0e", size = 184287, upload-time = "2026-02-24T18:39:03.276Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c5/77/00887fb1fb2c0d61eed0dd76d1ed919558b679f71904d63de6925ca350f9/langgraph_sdk-0.3.8-py3-none-any.whl", hash = "sha256:90436594e95c6fc1d1dafb59ac1c5eff2f8e1853eecc6082262b8e6de04233c1", size = 90038, upload-time = "2026-02-19T19:12:36.65Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/4c/7a7510260fbda788efd13bf4650d3e7d80988118441ac811ec78e0aa03ac/langgraph_sdk-0.3.9-py3-none-any.whl", hash = "sha256:94654294250c920789b6ed0d8a70c0117fed5736b61efc24ff647157359453c5", size = 90511, upload-time = "2026-02-24T18:39:02.012Z" },
]
[[package]]
@@ -1952,7 +1952,7 @@ wheels = [
[[package]]
name = "openai"
-version = "2.22.0"
+version = "2.24.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -1964,9 +1964,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/73/ed/0a004a42fea6b6f3dd4ab33235183e994a4c7ade214fba10d9494577ec04/openai-2.22.0.tar.gz", hash = "sha256:fc2ea71c79951ac3faf178ff72c766bb4b09c3e9aab277184c5260ab3e94294f", size = 657093, upload-time = "2026-02-23T20:14:31.017Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/55/13/17e87641b89b74552ed408a92b231283786523edddc95f3545809fab673c/openai-2.24.0.tar.gz", hash = "sha256:1e5769f540dbd01cb33bc4716a23e67b9d695161a734aff9c5f925e2bf99a673", size = 658717, upload-time = "2026-02-24T20:02:07.958Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/dc/9a/ac24d606ea7e729475100689a1fe8866fe6cbcd0fd9b93dc4b8324be353d/openai-2.22.0-py3-none-any.whl", hash = "sha256:df02cfb731fe312215d046bf1330030e0f4b70a7b880b96992b1517b0b6aced8", size = 1118913, upload-time = "2026-02-23T20:14:29.546Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/30/844dc675ee6902579b8eef01ed23917cc9319a1c9c0c14ec6e39340c96d0/openai-2.24.0-py3-none-any.whl", hash = "sha256:fed30480d7d6c884303287bde864980a4b137b60553ffbcf9ab4a233b7a73d94", size = 1120122, upload-time = "2026-02-24T20:02:05.669Z" },
]
[[package]]
@@ -2098,7 +2098,7 @@ name = "pexpect"
version = "4.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "ptyprocess" },
+ { name = "ptyprocess", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
wheels = [
@@ -2149,40 +2149,40 @@ wheels = [
[[package]]
name = "primp"
-version = "1.0.0"
+version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f4/60/ea0822d275847ed266d694662cef1863c37d3c1752f4286c4baae5297d3f/primp-1.0.0.tar.gz", hash = "sha256:09fc1ff6009220247d723792794e514782e1ab7e9ba5e2547272a07afed5ca86", size = 973426, upload-time = "2026-02-13T15:32:49.846Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/d2/ae/443244fb49e2f421dafadd689361777d48b07f0ea7d18b34e72a38a3ef44/primp-1.0.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6af2343ac655d409ec70c3eeb7c2283de509b663aeb6b3e34e39e1331c82daf6", size = 3893122, upload-time = "2026-02-13T15:33:07.596Z" },
- { url = "https://files.pythonhosted.org/packages/92/02/aa765143ce632bcf5e3cfa8bd41e2032f8d12695754564b5059821b2b41a/primp-1.0.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:25f21400ff236b0e1db5d4db7db66965f63b64898103384e916ecef575ab3395", size = 3655128, upload-time = "2026-02-13T15:32:41.147Z" },
- { url = "https://files.pythonhosted.org/packages/c3/d7/5e9e320441a7c0ffef24ce55fd2922aacd003e6713633d1d0732fe964ff6/primp-1.0.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd09660db079903031be91e04af2dcf42457bd739e6f328c7b2364e38061876", size = 3792951, upload-time = "2026-02-13T15:32:56.186Z" },
- { url = "https://files.pythonhosted.org/packages/36/f2/1130fad846f08bbf104a64232ef4f58ae5b5c4b2c64d6a73b1f4245607e0/primp-1.0.0-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6e756480c9dd585b20927c2a0c1d0c42cbcb5866ed1e741a8f93163e6f905e6c", size = 3440111, upload-time = "2026-02-13T15:32:57.523Z" },
- { url = "https://files.pythonhosted.org/packages/c4/e5/a3e0ba7f4a0409ba615098bda35a1276ebf992d2bd7a8f635c8349e77276/primp-1.0.0-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b75a10ead2872dee9be9c60c07e8fce5328c88ed251e3fdbd29a7d2d73ab512a", size = 3651920, upload-time = "2026-02-13T15:32:48.511Z" },
- { url = "https://files.pythonhosted.org/packages/80/02/10cfc095e958e498171977068ebcabddaa8dabd7835725482b8c0eefec19/primp-1.0.0-cp310-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ea1a0b1d4c2a65efd5f22bc42bc0133ebf359f70dd155847cbebf8015fb05a1", size = 3922305, upload-time = "2026-02-13T15:33:23.231Z" },
- { url = "https://files.pythonhosted.org/packages/89/00/947c74646825d38d7f5c5fc5a7f2474f30767ea9817f9a7742f95ac99e45/primp-1.0.0-cp310-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1abd58a2bf0a2f062edc51a3684f8b9d0170348a96afdd3915f02f498c661228", size = 3811925, upload-time = "2026-02-13T15:33:04.976Z" },
- { url = "https://files.pythonhosted.org/packages/65/34/0f788310dd2903be8b49d9396ad4fa7deb1f5ab6419a2a7ea9014380f52f/primp-1.0.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52506249b8132eb386e90349f9fbbcf6b39e36523d61f92a0e8c557e32f71ef2", size = 4009948, upload-time = "2026-02-13T15:32:43.88Z" },
- { url = "https://files.pythonhosted.org/packages/44/35/9a3147377764380fa9940d4cfc328b5a31a1a1c72d2cbbdaa188ab8ea296/primp-1.0.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b7f24c3a67aab0517ba4f6e743dfced331198062ff8e31df692381e60a17b775", size = 3970643, upload-time = "2026-02-13T15:33:06.248Z" },
- { url = "https://files.pythonhosted.org/packages/df/a9/396511a300bc44de4213198f10a21337fcb3f43e4553ece9a17b1a48e1df/primp-1.0.0-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:0cf76f39d5820a2607a2dd25c074ceb8efa741bc311552218156c53b1002ec25", size = 3668236, upload-time = "2026-02-13T15:33:00.299Z" },
- { url = "https://files.pythonhosted.org/packages/2b/44/f1f4a6223dbfa8c72d37286b4bf9a2bb06241c9bac7ce95c5acc03069fec/primp-1.0.0-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:3414a4bbe37e909a45c0fea04104bd23165d81b94f3d68bfe9a11ba18c462b39", size = 3776956, upload-time = "2026-02-13T15:33:08.969Z" },
- { url = "https://files.pythonhosted.org/packages/d7/9e/b6cb2c19abaeea0ade9256c296340b79dee0084bffcbaadceeebaf75c691/primp-1.0.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3487e5269dc6d840035d59a8e5afbba99b5736da848664b71356681a837c3a8b", size = 4262036, upload-time = "2026-02-13T15:33:21.939Z" },
- { url = "https://files.pythonhosted.org/packages/6b/80/bf5a730384f338be7a52e5976c0f7ea8e00f8f078a80bd51fa15a61cd35a/primp-1.0.0-cp310-abi3-win32.whl", hash = "sha256:0c44e8dccfcd2dd3fb3467d44836445039a013704ea869340bf67a444cbf3f36", size = 3185054, upload-time = "2026-02-13T15:33:15.486Z" },
- { url = "https://files.pythonhosted.org/packages/8f/0b/92d644fbbf97f8fca2959c388f0ed50abd9ea1d17c3ad9b5b0e364fa8d37/primp-1.0.0-cp310-abi3-win_amd64.whl", hash = "sha256:705fb755f5461b551925de7546f3fea5b657fc44fee136498bed492bf5051864", size = 3512508, upload-time = "2026-02-13T15:32:52.646Z" },
- { url = "https://files.pythonhosted.org/packages/c3/6e/efd595743e3b8b0477f44194f6a22fe0d7118b76e9b01167b0921a160d91/primp-1.0.0-cp310-abi3-win_arm64.whl", hash = "sha256:4e080ad054df4c325c434acf613d9cae54278e8141fa116452ec18bf576672a8", size = 3560136, upload-time = "2026-02-13T15:32:50.901Z" },
- { url = "https://files.pythonhosted.org/packages/29/62/e3ee3836154f849086e5a29db7ec95bf805c0143266d59868c2eff0528df/primp-1.0.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:6853b719f511ed09dc3673e54cd489b4ed35b0f769428dc79b3c54c446aafd22", size = 3890886, upload-time = "2026-02-13T15:33:12.447Z" },
- { url = "https://files.pythonhosted.org/packages/23/12/4ea190b844557e919a84d3851d49407303d145dfe93cab67d2ed7268c6fa/primp-1.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3d072d1e3c84068b5727426500210e33241ef97844fe781d9817094fdfc6b128", size = 3653937, upload-time = "2026-02-13T15:33:13.803Z" },
- { url = "https://files.pythonhosted.org/packages/be/51/bb861bcc45b6761b4dcc3b41a1ce6eecea9ccf4e9786d545f28313540259/primp-1.0.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ef28f8d6b89c5daf651dc7c7560b4914647bfe73b9a3847e2ae5ed0ff7d8bcf", size = 3792475, upload-time = "2026-02-13T15:33:27.419Z" },
- { url = "https://files.pythonhosted.org/packages/88/87/f87d652aa13a1b1bba9f576c04732319ecf75075e3b26bf91ad47eab00d3/primp-1.0.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04a0d9d88cdce7ab685b4657cfe07d603a85118ec48a09015fa66eadad156c44", size = 3443247, upload-time = "2026-02-13T15:32:46.793Z" },
- { url = "https://files.pythonhosted.org/packages/31/f5/623885d04702523201639af3d011efb2eaed0dff9200a78db609b570c4c6/primp-1.0.0-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0ad2255403b155d93cf5cb7f6e807e26dc10c49071e0bac888c2c0e14801b82", size = 3651674, upload-time = "2026-02-13T15:33:24.577Z" },
- { url = "https://files.pythonhosted.org/packages/0b/17/b45e7e79cf3c5de7aaf23bf38167243c4df017997d954dd903a479f474d8/primp-1.0.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3e7ccbe4746163f14b984523ac49ce3eed923fbe672c4c08480fa13217c2357", size = 3918929, upload-time = "2026-02-13T15:32:42.615Z" },
- { url = "https://files.pythonhosted.org/packages/fb/00/f5f58ef9856d99cf52e59f9034b27dc2659430be3257ecb890f1b4fccb17/primp-1.0.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63a1d34732c2e6282e5e30f5d425eaa28ca417d74accda92908fdb8c944ff319", size = 3814485, upload-time = "2026-02-13T15:33:16.917Z" },
- { url = "https://files.pythonhosted.org/packages/b0/93/5e82f1fb2fd026d21c645b80da90f29f3afb6f1990120dcff8662c4f4b6e/primp-1.0.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d90e61f173e661ed8e21d8cd6534c586ad1d25565a0bac539a6a2d5e990439e0", size = 4014672, upload-time = "2026-02-13T15:33:26.083Z" },
- { url = "https://files.pythonhosted.org/packages/03/d7/6f1739043c84e772b45c51d2a1ab8c32727f0db6d41beb1b092a7baa2c02/primp-1.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcb28e07bc250b8c4762312e952bd84b6b983554fba6cd067f83018bd39a0488", size = 3971122, upload-time = "2026-02-13T15:32:53.944Z" },
- { url = "https://files.pythonhosted.org/packages/74/9a/47d7101034a36e73bb6976c566c56b54ec46efff1d64ebc07dccf05e51d8/primp-1.0.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:8e5b8fa46130d3db33192784d4935fc3f9574f030d0e78d281e90c37cf2507ee", size = 3669273, upload-time = "2026-02-13T15:33:10.267Z" },
- { url = "https://files.pythonhosted.org/packages/48/15/86878a9b46fc4bafba454e63b293e779c1ba6f9bf5ffc221f2f3dc70d60e/primp-1.0.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:984ab730449fd2e5f794fd6fad37fed3596432a24435ce2d0363b454503b7846", size = 3776747, upload-time = "2026-02-13T15:33:03.156Z" },
- { url = "https://files.pythonhosted.org/packages/9c/52/7afaf2a232987711863fa1e994cb6908c9dcd550d436578bb6cb63e53a83/primp-1.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2abd6d47ca60028bcc33dc47dd33f355237be80d7889518e44cc4d730c9e45e0", size = 4266058, upload-time = "2026-02-13T15:32:59.084Z" },
- { url = "https://files.pythonhosted.org/packages/67/c2/fd1365ab28c4e15bebd291215c152c9787185a4fade0df780bb5e53d5866/primp-1.0.0-cp314-cp314t-win32.whl", hash = "sha256:39c27d84fd597a43bb291b6928fbaa46d4a7aff0c31ae1a361dccbbd109118a1", size = 3184230, upload-time = "2026-02-13T15:32:45.437Z" },
- { url = "https://files.pythonhosted.org/packages/30/2f/fcb4935ef1b2ba19bafbf050775f402ef30d19c9ba0d83a6328b453436a4/primp-1.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bc8bac0288fb7ed541c8db4be46c5f2779e4c1b023bf01e46fe4c1405150dbeb", size = 3514652, upload-time = "2026-02-13T15:33:01.694Z" },
- { url = "https://files.pythonhosted.org/packages/49/88/2dbeee5a6c914c36b5dfca6e77913f4a190ac0137db0ea386b9632c16ef0/primp-1.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:117d3eb9c556fe88c8ed0533be80c2495922671e977e3e0e78a6b841014380eb", size = 3553319, upload-time = "2026-02-13T15:33:19.67Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/de/25/1113a87a693121f4eb18d2df3a99d8ad43984f4068e31a5765c03e4b8b96/primp-1.1.1.tar.gz", hash = "sha256:58775e74f86cc58f9abe4b1dacea399fa6367c1959e591ad9345f151ad38d259", size = 311388, upload-time = "2026-02-24T16:12:53.452Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bf/0f/027fc0394f70721c6dc5054fb3efff6479753da0b272e15b16cefba958b8/primp-1.1.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:691215c5a514a7395c1ee775cd03a94a41497941e17291e1a71f5356142c61e6", size = 3997489, upload-time = "2026-02-24T16:12:49.154Z" },
+ { url = "https://files.pythonhosted.org/packages/af/ea/0f23fbfef2a550c420eaa73fd3e21176acb0ddf0d50028d8bc8d937441be/primp-1.1.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:17ace56cd24a894236121bf37d3616ec15d5299a6fa2d2a30fbbf9c22b946a03", size = 3734591, upload-time = "2026-02-24T16:12:45.629Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/63/c5669652446a981dd5faad8a8255e5567db5818b951dbe74e81968f672cb/primp-1.1.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfec08ae15f6d86b2bcaaee3358d5cc349a843c8be164502ea73658a817c5cf2", size = 3875508, upload-time = "2026-02-24T16:12:59.403Z" },
+ { url = "https://files.pythonhosted.org/packages/14/79/19e4d19a445b39c930a317e4ea4d1eff07ef0661b4e7397ad425f7ff0bd8/primp-1.1.1-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3cf7e93e8ff4842eee9c6d4ac47d638a5c981752b19f458877a3536c1da6671", size = 3510461, upload-time = "2026-02-24T16:12:37.908Z" },
+ { url = "https://files.pythonhosted.org/packages/50/39/091282d624067958b42a087976c0da80eecc5ade03acfc732389be3af723/primp-1.1.1-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db6f3f18855bf25dca14f6d121d214e5c922275f49cdadd248eff28abb779edb", size = 3727644, upload-time = "2026-02-24T16:12:16.671Z" },
+ { url = "https://files.pythonhosted.org/packages/33/ae/ca4e4a5d0cbd35684a228fd1f7c1425db0860a7bd74ce8f40835f6184834/primp-1.1.1-cp310-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d8363faadb1d07fa8ae73de6ed2ca4666b36c77ea3990714164b8ee7ab1aa1d", size = 4004689, upload-time = "2026-02-24T16:12:57.957Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/ed/b3cf17bcac4914aa63cd83d763c9e347aab6e0b9285645b0015b036f914d/primp-1.1.1-cp310-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:302241ee447c185417e93e3a3e5a2801fdd710b1a5cc63c01a26ee7dc634e9b1", size = 3918084, upload-time = "2026-02-24T16:12:30.283Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/9f/f563eaeb654749fa519c627b1f1ab93cf875537c56123fba507f74b647fc/primp-1.1.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a37ad318f1b8295d414e1c32ca407efcb92e664c5ff41f06901bd3ee03bab1fa", size = 4108648, upload-time = "2026-02-24T16:12:15.269Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/b9/2df5376900c293238cf641591952979f689ea3f009195df4cce15786afb9/primp-1.1.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e46829d9d86caf18b2b40829655d470e0ce2eebb061f2ee973451b2509f1c5a2", size = 4055747, upload-time = "2026-02-24T16:12:42.925Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/e9/eaaea488b4ae445059bd99559649402c77ddd9dfdda01528daa9ee11d8fe/primp-1.1.1-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:8ef9cb971915d2db3fbb1a512777261e5267c95d4717b18aff453f5e3dbb9bda", size = 3742046, upload-time = "2026-02-24T16:12:19.945Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/92/0607dd9d01840e0c007519d69cdcbb6f1358d6d7f8e739fc3359773b50d2/primp-1.1.1-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:1a350656142772b5d6afc0dfaf9172c69449fbfafb9b6590af7ba116d32554d7", size = 3857103, upload-time = "2026-02-24T16:12:39.338Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/b6/5d574a7a84afd38df03c5535a9bb1052090bd0289760dcca24188510dd09/primp-1.1.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ec71a66750befd219f29cb6ff01bc1c26671040fc76b4115bf045c85f84da041", size = 4357972, upload-time = "2026-02-24T16:12:12.159Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/f3/34ba2deba36de0a6041a61c16f2097e0bd2e74114f8d85096b3911288b4c/primp-1.1.1-cp310-abi3-win32.whl", hash = "sha256:901dc1e40b99ba5925463ab120af14afb8a66f4ac7eb2cdf87aaf21047f6db39", size = 3259840, upload-time = "2026-02-24T16:12:31.762Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/c6/fa3c17e5b6e4cff5bbdfd6bed1d0e8f81e17708dd8106906a031a2432b61/primp-1.1.1-cp310-abi3-win_amd64.whl", hash = "sha256:6bedd91451ec9ac46203ccb5c2c9925e9206e33abec7c791a2b39e3f86530bf0", size = 3596643, upload-time = "2026-02-24T16:12:21.554Z" },
+ { url = "https://files.pythonhosted.org/packages/94/3d/a5b391107ba1c72dc8eb4f603c5764067449e1445438d71e093a72d5eda1/primp-1.1.1-cp310-abi3-win_arm64.whl", hash = "sha256:fd22a10164536374262e32fccbf81736b20798ac7582f159d5ffdef01a755579", size = 3606836, upload-time = "2026-02-24T16:12:28.579Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/77/b7df4f1776ae2e7cb5cf123b977167709c120712c7a4f968dc93b28d05ac/primp-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:e6b0fdeb12cc60b0fa756191118cec8ede8d26f869b83fa501aed722984a964b", size = 3981048, upload-time = "2026-02-24T16:12:24.396Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/c8/f198cd6ad9f232a171739a69c534c684237362af8e55f0cc2fc452377aa8/primp-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fc473e87adc88f6ce94b7f3edeb2ca6c973f4ceb2d4199d0e707544f71c639c4", size = 3729293, upload-time = "2026-02-24T16:12:18.07Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/ce/bd8e564f8233ab0213a296dda2e04b484e0c4b9975702c7ba712e96ead8c/primp-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e85f2aea74b8683611c76958de8827322bd800e1b51aec88130da68d00a20462", size = 3873474, upload-time = "2026-02-24T16:12:40.749Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/ab/d3ee13de657cb068e81008eedc2d61103094497d9edc054997b85d85163e/primp-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ca535dfbc5a8290975f4bd8ce38922b26cf4fefc737aa2116bcb1a5795c14309", size = 3509513, upload-time = "2026-02-24T16:12:44.251Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/5d/3ed38dd94ae503977329976dbe00831e66d22f0f298c026f8d7493be2b39/primp-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d94073e9ecbf97f6d1538d4678df1bb662fd418ad5fd09da4040fe46623e2ec5", size = 3728743, upload-time = "2026-02-24T16:12:33.277Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/15/19af65a35b2189d6f2267148ea5b7cbb266aa36891acd641388b7a0f6022/primp-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e639441bd36e582feec7033e4b8661e0979a61bff65af5f476d33e02ebb3c4d", size = 3999650, upload-time = "2026-02-24T16:12:36.157Z" },
+ { url = "https://files.pythonhosted.org/packages/22/cb/aa635a9903a1ee3b0ffe5dd9218a2e2d8880828a1eaba9d0035f967d118a/primp-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd770ca4f73a700da0911d1300a952e4d9a4a3321e205aa5a8644ae81cbd4d7d", size = 3896990, upload-time = "2026-02-24T16:12:13.66Z" },
+ { url = "https://files.pythonhosted.org/packages/25/98/916916ec3bd5dab4125bf17b28d1959883a831dc4f9757f915e509c43ec2/primp-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dccb605997c918b7abbdd163303d789d63eb03d7cd0440184f34b06a8522fc1", size = 4096157, upload-time = "2026-02-24T16:12:27.163Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/57/219c44bf21896a3f2132821ea00bbc9af36b194449ee5083791f690daf7d/primp-1.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1ada94c7f9f047b1c5ba339f62effd44f4c4943d4d8bb96447e9c84ab3bd874d", size = 4052968, upload-time = "2026-02-24T16:12:34.574Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/ce/dfdd734c7372faef4a26ecb0267a724e19f78b76a9a92440b8ca824e8f5a/primp-1.1.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:21ac92542f378a21fba8093dbeb7e093851e00da2bdfd9bc6aa63f81cff035d0", size = 3744522, upload-time = "2026-02-24T16:12:25.726Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/9c/3eb9e484c17784eac6549c505a68d82b6e5959a0af6efbcf28a773450a81/primp-1.1.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:adaa5d7e8d2ca089cbf41a837a301da605c21ff0ea5fecac8a8b1eead4bc563f", size = 3855298, upload-time = "2026-02-24T16:12:54.518Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/ca/80924591ec24f9341982e4d74251f6bfeda44cbb90f6f792403d0737a390/primp-1.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b041ab0019e0fb21c24de542e80056775508e5d1d0f0333fb661185bdb359138", size = 4348887, upload-time = "2026-02-24T16:12:47.376Z" },
+ { url = "https://files.pythonhosted.org/packages/95/4b/0edc62583af9a03fd1eb34ffd865245c921919f374b0e72b1bb73dc9adf6/primp-1.1.1-cp314-cp314t-win32.whl", hash = "sha256:b7270b9755a931e7667854ad5d9b2aeb88068f0add4fb741529e8c25d953f21b", size = 3252145, upload-time = "2026-02-24T16:12:52.335Z" },
+ { url = "https://files.pythonhosted.org/packages/01/b7/9784b93d252e4c2a50f7a46908d91110b7ce9d04e1adb47227fc212576ff/primp-1.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:19a48f4e91256ec661e022976a75e6a0621522244ac928e8c632d829adb929ce", size = 3591097, upload-time = "2026-02-24T16:12:22.898Z" },
+ { url = "https://files.pythonhosted.org/packages/db/d5/3b34601cb2da1cec7aec88f447af9de1e8e3bb3101f26351aa8570b5b7af/primp-1.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:c97b951afb203b9528f36524e96b1e37ce42f3a7eb0cd77cd053ad5bdfc93d81", size = 3603917, upload-time = "2026-02-24T16:12:55.859Z" },
]
[[package]]
@@ -2967,26 +2967,28 @@ wheels = [
[[package]]
name = "sqlalchemy"
-version = "2.0.46"
+version = "2.0.47"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e9/f8/5ecdfc73383ec496de038ed1614de9e740a82db9ad67e6e4514ebc0708a3/sqlalchemy-2.0.46-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:56bdd261bfd0895452006d5316cbf35739c53b9bb71a170a331fa0ea560b2ada", size = 2152079, upload-time = "2026-01-21T19:05:58.477Z" },
- { url = "https://files.pythonhosted.org/packages/e5/bf/eba3036be7663ce4d9c050bc3d63794dc29fbe01691f2bf5ccb64e048d20/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33e462154edb9493f6c3ad2125931e273bbd0be8ae53f3ecd1c161ea9a1dd366", size = 3272216, upload-time = "2026-01-21T18:46:52.634Z" },
- { url = "https://files.pythonhosted.org/packages/05/45/1256fb597bb83b58a01ddb600c59fe6fdf0e5afe333f0456ed75c0f8d7bd/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bcdce05f056622a632f1d44bb47dbdb677f58cad393612280406ce37530eb6d", size = 3277208, upload-time = "2026-01-21T18:40:16.38Z" },
- { url = "https://files.pythonhosted.org/packages/d9/a0/2053b39e4e63b5d7ceb3372cface0859a067c1ddbd575ea7e9985716f771/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e84b09a9b0f19accedcbeff5c2caf36e0dd537341a33aad8d680336152dc34e", size = 3221994, upload-time = "2026-01-21T18:46:54.622Z" },
- { url = "https://files.pythonhosted.org/packages/1e/87/97713497d9502553c68f105a1cb62786ba1ee91dea3852ae4067ed956a50/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4f52f7291a92381e9b4de9050b0a65ce5d6a763333406861e33906b8aa4906bf", size = 3243990, upload-time = "2026-01-21T18:40:18.253Z" },
- { url = "https://files.pythonhosted.org/packages/a8/87/5d1b23548f420ff823c236f8bea36b1a997250fd2f892e44a3838ca424f4/sqlalchemy-2.0.46-cp314-cp314-win32.whl", hash = "sha256:70ed2830b169a9960193f4d4322d22be5c0925357d82cbf485b3369893350908", size = 2114215, upload-time = "2026-01-21T18:42:55.232Z" },
- { url = "https://files.pythonhosted.org/packages/3a/20/555f39cbcf0c10cf452988b6a93c2a12495035f68b3dbd1a408531049d31/sqlalchemy-2.0.46-cp314-cp314-win_amd64.whl", hash = "sha256:3c32e993bc57be6d177f7d5d31edb93f30726d798ad86ff9066d75d9bf2e0b6b", size = 2139867, upload-time = "2026-01-21T18:42:56.474Z" },
- { url = "https://files.pythonhosted.org/packages/3e/f0/f96c8057c982d9d8a7a68f45d69c674bc6f78cad401099692fe16521640a/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4dafb537740eef640c4d6a7c254611dca2df87eaf6d14d6a5fca9d1f4c3fc0fa", size = 3561202, upload-time = "2026-01-21T18:33:10.337Z" },
- { url = "https://files.pythonhosted.org/packages/d7/53/3b37dda0a5b137f21ef608d8dfc77b08477bab0fe2ac9d3e0a66eaeab6fc/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42a1643dc5427b69aca967dae540a90b0fbf57eaf248f13a90ea5930e0966863", size = 3526296, upload-time = "2026-01-21T18:45:12.657Z" },
- { url = "https://files.pythonhosted.org/packages/33/75/f28622ba6dde79cd545055ea7bd4062dc934e0621f7b3be2891f8563f8de/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ff33c6e6ad006bbc0f34f5faf941cfc62c45841c64c0a058ac38c799f15b5ede", size = 3470008, upload-time = "2026-01-21T18:33:11.725Z" },
- { url = "https://files.pythonhosted.org/packages/a9/42/4afecbbc38d5e99b18acef446453c76eec6fbd03db0a457a12a056836e22/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:82ec52100ec1e6ec671563bbd02d7c7c8d0b9e71a0723c72f22ecf52d1755330", size = 3476137, upload-time = "2026-01-21T18:45:15.001Z" },
- { url = "https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl", hash = "sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size = 1937882, upload-time = "2026-01-21T18:22:10.456Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/cd/4b/1e00561093fe2cd8eef09d406da003c8a118ff02d6548498c1ae677d68d9/sqlalchemy-2.0.47.tar.gz", hash = "sha256:e3e7feb57b267fe897e492b9721ae46d5c7de6f9e8dee58aacf105dc4e154f3d", size = 9886323, upload-time = "2026-02-24T16:34:27.947Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/30/98243209aae58ed80e090ea988d5182244ca7ab3ff59e6d850c3dfc7651e/sqlalchemy-2.0.47-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b03010a5a5dfe71676bc83f2473ebe082478e32d77e6f082c8fe15a31c3b42a6", size = 2154355, upload-time = "2026-02-24T17:05:48.959Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/62/12ca6ea92055fe486d6558a2a4efe93e194ff597463849c01f88e5adb99d/sqlalchemy-2.0.47-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8e3371aa9024520883a415a09cc20c33cfd3eeccf9e0f4f4c367f940b9cbd44", size = 3274486, upload-time = "2026-02-24T17:18:13.659Z" },
+ { url = "https://files.pythonhosted.org/packages/97/88/7dfbdeaa8d42b1584e65d6cc713e9d33b6fa563e0d546d5cb87e545bb0e5/sqlalchemy-2.0.47-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9449f747e50d518c6e1b40cc379e48bfc796453c47b15e627ea901c201e48a6", size = 3279481, upload-time = "2026-02-24T17:27:26.491Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/b7/75e1c1970616a9dd64a8a6fd788248da2ddaf81c95f4875f2a1e8aee4128/sqlalchemy-2.0.47-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:21410f60d5cac1d6bfe360e05bd91b179be4fa0aa6eea6be46054971d277608f", size = 3224269, upload-time = "2026-02-24T17:18:15.078Z" },
+ { url = "https://files.pythonhosted.org/packages/31/ac/eec1a13b891df9a8bc203334caf6e6aac60b02f61b018ef3b4124b8c4120/sqlalchemy-2.0.47-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:819841dd5bb4324c284c09e2874cf96fe6338bfb57a64548d9b81a4e39c9871f", size = 3246262, upload-time = "2026-02-24T17:27:27.986Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/b0/661b0245b06421058610da39f8ceb34abcc90b49f90f256380968d761dbe/sqlalchemy-2.0.47-cp314-cp314-win32.whl", hash = "sha256:e255ee44821a7ef45649c43064cf94e74f81f61b4df70547304b97a351e9b7db", size = 2116528, upload-time = "2026-02-24T17:22:59.363Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/ef/1035a90d899e61810791c052004958be622a2cf3eb3df71c3fe20778c5d0/sqlalchemy-2.0.47-cp314-cp314-win_amd64.whl", hash = "sha256:209467ff73ea1518fe1a5aaed9ba75bb9e33b2666e2553af9ccd13387bf192cb", size = 2142181, upload-time = "2026-02-24T17:23:01.001Z" },
+ { url = "https://files.pythonhosted.org/packages/76/bb/17a1dd09cbba91258218ceb582225f14b5364d2683f9f5a274f72f2d764f/sqlalchemy-2.0.47-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e78fd9186946afaa287f8a1fe147ead06e5d566b08c0afcb601226e9c7322a64", size = 3563477, upload-time = "2026-02-24T17:12:18.46Z" },
+ { url = "https://files.pythonhosted.org/packages/66/8f/1a03d24c40cc321ef2f2231f05420d140bb06a84f7047eaa7eaa21d230ba/sqlalchemy-2.0.47-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5740e2f31b5987ed9619d6912ae5b750c03637f2078850da3002934c9532f172", size = 3528568, upload-time = "2026-02-24T17:28:03.732Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/53/d56a213055d6b038a5384f0db5ece7343334aca230ff3f0fa1561106f22c/sqlalchemy-2.0.47-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb9ac00d03de93acb210e8ec7243fefe3e012515bf5fd2f0898c8dff38bc77a4", size = 3472284, upload-time = "2026-02-24T17:12:20.319Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/19/c235d81b9cfdd6130bf63143b7bade0dc4afa46c4b634d5d6b2a96bea233/sqlalchemy-2.0.47-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c72a0b9eb2672d70d112cb149fbaf172d466bc691014c496aaac594f1988e706", size = 3478410, upload-time = "2026-02-24T17:28:05.892Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/db/cafdeca5ecdaa3bb0811ba5449501da677ce0d83be8d05c5822da72d2e86/sqlalchemy-2.0.47-cp314-cp314t-win32.whl", hash = "sha256:c200db1128d72a71dc3c31c24b42eb9fd85b2b3e5a3c9ba1e751c11ac31250ff", size = 2147164, upload-time = "2026-02-24T17:14:40.783Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/5e/ff41a010e9e0f76418b02ad352060a4341bb15f0af66cedc924ab376c7c6/sqlalchemy-2.0.47-cp314-cp314t-win_amd64.whl", hash = "sha256:669837759b84e575407355dcff912835892058aea9b80bd1cb76d6a151cf37f7", size = 2182154, upload-time = "2026-02-24T17:14:43.205Z" },
+ { url = "https://files.pythonhosted.org/packages/15/9f/7c378406b592fcf1fc157248607b495a40e3202ba4a6f1372a2ba6447717/sqlalchemy-2.0.47-py3-none-any.whl", hash = "sha256:e2647043599297a1ef10e720cf310846b7f31b6c841fee093d2b09d81215eb93", size = 1940159, upload-time = "2026-02-24T17:15:07.158Z" },
]
[[package]]