diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1a857548..25c9bafb 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,7 +7,7 @@ repos:
args: ["--branch", "main"]
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.14.11
+ rev: v0.15.0
hooks:
- id: ruff
name: Run the ruff linter
@@ -16,6 +16,6 @@ repos:
name: Run the ruff formatter
- repo: https://github.com/tox-dev/pyproject-fmt
- rev: v2.11.1
+ rev: v2.14.2
hooks:
- id: pyproject-fmt
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5837e4d5..08d364c2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -52,6 +52,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Fixed repeated generation of GitHub CLI installation tokens by caching the token in the agent session state for subsequent `gh` tool calls.
- Fixed skill tool to properly return a `Command` object for state updates instead of returning messages directly.
- Fixed `daiv-auto` label to work as a trigger label that both launches the agent and enables auto-approval mode, eliminating the need to add two separate labels.
+- Fixed agent post-run failures when git push returns authentication/permission errors by handling push permission failures gracefully in git middleware and adding regression tests.
### Removed
diff --git a/Makefile b/Makefile
index d1db998a..fc4dd652 100644
--- a/Makefile
+++ b/Makefile
@@ -11,7 +11,7 @@ help:
@echo " make lint-fix - Fix linting and formatting issues"
@echo " make lint-typing - Run type checking with ty"
@echo " make lock - Update uv lock"
- @echo " make evals - Run evals"
+ @echo " make integration-tests - Run integration tests"
test:
LANGCHAIN_TRACING_V2=false uv run pytest -s tests/unit_tests
@@ -42,8 +42,8 @@ makemessages:
compilemessages:
uv run django-admin compilemessages
-evals:
- LANGSMITH_TEST_SUITE="DAIV: PR Describer" uv run pytest --reuse-db evals --no-cov --log-level=INFO -k test_pr_describer -n 2
+integration-tests:
+ LANGSMITH_TEST_SUITE="DAIV: Diff to Metadata" uv run pytest --reuse-db tests/integration_tests --no-cov --log-level=INFO -k test_diff_to_metadata -n 2
swebench:
uv run evals/swebench.py --dataset-path "SWE-bench/SWE-bench_Lite" --dataset-split "dev" --output-path predictions.json --num-samples 1
diff --git a/daiv/automation/agent/conf.py b/daiv/automation/agent/conf.py
index 4181f653..13210c34 100644
--- a/daiv/automation/agent/conf.py
+++ b/daiv/automation/agent/conf.py
@@ -25,7 +25,7 @@ class DAIVAgentSettings(BaseSettings):
description="Thinking level to be used for tasks. Set as `None` to disable thinking.",
)
MAX_MODEL_NAME: ModelName | str = Field(
- default=ModelName.CLAUDE_OPUS_4_5,
+ default=ModelName.CLAUDE_OPUS_4_6,
description=(
"Model for tasks when daiv-max label is present, a multi-modal (image and text) model with "
"capabilities to call tools."
@@ -37,6 +37,10 @@ class DAIVAgentSettings(BaseSettings):
"Thinking level to be used for tasks when daiv-max label is present. Set as `None` to disable thinking."
),
)
+ EXPLORE_MODEL_NAME: ModelName | str = Field(
+ default=ModelName.CLAUDE_HAIKU_4_5,
+ description="Model for the explore subagent, a fast model with capabilities to call tools.",
+ )
settings = DAIVAgentSettings()
diff --git a/daiv/automation/agent/constants.py b/daiv/automation/agent/constants.py
index 42b8db39..8a285016 100644
--- a/daiv/automation/agent/constants.py
+++ b/daiv/automation/agent/constants.py
@@ -6,16 +6,15 @@
BUILTIN_SKILLS_PATH = PROJECT_DIR / "automation" / "agent" / "skills"
# Path where the skills are stored in repository.
-DAIV_SKILLS_PATH = ".daiv/skills"
CURSOR_SKILLS_PATH = ".cursor/skills"
-CLAUDE_CODER_SKILLS_PATH = ".claude/skills"
+CLAUDE_CODE_SKILLS_PATH = ".claude/skills"
AGENTS_SKILLS_PATH = ".agents/skills"
# Paths where the skills are stored in repository.
-SKILLS_SOURCES = [DAIV_SKILLS_PATH, CURSOR_SKILLS_PATH, CLAUDE_CODER_SKILLS_PATH, AGENTS_SKILLS_PATH]
+SKILLS_SOURCES = [CURSOR_SKILLS_PATH, CLAUDE_CODE_SKILLS_PATH, AGENTS_SKILLS_PATH]
# Path where the memory is stored in repository.
-DAIV_MEMORY_PATH = ".daiv/AGENTS.md"
+AGENTS_MEMORY_PATH = ".agents/AGENTS.md"
class ModelName(StrEnum):
@@ -24,18 +23,17 @@ class ModelName(StrEnum):
You can also use `anthropic`, `google` or `openai` model providers directly to use any model that is supported
by Anthropic, Google or OpenAI.
+
+ Only models that have been tested and are working well are listed here for the sake of convenience.
"""
# Anthropic models
- CLAUDE_OPUS_4_5 = "openrouter:anthropic/claude-opus-4.5"
+ CLAUDE_OPUS_4_6 = "openrouter:anthropic/claude-opus-4.6"
CLAUDE_SONNET_4_5 = "openrouter:anthropic/claude-sonnet-4.5"
CLAUDE_HAIKU_4_5 = "openrouter:anthropic/claude-haiku-4.5"
# OpenAI models
GPT_4_1_MINI = "openrouter:openai/gpt-4.1-mini"
- GPT_5_1_CODEX_MINI = "openrouter:openai/gpt-5.1-codex-mini"
- GPT_5_1_CODEX = "openrouter:openai/gpt-5.1-codex"
- GPT_5_1_CODEX_MAX = "openrouter:openai/gpt-5.1-codex-max"
GPT_5_2 = "openrouter:openai/gpt-5.2"
GPT_5_2_CODEX = "openrouter:openai/gpt-5.2-codex"
diff --git a/daiv/automation/agent/pr_describer/__init__.py b/daiv/automation/agent/diff_to_metadata/__init__.py
similarity index 100%
rename from daiv/automation/agent/pr_describer/__init__.py
rename to daiv/automation/agent/diff_to_metadata/__init__.py
diff --git a/daiv/automation/agent/diff_to_metadata/conf.py b/daiv/automation/agent/diff_to_metadata/conf.py
new file mode 100644
index 00000000..23050a96
--- /dev/null
+++ b/daiv/automation/agent/diff_to_metadata/conf.py
@@ -0,0 +1,18 @@
+from pydantic import Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+from automation.agent.constants import ModelName
+
+
+class DiffToMetadataSettings(BaseSettings):
+ model_config = SettingsConfigDict(env_prefix="DIFF_TO_METADATA_", env_parse_none_str="None")
+
+ MODEL_NAME: ModelName | str = Field(
+ default=ModelName.CLAUDE_HAIKU_4_5, description="Model name to be used to transform a diff into metadata."
+ )
+ FALLBACK_MODEL_NAME: ModelName | str = Field(
+ default=ModelName.GPT_4_1_MINI, description="Fallback model name to be used when the primary model fails."
+ )
+
+
+settings = DiffToMetadataSettings()
diff --git a/daiv/automation/agent/diff_to_metadata/graph.py b/daiv/automation/agent/diff_to_metadata/graph.py
new file mode 100644
index 00000000..c1ca8ecc
--- /dev/null
+++ b/daiv/automation/agent/diff_to_metadata/graph.py
@@ -0,0 +1,167 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, cast
+
+from django.utils import timezone
+
+from deepagents.backends import FilesystemBackend
+from deepagents.graph import create_agent
+from deepagents.middleware.memory import MemoryMiddleware
+from langchain.agents.middleware import ModelFallbackMiddleware, dynamic_prompt
+from langchain_core.prompts import ChatPromptTemplate
+from langchain_core.runnables import RunnableLambda, RunnableParallel
+from prompt_toolkit import HTML, PromptSession
+
+from automation.agent import BaseAgent
+from automation.agent.constants import AGENTS_MEMORY_PATH, ModelName
+from automation.agent.middlewares.prompt_cache import AnthropicPromptCachingMiddleware
+from codebase.base import Scope
+from codebase.context import RuntimeCtx, set_runtime_ctx
+from codebase.utils import redact_diff_content
+
+from .conf import settings
+from .prompts import human_commit_message, human_pr_metadata, system
+from .schemas import CommitMetadata, PullRequestMetadata
+
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+
+ from langchain.agents.middleware.types import ModelRequest
+ from langchain_core.runnables import Runnable
+
+
+@dynamic_prompt
+def dynamic_system_prompt(request: ModelRequest) -> str:
+ """
+ Dynamic system prompt for the changes metadata agent.
+ """
+ system_prompt = ""
+ if request.system_prompt:
+ system_prompt = request.system_prompt + "\n\n"
+ return system_prompt + cast("str", system.format(current_date_time=timezone.now().strftime("%d %B, %Y")).content)
+
+
+def create_diff_to_metadata_graph(
+ model_names: Sequence[ModelName | str] = (settings.MODEL_NAME, settings.FALLBACK_MODEL_NAME),
+ *,
+ ctx: RuntimeCtx,
+ include_pr_metadata: bool = True,
+ include_commit_message: bool = True,
+) -> Runnable:
+ """
+ Create a graph to describe changes to feed into a pull request and optionally a commit message.
+
+ Args:
+ model: The model to use for the agent.
+ ctx: The runtime context.
+
+ Returns:
+ The PR metadata graph.
+ """
+ assert include_pr_metadata or include_commit_message, (
+ "At least one of include_pr_metadata or include_commit_message must be True"
+ )
+
+ agent_path = Path(ctx.repo.working_dir)
+
+ backend = FilesystemBackend(root_dir=agent_path.parent, virtual_mode=True)
+
+ model = BaseAgent.get_model(model=model_names[0])
+ fallback_models = [BaseAgent.get_model(model=model_name) for model_name in model_names[1:]]
+
+ middleware = [
+ MemoryMiddleware(
+ backend=backend,
+ sources=[f"/{agent_path.name}/{ctx.config.context_file_name}", f"/{agent_path.name}/{AGENTS_MEMORY_PATH}"],
+ ),
+ AnthropicPromptCachingMiddleware(),
+ dynamic_system_prompt,
+ ]
+
+ if fallback_models:
+ middleware.append(ModelFallbackMiddleware(fallback_models[0], *fallback_models[1:]))
+
+ graphs: dict[str, Runnable] = {}
+
+ if include_pr_metadata:
+ graphs["pr_metadata"] = (
+ ChatPromptTemplate.from_messages([human_pr_metadata]).partial(extra_context="")
+ | create_agent(
+ model=model,
+ tools=[], # No tools are needed for this agent, it only uses the memory and the system prompt
+ middleware=middleware,
+ response_format=PullRequestMetadata,
+ context_schema=RuntimeCtx,
+ )
+ ).with_config(run_name="PRMetadata")
+
+ if include_commit_message:
+ graphs["commit_message"] = (
+ ChatPromptTemplate.from_messages([human_commit_message])
+ | create_agent(
+ model=model,
+ tools=[], # No tools are needed for this agent, it only uses the memory and the system prompt
+ middleware=middleware,
+ response_format=CommitMetadata,
+ context_schema=RuntimeCtx,
+ )
+ ).with_config(run_name="CommitMessage")
+
+ def _input_selector(x: dict[str, Any]) -> dict[str, str]:
+ input_data = {}
+ if include_pr_metadata:
+ input_data["pr_metadata_diff"] = x.get("pr_metadata_diff", x.get("diff", ""))
+ if include_commit_message:
+ input_data["commit_message_diff"] = x.get("commit_message_diff", x.get("diff", ""))
+ return input_data
+
+ def _output_selector(x: dict[str, Any]) -> dict[str, PullRequestMetadata | CommitMetadata]:
+ output: dict[str, PullRequestMetadata | CommitMetadata] = {}
+ if include_pr_metadata and "pr_metadata" in x:
+ output["pr_metadata"] = x["pr_metadata"]["structured_response"]
+ if include_commit_message and "commit_message" in x:
+ output["commit_message"] = x["commit_message"]["structured_response"]
+ return output
+
+ run_name = "DiffToMetadata"
+ return (RunnableLambda(_input_selector) | RunnableParallel(graphs) | RunnableLambda(_output_selector)).with_config(
+ run_name=run_name,
+ tags=[run_name],
+ metadata={"include_pr_metadata": include_pr_metadata, "include_commit_message": include_commit_message},
+ )
+
+
+async def main():
+ session = PromptSession(
+ message=HTML(' '),
+ complete_while_typing=True, # Show completions as you type
+ complete_in_thread=True, # Async completion prevents menu freezing
+ mouse_support=False,
+ enable_open_in_editor=True, # Allow Ctrl+X Ctrl+E to open external editor
+ enable_history_search=True,
+ wrap_lines=True,
+ reserve_space_for_menu=7, # Reserve space for completion menu to show 5-6 results
+ )
+ async with set_runtime_ctx(repo_id="srtab/daiv", scope=Scope.GLOBAL, ref="main") as ctx:
+ diff_to_metadata_graph = create_diff_to_metadata_graph(ctx=ctx, model_names=[ModelName.CLAUDE_HAIKU_4_5])
+ while True:
+ user_input = await session.prompt_async()
+ output = await diff_to_metadata_graph.ainvoke(
+ {"diff": redact_diff_content(user_input, ctx.config.omit_content_patterns)},
+ context=ctx,
+ config={"configurable": {"thread_id": "1"}},
+ )
+ if output and "pr_metadata" in output:
+ print(output["pr_metadata"].model_dump_json(indent=2)) # noqa: T201
+ if output and "commit_message" in output:
+ print(output["commit_message"].model_dump_json(indent=2)) # noqa: T201
+
+
+if __name__ == "__main__":
+ import asyncio
+
+ import django
+
+ django.setup()
+ asyncio.run(main())
diff --git a/daiv/automation/agent/pr_describer/prompts.py b/daiv/automation/agent/diff_to_metadata/prompts.py
similarity index 74%
rename from daiv/automation/agent/pr_describer/prompts.py
rename to daiv/automation/agent/diff_to_metadata/prompts.py
index c9c41562..45e4a841 100644
--- a/daiv/automation/agent/pr_describer/prompts.py
+++ b/daiv/automation/agent/diff_to_metadata/prompts.py
@@ -16,17 +16,16 @@
- Use a sensible default:
- branch: / where type ∈ {feat, fix, chore, docs, refactor, test}
- commit_message: Conventional Commits style ": " (subject only)
-5) Output MUST match the requested structured format exactly (no extra keys).
-""",
+5) Output MUST match the requested structured format exactly (no extra keys).""",
"mustache",
)
-human = HumanMessagePromptTemplate.from_template(
- """Generate PR metadata from the repo instructions and code changes.
+human_pr_metadata = HumanMessagePromptTemplate.from_template(
+ """Generate PR metadata from the memory and code changes.
Diff hunks (unified diff; may include multiple files):
~~~diff
-{{diff}}
+{{pr_metadata_diff}}
~~~
{{#extra_context}}
@@ -56,7 +55,26 @@
- branch:
- If memory defines a naming convention, follow it.
- Otherwise use: "/".
- - Keep it lowercase, ascii, no spaces, avoid > 50 chars.
-""",
+ - Keep it lowercase, ascii, no spaces, avoid > 50 chars.""",
+ "mustache",
+)
+
+
+human_commit_message = HumanMessagePromptTemplate.from_template(
+ """Generate a commit message from the memory and code changes.
+
+Diff hunks (unified diff; may include multiple files):
+~~~diff
+{{commit_message_diff}}
+~~~
+
+Output requirements:
+- Return a single JSON object with EXACTLY this key:
+ - commit_message
+
+Field rules:
+- commit_message:
+ - If memory defines a format, follow it.
+ - Otherwise use: ": " (Conventional Commits), single line.""",
"mustache",
)
diff --git a/daiv/automation/agent/pr_describer/schemas.py b/daiv/automation/agent/diff_to_metadata/schemas.py
similarity index 63%
rename from daiv/automation/agent/pr_describer/schemas.py
rename to daiv/automation/agent/diff_to_metadata/schemas.py
index 051b46e8..05fcf246 100644
--- a/daiv/automation/agent/pr_describer/schemas.py
+++ b/daiv/automation/agent/diff_to_metadata/schemas.py
@@ -3,8 +3,11 @@
from pydantic import BaseModel, Field
+class CommitMetadata(BaseModel):
+ commit_message: str
+
+
class PullRequestMetadata(BaseModel):
- title: str = Field()
+ title: str
branch: str = Field(pattern=r"[a-z0-9-_/]")
- description: str = Field()
- commit_message: str = Field()
+ description: str
diff --git a/daiv/automation/agent/graph.py b/daiv/automation/agent/graph.py
index 5fd37242..40515736 100644
--- a/daiv/automation/agent/graph.py
+++ b/daiv/automation/agent/graph.py
@@ -1,6 +1,6 @@
import asyncio
from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, cast
from django.conf import django
from django.utils import timezone
@@ -25,7 +25,7 @@
from automation.agent.base import BaseAgent, ThinkingLevel
from automation.agent.conf import settings
-from automation.agent.constants import DAIV_MEMORY_PATH, SKILLS_SOURCES, ModelName
+from automation.agent.constants import AGENTS_MEMORY_PATH, SKILLS_SOURCES, ModelName
from automation.agent.mcp.toolkits import MCPToolkit
from automation.agent.middlewares.file_system import FilesystemMiddleware
from automation.agent.middlewares.git import GitMiddleware
@@ -48,6 +48,8 @@
from core.constants import BOT_NAME
if TYPE_CHECKING:
+ from collections.abc import Sequence
+
from langgraph.checkpoint.base import BaseCheckpointSaver
from langgraph.store.base import BaseStore
@@ -106,11 +108,11 @@ def dynamic_write_todos_system_prompt(bash_tool_enabled: bool) -> str:
"""
Dynamic prompt for the write todos system.
"""
- return WRITE_TODOS_SYSTEM_PROMPT.format(bash_tool_enabled=bash_tool_enabled).content
+ return cast("str", WRITE_TODOS_SYSTEM_PROMPT.format(bash_tool_enabled=bash_tool_enabled).content)
async def create_daiv_agent(
- model_names: list[ModelName | str] = (settings.MODEL_NAME, settings.FALLBACK_MODEL_NAME),
+ model_names: Sequence[ModelName | str] = (settings.MODEL_NAME, settings.FALLBACK_MODEL_NAME),
thinking_level: ThinkingLevel | None = settings.THINKING_LEVEL,
*,
ctx: RuntimeCtx,
@@ -118,7 +120,6 @@ async def create_daiv_agent(
checkpointer: BaseCheckpointSaver | None = None,
store: BaseStore | None = None,
debug: bool = False,
- cache: bool = False,
offline: bool = False,
):
"""
@@ -132,7 +133,6 @@ async def create_daiv_agent(
checkpointer: The checkpointer to use for the agent.
store: The store to use for the agent.
debug: Whether to enable debug mode for the agent.
- cache: Whether to enable cache for the agent.
offline: Whether to enable offline mode for the agent.
Returns:
@@ -190,7 +190,7 @@ async def create_daiv_agent(
),
MemoryMiddleware(
backend=backend,
- sources=[f"/{agent_path.name}/{ctx.config.context_file_name}", f"/{agent_path.name}/{DAIV_MEMORY_PATH}"],
+ sources=[f"/{agent_path.name}/{ctx.config.context_file_name}", f"/{agent_path.name}/{AGENTS_MEMORY_PATH}"],
),
SkillsMiddleware(
backend=backend, sources=[f"/{agent_path.name}/{source}" for source in SKILLS_SOURCES], subagents=subagents
@@ -223,7 +223,6 @@ async def create_daiv_agent(
store=store,
debug=debug,
name="DAIV Agent",
- cache=cache,
).with_config({"recursion_limit": settings.RECURSION_LIMIT})
@@ -240,7 +239,7 @@ async def main():
)
async with set_runtime_ctx(repo_id="srtab/daiv", scope=Scope.GLOBAL, ref="main") as ctx:
agent = await create_daiv_agent(
- ctx=ctx, model_names=[ModelName.MOONSHOTAI_KIMI_K2_5], store=InMemoryStore(), checkpointer=InMemorySaver()
+ ctx=ctx, model_names=["openrouter:z-ai/glm-5"], store=InMemoryStore(), checkpointer=InMemorySaver()
)
while True:
user_input = await session.prompt_async()
diff --git a/daiv/automation/agent/middlewares/git.py b/daiv/automation/agent/middlewares/git.py
index c08795f1..907e8f37 100644
--- a/daiv/automation/agent/middlewares/git.py
+++ b/daiv/automation/agent/middlewares/git.py
@@ -1,29 +1,24 @@
from __future__ import annotations
import logging
-from textwrap import dedent
-from typing import TYPE_CHECKING, Any
-
-from django.template.loader import render_to_string
+from typing import TYPE_CHECKING, Annotated, Any, cast
from langchain.agents import AgentState
from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse
+from langchain.agents.middleware.types import PrivateStateAttr
from langchain_core.prompts import SystemMessagePromptTemplate
+from langsmith import get_current_run_tree
-from automation.agent.pr_describer.graph import create_pr_describer_agent
-from codebase.base import GitPlatform, MergeRequest, Scope
-from codebase.clients import RepoClient
+from automation.agent.publishers import GitChangePublisher
+from codebase.base import MergeRequest, Scope
from codebase.context import RuntimeCtx # noqa: TC001
-from codebase.utils import GitManager, redact_diff_content
-from core.constants import BOT_LABEL, BOT_NAME
+from codebase.utils import GitManager, GitPushPermissionError
if TYPE_CHECKING:
from collections.abc import Awaitable, Callable
from langgraph.runtime import Runtime
- from automation.agent.pr_describer.schemas import PullRequestMetadata
-
logger = logging.getLogger("daiv.tools")
@@ -58,18 +53,13 @@ class GitState(AgentState):
State for the git middleware.
"""
- branch_name: str
- """
- The branch name used to commit the changes.
- """
-
- merge_request_id: int
+ merge_request: Annotated[MergeRequest | None, PrivateStateAttr]
"""
- The merge request ID used to commit the changes.
+ The merge request used to commit the changes.
"""
-class GitMiddleware(AgentMiddleware):
+class GitMiddleware(AgentMiddleware[GitState, RuntimeCtx]):
"""
Middleware to handle the git operations and persist changes made by the DAIV agent to the repository.
@@ -109,32 +99,29 @@ async def abefore_agent(self, state: GitState, runtime: Runtime[RuntimeCtx]) ->
"""
Before the agent starts, set the branch name and merge request ID.
"""
- branch_name = state.get("branch_name")
- merge_request_id = state.get("merge_request_id")
+ merge_request = state.get("merge_request")
if runtime.context.scope == Scope.MERGE_REQUEST:
# In this case, ignore the branch name and merge request ID from the state,
# and use the source branch and merge request ID from the merge request.
- branch_name = runtime.context.merge_request.source_branch
- merge_request_id = runtime.context.merge_request.merge_request_id
+ merge_request = runtime.context.merge_request
- if branch_name and branch_name != runtime.context.repo.active_branch.name:
+ if merge_request and merge_request.source_branch != runtime.context.repo.active_branch.name:
git_manager = GitManager(runtime.context.repo)
- logger.info("[%s] Checking out to branch '%s'", self.name, branch_name)
+ logger.info("[%s] Checking out to branch '%s'", self.name, merge_request.source_branch)
try:
- git_manager.checkout(branch_name)
+ git_manager.checkout(merge_request.source_branch)
except ValueError as e:
# The branch does not exist in the repository, so we need to create it.
- logger.warning("[%s] Failed to checkout to branch '%s': %s", self.name, branch_name, e)
- branch_name = None
- merge_request_id = None
+ logger.warning("[%s] Failed to checkout to branch '%s': %s", self.name, merge_request.source_branch, e)
+ merge_request = None
- return {"branch_name": branch_name, "merge_request_id": merge_request_id}
+ return {"merge_request": merge_request}
async def awrap_model_call(
- self, request: ModelRequest, handler: Callable[[ModelRequest], Awaitable[ModelResponse]]
+ self, request: ModelRequest[RuntimeCtx], handler: Callable[[ModelRequest[RuntimeCtx]], Awaitable[ModelResponse]]
) -> ModelResponse:
"""
Update the system prompt with the git system prompt.
@@ -150,11 +137,13 @@ async def awrap_model_call(
else None,
}
- system_prompt = GIT_SYSTEM_PROMPT.format(**context).content
+ system_prompt = ""
+ if request.system_prompt:
+ system_prompt = request.system_prompt + "\n\n"
- request = request.override(system_prompt=request.system_prompt + "\n\n" + system_prompt)
+ system_prompt += cast("str", GIT_SYSTEM_PROMPT.format(**context).content)
- return await handler(request)
+ return await handler(request.override(system_prompt=system_prompt))
async def aafter_agent(self, state: GitState, runtime: Runtime[RuntimeCtx]) -> dict[str, Any] | None:
"""
@@ -163,119 +152,18 @@ async def aafter_agent(self, state: GitState, runtime: Runtime[RuntimeCtx]) -> d
if not self.auto_commit_changes:
return None
- git_manager = GitManager(runtime.context.repo)
-
- if not git_manager.is_dirty():
+ publisher = GitChangePublisher(runtime.context)
+ try:
+ merge_request = await publisher.publish(merge_request=state.get("merge_request"), skip_ci=self.skip_ci)
+ except GitPushPermissionError as e:
+ logger.warning("[%s] Failed to publish changes due to git push permissions: %s", self.name, e)
return None
- pr_metadata = await self._get_mr_metadata(runtime, git_manager.get_diff())
- branch_name = state.get("branch_name") or pr_metadata.branch
-
- logger.info("[%s] Committing and pushing changes to branch '%s'", self.name, branch_name)
-
- unique_branch_name = git_manager.commit_and_push_changes(
- pr_metadata.commit_message,
- branch_name=branch_name,
- skip_ci=self.skip_ci,
- use_branch_if_exists=bool(state.get("branch_name")),
- )
-
- merge_request_id = state.get("merge_request_id")
- if runtime.context.scope != Scope.MERGE_REQUEST and not merge_request_id:
- logger.info(
- "[%s] Creating merge request: '%s' -> '%s'",
- self.name,
- unique_branch_name,
- runtime.context.config.default_branch,
- )
- merge_request = self._update_or_create_merge_request(
- runtime, unique_branch_name, pr_metadata.title, pr_metadata.description
- )
- merge_request_id = merge_request.merge_request_id
- logger.info("[%s] Merge request created: %s", self.name, merge_request.web_url)
-
- return {"branch_name": unique_branch_name, "merge_request_id": merge_request_id}
-
- async def _get_mr_metadata(self, runtime: Runtime[RuntimeCtx], diff: str) -> PullRequestMetadata:
- """
- Get the PR metadata from the diff.
+ if merge_request:
+ if runtime.context.scope == Scope.ISSUE and (rt := get_current_run_tree()):
+ # If an issue resulted in a merge request, we send it to LangSmith for tracking.
+ rt.metadata["merge_request_id"] = merge_request.merge_request_id
- Args:
- runtime: The runtime context.
- diff: The diff of the changes.
+ return {"merge_request": merge_request}
- Returns:
- The PR metadata.
- """
- pr_describer = create_pr_describer_agent(
- model=runtime.context.config.models.pr_describer.model, ctx=runtime.context
- )
-
- extra_context = ""
- if runtime.context.scope == Scope.ISSUE:
- extra_context = dedent(
- """\
- This changes were made to address the following issue:
-
- Issue ID: {issue.iid}
- Issue title: {issue.title}
- Issue description: {issue.description}
- """
- ).format(issue=runtime.context.issue)
-
- result = await pr_describer.ainvoke(
- {
- "diff": redact_diff_content(diff, runtime.context.config.omit_content_patterns),
- "extra_context": extra_context,
- },
- config={
- "tags": [pr_describer.get_name(), runtime.context.git_platform.value],
- "metadata": {"scope": runtime.context.scope, "repo_id": runtime.context.repo_id},
- },
- )
- if result and "structured_response" in result:
- return result["structured_response"]
-
- raise ValueError("Failed to get PR metadata from the diff.")
-
- def _update_or_create_merge_request(
- self, runtime: Runtime[RuntimeCtx], branch_name: str, title: str, description: str
- ) -> MergeRequest:
- """
- Update or create the merge request.
-
- Args:
- runtime: The runtime context.
- branch_name: The branch name.
- title: The title of the merge request.
- description: The description of the merge request.
- """
- assignee_id = None
-
- if runtime.context.issue and runtime.context.issue.assignee:
- assignee_id = (
- runtime.context.issue.assignee.id
- if runtime.context.git_platform == GitPlatform.GITLAB
- else runtime.context.issue.assignee.username
- )
-
- client = RepoClient.create_instance()
- return client.update_or_create_merge_request(
- repo_id=runtime.context.repo_id,
- source_branch=branch_name,
- target_branch=runtime.context.config.default_branch,
- labels=[BOT_LABEL],
- title=title,
- assignee_id=assignee_id,
- description=render_to_string(
- "codebase/issue_merge_request.txt",
- {
- "description": description,
- "source_repo_id": runtime.context.repo_id,
- "issue_id": runtime.context.issue.iid if runtime.context.issue else None,
- "bot_name": BOT_NAME,
- "bot_username": runtime.context.bot_username,
- "is_gitlab": runtime.context.git_platform == GitPlatform.GITLAB,
- },
- ),
- )
+ return None
diff --git a/daiv/automation/agent/middlewares/git_platform.py b/daiv/automation/agent/middlewares/git_platform.py
index 89cb366d..99f97905 100644
--- a/daiv/automation/agent/middlewares/git_platform.py
+++ b/daiv/automation/agent/middlewares/git_platform.py
@@ -465,7 +465,8 @@ async def github_tool(
args = ["gh"]
args += splitted_subcommand
- args += ["--repo", runtime.context.repo_id]
+ if resource != "api":
+ args += ["--repo", runtime.context.repo_id]
try:
process = await asyncio.create_subprocess_exec(
diff --git a/daiv/automation/agent/middlewares/skills.py b/daiv/automation/agent/middlewares/skills.py
index 7256f4be..7acf99cd 100644
--- a/daiv/automation/agent/middlewares/skills.py
+++ b/daiv/automation/agent/middlewares/skills.py
@@ -12,7 +12,7 @@
from langgraph.runtime import Runtime # noqa: TC002
from langgraph.types import Command
-from automation.agent.constants import BUILTIN_SKILLS_PATH, DAIV_SKILLS_PATH
+from automation.agent.constants import AGENTS_SKILLS_PATH, BUILTIN_SKILLS_PATH
from automation.agent.utils import extract_body_from_frontmatter, extract_text_content
from codebase.context import RuntimeCtx # noqa: TC001
from slash_commands.parser import SlashCommandCommand, parse_slash_command
@@ -99,9 +99,9 @@ async def abefore_agent(
Apply builtin slash commands early in the conversation and copy builtin skills to the project skills directory
to make them available to the agent.
"""
- if "skills_metadata" in state:
- return None
-
+ # We need to always copy builtin skills before calling the super method to make them available in the filesystem
+ # not just to be captured and registered in "skills_metadata" on first run, but also to be available in the
+ # filesystem so that the agent can use them using the `skill` tool, otherwise a not_found error will be raised.
builtin_skills = await self._copy_builtin_skills(agent_path=Path(runtime.context.repo.working_dir))
skills_update = await super().abefore_agent(state, runtime, config)
@@ -115,8 +115,12 @@ async def abefore_agent(
else:
skill["metadata"].pop("is_builtin", None)
+ # If the super method returns None, it means that the skills metadata was already captured and registered in
+ # the state.
+ skills_metadata = skills_update["skills_metadata"] if skills_update else state["skills_metadata"]
+
builtin_slash_commands = await self._apply_builtin_slash_commands(
- state["messages"], runtime.context, skills_update["skills_metadata"]
+ state["messages"], runtime.context, skills_metadata
)
if builtin_slash_commands:
@@ -143,7 +147,7 @@ async def _copy_builtin_skills(self, agent_path: Path) -> list[str]:
"""
builtin_skills = []
files_to_upload = []
- project_skills_path = Path(f"/{agent_path.name}/{DAIV_SKILLS_PATH}")
+ project_skills_path = Path(f"/{agent_path.name}/{AGENTS_SKILLS_PATH}")
for builtin_skill_dir in BUILTIN_SKILLS_PATH.iterdir():
if not builtin_skill_dir.is_dir() or builtin_skill_dir.name == "__pycache__":
diff --git a/daiv/automation/agent/pr_describer/conf.py b/daiv/automation/agent/pr_describer/conf.py
deleted file mode 100644
index 87ed64e3..00000000
--- a/daiv/automation/agent/pr_describer/conf.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from pydantic import Field
-from pydantic_settings import BaseSettings, SettingsConfigDict
-
-from automation.agent.constants import ModelName
-
-
-class PRDescriberSettings(BaseSettings):
- model_config = SettingsConfigDict(env_prefix="PR_DESCRIBER_", env_parse_none_str="None")
-
- MODEL_NAME: ModelName | str = Field(
- default=ModelName.GPT_4_1_MINI, description="Model name to be used for PR describer."
- )
-
-
-settings = PRDescriberSettings()
diff --git a/daiv/automation/agent/pr_describer/graph.py b/daiv/automation/agent/pr_describer/graph.py
deleted file mode 100644
index e8922a09..00000000
--- a/daiv/automation/agent/pr_describer/graph.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-from django.utils import timezone
-
-from deepagents.backends import FilesystemBackend
-from deepagents.graph import create_agent
-from deepagents.middleware.memory import MemoryMiddleware
-from langchain.agents.middleware import dynamic_prompt
-from langchain_core.prompts import ChatPromptTemplate
-
-from automation.agent import BaseAgent
-from automation.agent.constants import DAIV_MEMORY_PATH
-from automation.agent.middlewares.prompt_cache import AnthropicPromptCachingMiddleware
-from codebase.context import RuntimeCtx
-
-from .prompts import human, system
-from .schemas import PullRequestMetadata
-
-if TYPE_CHECKING:
- from langchain.agents.middleware.types import ModelRequest
- from langchain_core.runnables import Runnable
-
- from automation.agent.constants import ModelName
-
-
-@dynamic_prompt
-def dynamic_pr_describer_system_prompt(request: ModelRequest) -> str:
- """
- Dynamic system prompt for the PR describer agent.
- """
- return (
- request.system_prompt + "\n\n" + system.format(current_date_time=timezone.now().strftime("%d %B, %Y")).content
- )
-
-
-def create_pr_describer_agent(model: ModelName | str, *, ctx: RuntimeCtx) -> Runnable:
- """
- Create the PR describer agent.
-
- Args:
- model: The model to use for the agent.
- ctx: The runtime context.
-
- Returns:
- The PR describer agent.
- """
- agent_path = Path(ctx.repo.working_dir)
- backend = FilesystemBackend(root_dir=agent_path.parent, virtual_mode=True)
-
- return ChatPromptTemplate.from_messages([human]).partial(extra_context="") | create_agent(
- model=BaseAgent.get_model(model=model),
- tools=[], # No tools are needed for this agent, it only uses the memory and the system prompt
- middleware=[
- MemoryMiddleware(
- backend=backend,
- sources=[
- f"/{agent_path.name}/{ctx.config.context_file_name}",
- f"/{agent_path.name}/{DAIV_MEMORY_PATH}",
- ],
- ),
- AnthropicPromptCachingMiddleware(),
- dynamic_pr_describer_system_prompt,
- ],
- response_format=PullRequestMetadata,
- context_schema=RuntimeCtx,
- name="PR Describer Agent",
- )
diff --git a/daiv/automation/agent/publishers.py b/daiv/automation/agent/publishers.py
new file mode 100644
index 00000000..b4e997b9
--- /dev/null
+++ b/daiv/automation/agent/publishers.py
@@ -0,0 +1,206 @@
+from __future__ import annotations
+
+import logging
+from abc import abstractmethod
+from textwrap import dedent
+from typing import TYPE_CHECKING, Any, cast
+
+from django.template.loader import render_to_string
+
+from codebase.base import GitPlatform, MergeRequest, Scope
+from codebase.clients import RepoClient
+from codebase.utils import GitManager, redact_diff_content
+from core.constants import BOT_LABEL, BOT_NAME
+
+from .diff_to_metadata.graph import create_diff_to_metadata_graph
+
+if TYPE_CHECKING:
+ from codebase.context import RuntimeCtx
+
+
+logger = logging.getLogger("daiv.tools")
+
+
+class ChangePublisher:
+ """
+ Publisher for changes made by the agent.
+ """
+
+ def __init__(self, ctx: RuntimeCtx):
+ """
+ Initialize the publisher.
+ """
+ self.ctx = ctx
+ self.client = RepoClient.create_instance()
+
+ @abstractmethod
+ async def publish(self, **kwargs) -> Any:
+ """
+ Publish the changes.
+ """
+
+
+class GitChangePublisher(ChangePublisher):
+ """
+ Publisher for changes made by the agent to the Git repository.
+ """
+
+ async def publish(
+ self, *, merge_request: MergeRequest | None = None, skip_ci: bool = False, as_draft: bool = False, **kwargs
+ ) -> MergeRequest | None:
+ """
+ Save the changes made by the agent to the repository.
+
+ Args:
+ merge_request: The merge request to commit and push the changes to. If None, a new merge request will be
+ generated based on the diff.
+ skip_ci: Whether to skip the CI.
+ as_draft: Whether to create the merge request as a draft if merge request doesn't exist.
+
+ Returns:
+ The merge request if it was created or updated, otherwise None.
+ """
+ git_manager = GitManager(self.ctx.repo)
+
+ if not git_manager.is_dirty():
+ logger.info("No changes to publish.")
+ return None
+
+ # Compute full diff metadata when creating a new merge request or updating a draft merge request
+ # to ensure we have the most up-to-date information.
+ pr_metadata_diff = (
+ git_manager.get_diff(f"origin/{self.ctx.config.default_branch}")
+ if merge_request is None or (merge_request.draft and as_draft is False)
+ else None
+ )
+
+ changes_metadata = await self._diff_to_metadata(
+ pr_metadata_diff=pr_metadata_diff, commit_message_diff=git_manager.get_diff()
+ )
+
+ unique_branch_name = git_manager.commit_and_push_changes(
+ changes_metadata["commit_message"].commit_message,
+ branch_name=(
+ changes_metadata["pr_metadata"].branch if merge_request is None else merge_request.source_branch
+ ),
+ use_branch_if_exists=merge_request is not None,
+ skip_ci=skip_ci,
+ )
+
+ logger.info("Published changes to branch: '%s' [skip_ci: %s]", unique_branch_name, skip_ci)
+
+ if merge_request is None:
+ merge_request = self._create_merge_request(
+ unique_branch_name,
+ changes_metadata["pr_metadata"].title,
+ changes_metadata["pr_metadata"].description,
+ as_draft=as_draft,
+ )
+ logger.info(
+ "Created merge request: %s [merge_request_id: %s, draft: %r]",
+ merge_request.web_url,
+ merge_request.merge_request_id,
+ merge_request.draft,
+ )
+ elif merge_request.draft and as_draft is False:
+ merge_request = self.client.update_merge_request(
+ merge_request.repo_id, merge_request.merge_request_id, as_draft=as_draft
+ )
+ logger.info(
+ "Updated merge request: %s [merge_request_id: %s, draft: %r]",
+ merge_request.web_url,
+ merge_request.merge_request_id,
+ merge_request.draft,
+ )
+
+ return merge_request
+
+ async def _diff_to_metadata(self, commit_message_diff: str, pr_metadata_diff: str | None = None) -> dict[str, Any]:
+ """
+ Get the PR metadata from the diff.
+
+ Args:
+ ctx: The runtime context.
+ commit_message_diff: The diff of the commit message.
+ pr_metadata_diff: The diff of the PR metadata. If None, the PR metadata will not be computed.
+
+ Returns:
+ The pull request metadata and commit message.
+ """
+
+ input_data = {
+ "commit_message_diff": redact_diff_content(commit_message_diff, self.ctx.config.omit_content_patterns)
+ }
+ if self.ctx.scope == Scope.ISSUE:
+ input_data["extra_context"] = dedent(
+ """\
+ This changes were made to address the following issue:
+
+ Issue ID: {issue.iid}
+ Issue title: {issue.title}
+ Issue description: {issue.description}
+ """
+ ).format(issue=self.ctx.issue)
+
+ if pr_metadata_diff:
+ input_data["pr_metadata_diff"] = redact_diff_content(
+ pr_metadata_diff, self.ctx.config.omit_content_patterns
+ )
+
+ changes_metadata_graph = create_diff_to_metadata_graph(ctx=self.ctx, include_pr_metadata=bool(pr_metadata_diff))
+ result = await changes_metadata_graph.ainvoke(
+ input_data,
+ config={
+ "tags": [self.ctx.git_platform.value],
+ "metadata": {"scope": self.ctx.scope, "repo_id": self.ctx.repo_id},
+ },
+ )
+ if result and ("pr_metadata" in result or "commit_message" in result):
+ return result
+
+ raise ValueError("Failed to get PR metadata from the diff.")
+
+ def _create_merge_request(
+ self, branch_name: str, title: str, description: str, as_draft: bool = False
+ ) -> MergeRequest:
+ """
+ Update or create the merge request.
+
+ Args:
+ branch_name: The branch name.
+ title: The title of the merge request.
+ description: The description of the merge request.
+ as_draft: Whether to create the merge request as a draft.
+
+ Returns:
+ The merge request.
+ """
+ assignee_id = None
+
+ if self.ctx.issue and self.ctx.issue.assignee:
+ assignee_id = (
+ self.ctx.issue.assignee.id
+ if self.ctx.git_platform == GitPlatform.GITLAB
+ else self.ctx.issue.assignee.username
+ )
+
+ return self.client.update_or_create_merge_request(
+ repo_id=self.ctx.repo_id,
+ source_branch=branch_name,
+ target_branch=cast("str", self.ctx.config.default_branch),
+ labels=[BOT_LABEL],
+ title=title,
+ assignee_id=assignee_id,
+ as_draft=as_draft,
+ description=render_to_string(
+ "codebase/issue_merge_request.txt",
+ {
+ "description": description,
+ "source_repo_id": self.ctx.repo_id,
+ "issue_id": self.ctx.issue.iid if self.ctx.issue else None,
+ "bot_name": BOT_NAME,
+ "bot_username": self.ctx.bot_username,
+ "is_gitlab": self.ctx.git_platform == GitPlatform.GITLAB,
+ },
+ ),
+ )
diff --git a/daiv/automation/agent/subagents.py b/daiv/automation/agent/subagents.py
index 5739b122..3b58a531 100644
--- a/daiv/automation/agent/subagents.py
+++ b/daiv/automation/agent/subagents.py
@@ -3,6 +3,8 @@
from deepagents.graph import SubAgent
from langchain.agents.middleware import TodoListMiddleware
+from automation.agent import BaseAgent
+from automation.agent.conf import settings
from automation.agent.middlewares.file_system import FilesystemMiddleware
from automation.agent.middlewares.git_platform import GitPlatformMiddleware
from automation.agent.middlewares.sandbox import SandboxMiddleware
@@ -247,6 +249,7 @@ def create_explore_subagent(backend: BackendProtocol, runtime: RuntimeCtx) -> Su
description=EXPLORE_SUBAGENT_DESCRIPTION,
system_prompt=EXPLORE_SYSTEM_PROMPT,
middleware=middleware,
+ model=BaseAgent.get_model(model=settings.EXPLORE_MODEL_NAME),
)
diff --git a/daiv/automation/agent/utils.py b/daiv/automation/agent/utils.py
index 784e297f..1cae677a 100644
--- a/daiv/automation/agent/utils.py
+++ b/daiv/automation/agent/utils.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import base64
import re
from pathlib import Path
@@ -6,12 +8,12 @@
from langchain_core.messages.content import create_image_block
-from automation.agent.schemas import Image
from codebase.base import GitPlatform
from codebase.clients import RepoClient
from core.utils import extract_valid_image_mimetype, is_valid_url
from .conf import settings
+from .schemas import Image
if TYPE_CHECKING:
from langchain_core.messages import ImageContentBlock
diff --git a/daiv/automation/checks.py b/daiv/automation/checks.py
index 2a2aab54..b3f86847 100644
--- a/daiv/automation/checks.py
+++ b/daiv/automation/checks.py
@@ -2,13 +2,14 @@
from .agent.base import BaseAgent, ModelProvider
from .agent.conf import settings as agent_settings
-from .agent.pr_describer.conf import settings as pr_describer_settings
+from .agent.diff_to_metadata.conf import settings as diff_to_metadata_settings
from .conf import settings
declared_model_names = {
agent_settings.MODEL_NAME,
agent_settings.FALLBACK_MODEL_NAME,
- pr_describer_settings.MODEL_NAME,
+ diff_to_metadata_settings.MODEL_NAME,
+ diff_to_metadata_settings.FALLBACK_MODEL_NAME,
settings.WEB_FETCH_MODEL_NAME,
}
diff --git a/daiv/codebase/base.py b/daiv/codebase/base.py
index aaa3436d..5ac8d472 100644
--- a/daiv/codebase/base.py
+++ b/daiv/codebase/base.py
@@ -78,6 +78,7 @@ class MergeRequest(BaseModel):
web_url: str | None = None
sha: str | None = None
author: User
+ draft: bool = False
class MergeRequestDiff(BaseModel):
diff --git a/daiv/codebase/clients/base.py b/daiv/codebase/clients/base.py
index 43df21b8..9b255251 100644
--- a/daiv/codebase/clients/base.py
+++ b/daiv/codebase/clients/base.py
@@ -3,6 +3,7 @@
import abc
import functools
import logging
+from contextlib import contextmanager
from enum import StrEnum
from functools import cached_property
from typing import TYPE_CHECKING, Any
@@ -46,7 +47,7 @@ def get_repository_file(self, repo_id: str, file_path: str, ref: str) -> str | N
pass
@abc.abstractmethod
- def get_project_uploaded_file(self, repo_id: str, file_path: str) -> bytes | None:
+ async def get_project_uploaded_file(self, repo_id: str, file_path: str) -> bytes | None:
pass
@abc.abstractmethod
@@ -73,7 +74,21 @@ def update_or_create_merge_request(
title: str,
description: str,
labels: list[str] | None = None,
- assignee_id: int | None = None,
+ assignee_id: str | int | None = None,
+ as_draft: bool = False,
+ ) -> MergeRequest:
+ pass
+
+ @abc.abstractmethod
+ def update_merge_request(
+ self,
+ repo_id: str,
+ merge_request_id: int,
+ as_draft: bool | None = None,
+ title: str | None = None,
+ description: str | None = None,
+ labels: list[str] | None = None,
+ assignee_id: str | int | None = None,
) -> MergeRequest:
pass
@@ -90,6 +105,7 @@ def create_merge_request_comment(
pass
@abc.abstractmethod
+ @contextmanager
def load_repo(self, repository: Repository, sha: str) -> Iterator[Repo]:
pass
@@ -126,7 +142,7 @@ def update_issue_comment(
pass
@abc.abstractmethod
- def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: str):
+ def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: int | None = None):
pass
@abc.abstractmethod
@@ -180,7 +196,7 @@ def get_merge_request_comment(self, repo_id: str, merge_request_id: int, comment
pass
@abc.abstractmethod
- def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: str):
+ def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: int):
pass
@abc.abstractmethod
diff --git a/daiv/codebase/clients/github/api/callbacks.py b/daiv/codebase/clients/github/api/callbacks.py
index 595c82d4..1726ffc3 100644
--- a/daiv/codebase/clients/github/api/callbacks.py
+++ b/daiv/codebase/clients/github/api/callbacks.py
@@ -6,11 +6,11 @@
from codebase.clients import RepoClient
from codebase.clients.base import Emoji
from codebase.repo_config import RepositoryConfig
-from codebase.tasks import address_issue_task, address_mr_comments_task, address_mr_review_task
+from codebase.tasks import address_issue_task, address_mr_comments_task
from codebase.utils import note_mentions_daiv
from core.constants import BOT_AUTO_LABEL, BOT_LABEL, BOT_MAX_LABEL
-from .models import Comment, Issue, Label, PullRequest, Repository, Review # noqa: TC001
+from .models import Comment, Issue, Label, Repository # noqa: TC001
logger = logging.getLogger("daiv.webhooks")
@@ -113,8 +113,9 @@ async def process_callback(self):
)
elif self._is_merge_request_review:
- # The webhook doesn't provide the source branch, so we need to fetch it from the merge request.
-
+ self._client.create_merge_request_note_emoji(
+ self.repository.full_name, self.issue.number, Emoji.EYES, self.comment.id
+ )
await address_mr_comments_task.aenqueue(
repo_id=self.repository.full_name,
merge_request_id=self.issue.number,
@@ -149,42 +150,6 @@ def _is_issue_comment(self) -> bool:
)
-class PullRequestReviewCallback(GitHubCallback):
- """
- GitHub Pull Request Review Webhook for automatically address the review feedback.
- """
-
- action: Literal["submitted", "edited", "dismissed"]
- pull_request: PullRequest
- review: Review
-
- def model_post_init(self, __context: Any):
- self._client = RepoClient.create_instance()
-
- def accept_callback(self) -> bool:
- """
- Check if the webhook is accepted.
- """
- return (
- self.action in ["submitted", "edited"]
- and self.pull_request.state == "open"
- # Ignore the DAIV review itself
- and self.review.user.id != self._client.current_user.id
- )
-
- async def process_callback(self):
- """
- Trigger the task to address the review feedback or issue comment like the plan approval use case.
-
- GitLab Note Webhook is called multiple times, one per note/discussion.
- """
- await address_mr_review_task.aenqueue(
- repo_id=self.repository.full_name,
- merge_request_id=self.pull_request.number,
- merge_request_source_branch=self.pull_request.head.ref,
- )
-
-
class PushCallback(GitHubCallback):
"""
GitHub Push Webhook for automatically invalidate the cache for the repository configurations.
diff --git a/daiv/codebase/clients/github/api/views.py b/daiv/codebase/clients/github/api/views.py
index bcabb7f9..b81f5b71 100644
--- a/daiv/codebase/clients/github/api/views.py
+++ b/daiv/codebase/clients/github/api/views.py
@@ -5,7 +5,7 @@
from codebase.base import GitPlatform
from codebase.conf import settings
-from .callbacks import IssueCallback, IssueCommentCallback, PullRequestReviewCallback, PushCallback # noqa: TC001
+from .callbacks import IssueCallback, IssueCommentCallback, PushCallback # noqa: TC001
from .security import validate_github_webhook
logger = logging.getLogger("daiv.webhooks")
@@ -13,7 +13,7 @@
@router.post("/callbacks/github", response={204: None, 401: None, 403: None, 422: UnprocessableEntityResponse})
@router.post("/callbacks/github/", response={204: None, 401: None, 403: None, 422: UnprocessableEntityResponse})
-async def callback(request, payload: IssueCallback | IssueCommentCallback | PushCallback | PullRequestReviewCallback):
+async def callback(request, payload: IssueCallback | IssueCommentCallback | PushCallback):
"""
GitHub callback endpoint for processing callbacks.
diff --git a/daiv/codebase/clients/github/client.py b/daiv/codebase/clients/github/client.py
index 55fd5aed..d51bd9aa 100644
--- a/daiv/codebase/clients/github/client.py
+++ b/daiv/codebase/clients/github/client.py
@@ -61,6 +61,18 @@ def __init__(self, integration: GithubIntegration, installation_id: int):
self.client_installation = integration.get_app_installation(installation_id)
self.client = self.client_installation.get_github_for_installation()
+ def _configure_commit_identity(self, repo: Repo) -> None:
+ """
+ Configure repository-local git identity to match the GitHub App bot user.
+ """
+ bot_login = f"{self.client_installation.app_slug}[bot]"
+ bot_user_id = self.current_user.id
+ bot_email = f"{bot_user_id}+{bot_login}@users.noreply.github.com"
+
+ with repo.config_writer() as writer:
+ writer.set_value("user", "name", bot_login)
+ writer.set_value("user", "email", bot_email)
+
def get_repository(self, repo_id: str) -> Repository:
"""
Get a repository.
@@ -104,6 +116,7 @@ def list_repositories(self, search: str | None = None, topics: list[str] | None
default_branch=repo.default_branch,
git_platform=self.git_platform,
topics=repo.topics,
+ clone_url=repo.clone_url,
)
for repo in self.client_installation.get_repos()
if topics is None or any(topic in repo.topics for topic in topics)
@@ -217,7 +230,7 @@ def create_issue(self, repo_id: str, title: str, description: str, labels: list[
issue = repo.create_issue(title=title, body=description, labels=labels or [])
return issue.number
- def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: str | None = None):
+ def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: int | None = None):
"""
Create an emoji in a note of an issue.
"""
@@ -226,7 +239,7 @@ def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id:
issue = self.client.get_repo(repo_id, lazy=True).get_issue(issue_id)
if note_id is not None:
- issue.get_comment(int(note_id)).create_reaction(emoji_reaction)
+ issue.get_comment(note_id).create_reaction(emoji_reaction)
else:
issue.create_reaction(emoji_reaction)
@@ -393,7 +406,7 @@ def create_merge_request_comment(
to_return = pr.create_issue_comment(body).id
return to_return
- def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: str):
+ def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: int):
"""
Create an emoji on a note of a merge request.
@@ -408,9 +421,9 @@ def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, e
pr = self.client.get_repo(repo_id, lazy=True).get_pull(merge_request_id)
try:
- pr.get_review_comment(int(note_id)).create_reaction(emoji_reaction)
+ pr.get_review_comment(note_id).create_reaction(emoji_reaction)
except UnknownObjectException:
- pr.get_issue_comment(int(note_id)).create_reaction(emoji_reaction)
+ pr.get_issue_comment(note_id).create_reaction(emoji_reaction)
def get_issue_related_merge_requests(
self, repo_id: str, issue_id: int, assignee_id: int | None = None, label: str | None = None
@@ -755,12 +768,16 @@ def load_repo(self, repository: Repository, sha: str) -> Iterator[Repo]:
with tempfile.TemporaryDirectory(prefix=f"{safe_slug(repository.slug)}-{repository.pk}") as tmpdir:
logger.debug("Cloning repository %s to %s", repository.clone_url, tmpdir)
# the access token is valid for 1 hour
- access_token = self._integration.get_access_token(self.client_installation.id)
+ access_token = self._integration.get_access_token(
+ self.client_installation.id, permissions={"contents": "write"}
+ )
parsed = urlparse(repository.clone_url)
clone_url = f"{parsed.scheme}://oauth2:{access_token.token}@{parsed.netloc}{parsed.path}"
clone_dir = Path(tmpdir) / "repo"
clone_dir.mkdir(exist_ok=True)
- yield Repo.clone_from(clone_url, clone_dir, branch=sha)
+ repo = Repo.clone_from(clone_url, clone_dir, branch=sha)
+ self._configure_commit_identity(repo)
+ yield repo
def update_or_create_merge_request(
self,
@@ -770,10 +787,11 @@ def update_or_create_merge_request(
title: str,
description: str,
labels: list[str] | None = None,
- assignee_id: int | None = None,
+ assignee_id: str | int | None = None,
+ as_draft: bool = False,
) -> MergeRequest:
"""
- Update or create a merge request.
+ Create a merge request or update an existing one if it already exists based on the source and target branches.
Args:
repo_id: The repository ID.
@@ -783,6 +801,7 @@ def update_or_create_merge_request(
description: The description.
labels: The labels.
assignee_id: The assignee ID.
+ as_draft: Whether to create the merge request as a draft.
Returns:
The merge request data.
@@ -790,9 +809,12 @@ def update_or_create_merge_request(
repo = self.client.get_repo(repo_id, lazy=True)
try:
- pr = repo.create_pull(base=target_branch, head=source_branch, title=title, body=description)
+ pr = repo.create_pull(base=target_branch, head=source_branch, title=title, body=description, draft=as_draft)
except GithubException as e:
- if e.status != 409:
+ if e.status != 422 or not any(
+ error.get("message").startswith("A pull request already exists for")
+ for error in e.data.get("errors", [])
+ ):
raise e
prs = repo.get_pulls(base=target_branch, head=source_branch, state="open")
@@ -803,7 +825,12 @@ def update_or_create_merge_request(
pr = prs[0]
pr.edit(title=title, body=description)
- if labels is not None:
+ if pr.draft and not as_draft:
+ pr.mark_ready_for_review()
+ elif not pr.draft and as_draft:
+ pr.convert_to_draft()
+
+ if labels is not None and not any(label.name in labels for label in pr.labels):
pr.add_to_labels(*labels)
if assignee_id and not any(assignee.id == assignee_id for assignee in pr.assignees):
@@ -820,6 +847,70 @@ def update_or_create_merge_request(
web_url=pr.html_url,
sha=pr.head.sha,
author=User(id=pr.user.id, username=pr.user.login, name=pr.user.name),
+ draft=pr.draft,
+ )
+
+ def update_merge_request(
+ self,
+ repo_id: str,
+ merge_request_id: int,
+ as_draft: bool | None = None,
+ title: str | None = None,
+ description: str | None = None,
+ labels: list[str] | None = None,
+ assignee_id: str | int | None = None,
+ ) -> MergeRequest:
+ """
+ Update an existing merge request if it has changes.
+
+ Args:
+ repo_id: The repository ID.
+ merge_request_id: The merge request ID.
+ as_draft: Whether to set the merge request as a draft.
+ title: The title of the merge request.
+ description: The description of the merge request.
+ labels: The labels of the merge request.
+ assignee_id: The assignee ID of the merge request.
+
+ Returns:
+ The merge request.
+ """
+ repo = self.client.get_repo(repo_id, lazy=True)
+ pr = repo.get_pull(merge_request_id)
+
+ if as_draft is not None and pr.draft and not as_draft:
+ pr.mark_ready_for_review()
+ elif as_draft is not None and not pr.draft and as_draft:
+ pr.convert_to_draft()
+
+ edit_fields = {}
+ if title is not None:
+ edit_fields["title"] = title
+
+ if description is not None:
+ edit_fields["body"] = description
+
+ if edit_fields:
+ pr.edit(**edit_fields)
+
+ if labels is not None and not any(label.name in labels for label in pr.labels):
+ pr.add_to_labels(*labels)
+
+ if assignee_id is not None and not any(assignee.id == assignee_id for assignee in pr.assignees):
+ pr.add_to_assignees(assignee_id)
+
+ return MergeRequest(
+ repo_id=repo_id,
+ merge_request_id=pr.number,
+ source_branch=pr.head.ref,
+ target_branch=pr.base.ref,
+ title=pr.title,
+ description=pr.body or "",
+ labels=[label.name for label in pr.labels],
+ web_url=pr.html_url,
+ sha=pr.head.sha,
+ author=User(id=pr.user.id, username=pr.user.login, name=pr.user.name),
+ draft=pr.draft,
)
def _serialize_comments(
diff --git a/daiv/codebase/clients/gitlab/api/callbacks.py b/daiv/codebase/clients/gitlab/api/callbacks.py
index af80b500..8a2f179a 100644
--- a/daiv/codebase/clients/gitlab/api/callbacks.py
+++ b/daiv/codebase/clients/gitlab/api/callbacks.py
@@ -3,11 +3,10 @@
from typing import Any, Literal
from codebase.api.callbacks import BaseCallback
-from codebase.base import NoteType
from codebase.clients import RepoClient
from codebase.clients.base import Emoji
from codebase.repo_config import RepositoryConfig
-from codebase.tasks import address_issue_task, address_mr_comments_task, address_mr_review_task
+from codebase.tasks import address_issue_task, address_mr_comments_task
from codebase.utils import note_mentions_daiv
from core.constants import BOT_AUTO_LABEL, BOT_LABEL, BOT_MAX_LABEL
@@ -112,7 +111,7 @@ def accept_callback(self) -> bool:
):
return False
- return bool(self._is_issue_comment or self._is_merge_request_review)
+ return bool(self._is_issue_comment or self._is_merge_request_comment)
async def process_callback(self):
"""
@@ -120,7 +119,7 @@ async def process_callback(self):
GitLab Note Webhook is called multiple times, one per note/discussion.
"""
- if self._is_issue_comment:
+ if self.issue and self._is_issue_comment:
self._client.create_issue_emoji(
self.project.path_with_namespace, self.issue.iid, Emoji.EYES, self.object_attributes.id
)
@@ -130,30 +129,24 @@ async def process_callback(self):
mention_comment_id=self.object_attributes.discussion_id,
)
- elif self._is_merge_request_review:
- if self.object_attributes.type in [NoteType.DIFF_NOTE, NoteType.DISCUSSION_NOTE]:
- await address_mr_review_task.aenqueue(
- repo_id=self.project.path_with_namespace,
- merge_request_id=self.merge_request.iid,
- merge_request_source_branch=self.merge_request.source_branch,
- )
- elif self.object_attributes.type is None: # This is a comment note.
- await address_mr_comments_task.aenqueue(
- repo_id=self.project.path_with_namespace,
- merge_request_id=self.merge_request.iid,
- merge_request_source_branch=self.merge_request.source_branch,
- mention_comment_id=self.object_attributes.discussion_id,
- )
- else:
- logger.warning("Unsupported note type: %s", self.object_attributes.type)
+ elif self.merge_request and self._is_merge_request_comment:
+ self._client.create_merge_request_note_emoji(
+ self.project.path_with_namespace, self.merge_request.iid, Emoji.EYES, self.object_attributes.id
+ )
+ await address_mr_comments_task.aenqueue(
+ repo_id=self.project.path_with_namespace,
+ merge_request_id=self.merge_request.iid,
+ mention_comment_id=self.object_attributes.discussion_id,
+ )
@cached_property
- def _is_merge_request_review(self) -> bool:
+ def _is_merge_request_comment(self) -> bool:
"""
Accept the webhook if the note is a merge request comment that mentions DAIV.
"""
return bool(
self._repo_config.code_review.enabled
+ and self.object_attributes.type is None # This is a comment note.
and self.object_attributes.noteable_type == NoteableType.MERGE_REQUEST
and self.object_attributes.action in [NoteAction.CREATE, NoteAction.UPDATE]
and self.merge_request
@@ -190,13 +183,10 @@ def accept_callback(self) -> bool:
"""
Accept the webhook if the push is to the default branch or to any branch with MR created.
"""
- return self.ref.endswith(self.project.default_branch)
+ return bool(self.project.default_branch and self.ref.endswith(self.project.default_branch))
async def process_callback(self):
"""
- Process the push webhook to update the codebase index and invalidate the cache for the
- repository configurations.
+ Process the push webhook to invalidate the cache for the repository configurations.
"""
- if self.project.default_branch and self.ref.endswith(self.project.default_branch):
- # Invalidate the cache for the repository configurations, they could have changed.
- RepositoryConfig.invalidate_cache(self.project.path_with_namespace)
+ RepositoryConfig.invalidate_cache(self.project.path_with_namespace)
diff --git a/daiv/codebase/clients/gitlab/client.py b/daiv/codebase/clients/gitlab/client.py
index ec11ad22..aa3774f6 100644
--- a/daiv/codebase/clients/gitlab/client.py
+++ b/daiv/codebase/clients/gitlab/client.py
@@ -34,7 +34,7 @@
if TYPE_CHECKING:
from collections.abc import Iterator
- from gitlab.v4.objects import ProjectHook
+ from gitlab.v4.objects import ProjectHook, ProjectMergeRequest
from codebase.clients.base import Emoji
@@ -59,6 +59,30 @@ def __init__(self, auth_token: str, url: str | None = None):
user_agent=USER_AGENT,
)
+ def _get_commit_email(self) -> str:
+ """
+ Resolve the best available email for commit attribution.
+ """
+ self.client.auth()
+ if user := self.client.user:
+ for email_attr in ("commit_email", "public_email", "email"):
+ if (email := getattr(user, email_attr, None)) and isinstance(email, str) and email.strip():
+ return email
+ return f"{user.username}@users.noreply.gitlab.com"
+
+ return f"{self.current_user.username}@users.noreply.gitlab.com"
+
+ def _configure_commit_identity(self, repo: Repo) -> None:
+ """
+ Configure repository-local git identity to match the GitLab bot user.
+ """
+ bot_username = self.current_user.username
+ bot_email = self._get_commit_email()
+
+ with repo.config_writer() as writer:
+ writer.set_value("user", "name", bot_username)
+ writer.set_value("user", "email", bot_email)
+
@property
def _codebase_url(self) -> str:
return self.client.url
@@ -295,7 +319,8 @@ def update_or_create_merge_request(
title: str,
description: str,
labels: list[str] | None = None,
- assignee_id: int | None = None,
+ assignee_id: str | int | None = None,
+ as_draft: bool = False,
) -> MergeRequest:
"""
Create a merge request in a repository or update an existing one if it already exists.
@@ -308,6 +333,7 @@ def update_or_create_merge_request(
description: The description of the merge request.
labels: The list of labels.
assignee_id: The assignee ID.
+ as_draft: Whether to create the merge request as a draft.
Returns:
The merge request data.
@@ -321,23 +347,9 @@ def update_or_create_merge_request(
"description": description,
"labels": labels or [],
"assignee_id": assignee_id,
+ "work_in_progress": as_draft,
})
- return MergeRequest(
- repo_id=repo_id,
- merge_request_id=cast("int", merge_request.get_id()),
- source_branch=merge_request.source_branch,
- target_branch=merge_request.target_branch,
- title=merge_request.title,
- description=merge_request.description,
- labels=merge_request.labels,
- web_url=merge_request.web_url,
- sha=merge_request.sha,
- author=User(
- id=merge_request.author.get("id"),
- username=merge_request.author.get("username"),
- name=merge_request.author.get("name"),
- ),
- )
+ return self._serialize_merge_request(repo_id, merge_request)
except GitlabCreateError as e:
if e.response_code != 409:
raise e
@@ -349,25 +361,81 @@ def update_or_create_merge_request(
merge_request.description = description
merge_request.labels = labels or []
merge_request.assignee_id = assignee_id
+ merge_request.work_in_progress = as_draft
merge_request.save()
- return MergeRequest(
- repo_id=repo_id,
- merge_request_id=cast("int", merge_request.get_id()),
- source_branch=merge_request.source_branch,
- target_branch=merge_request.target_branch,
- title=merge_request.title,
- description=merge_request.description,
- labels=merge_request.labels,
- web_url=merge_request.web_url,
- sha=merge_request.sha,
- author=User(
- id=merge_request.author.get("id"),
- username=merge_request.author.get("username"),
- name=merge_request.author.get("name"),
- ),
- )
+ return self._serialize_merge_request(repo_id, merge_request)
raise e
+ def update_merge_request(
+ self,
+ repo_id: str,
+ merge_request_id: int,
+ as_draft: bool | None = None,
+ title: str | None = None,
+ description: str | None = None,
+ labels: list[str] | None = None,
+ assignee_id: str | int | None = None,
+ ) -> MergeRequest:
+ """
+ Update an existing merge request if it has changes.
+
+ Args:
+ repo_id: The repository ID.
+ merge_request_id: The merge request ID.
+ as_draft: Whether to set the merge request as a draft.
+ title: The title of the merge request.
+ description: The description of the merge request.
+ labels: The labels of the merge request.
+ assignee_id: The assignee ID of the merge request.
+
+ Returns:
+ The merge request.
+ """
+ project = self.client.projects.get(repo_id, lazy=True)
+ merge_request = project.mergerequests.get(merge_request_id)
+
+ has_changes = False
+ if as_draft is not None and merge_request.work_in_progress != as_draft:
+ merge_request.work_in_progress = as_draft
+ has_changes = True
+ if title is not None and merge_request.title != title:
+ merge_request.title = title
+ has_changes = True
+ if description is not None and merge_request.description != description:
+ merge_request.description = description
+ has_changes = True
+ if labels is not None and any(label.title not in labels for label in merge_request.labels):
+ mr_label_titles = [label.title for label in merge_request.labels]
+ merge_request.labels += [label for label in labels if label not in mr_label_titles]
+ has_changes = True
+ if assignee_id is not None and merge_request.assignee_id != assignee_id:
+ merge_request.assignee_id = assignee_id
+ has_changes = True
+
+ if has_changes:
+ merge_request.save()
+
+ return self._serialize_merge_request(repo_id, merge_request)
+
+ def _serialize_merge_request(self, repo_id: str, merge_request: ProjectMergeRequest) -> MergeRequest:
+ return MergeRequest(
+ repo_id=repo_id,
+ merge_request_id=cast("int", merge_request.get_id()),
+ source_branch=merge_request.source_branch,
+ target_branch=merge_request.target_branch,
+ title=merge_request.title,
+ description=merge_request.description,
+ labels=merge_request.labels,
+ web_url=merge_request.web_url,
+ sha=merge_request.sha,
+ author=User(
+ id=merge_request.author.get("id"),
+ username=merge_request.author.get("username"),
+ name=merge_request.author.get("name"),
+ ),
+ draft=merge_request.work_in_progress,
+ )
+
@contextmanager
def load_repo(self, repository: Repository, sha: str) -> Iterator[Repo]:
"""
@@ -390,7 +458,9 @@ def load_repo(self, repository: Repository, sha: str) -> Iterator[Repo]:
clone_dir = Path(tmpdir) / "repo"
clone_dir.mkdir(exist_ok=True)
- yield Repo.clone_from(clone_url, clone_dir, branch=sha)
+ repo = Repo.clone_from(clone_url, clone_dir, branch=sha)
+ self._configure_commit_identity(repo)
+ yield repo
def get_issue(self, repo_id: str, issue_id: int) -> Issue:
"""
@@ -445,7 +515,7 @@ def create_issue(self, repo_id: str, title: str, description: str, labels: list[
issue = project.issues.create(issue_data)
return issue.iid
- def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: str | None = None):
+ def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: int | None = None):
"""
Create an emoji direclty on an issue or on an issue note.
"""
@@ -770,7 +840,7 @@ def create_merge_request_comment(
return to_return
- def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: str):
+ def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: int):
"""
Create an emoji in a note of a merge request.
diff --git a/daiv/codebase/clients/swe.py b/daiv/codebase/clients/swe.py
index 73ece450..31d441e0 100644
--- a/daiv/codebase/clients/swe.py
+++ b/daiv/codebase/clients/swe.py
@@ -236,7 +236,21 @@ def update_or_create_merge_request(
title: str,
description: str,
labels: list[str] | None = None,
- assignee_id: int | None = None,
+ assignee_id: str | int | None = None,
+ as_draft: bool = False,
+ ) -> MergeRequest:
+ """Not supported for SWE client."""
+ raise NotImplementedError("SWERepoClient does not support merge requests")
+
+ def update_merge_request(
+ self,
+ repo_id: str,
+ merge_request_id: int,
+ as_draft: bool | None = None,
+ title: str | None = None,
+ description: str | None = None,
+ labels: list[str] | None = None,
+ assignee_id: str | int | None = None,
) -> MergeRequest:
"""Not supported for SWE client."""
raise NotImplementedError("SWERepoClient does not support merge requests")
@@ -273,7 +287,7 @@ def update_issue_comment(
"""Not supported for SWE client."""
raise NotImplementedError("SWERepoClient does not support issue comments")
- def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: str):
+ def create_issue_emoji(self, repo_id: str, issue_id: int, emoji: Emoji, note_id: int | None = None):
"""Not supported for SWE client."""
raise NotImplementedError("SWERepoClient does not support issue emojis")
@@ -311,7 +325,7 @@ def get_merge_request_comment(self, repo_id: str, merge_request_id: int, comment
"""Not supported for SWE client."""
raise NotImplementedError("SWERepoClient does not support merge request comments")
- def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: str):
+ def create_merge_request_note_emoji(self, repo_id: str, merge_request_id: int, emoji: Emoji, note_id: int):
"""Not supported for SWE client."""
raise NotImplementedError("SWERepoClient does not support merge request emojis")
diff --git a/daiv/codebase/managers/issue_addressor.py b/daiv/codebase/managers/issue_addressor.py
index 2e2a6d7a..2f63d55c 100644
--- a/daiv/codebase/managers/issue_addressor.py
+++ b/daiv/codebase/managers/issue_addressor.py
@@ -9,19 +9,22 @@
from langgraph.checkpoint.postgres.aio import AsyncPostgresSaver
from automation.agent.graph import create_daiv_agent
+from automation.agent.publishers import GitChangePublisher
from automation.agent.utils import extract_text_content, get_daiv_agent_kwargs
-from codebase.base import GitPlatform, Issue
+from codebase.base import GitPlatform
+from core.constants import BOT_NAME
from core.utils import generate_uuid
from .base import BaseManager
if TYPE_CHECKING:
+ from codebase.base import Issue
from codebase.context import RuntimeCtx
logger = logging.getLogger("daiv.managers")
-PLAN_ISSUE_PROMPT = "Present a plan to address this issue and wait for approval before executing it."
+PLAN_ISSUE_PROMPT = "Present a detailed plan to address this issue and wait for approval before executing it."
ADDRESS_ISSUE_PROMPT = "Address this issue."
@@ -33,7 +36,7 @@ class IssueAddressorManager(BaseManager):
def __init__(self, *, issue: Issue, mention_comment_id: str | None = None, runtime_ctx: RuntimeCtx):
super().__init__(runtime_ctx=runtime_ctx)
self.issue = issue
- self.thread_id = generate_uuid(f"{self.ctx.repo_id}:{self.ctx.scope.value}/{issue.iid}")
+ self.thread_id = generate_uuid(f"{self.ctx.repo_id}:{self.ctx.scope}/{issue.iid}")
self.mention_comment_id = mention_comment_id
@classmethod
@@ -79,68 +82,69 @@ async def _address_issue(self):
store=self.store,
**get_daiv_agent_kwargs(model_config=self.ctx.config.models.agent, use_max=self.issue.has_max_label()),
)
-
- result = await daiv_agent.ainvoke(
- {"messages": messages},
- config=RunnableConfig(
- tags=[daiv_agent.get_name(), self.client.git_platform.value],
- metadata={
- "author": self.issue.author.username,
- "issue_id": self.issue.iid,
- "scope": self.ctx.scope,
- "use_max_model": self.issue.has_max_label(),
- },
- configurable={"thread_id": self.thread_id},
- ),
- context=self.ctx,
+ agent_config = RunnableConfig(
+ configurable={"thread_id": self.thread_id},
+ tags=[daiv_agent.get_name(), self.client.git_platform.value],
+ metadata={
+ "author": self.issue.author.username,
+ "issue_id": self.issue.iid,
+ "labels": [label.lower() for label in self.issue.labels],
+ "scope": self.ctx.scope,
+ },
)
-
- response = result and extract_text_content(result["messages"][-1].content)
-
- if merge_request_id := result.get("merge_request_id"):
- self._add_issue_addressed_note(merge_request_id, response)
+ try:
+ result = await daiv_agent.ainvoke({"messages": messages}, config=agent_config, context=self.ctx)
+ except Exception:
+ snapshot = await daiv_agent.aget_state(config=agent_config)
+
+ # If and unexpect error occurs while addressing the issue, a draft merge request is created to avoid
+ # losing the changes made by the agent.
+ merge_request = snapshot.values.get("merge_request")
+ publisher = GitChangePublisher(self.ctx)
+ merge_request = await publisher.publish(
+ merge_request=merge_request, as_draft=(merge_request is None or merge_request.draft)
+ )
+
+ # If the draft merge request is created successfully, we update the state to reflect the new MR.
+ if merge_request:
+ await daiv_agent.aupdate_state(config=agent_config, values={"merge_request": merge_request})
+
+ self._add_unable_to_address_issue_note(draft_published=bool(merge_request))
else:
- self._create_or_update_comment(response)
-
- def _add_unable_to_address_issue_note(self):
+ if (
+ result
+ and "messages" in result
+ and result["messages"]
+ and (response_text := extract_text_content(result["messages"][-1].content).strip())
+ ):
+ self._leave_comment(response_text)
+ else:
+ self._add_unable_to_address_issue_note()
+
+ def _add_unable_to_address_issue_note(self, *, draft_published: bool = False):
"""
Add a note to the issue to inform the user that the response could not be generated.
"""
- self._create_or_update_comment(
- render_to_string("codebase/issue_unable_address_issue.txt", {"bot_username": self.ctx.bot_username}),
- reply_to_id=self.mention_comment_id,
- )
-
- def _add_issue_addressed_note(self, merge_request_id: int, message: str):
- """
- Add a note to the issue to inform the user that the issue has been addressed.
- """
- self._create_or_update_comment(
+ self._leave_comment(
render_to_string(
- "codebase/issue_addressed.txt",
+ "codebase/unable_address_issue.txt",
{
- "source_repo_id": self.ctx.repo_id,
- "merge_request_id": merge_request_id,
- # GitHub already shows the merge request link right after the comment.
- "show_merge_request_link": self.client.git_platform == GitPlatform.GITLAB,
- "message": message,
+ "bot_name": BOT_NAME,
+ "bot_username": self.ctx.bot_username,
+ "draft_published": draft_published,
+ "is_gitlab": self.ctx.git_platform == GitPlatform.GITLAB,
},
- )
+ ),
+ # GitHub doesn't support replying to comments, so we need to provide a reply_to_id only for GitLab.
+ reply_to_id=self.mention_comment_id if self.ctx.git_platform == GitPlatform.GITLAB else None,
)
- def _create_or_update_comment(self, note_message: str, reply_to_id: str | None = None):
+ def _leave_comment(self, body: str, reply_to_id: str | None = None):
"""
- Create or update a comment on the issue.
+ Leave a comment on the issue.
Args:
- note_message: The message to add to the comment.
- reply_to_id: The ID of the comment to reply to.
+ body: The body of the comment.
+ reply_to_id: The ID of the comment to reply to. This is not supported for GitHub.
"""
- if self._comment_id is not None:
- self.client.update_issue_comment(
- self.ctx.repo_id, self.issue.iid, self._comment_id, note_message, reply_to_id=reply_to_id
- )
- else:
- self._comment_id = self.client.create_issue_comment(
- self.ctx.repo_id, self.issue.iid, note_message, reply_to_id=reply_to_id
- )
+ return self.client.create_issue_comment(self.ctx.repo_id, self.issue.iid, body, reply_to_id=reply_to_id)
diff --git a/daiv/codebase/managers/review_addressor.py b/daiv/codebase/managers/review_addressor.py
index cab0f867..6171ed9d 100644
--- a/daiv/codebase/managers/review_addressor.py
+++ b/daiv/codebase/managers/review_addressor.py
@@ -4,6 +4,7 @@
from typing import TYPE_CHECKING
from django.conf import settings as django_settings
+from django.template.loader import render_to_string
from langchain_core.messages import HumanMessage
from langchain_core.runnables import RunnableConfig
@@ -12,8 +13,10 @@
from unidiff.patch import Line
from automation.agent.graph import create_daiv_agent
+from automation.agent.publishers import GitChangePublisher
from automation.agent.utils import extract_text_content, get_daiv_agent_kwargs
-from codebase.base import MergeRequest, Note, NoteDiffPosition, NoteDiffPositionType, NotePositionType
+from codebase.base import GitPlatform, MergeRequest, Note, NoteDiffPosition, NoteDiffPositionType, NotePositionType
+from core.constants import BOT_NAME
from core.utils import generate_uuid
from .base import BaseManager
@@ -191,9 +194,7 @@ def __init__(self, *, merge_request: MergeRequest, mention_comment_id: str, runt
super().__init__(runtime_ctx=runtime_ctx)
self.merge_request = merge_request
self.mention_comment_id = mention_comment_id
- self.thread_id = generate_uuid(
- f"{self.ctx.repo_id}:{self.ctx.scope.value}/{self.merge_request.merge_request_id}"
- )
+ self.thread_id = generate_uuid(f"{self.ctx.repo_id}:{self.ctx.scope}/{self.merge_request.merge_request_id}")
@classmethod
async def address_comments(cls, *, merge_request: MergeRequest, mention_comment_id: str, runtime_ctx: RuntimeCtx):
@@ -207,7 +208,11 @@ async def address_comments(cls, *, merge_request: MergeRequest, mention_comment_
"""
manager = cls(merge_request=merge_request, mention_comment_id=mention_comment_id, runtime_ctx=runtime_ctx)
- await manager._address_comments()
+ try:
+ await manager._address_comments()
+ except Exception:
+ logger.exception("Error addressing comments for merge request: %d", merge_request.merge_request_id)
+ manager._add_unable_to_address_review_note()
async def _address_comments(self):
"""
@@ -224,28 +229,86 @@ async def _address_comments(self):
store=self.store,
**get_daiv_agent_kwargs(model_config=self.ctx.config.models.agent),
)
+ agent_config = RunnableConfig(
+ configurable={"thread_id": self.thread_id},
+ tags=[daiv_agent.get_name(), self.client.git_platform.value],
+ metadata={
+ "author": self.merge_request.author.username,
+ "merge_request_id": self.merge_request.merge_request_id,
+ "scope": self.ctx.scope,
+ },
+ )
+
+ try:
+ result = await daiv_agent.ainvoke(
+ {
+ "messages": [
+ HumanMessage(
+ name=mention_comment.notes[0].author.username,
+ id=mention_comment.notes[0].id,
+ content=mention_comment.notes[0].body,
+ )
+ ]
+ },
+ config=agent_config,
+ context=self.ctx,
+ )
+ except Exception:
+ snapshot = await daiv_agent.aget_state(config=agent_config)
+
+ # If and unexpect error occurs while addressing the review, a draft merge request is created to avoid
+ # losing the changes made by the agent.
+ publisher = GitChangePublisher(self.ctx)
+ merge_request = snapshot.values.get("merge_request")
+ merge_request = await publisher.publish(
+ merge_request=merge_request, as_draft=(merge_request is None or merge_request.draft)
+ )
+
+ if merge_request:
+ await daiv_agent.aupdate_state(config=agent_config, values={"merge_request": merge_request})
+
+ self._add_unable_to_address_review_note(draft_published=bool(merge_request))
+ else:
+ if (
+ result
+ and "messages" in result
+ and result["messages"]
+ and (response_text := extract_text_content(result["messages"][-1].content).strip())
+ ):
+ self._leave_comment(response_text)
+ else:
+ self._add_unable_to_address_issue_note()
+ self._leave_comment(result and extract_text_content(result["messages"][-1].content))
- result = await daiv_agent.ainvoke(
+ def _add_unable_to_address_review_note(self, *, draft_published: bool = False):
+ """
+ Add a note to the merge request to inform the user that the review could not be addressed.
+
+ Args:
+ draft_published: Whether the draft merge request was published to the repository.
+ """
+ self._leave_comment(
+ render_to_string(
+ "codebase/unable_address_review.txt",
{
- "messages": [
- HumanMessage(
- name=mention_comment.notes[0].author.username,
- id=mention_comment.notes[0].id,
- content=mention_comment.notes[0].body,
- )
- ]
+ "bot_name": BOT_NAME,
+ "bot_username": self.ctx.bot_username,
+ "draft_published": draft_published,
+ "is_gitlab": self.ctx.git_platform == GitPlatform.GITLAB,
},
- config=RunnableConfig(
- tags=[daiv_agent.get_name(), self.client.git_platform.value],
- metadata={
- "author": self.merge_request.author.username,
- "merge_request_id": self.merge_request.merge_request_id,
- "scope": self.ctx.scope,
- },
- configurable={"thread_id": self.thread_id},
- ),
- context=self.ctx,
- )
+ ),
+ # GitHub doesn't support replying to comments, so we need to provide a reply_to_id only for GitLab.
+ reply_to_id=self.mention_comment_id if self.ctx.git_platform == GitPlatform.GITLAB else None,
+ )
- response = result and extract_text_content(result["messages"][-1].content)
- self.client.create_merge_request_comment(self.ctx.repo_id, self.merge_request.merge_request_id, response)
+ def _leave_comment(self, body: str, reply_to_id: str | None = None):
+ """
+ Create a comment on the merge request.
+
+ Args:
+ body: The body of the comment.
+ reply_to_id: The ID of the comment to reply to.
+ """
+ return self.client.create_merge_request_comment(
+ self.ctx.repo_id, self.merge_request.merge_request_id, body, reply_to_id=reply_to_id
+ )
diff --git a/daiv/codebase/repo_config.py b/daiv/codebase/repo_config.py
index 4464e876..6399b6a3 100644
--- a/daiv/codebase/repo_config.py
+++ b/daiv/codebase/repo_config.py
@@ -13,7 +13,7 @@
from automation.agent.base import ThinkingLevel # noqa: TC001
from automation.agent.conf import settings as deepagent_settings
from automation.agent.constants import ModelName # noqa: TC001
-from automation.agent.pr_describer.conf import settings as pr_describer_settings
+from automation.agent.diff_to_metadata.conf import settings as diff_to_metadata_settings
from core.conf import settings as core_settings
if TYPE_CHECKING:
@@ -112,14 +112,24 @@ class AgentModelConfig(BaseModel):
)
-class PRDescriberModelConfig(BaseModel):
+class DiffToMetadataModelConfig(BaseModel):
"""
- Model configuration for the PR describer agent.
+ Model configuration for the diff to metadata agent.
"""
model: ModelName | str = Field(
- default=pr_describer_settings.MODEL_NAME,
- description="Model name for PR description. Overrides PR_DESCRIBER_MODEL_NAME environment variable.",
+ default=diff_to_metadata_settings.MODEL_NAME,
+ description=(
+ "Model name to transform a diff into metadata for a pull request/commit message. "
+ "Overrides DIFF_TO_METADATA_MODEL_NAME environment variable."
+ ),
+ )
+ fallback_model: ModelName | str = Field(
+ default=diff_to_metadata_settings.FALLBACK_MODEL_NAME,
+ description=(
+ "Fallback model name for diff to metadata. "
+ "Overrides DIFF_TO_METADATA_FALLBACK_MODEL_NAME environment variable."
+ ),
)
@@ -129,8 +139,8 @@ class Models(BaseModel):
"""
agent: AgentModelConfig = Field(default_factory=AgentModelConfig, description="Configuration for the DAIV agent.")
- pr_describer: PRDescriberModelConfig = Field(
- default_factory=PRDescriberModelConfig, description="Configuration for the PR describer agent."
+ diff_to_metadata: DiffToMetadataModelConfig = Field(
+ default_factory=DiffToMetadataModelConfig, description="Configuration for the diff to metadata agent."
)
diff --git a/daiv/codebase/tasks.py b/daiv/codebase/tasks.py
index e0468b0b..1621c546 100644
--- a/daiv/codebase/tasks.py
+++ b/daiv/codebase/tasks.py
@@ -20,7 +20,7 @@
@cron("*/5 * * * *") # every 5 minute
@task
- async def setup_webhooks_cron_task():
+ def setup_webhooks_cron_task():
"""
Setup webhooks for all repositories every 5 minutes.
"""
@@ -48,20 +48,6 @@ async def address_issue_task(
)
-@task(dedup=True)
-async def address_mr_review_task(repo_id: str, merge_request_id: int, merge_request_source_branch: str):
- """
- Address a review feedback by applying the changes described or answering questions about the codebase.
-
- Args:
- repo_id (str): The repository id.
- merge_request_id (int): The merge request id.
- merge_request_source_branch (str): The merge request source branch.
- """
- # async with set_runtime_ctx(repo_id, ref=merge_request_source_branch, scope="merge_request") as runtime_ctx:
- # await ReviewAddressorManager.process_review_comments(merge_request_id=merge_request_id, runtime_ctx=runtime_ctx) # noqa: E501 ERA001
-
-
@task(dedup=True)
async def address_mr_comments_task(repo_id: str, merge_request_id: int, mention_comment_id: str):
"""
diff --git a/daiv/codebase/templates/codebase/issue_addressed.txt b/daiv/codebase/templates/codebase/issue_addressed.txt
deleted file mode 100644
index 390b0549..00000000
--- a/daiv/codebase/templates/codebase/issue_addressed.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-{% load l10n %}### ✅ ***Process Completed***
-
-{{ message }}
-
----
-
-💡 **Next Steps:**
-
-- **Review Changes:** Please review the changes in the merge request.
-- **Follow Instructions:** Follow the instructions provided in the merge request description.
-{% if show_merge_request_link %}
-🔗 {{ source_repo_id }}!{{ merge_request_id|unlocalize }}+
-{% endif %}
-
diff --git a/daiv/codebase/templates/codebase/issue_merge_request.txt b/daiv/codebase/templates/codebase/issue_merge_request.txt
index 79e1aac5..117d8f26 100644
--- a/daiv/codebase/templates/codebase/issue_merge_request.txt
+++ b/daiv/codebase/templates/codebase/issue_merge_request.txt
@@ -9,10 +9,4 @@ Closes: {{ source_repo_id }}#{{ issue_id|unlocalize }}{% if is_gitlab %}+{% endi
---
#### 💡 Instructions for the reviewer:
- - 💬 {{ bot_name }} will address comments for you in the following ways:
- - Open a discussion on the merge request overview and mention @{{ bot_username }};
- - Leave comments on the files and mention @{{ bot_username }};
- - Leave comments on specific lines of the file and mention @{{ bot_username }}.
-{% if issue_id %}
- - 📝 Edit the original issue ({{ source_repo_id }}#{{ issue_id|unlocalize }}) to get {{ bot_name }} to recreate the MR from scratch.
-{% endif %}
\ No newline at end of file
+ - 💬 Just leave comments/reviews mentioning @{{ bot_username }} to get {{ bot_name }} to address them for you.
\ No newline at end of file
diff --git a/daiv/codebase/templates/codebase/issue_unable_address_issue.txt b/daiv/codebase/templates/codebase/issue_unable_address_issue.txt
deleted file mode 100644
index 17e0d4b3..00000000
--- a/daiv/codebase/templates/codebase/issue_unable_address_issue.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-### ⚠️ Sorry, something went wrong while responding to your comment.
-
----
-
-💡 **Next Steps:**
-
-- 🔄 Leave a new comment mentioning @{{ bot_username }} to trigger a fresh response attempt.
diff --git a/daiv/codebase/templates/codebase/unable_address_issue.txt b/daiv/codebase/templates/codebase/unable_address_issue.txt
new file mode 100644
index 00000000..d53b244a
--- /dev/null
+++ b/daiv/codebase/templates/codebase/unable_address_issue.txt
@@ -0,0 +1,6 @@
+An unexpected error occurred while working on this issue.
+{% if draft_published %}
+To avoid losing progress, {{ bot_name }} created a draft {% if is_gitlab %}merge request{% else %}pull request{% endif %}.
+{% endif %}
+
+To have {{ bot_name }} resume, leave a new comment mentioning @{{ bot_username }}.
\ No newline at end of file
diff --git a/daiv/codebase/templates/codebase/unable_address_review.txt b/daiv/codebase/templates/codebase/unable_address_review.txt
new file mode 100644
index 00000000..6c116a7f
--- /dev/null
+++ b/daiv/codebase/templates/codebase/unable_address_review.txt
@@ -0,0 +1,6 @@
+An unexpected error occurred while working on this {% if is_gitlab %}merge request{% else %}pull request{% endif %}.
+{% if draft_published %}
+To avoid losing progress, {{ bot_name }} committed the changes done so far.
+{% endif %}
+
+To have {{ bot_name }} resume, leave a new comment mentioning @{{ bot_username }}.
\ No newline at end of file
diff --git a/daiv/codebase/utils.py b/daiv/codebase/utils.py
index 76296926..5d6accd5 100644
--- a/daiv/codebase/utils.py
+++ b/daiv/codebase/utils.py
@@ -137,7 +137,7 @@ def __init__(self, repo: Repo):
"""
self.repo = repo
- def get_diff(self) -> str:
+ def get_diff(self, ref: str = "HEAD") -> str:
"""
Get the diff of the repository's including unstaged changes.
@@ -145,7 +145,7 @@ def get_diff(self) -> str:
The diff of the repository.
"""
try:
- diff = self.repo.git.diff("HEAD")
+ diff = self.repo.git.diff(ref)
except GitCommandError:
# No commits yet, get diff of all files
diff = self.repo.git.diff("--cached", "--no-prefix")
@@ -234,7 +234,14 @@ def commit_and_push_changes(
self.repo.git.add("-A")
self.repo.index.commit(commit_message if not skip_ci else f"[skip ci] {commit_message}")
- self.repo.remotes.origin.push(branch_name, force=override_commits)
+ try:
+ self.repo.remotes.origin.push(branch_name, force=override_commits)
+ except GitCommandError as e:
+ if _is_push_auth_error(e):
+ raise GitPushPermissionError(
+ "Failed to push changes to the remote repository due to authentication or permission issues."
+ ) from e
+ raise
return branch_name
def checkout(self, branch_name: str):
@@ -315,3 +322,28 @@ def apply_patch(self, patch: str):
finally:
with contextlib.suppress(OSError):
Path(tmp_path).unlink()
+
+
+class GitPushPermissionError(RuntimeError):
+ """
+ Raised when pushing changes fails due to authentication or permission issues.
+ """
+
+
+def _is_push_auth_error(error: GitCommandError) -> bool:
+ """
+ Check if a git push error is likely caused by authentication or permission issues.
+ """
+ error_text = str(error).lower()
+ return any(
+ marker in error_text
+ for marker in (
+ "returned error: 403",
+ "authentication failed",
+ "permission denied",
+ "access denied",
+ "http basic: access denied",
+ "could not read username",
+ "not authorized",
+ )
+ )
diff --git a/daiv/core/tasks.py b/daiv/core/tasks.py
index 977aa7fb..b96d8f9c 100644
--- a/daiv/core/tasks.py
+++ b/daiv/core/tasks.py
@@ -6,7 +6,7 @@
@cron("0 0 * * *") # every day at midnight
@task
-async def prune_db_task_results_cron_task():
+def prune_db_task_results_cron_task():
"""
Prune database task results every day at midnight.
"""
diff --git a/daiv/slash_commands/actions/help.py b/daiv/slash_commands/actions/help.py
index 44aa77c5..d5187863 100644
--- a/daiv/slash_commands/actions/help.py
+++ b/daiv/slash_commands/actions/help.py
@@ -61,4 +61,4 @@ def _format_skill_help(self, skill: SkillMetadata) -> str:
Returns:
The help message for the skill.
"""
- return f" * `/{skill['name']}` - {skill['description']}"
+ return f"| `/{skill['name']}` | {skill['description']} |"
diff --git a/daiv/slash_commands/base.py b/daiv/slash_commands/base.py
index 87812af1..02d89a74 100644
--- a/daiv/slash_commands/base.py
+++ b/daiv/slash_commands/base.py
@@ -37,7 +37,7 @@ def help(self) -> str:
"""
Get the help message for the command.
"""
- return f" * `{self.command_to_invoke}` - {self.description}"
+ return f"| `{self.command_to_invoke}` | {self.description} |"
@abstractmethod
async def execute_for_agent(self, *, args: str, **kwargs) -> str:
diff --git a/daiv/slash_commands/templates/slash_commands/slash_commands_help.txt b/daiv/slash_commands/templates/slash_commands/slash_commands_help.txt
index 8cca6350..875ce585 100644
--- a/daiv/slash_commands/templates/slash_commands/slash_commands_help.txt
+++ b/daiv/slash_commands/templates/slash_commands/slash_commands_help.txt
@@ -1,5 +1,7 @@
### 🤖 {{ bot_name }} Slash Commands
+| Command | Description |
+|---------|-------------|
{% for action in actions %}{{ action }}
{% endfor -%}
diff --git a/docker/local/app/config.env b/docker/local/app/config.env
index cd813f47..f8d7a50e 100644
--- a/docker/local/app/config.env
+++ b/docker/local/app/config.env
@@ -27,5 +27,4 @@ LANGCHAIN_PROJECT=default
DAIV_SANDBOX_NETWORK_ENABLED=True
# AUTOMATION
-# PLAN_AND_EXECUTE_PLANNING_MODEL_NAME=openrouter:qwen/qwen3-max
-# PLAN_AND_EXECUTE_EXECUTION_MODEL_NAME=openrouter:qwen/qwen3-coder-plus
+DAIV_AGENT_MODEL_NAME=openrouter:z-ai/glm-5 # moonshotai/kimi-k2.5
diff --git a/docs/configuration/env-config.md b/docs/configuration/env-config.md
index 52b7622a..7c827d8d 100644
--- a/docs/configuration/env-config.md
+++ b/docs/configuration/env-config.md
@@ -260,20 +260,12 @@ All the default models where chosen to be the most effective models. You can cha
| `PLAN_AND_EXECUTE_MAX_EXECUTION_MODEL_NAME` | Model for execution tasks when `daiv-max` label is present. | `openrouter:anthropic/claude-opus-4.5` |
| `PLAN_AND_EXECUTE_MAX_EXECUTION_THINKING_LEVEL` | Thinking level for execution tasks when `daiv-max` label is present. | `high` |
-### Review Addressor
-
-| Variable | Description | Default |
-|----------------------------------------|----------------------------------------------------------|--------------------|
-| `REVIEW_ADDRESSOR_REVIEW_COMMENT_MODEL_NAME` | Model for review assessment. | `openrouter:openai/gpt-4.1-mini` |
-| `REVIEW_ADDRESSOR_REPLY_MODEL_NAME` | Model for reply to comments/questions. | `openrouter:anthropic/claude-haiku-4.5` |
-| `REVIEW_ADDRESSOR_REPLY_TEMPERATURE` | Temperature for the reply model. | `0.2` |
-| `REVIEW_ADDRESSOR_RECURSION_LIMIT` | Recursion limit for the agent to address all the review comments in a single run. | `100` |
-
-### Pull Request Describer
+### Diff to Metadata
| Variable | Description | Default |
|-------------------------------|----------------------------------------------|------------------------|
-| `PR_DESCRIBER_MODEL_NAME` | Model for PR describer. | `openrouter:openai/gpt-4.1-mini` |
+| `DIFF_TO_METADATA_MODEL_NAME` | Model for diff to metadata. | `openrouter:anthropic/claude-haiku-4.5` |
+| `DIFF_TO_METADATA_FALLBACK_MODEL_NAME` | Fallback model for diff to metadata. | `openrouter:openai/gpt-4.1-mini` |
### Codebase Chat
diff --git a/docs/configuration/yaml-config.md b/docs/configuration/yaml-config.md
index af3b7564..4772b5cc 100644
--- a/docs/configuration/yaml-config.md
+++ b/docs/configuration/yaml-config.md
@@ -242,20 +242,21 @@ Configure models for the codebase chat agent.
| `model` | `str \| null` | `null` | Model name for codebase chat. Overrides `CODEBASE_CHAT_MODEL_NAME` environment variable. |
| `temperature` | `float \| null` | `null` | Temperature for codebase chat. Overrides `CODEBASE_CHAT_TEMPERATURE` environment variable. |
-### PR Describer Agent
+### Diff to Metadata Agent
-Configure models for the PR describer agent.
+Configure models for the diff to metadata agent.
| Option | Type | Default | Description |
|-------------------|-----------------------------------------------------------|---------|-----------------------------------------------------------------------------|
-| `model` | `str \| null` | `null` | Model name for PR description. Overrides `PR_DESCRIBER_MODEL_NAME` environment variable. |
+| `model` | `str \| null` | `null` | Model name to transform a diff into metadata for a pull request/commit message. Overrides `DIFF_TO_METADATA_MODEL_NAME` environment variable. |
+| `fallback_model` | `str \| null` | `null` | Fallback model name for diff to metadata. Overrides `DIFF_TO_METADATA_FALLBACK_MODEL_NAME` environment variable. |
**Example configuration:**
```yaml
models:
- pr_describer:
+ diff_to_metadata:
model: "openrouter:openai/gpt-4.1-mini"
```
!!! note
- The PR describer agent automatically reads your `AGENTS.md` context file to understand branch naming and commit message conventions. See [Branch Naming and Commit Message Conventions](#branch-naming-and-commit-message-conventions) for details.
+ The diff to metadata agent automatically reads your `AGENTS.md` context file to understand branch naming and commit message conventions. See [Branch Naming and Commit Message Conventions](#branch-naming-and-commit-message-conventions) for details.
diff --git a/pyproject.toml b/pyproject.toml
index 3c9b1f6c..7351a31c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,10 @@
[project]
name = "daiv"
version = "1.1.0"
-description = "Async SWE agents seamlessly integrated on your git platform to automate code issues implementation, reviews, and pipeline repairs."
+description = """\
+ Async SWE agents seamlessly integrated on your git platform to automate code issues implementation, reviews, and \
+ pipeline repairs.\
+ """
readme = "README.md"
license = { file = "LICENSE" }
maintainers = [
@@ -17,7 +20,7 @@ classifiers = [
]
dependencies = [
"ddgs==9.10.0",
- "deepagents==0.3.12",
+ "deepagents==0.4.1",
"django==6.0.2",
"django-crontask==1.1.3",
"django-extensions==4.1.0",
@@ -30,12 +33,12 @@ dependencies = [
"httpx==0.28.1",
"ipython==9.10.0",
"jinja2==3.1.6",
- "langchain[anthropic,community,google-genai,openai]==1.2.8",
+ "langchain[anthropic,community,google-genai,openai]==1.2.10",
"langchain-mcp-adapters==0.2.1",
"langgraph==1.0.8",
"langgraph-checkpoint-postgres==3.0.4",
- "langsmith[pytest]==0.6.9",
- "langsmith-fetch>=0.3.1",
+ "langsmith[pytest]==0.7.1",
+ "langsmith-fetch==0.3.1",
"markdownify==1.2.2",
"openevals==0.1.3",
"prompt-toolkit==3.0.52",
@@ -51,29 +54,27 @@ dependencies = [
"unidiff==0.7.5",
"uvicorn[standard]==0.40.0",
]
-
urls.changelog = "https://github.com/srtab/daiv/blob/main/CHANGELOG.md"
-
urls.issues = "https://github.com/srtab/daiv/issues"
urls.source = "https://github.com/srtab/daiv"
[dependency-groups]
dev = [
- "coverage==7.13.3",
+ "coverage==7.13.4",
"datasets==4.5.0",
"prek==0.3.2",
- "pyproject-fmt==2.12.1",
+ "pyproject-fmt==2.15.2",
"pytest==9.0.2",
"pytest-asyncio==1.3.0",
"pytest-cov==7.0.0",
"pytest-django==4.11.1",
- "pytest-env==1.2.0",
+ "pytest-env==1.3.2",
"pytest-httpx==0.36.0",
"pytest-mock==3.15.1",
"pytest-xdist==3.8.0",
"python-dotenv==1.2.1",
"ruff==0.15.0",
- "ty==0.0.15",
+ "ty==0.0.16",
"types-pyyaml==6.0.12.20250915",
"watchdog==6.0.0",
]
@@ -93,7 +94,6 @@ extend-exclude = [ "*/node_modules/*", "*/static/*" ]
force-exclude = true
preview = true
unsafe-fixes = true
-
show-fixes = true
format.skip-magic-trailing-comma = true
lint.select = [
@@ -158,13 +158,13 @@ lint.isort.split-on-trailing-comma = false
[tool.pyproject-fmt]
keep_full_version = true
-[tool.pytest.ini_options]
-DJANGO_SETTINGS_MODULE = "daiv.settings.test"
-pythonpath = "daiv"
-python_files = "test_*.py"
-python_classes = "Test *Test"
-testpaths = [ "tests" ]
-norecursedirs = [
+[tool.pytest]
+ini_options.DJANGO_SETTINGS_MODULE = "daiv.settings.test"
+ini_options.pythonpath = "daiv"
+ini_options.python_files = "test_*.py"
+ini_options.python_classes = "Test *Test"
+ini_options.testpaths = [ "tests" ]
+ini_options.norecursedirs = [
".venv",
"data",
"docker",
@@ -173,16 +173,15 @@ norecursedirs = [
"node_modules",
"static",
]
-addopts = "--cov --cov-config=.coveragerc -p no:warnings"
-asyncio_mode = "auto"
-asyncio_default_fixture_loop_scope = "function"
+ini_options.addopts = "--cov --cov-config=.coveragerc -p no:warnings"
+ini_options.asyncio_mode = "auto"
+ini_options.asyncio_default_fixture_loop_scope = "function"
[tool.pytest_env]
DJANGO_SETTINGS_MODULE = "daiv.settings.test"
DJANGO_SECRET_KEY = "not-so-secret"
ANTHROPIC_API_KEY = "anthropic-api-key"
OPENAI_API_KEY = "openai-api-key"
-OPENROUTER_API_KEY = "openrouter-api-key"
CODEBASE_GITLAB_WEBHOOK_SECRET = "gitlab-webhook-secret"
NINJA_SKIP_REGISTRY = true
diff --git a/tests/conftest.py b/tests/conftest.py
deleted file mode 100644
index c0355935..00000000
--- a/tests/conftest.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from contextlib import contextmanager
-from pathlib import Path
-from tempfile import TemporaryDirectory
-from unittest.mock import AsyncMock, Mock, patch
-
-import pytest
-from pydantic import SecretStr
-
-from codebase.base import GitPlatform, MergeRequest, Repository, User
-from codebase.clients import RepoClient
-from codebase.conf import settings
-
-
-@pytest.fixture(autouse=True)
-def mock_settings():
- """Fixture to mock the secret token for testing."""
- with (
- patch.object(settings, "GITLAB_WEBHOOK_SECRET", SecretStr("test_secret")),
- patch.object(settings, "GITHUB_WEBHOOK_SECRET", SecretStr("test_secret")),
- patch.object(settings, "CLIENT", GitPlatform.GITLAB),
- ):
- yield settings
-
-
-@pytest.fixture(autouse=True)
-def mock_repo_client():
- """
- Global fixture that automatically mocks RepoClient.create_instance for all tests.
-
- This fixture returns a comprehensive mock that implements all the abstract methods
- of RepoClient to prevent AttributeError during tests.
- """
- with patch.object(RepoClient, "create_instance") as mock_create_instance:
- # Create a mock that implements the RepoClient interface
- mock_client = Mock(spec=RepoClient)
-
- # Set up commonly used properties and methods with reasonable defaults
- mock_client.current_user = User(id=1, username="test-user", name="Test User")
- mock_client.codebase_url = "https://test-repo.com"
- mock_client.git_platform = GitPlatform.GITLAB
-
- # Mock basic repository operations
- mock_client.get_repository.return_value = Repository(
- pk=1,
- slug="test/test-repo",
- name="test-repo",
- default_branch="main",
- git_platform=GitPlatform.GITLAB,
- clone_url="https://test-repo.com",
- )
- mock_client.list_repositories.return_value = []
- mock_client.get_repository_file.return_value = None
- mock_client.get_project_uploaded_file = AsyncMock(return_value=b"image content")
- mock_client.repository_branch_exists.return_value = True
-
- # Mock repository modification operations
- mock_client.set_repository_webhooks.return_value = True
-
- # Mock issue operations
- mock_client.get_issue.return_value = Mock()
- mock_client.create_issue_comment.return_value = None
- mock_client.update_issue_comment.return_value = None
- mock_client.create_issue_emoji.return_value = None
- mock_client.get_issue_comment.return_value = Mock()
- mock_client.get_issue_related_merge_requests.return_value = []
-
- # Mock merge request operations
- merge_request = MergeRequest(
- repo_id="test/test-repo",
- merge_request_id=1,
- source_branch="feature/test",
- target_branch="main",
- title="Test merge request",
- description="Test merge request description",
- labels=["daiv"],
- web_url="https://test-repo.com/merge_requests/1",
- sha="testsha",
- author=mock_client.current_user,
- )
- mock_client.update_or_create_merge_request.return_value = merge_request
- mock_client.get_merge_request.return_value = merge_request
- mock_client.get_merge_request_latest_pipelines.return_value = []
- mock_client.get_merge_request_review_comments.return_value = []
- mock_client.get_merge_request_comments.return_value = []
- mock_client.get_merge_request_comment.return_value = Mock()
- mock_client.create_merge_request_comment.return_value = None
- mock_client.create_merge_request_note_emoji.return_value = None
- mock_client.mark_merge_request_comment_as_resolved.return_value = None
- mock_client.job_log_trace.return_value = "trace"
-
- # Mock load_repo to return a temporary directory context manager
- @contextmanager
- def mock_load_repo(repo_id: str, sha: str):
- with TemporaryDirectory() as temp_dir:
- yield Path(temp_dir)
-
- mock_client.load_repo = mock_load_repo
-
- # Set up the create_instance mock to return our comprehensive mock
- mock_create_instance.return_value = mock_client
-
- yield mock_client
diff --git a/tests/integration_tests/__init__.py b/tests/integration_tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/evals/data/pr_describer/cases.jsonl b/tests/integration_tests/data/diff_to_metadata/cases.jsonl
similarity index 100%
rename from evals/data/pr_describer/cases.jsonl
rename to tests/integration_tests/data/diff_to_metadata/cases.jsonl
diff --git a/evals/data/pr_describer/context_files/005-AGENTS.md b/tests/integration_tests/data/diff_to_metadata/context_files/005-AGENTS.md
similarity index 100%
rename from evals/data/pr_describer/context_files/005-AGENTS.md
rename to tests/integration_tests/data/diff_to_metadata/context_files/005-AGENTS.md
diff --git a/evals/data/pr_describer/context_files/006-AGENTS.md b/tests/integration_tests/data/diff_to_metadata/context_files/006-AGENTS.md
similarity index 100%
rename from evals/data/pr_describer/context_files/006-AGENTS.md
rename to tests/integration_tests/data/diff_to_metadata/context_files/006-AGENTS.md
diff --git a/evals/data/pr_describer/diffs/001-feat-health.diff b/tests/integration_tests/data/diff_to_metadata/diffs/001-feat-health.diff
similarity index 100%
rename from evals/data/pr_describer/diffs/001-feat-health.diff
rename to tests/integration_tests/data/diff_to_metadata/diffs/001-feat-health.diff
diff --git a/evals/data/pr_describer/diffs/002-fix-parse-number.diff b/tests/integration_tests/data/diff_to_metadata/diffs/002-fix-parse-number.diff
similarity index 100%
rename from evals/data/pr_describer/diffs/002-fix-parse-number.diff
rename to tests/integration_tests/data/diff_to_metadata/diffs/002-fix-parse-number.diff
diff --git a/evals/data/pr_describer/diffs/003-refactor-order-service.diff b/tests/integration_tests/data/diff_to_metadata/diffs/003-refactor-order-service.diff
similarity index 100%
rename from evals/data/pr_describer/diffs/003-refactor-order-service.diff
rename to tests/integration_tests/data/diff_to_metadata/diffs/003-refactor-order-service.diff
diff --git a/evals/data/pr_describer/diffs/004-docs-readme.diff b/tests/integration_tests/data/diff_to_metadata/diffs/004-docs-readme.diff
similarity index 100%
rename from evals/data/pr_describer/diffs/004-docs-readme.diff
rename to tests/integration_tests/data/diff_to_metadata/diffs/004-docs-readme.diff
diff --git a/evals/data/pr_describer/diffs/005-fix-email-regex.diff b/tests/integration_tests/data/diff_to_metadata/diffs/005-fix-email-regex.diff
similarity index 100%
rename from evals/data/pr_describer/diffs/005-fix-email-regex.diff
rename to tests/integration_tests/data/diff_to_metadata/diffs/005-fix-email-regex.diff
diff --git a/evals/data/pr_describer/diffs/006-fix-date-utc.diff b/tests/integration_tests/data/diff_to_metadata/diffs/006-fix-date-utc.diff
similarity index 100%
rename from evals/data/pr_describer/diffs/006-fix-date-utc.diff
rename to tests/integration_tests/data/diff_to_metadata/diffs/006-fix-date-utc.diff
diff --git a/evals/data/pr_describer/extras/005-context.md b/tests/integration_tests/data/diff_to_metadata/extras/005-context.md
similarity index 100%
rename from evals/data/pr_describer/extras/005-context.md
rename to tests/integration_tests/data/diff_to_metadata/extras/005-context.md
diff --git a/evals/data/pr_describer/extras/006-context.md b/tests/integration_tests/data/diff_to_metadata/extras/006-context.md
similarity index 100%
rename from evals/data/pr_describer/extras/006-context.md
rename to tests/integration_tests/data/diff_to_metadata/extras/006-context.md
diff --git a/evals/evaluators.py b/tests/integration_tests/evaluators.py
similarity index 76%
rename from evals/evaluators.py
rename to tests/integration_tests/evaluators.py
index ece231c7..a6f47297 100644
--- a/evals/evaluators.py
+++ b/tests/integration_tests/evaluators.py
@@ -7,5 +7,5 @@
correctness_evaluator = create_llm_as_judge(
prompt=CORRECTNESS_PROMPT,
feedback_key="correctness",
- judge=BaseAgent.get_model(model=ModelName.GPT_5_1_CODEX, thinking_level=ThinkingLevel.HIGH),
+ judge=BaseAgent.get_model(model=ModelName.GPT_5_2_CODEX, thinking_level=ThinkingLevel.MEDIUM),
)
diff --git a/evals/test_pr_describer.py b/tests/integration_tests/test_diff_to_metadata.py
similarity index 67%
rename from evals/test_pr_describer.py
rename to tests/integration_tests/test_diff_to_metadata.py
index d77acb1c..a650f393 100644
--- a/evals/test_pr_describer.py
+++ b/tests/integration_tests/test_diff_to_metadata.py
@@ -4,14 +4,13 @@
import pytest
from langsmith import testing as t
-from automation.agent.constants import ModelName
-from automation.agent.pr_describer.graph import create_pr_describer_agent
+from automation.agent.diff_to_metadata.graph import create_changes_metadata_graph
from codebase.base import GitPlatform, Scope
from codebase.context import set_runtime_ctx
from .evaluators import correctness_evaluator
-DATA_DIR = Path(__file__).parent / "data" / "pr_describer"
+DATA_DIR = Path(__file__).parent / "data" / "diff_to_metadata"
def _read_text(rel_path: str) -> str:
@@ -41,7 +40,7 @@ def load_cases() -> list[pytest.param]:
@pytest.mark.langsmith(output_keys=["reference_outputs"])
@pytest.mark.parametrize("inputs,reference_outputs", load_cases())
-async def test_pr_describer(inputs, reference_outputs):
+async def test_diff_to_metadata(inputs, reference_outputs):
t.log_inputs(inputs)
t.log_reference_outputs(reference_outputs)
@@ -53,16 +52,18 @@ async def test_pr_describer(inputs, reference_outputs):
(agent_path / ctx.config.context_file_name).write_text(inputs.pop("context_file_content"))
else:
(agent_path / ctx.config.context_file_name).unlink()
- pr_describer = create_pr_describer_agent(model=ModelName.GPT_4_1_MINI, ctx=ctx)
- outputs = await pr_describer.ainvoke(inputs)
+ changes_metadata_graph = create_changes_metadata_graph(ctx=ctx)
+ outputs = await changes_metadata_graph.ainvoke(inputs)
+ outputs = {
+ "pr_metadata": outputs["pr_metadata"].model_dump(mode="json") if "pr_metadata" in outputs else None,
+ "commit_message": outputs["commit_message"].model_dump(mode="json")
+ if "commit_message" in outputs
+ else None,
+ }
- assert "structured_response" in outputs, outputs
+ assert "pr_metadata" in outputs or "commit_message" in outputs, outputs
- t.log_outputs(outputs["structured_response"].model_dump(mode="json"))
+ t.log_outputs(outputs)
- result = correctness_evaluator(
- inputs=inputs,
- outputs=outputs["structured_response"].model_dump(mode="json"),
- reference_outputs=reference_outputs,
- )
+ result = correctness_evaluator(inputs=inputs, outputs=outputs, reference_outputs=reference_outputs)
assert result["score"] is True, result["comment"]
diff --git a/tests/unit_tests/automation/agent/middlewares/test_git.py b/tests/unit_tests/automation/agent/middlewares/test_git.py
new file mode 100644
index 00000000..564f9911
--- /dev/null
+++ b/tests/unit_tests/automation/agent/middlewares/test_git.py
@@ -0,0 +1,26 @@
+from unittest.mock import AsyncMock, Mock, patch
+
+from automation.agent.middlewares.git import GitMiddleware
+from codebase.base import Scope
+from codebase.utils import GitPushPermissionError
+
+
+def _make_runtime(*, scope: Scope = Scope.ISSUE) -> Mock:
+ runtime = Mock()
+ runtime.context = Mock()
+ runtime.context.scope = scope
+ return runtime
+
+
+class TestGitMiddleware:
+ async def test_aafter_agent_returns_none_when_publish_fails_with_push_permission_error(self):
+ middleware = GitMiddleware()
+ runtime = _make_runtime()
+
+ with patch(
+ "automation.agent.middlewares.git.GitChangePublisher.publish",
+ new=AsyncMock(side_effect=GitPushPermissionError("No permission to push")),
+ ):
+ result = await middleware.aafter_agent(state={"merge_request": None}, runtime=runtime)
+
+ assert result is None
diff --git a/tests/unit_tests/automation/agent/middlewares/test_skills.py b/tests/unit_tests/automation/agent/middlewares/test_skills.py
index 36e57631..379b64ba 100644
--- a/tests/unit_tests/automation/agent/middlewares/test_skills.py
+++ b/tests/unit_tests/automation/agent/middlewares/test_skills.py
@@ -7,6 +7,7 @@
from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
from langgraph.types import Command
+from automation.agent.constants import AGENTS_SKILLS_PATH, CLAUDE_CODE_SKILLS_PATH, CURSOR_SKILLS_PATH, SKILLS_SOURCES
from automation.agent.middlewares.skills import SkillsMiddleware
from codebase.base import Scope
from slash_commands.base import SlashCommand
@@ -44,18 +45,6 @@ class TestSkillsMiddleware:
Test the SkillsMiddleware class.
"""
- async def test_skips_when_skills_metadata_present(self, tmp_path: Path):
- from deepagents.backends.filesystem import FilesystemBackend
-
- repo_name = "repoX"
- backend = FilesystemBackend(root_dir=tmp_path, virtual_mode=True)
- middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/.daiv/skills"])
- runtime = _make_runtime(repo_working_dir=str(tmp_path / repo_name))
-
- result = await middleware.abefore_agent({"skills_metadata": []}, runtime, Mock())
- assert result is None
- assert not (tmp_path / repo_name / ".daiv" / "skills").exists()
-
async def test_copies_builtin_skills_then_delegates_to_super(self, tmp_path: Path):
from deepagents.backends.filesystem import FilesystemBackend
@@ -72,7 +61,7 @@ async def test_copies_builtin_skills_then_delegates_to_super(self, tmp_path: Pat
(builtin / "__pycache__" / "ignored.txt").write_text("ignored\n")
backend = FilesystemBackend(root_dir=tmp_path, virtual_mode=True)
- middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/.daiv/skills"])
+ middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/{AGENTS_SKILLS_PATH}"])
runtime = _make_runtime(repo_working_dir=str(tmp_path / repo_name))
with patch("automation.agent.middlewares.skills.BUILTIN_SKILLS_PATH", builtin):
@@ -83,8 +72,8 @@ async def test_copies_builtin_skills_then_delegates_to_super(self, tmp_path: Pat
assert set(skills) == {"skill-one", "skill-two"}
assert skills["skill-one"]["description"] == "does one"
assert skills["skill-two"]["description"] == "does two"
- assert skills["skill-one"]["path"] == f"/{repo_name}/.daiv/skills/skill-one/SKILL.md"
- assert skills["skill-two"]["path"] == f"/{repo_name}/.daiv/skills/skill-two/SKILL.md"
+ assert skills["skill-one"]["path"] == f"/{repo_name}/{AGENTS_SKILLS_PATH}/skill-one/SKILL.md"
+ assert skills["skill-two"]["path"] == f"/{repo_name}/{AGENTS_SKILLS_PATH}/skill-two/SKILL.md"
assert skills["skill-one"]["metadata"]["is_builtin"] is True
assert skills["skill-two"]["metadata"]["is_builtin"] is True
@@ -98,7 +87,7 @@ async def test_marks_builtin_metadata_and_clears_custom(self, tmp_path: Path):
(builtin / "skill-one" / "SKILL.md").write_text(_make_skill_md(name="skill-one", description="does one"))
(builtin / "skill-two" / "SKILL.md").write_text(_make_skill_md(name="skill-two", description="does two"))
- custom_skill = tmp_path / repo_name / ".daiv" / "skills" / "custom-skill"
+ custom_skill = tmp_path / repo_name / AGENTS_SKILLS_PATH / "custom-skill"
custom_skill.mkdir(parents=True)
(custom_skill / "SKILL.md").write_text(
_make_skill_md(
@@ -107,7 +96,7 @@ async def test_marks_builtin_metadata_and_clears_custom(self, tmp_path: Path):
)
backend = FilesystemBackend(root_dir=tmp_path, virtual_mode=True)
- middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/.daiv/skills"])
+ middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/{AGENTS_SKILLS_PATH}"])
runtime = _make_runtime(repo_working_dir=str(tmp_path / repo_name))
with patch("automation.agent.middlewares.skills.BUILTIN_SKILLS_PATH", builtin):
@@ -136,12 +125,12 @@ async def test_uploads_missing_files_and_gitignore(self, tmp_path: Path):
(builtin / "__pycache__" / "ignored.txt").write_text("ignored\n")
backend = FilesystemBackend(root_dir=tmp_path, virtual_mode=True)
- middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/.daiv/skills"])
+ middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/{AGENTS_SKILLS_PATH}"])
with patch("automation.agent.middlewares.skills.BUILTIN_SKILLS_PATH", builtin):
await middleware._copy_builtin_skills(agent_path=tmp_path / repo_name)
- project_skills = tmp_path / repo_name / ".daiv" / "skills"
+ project_skills = tmp_path / repo_name / AGENTS_SKILLS_PATH
assert (project_skills / "skill-one" / "SKILL.md").read_text() == _make_skill_md(
name="skill-one", description="does one"
)
@@ -164,9 +153,9 @@ async def test_skips_file_upload_when_dest_exists(self, tmp_path: Path):
(builtin / "skill-one" / "helpers" / "util.py").write_text("print('one')\n")
backend = FilesystemBackend(root_dir=tmp_path, virtual_mode=True)
- middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/.daiv/skills"])
+ middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/{AGENTS_SKILLS_PATH}"])
- project_skill_md = tmp_path / repo_name / ".daiv" / "skills" / "skill-one" / "SKILL.md"
+ project_skill_md = tmp_path / repo_name / AGENTS_SKILLS_PATH / "skill-one" / "SKILL.md"
project_skill_md.parent.mkdir(parents=True, exist_ok=True)
project_skill_md.write_text(_make_skill_md(name="skill-one", description="existing"))
@@ -187,10 +176,10 @@ def fake_exists(self: Path) -> bool:
# SKILL.md should not be overwritten, but other files should still be uploaded.
assert project_skill_md.read_text() == _make_skill_md(name="skill-one", description="existing")
- assert (tmp_path / repo_name / ".daiv" / "skills" / "skill-one" / "helpers" / "util.py").read_text() == (
+ assert (tmp_path / repo_name / AGENTS_SKILLS_PATH / "skill-one" / "helpers" / "util.py").read_text() == (
"print('one')\n"
)
- assert (tmp_path / repo_name / ".daiv" / "skills" / "skill-one" / ".gitignore").read_text() == "*"
+ assert (tmp_path / repo_name / AGENTS_SKILLS_PATH / "skill-one" / ".gitignore").read_text() == "*"
async def test_raises_when_backend_returns_error(self, tmp_path: Path):
builtin = tmp_path / "builtin_skills"
@@ -439,23 +428,20 @@ async def test_discovers_skills_from_multiple_sources(self, tmp_path: Path):
(builtin / "skill-one" / "SKILL.md").write_text(_make_skill_md(name="skill-one", description="builtin one"))
# Create skills in different source directories
- daiv_skill = tmp_path / repo_name / ".daiv" / "skills" / "daiv-skill"
+ daiv_skill = tmp_path / repo_name / AGENTS_SKILLS_PATH / "daiv-skill"
daiv_skill.mkdir(parents=True)
(daiv_skill / "SKILL.md").write_text(_make_skill_md(name="daiv-skill", description="from daiv"))
- agents_skill = tmp_path / repo_name / ".agents" / "skills" / "agents-skill"
+ agents_skill = tmp_path / repo_name / CLAUDE_CODE_SKILLS_PATH / "agents-skill"
agents_skill.mkdir(parents=True)
(agents_skill / "SKILL.md").write_text(_make_skill_md(name="agents-skill", description="from agents"))
- cursor_skill = tmp_path / repo_name / ".cursor" / "skills" / "cursor-skill"
+ cursor_skill = tmp_path / repo_name / CURSOR_SKILLS_PATH / "cursor-skill"
cursor_skill.mkdir(parents=True)
(cursor_skill / "SKILL.md").write_text(_make_skill_md(name="cursor-skill", description="from cursor"))
backend = FilesystemBackend(root_dir=tmp_path, virtual_mode=True)
- middleware = SkillsMiddleware(
- backend=backend,
- sources=[f"/{repo_name}/.daiv/skills", f"/{repo_name}/.agents/skills", f"/{repo_name}/.cursor/skills"],
- )
+ middleware = SkillsMiddleware(backend=backend, sources=[f"/{repo_name}/{source}" for source in SKILLS_SOURCES])
runtime = _make_runtime(repo_working_dir=str(tmp_path / repo_name))
with patch("automation.agent.middlewares.skills.BUILTIN_SKILLS_PATH", builtin):
diff --git a/tests/unit_tests/automation/agent/test_subagents.py b/tests/unit_tests/automation/agent/test_subagents.py
index d7c8ea49..2b0c54b3 100644
--- a/tests/unit_tests/automation/agent/test_subagents.py
+++ b/tests/unit_tests/automation/agent/test_subagents.py
@@ -3,6 +3,7 @@
from unittest.mock import Mock, patch
import pytest
+from pydantic import SecretStr
from automation.agent.middlewares.sandbox import SandboxMiddleware
from automation.agent.middlewares.web_search import WebSearchMiddleware
@@ -63,6 +64,7 @@ def test_excludes_sandbox_when_disabled(self, mock_backend, mock_runtime_ctx):
assert not any(isinstance(m, SandboxMiddleware) for m in result["middleware"])
+@patch("automation.agent.base.settings.OPENROUTER_API_KEY", SecretStr("test-key"))
class TestExploreSubagent:
"""Tests for create_explore_subagent."""
diff --git a/tests/unit_tests/codebase/clients/github/test_client.py b/tests/unit_tests/codebase/clients/github/test_client.py
index 89a390ad..dcb09ef5 100644
--- a/tests/unit_tests/codebase/clients/github/test_client.py
+++ b/tests/unit_tests/codebase/clients/github/test_client.py
@@ -5,6 +5,7 @@
from github.IssueComment import IssueComment
from github.PullRequestComment import PullRequestComment
+from codebase.base import GitPlatform, Repository, User
from codebase.clients.base import Emoji
from codebase.clients.github.client import GitHubClient
@@ -48,18 +49,6 @@ async def test_get_project_uploaded_file_failure(self, mock_download, github_cli
assert result is None
mock_download.assert_called_once_with(url, headers={"Authorization": "Bearer test-token-123"})
- @patch("codebase.clients.github.client.async_download_url")
- async def test_get_project_uploaded_file_uses_bearer_token(self, mock_download, github_client):
- """Test that the method uses Bearer token authentication."""
- mock_download.return_value = b"content"
-
- url = "https://github.com/user-attachments/assets/test.jpg"
- await github_client.get_project_uploaded_file("owner/repo", url)
-
- # Verify the Authorization header format
- call_args = mock_download.call_args
- assert call_args[1]["headers"]["Authorization"] == "Bearer test-token-123"
-
def test_create_issue_emoji_converts_note_id_to_int(self, github_client):
"""Test that create_issue_emoji converts string note_id to int."""
mock_repo = Mock()
@@ -71,109 +60,39 @@ def test_create_issue_emoji_converts_note_id_to_int(self, github_client):
mock_issue.get_comment.return_value = mock_comment
# Pass note_id as a string
- github_client.create_issue_emoji("owner/repo", 123, Emoji.THUMBSUP, "3645723306")
+ github_client.create_issue_emoji("owner/repo", 123, Emoji.THUMBSUP, 3645723306)
# Verify that get_comment was called with an integer
mock_issue.get_comment.assert_called_once_with(3645723306)
mock_comment.create_reaction.assert_called_once_with("+1")
- def test_has_issue_reaction_returns_true_when_reaction_exists(self, github_client):
- """Test that has_issue_reaction returns True when the current user has reacted with the specified emoji."""
- from codebase.base import User
-
- mock_repo = Mock()
- mock_issue = Mock()
- mock_reaction1 = Mock()
- mock_reaction2 = Mock()
- mock_user1 = Mock()
- mock_user2 = Mock()
-
- # Mock current_user as a cached_property
- type(github_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- # Set up reactions
- mock_user1.id = 456 # Different user
- mock_user2.id = 123 # Current user
- mock_reaction1.content = "eyes"
- mock_reaction1.user = mock_user1
- mock_reaction2.content = "eyes"
- mock_reaction2.user = mock_user2
-
- github_client.client.get_repo.return_value = mock_repo
- mock_repo.get_issue.return_value = mock_issue
- mock_issue.get_reactions.return_value = [mock_reaction1, mock_reaction2]
-
- result = github_client.has_issue_reaction("owner/repo", 123, Emoji.EYES)
-
- assert result is True
-
- def test_has_issue_reaction_returns_false_when_reaction_not_exists(self, github_client):
- """Test that has_issue_reaction returns False when the current user has not reacted."""
- from codebase.base import User
-
+ @pytest.mark.parametrize(
+ ("reactions", "emoji", "expected"),
+ [
+ pytest.param([("eyes", 456), ("eyes", 123)], Emoji.EYES, True, id="reaction-exists-for-current-user"),
+ pytest.param([("eyes", 456)], Emoji.EYES, False, id="reaction-from-different-user"),
+ pytest.param([("+1", 123)], Emoji.EYES, False, id="different-emoji"),
+ pytest.param([], Emoji.EYES, False, id="no-reactions"),
+ ],
+ )
+ def test_has_issue_reaction(self, github_client, monkeypatch, reactions, emoji, expected):
+ """Test issue reaction matching for user and emoji combinations."""
mock_repo = Mock()
mock_issue = Mock()
- mock_reaction = Mock()
- mock_user = Mock()
-
- # Mock current_user as a cached_property
- type(github_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- # Set up reaction from different user
- mock_user.id = 456
- mock_reaction.content = "eyes"
- mock_reaction.user = mock_user
-
+ mock_reactions = []
+ for content, user_id in reactions:
+ reaction = Mock()
+ reaction.content = content
+ reaction.user = Mock(id=user_id)
+ mock_reactions.append(reaction)
+
+ monkeypatch.setattr(type(github_client), "current_user", User(id=123, username="daiv", name="DAIV"))
github_client.client.get_repo.return_value = mock_repo
mock_repo.get_issue.return_value = mock_issue
- mock_issue.get_reactions.return_value = [mock_reaction]
-
- result = github_client.has_issue_reaction("owner/repo", 123, Emoji.EYES)
-
- assert result is False
+ mock_issue.get_reactions.return_value = mock_reactions
- def test_has_issue_reaction_returns_false_when_different_emoji(self, github_client):
- """Test that has_issue_reaction returns False when the current user reacted with a different emoji."""
- from codebase.base import User
-
- mock_repo = Mock()
- mock_issue = Mock()
- mock_reaction = Mock()
- mock_user = Mock()
-
- # Mock current_user as a cached_property
- type(github_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- # Set up reaction with different emoji
- mock_user.id = 123 # Current user
- mock_reaction.content = "+1" # Different emoji
- mock_reaction.user = mock_user
-
- github_client.client.get_repo.return_value = mock_repo
- mock_repo.get_issue.return_value = mock_issue
- mock_issue.get_reactions.return_value = [mock_reaction]
-
- result = github_client.has_issue_reaction("owner/repo", 123, Emoji.EYES)
-
- assert result is False
-
- def test_has_issue_reaction_returns_false_when_no_reactions(self, github_client):
- """Test that has_issue_reaction returns False when there are no reactions."""
- from codebase.base import User
-
- mock_repo = Mock()
- mock_issue = Mock()
-
- # Mock current_user as a cached_property
- type(github_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- github_client.client.get_repo.return_value = mock_repo
- mock_repo.get_issue.return_value = mock_issue
- mock_issue.get_reactions.return_value = []
-
- result = github_client.has_issue_reaction("owner/repo", 123, Emoji.EYES)
-
- assert result is False
+ result = github_client.has_issue_reaction("owner/repo", 123, emoji)
+ assert result is expected
def test_create_merge_request_note_emoji_review_comment(self, github_client):
"""Test that create_merge_request_note_emoji converts string note_id to int for review comments."""
@@ -186,7 +105,7 @@ def test_create_merge_request_note_emoji_review_comment(self, github_client):
mock_pr.get_review_comment.return_value = mock_comment
# Pass note_id as a string
- github_client.create_merge_request_note_emoji("owner/repo", 712, Emoji.THUMBSUP, "3645723306")
+ github_client.create_merge_request_note_emoji("owner/repo", 712, Emoji.THUMBSUP, 3645723306)
# Verify that get_review_comment was called with an integer
mock_pr.get_review_comment.assert_called_once_with(3645723306)
@@ -205,7 +124,7 @@ def test_create_merge_request_note_emoji_issue_comment_fallback(self, github_cli
mock_pr.get_issue_comment.return_value = mock_comment
# Pass note_id as a string
- github_client.create_merge_request_note_emoji("owner/repo", 712, Emoji.THUMBSUP, "3645723306")
+ github_client.create_merge_request_note_emoji("owner/repo", 712, Emoji.THUMBSUP, 3645723306)
# Verify that both methods were called with an integer
mock_pr.get_review_comment.assert_called_once_with(3645723306)
@@ -273,3 +192,39 @@ def test_get_merge_request_comment_converts_comment_id_to_int_review_comment(sel
mock_pr.get_review_comment.assert_called_once_with(3645723306)
assert result.id == "3645723306"
assert len(result.notes) == 1
+
+ @patch("codebase.clients.github.client.Repo.clone_from")
+ def test_load_repo_configures_git_identity_with_app_bot(self, mock_clone_from, github_client, monkeypatch):
+ """Test load_repo configures local git identity to the app bot user."""
+ mock_repo = Mock()
+ mock_writer = Mock()
+ mock_repo.config_writer.return_value.__enter__ = Mock(return_value=mock_writer)
+ mock_repo.config_writer.return_value.__exit__ = Mock(return_value=None)
+ mock_clone_from.return_value = mock_repo
+
+ github_client.client_installation.id = 67890
+ github_client.client_installation.app_slug = "daiv-agent-test"
+ github_client._integration.get_access_token.return_value = Mock(token="token") # noqa: S106
+ monkeypatch.setattr(
+ type(github_client), "current_user", User(id=123456, username="daiv-agent-test", name="DAIV Agent Test")
+ )
+
+ repository = Repository(
+ pk=1,
+ slug="owner/repo",
+ name="repo",
+ clone_url="https://github.com/owner/repo.git",
+ default_branch="main",
+ git_platform=GitPlatform.GITHUB,
+ )
+
+ with github_client.load_repo(repository, "main") as loaded_repo:
+ assert loaded_repo == mock_repo
+
+ clone_url, clone_dir = mock_clone_from.call_args.args[:2]
+ branch = mock_clone_from.call_args.kwargs["branch"]
+ assert clone_url == "https://oauth2:token@github.com/owner/repo.git"
+ assert clone_dir.name == "repo"
+ assert branch == "main"
+ mock_writer.set_value.assert_any_call("user", "name", "daiv-agent-test[bot]")
+ mock_writer.set_value.assert_any_call("user", "email", "123456+daiv-agent-test[bot]@users.noreply.github.com")
diff --git a/tests/unit_tests/codebase/clients/gitlab/test_client.py b/tests/unit_tests/codebase/clients/gitlab/test_client.py
index 2fbff2de..bed8a72a 100644
--- a/tests/unit_tests/codebase/clients/gitlab/test_client.py
+++ b/tests/unit_tests/codebase/clients/gitlab/test_client.py
@@ -1,7 +1,8 @@
-from unittest.mock import Mock
+from unittest.mock import Mock, patch
import pytest
+from codebase.base import GitPlatform, Repository, User
from codebase.clients.base import Emoji
from codebase.clients.gitlab.client import GitLabClient
@@ -19,92 +20,68 @@ def gitlab_client(self):
client = GitLabClient(auth_token="test-token", url="https://gitlab.com") # noqa: S106
yield client
- def test_has_issue_reaction_returns_true_when_reaction_exists(self, gitlab_client):
- """Test that has_issue_reaction returns True when the current user has awarded the emoji."""
- from codebase.base import User
-
- mock_project = Mock()
- mock_issue = Mock()
- mock_award_emoji1 = Mock()
- mock_award_emoji2 = Mock()
-
- # Mock current_user as a cached_property
- type(gitlab_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- # Set up award emojis
- mock_award_emoji1.name = "eyes"
- mock_award_emoji1.user = {"id": 456} # Different user
- mock_award_emoji2.name = "eyes"
- mock_award_emoji2.user = {"id": 123} # Current user
-
- gitlab_client.client.projects.get.return_value = mock_project
- mock_project.issues.get.return_value = mock_issue
- mock_issue.awardemojis.list.return_value = [mock_award_emoji1, mock_award_emoji2]
-
- result = gitlab_client.has_issue_reaction("group/repo", 123, Emoji.EYES)
-
- assert result is True
-
- def test_has_issue_reaction_returns_false_when_reaction_not_exists(self, gitlab_client):
- """Test that has_issue_reaction returns False when the current user has not awarded the emoji."""
- from codebase.base import User
-
- mock_project = Mock()
- mock_issue = Mock()
- mock_award_emoji = Mock()
-
- # Mock current_user as a cached_property
- type(gitlab_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- # Set up award emoji from different user
- mock_award_emoji.name = "eyes"
- mock_award_emoji.user = {"id": 456}
-
- gitlab_client.client.projects.get.return_value = mock_project
- mock_project.issues.get.return_value = mock_issue
- mock_issue.awardemojis.list.return_value = [mock_award_emoji]
-
- result = gitlab_client.has_issue_reaction("group/repo", 123, Emoji.EYES)
-
- assert result is False
-
- def test_has_issue_reaction_returns_false_when_different_emoji(self, gitlab_client):
- """Test that has_issue_reaction returns False when the current user awarded a different emoji."""
- from codebase.base import User
-
+ @pytest.mark.parametrize(
+ ("award_emojis", "emoji", "expected"),
+ [
+ pytest.param([("eyes", 456), ("eyes", 123)], Emoji.EYES, True, id="reaction-exists-for-current-user"),
+ pytest.param([("eyes", 456)], Emoji.EYES, False, id="reaction-from-different-user"),
+ pytest.param([("thumbsup", 123)], Emoji.EYES, False, id="different-emoji"),
+ pytest.param([], Emoji.EYES, False, id="no-reactions"),
+ ],
+ )
+ def test_has_issue_reaction(self, gitlab_client, monkeypatch, award_emojis, emoji, expected):
+ """Test issue award emoji matching for user and emoji combinations."""
mock_project = Mock()
mock_issue = Mock()
- mock_award_emoji = Mock()
-
- # Mock current_user as a cached_property
- type(gitlab_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- # Set up award emoji with different emoji
- mock_award_emoji.name = "thumbsup"
- mock_award_emoji.user = {"id": 123} # Current user
-
+ mock_reactions = []
+ for name, user_id in award_emojis:
+ award_emoji = Mock()
+ award_emoji.name = name
+ award_emoji.user = {"id": user_id}
+ mock_reactions.append(award_emoji)
+
+ monkeypatch.setattr(type(gitlab_client), "current_user", User(id=123, username="daiv", name="DAIV"))
gitlab_client.client.projects.get.return_value = mock_project
mock_project.issues.get.return_value = mock_issue
- mock_issue.awardemojis.list.return_value = [mock_award_emoji]
-
- result = gitlab_client.has_issue_reaction("group/repo", 123, Emoji.EYES)
-
- assert result is False
-
- def test_has_issue_reaction_returns_false_when_no_reactions(self, gitlab_client):
- """Test that has_issue_reaction returns False when there are no award emojis."""
- from codebase.base import User
-
- mock_project = Mock()
- mock_issue = Mock()
-
- # Mock current_user as a cached_property
- type(gitlab_client).current_user = User(id=123, username="daiv", name="DAIV")
-
- gitlab_client.client.projects.get.return_value = mock_project
- mock_project.issues.get.return_value = mock_issue
- mock_issue.awardemojis.list.return_value = []
-
- result = gitlab_client.has_issue_reaction("group/repo", 123, Emoji.EYES)
-
- assert result is False
+ mock_issue.awardemojis.list.return_value = mock_reactions
+
+ result = gitlab_client.has_issue_reaction("group/repo", 123, emoji)
+ assert result is expected
+
+ @patch("codebase.clients.gitlab.client.Repo.clone_from")
+ def test_load_repo_configures_git_identity_with_gitlab_user(self, mock_clone_from, gitlab_client, monkeypatch):
+ """Test load_repo configures local git identity to the GitLab user."""
+ mock_repo = Mock()
+ mock_writer = Mock()
+ mock_repo.config_writer.return_value.__enter__ = Mock(return_value=mock_writer)
+ mock_repo.config_writer.return_value.__exit__ = Mock(return_value=None)
+ mock_clone_from.return_value = mock_repo
+
+ gitlab_client.client.private_token = "token" # noqa: S105
+ gitlab_client.client.user = Mock(
+ username="daiv-agent-test", public_email="daiv-agent-test@users.noreply.gitlab.com"
+ )
+ gitlab_client.client.auth = Mock()
+ monkeypatch.setattr(
+ type(gitlab_client), "current_user", User(id=123456, username="daiv-agent-test", name="DAIV Agent Test")
+ )
+
+ repository = Repository(
+ pk=1,
+ slug="group/repo",
+ name="repo",
+ clone_url="https://gitlab.com/group/repo.git",
+ default_branch="main",
+ git_platform=GitPlatform.GITLAB,
+ )
+
+ with gitlab_client.load_repo(repository, "main") as loaded_repo:
+ assert loaded_repo == mock_repo
+
+ clone_url, clone_dir = mock_clone_from.call_args.args[:2]
+ branch = mock_clone_from.call_args.kwargs["branch"]
+ assert clone_url == "https://oauth2:token@gitlab.com/group/repo.git"
+ assert clone_dir.name == "repo"
+ assert branch == "main"
+ mock_writer.set_value.assert_any_call("user", "name", "daiv-agent-test")
+ mock_writer.set_value.assert_any_call("user", "email", "daiv-agent-test@users.noreply.gitlab.com")
diff --git a/tests/unit_tests/codebase/clients/test_swe.py b/tests/unit_tests/codebase/clients/test_swe.py
index b845df0a..f3aa5b90 100644
--- a/tests/unit_tests/codebase/clients/test_swe.py
+++ b/tests/unit_tests/codebase/clients/test_swe.py
@@ -199,6 +199,7 @@ def test_unsupported_methods_raise_not_implemented(self, swe_client):
unsupported_methods = [
("set_repository_webhooks", ("repo", "url")),
("update_or_create_merge_request", ("repo", "source", "target", "title", "desc")),
+ ("update_merge_request", ("repo", 1, "as_draft", "title", "description", "labels", "assignee_id")),
("create_merge_request_comment", ("repo", 1, "body")),
("get_issue", ("repo", 1)),
("create_issue_comment", ("repo", 1, "body")),
diff --git a/tests/unit_tests/codebase/test_config.py b/tests/unit_tests/codebase/test_config.py
index b1c1e213..fa58b4d7 100644
--- a/tests/unit_tests/codebase/test_config.py
+++ b/tests/unit_tests/codebase/test_config.py
@@ -118,7 +118,7 @@ def test_get_config_with_models_section(self, mock_cache, mock_repo_client):
agent:
model: "openrouter:anthropic/claude-haiku-4.5"
thinking_level: "low"
- pr_describer:
+ diff_to_metadata:
model: "openrouter:openai/gpt-4.1-mini"
"""
@@ -126,7 +126,7 @@ def test_get_config_with_models_section(self, mock_cache, mock_repo_client):
assert config.models.agent.model == "openrouter:anthropic/claude-haiku-4.5"
assert config.models.agent.thinking_level == "low"
- assert config.models.pr_describer.model == "openrouter:openai/gpt-4.1-mini"
+ assert config.models.diff_to_metadata.model == "openrouter:openai/gpt-4.1-mini"
@patch("codebase.repo_config.cache")
def test_get_config_with_partial_models_section(self, mock_cache, mock_repo_client):
diff --git a/tests/unit_tests/codebase/test_git_manager.py b/tests/unit_tests/codebase/test_git_manager.py
index d4b01ee8..38ad92f6 100644
--- a/tests/unit_tests/codebase/test_git_manager.py
+++ b/tests/unit_tests/codebase/test_git_manager.py
@@ -1,11 +1,12 @@
from __future__ import annotations
from pathlib import Path
+from unittest.mock import patch
import pytest
-from git import Repo
+from git import GitCommandError, Repo
-from codebase.utils import GitManager
+from codebase.utils import GitManager, GitPushPermissionError
def _configure_repo_identity(repo: Repo) -> None:
@@ -132,6 +133,26 @@ def test_git_manager_commit_and_push_generates_unique_branch_name(tmp_path: Path
assert repo.active_branch.name == "feature-1"
+def test_git_manager_commit_and_push_raises_permission_error_on_auth_failure(tmp_path: Path) -> None:
+ repo, _ = _init_repo_with_origin(tmp_path)
+ repo_dir = _repo_path(repo)
+ (repo_dir / "feature-auth.txt").write_text("feature-auth\n")
+
+ auth_error = GitCommandError(
+ command="git push",
+ status=128,
+ stderr=(
+ "fatal: unable to access 'https://github.com/example/repo.git/': The requested URL returned error: 403"
+ ),
+ )
+
+ with (
+ patch("git.remote.Remote.push", side_effect=auth_error),
+ pytest.raises(GitPushPermissionError, match="authentication or permission issues"),
+ ):
+ GitManager(repo).commit_and_push_changes("Add auth protected feature", branch_name="feature/auth-fail")
+
+
def test_git_manager_checkout_raises_for_missing_branch(tmp_path: Path) -> None:
repo, _ = _init_repo_with_origin(tmp_path)
diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py
index c0355935..4b1bf4fd 100644
--- a/tests/unit_tests/conftest.py
+++ b/tests/unit_tests/conftest.py
@@ -78,6 +78,7 @@ def mock_repo_client():
author=mock_client.current_user,
)
mock_client.update_or_create_merge_request.return_value = merge_request
+ mock_client.update_merge_request.return_value = merge_request
mock_client.get_merge_request.return_value = merge_request
mock_client.get_merge_request_latest_pipelines.return_value = []
mock_client.get_merge_request_review_comments.return_value = []
diff --git a/uv.lock b/uv.lock
index d672d78f..c24557d7 100644
--- a/uv.lock
+++ b/uv.lock
@@ -79,6 +79,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
]
+[[package]]
+name = "annotated-doc"
+version = "0.0.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
+]
+
[[package]]
name = "annotated-types"
version = "0.7.0"
@@ -90,7 +99,7 @@ wheels = [
[[package]]
name = "anthropic"
-version = "0.78.0"
+version = "0.79.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -102,9 +111,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/ec/51/32849a48f9b1cfe80a508fd269b20bd8f0b1357c70ba092890fde5a6a10b/anthropic-0.78.0.tar.gz", hash = "sha256:55fd978ab9b049c61857463f4c4e9e092b24f892519c6d8078cee1713d8af06e", size = 509136, upload-time = "2026-02-05T17:52:04.986Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/15/b1/91aea3f8fd180d01d133d931a167a78a3737b3fd39ccef2ae8d6619c24fd/anthropic-0.79.0.tar.gz", hash = "sha256:8707aafb3b1176ed6c13e2b1c9fb3efddce90d17aee5d8b83a86c70dcdcca871", size = 509825, upload-time = "2026-02-07T18:06:18.388Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/3b/03/2f50931a942e5e13f80e24d83406714672c57964be593fc046d81369335b/anthropic-0.78.0-py3-none-any.whl", hash = "sha256:2a9887d2e99d1b0f9fe08857a1e9fe5d2d4030455dbf9ac65aab052e2efaeac4", size = 405485, upload-time = "2026-02-05T17:52:03.674Z" },
+ { url = "https://files.pythonhosted.org/packages/95/b2/cc0b8e874a18d7da50b0fda8c99e4ac123f23bf47b471827c5f6f3e4a767/anthropic-0.79.0-py3-none-any.whl", hash = "sha256:04cbd473b6bbda4ca2e41dd670fe2f829a911530f01697d0a1e37321eb75f3cf", size = 405918, upload-time = "2026-02-07T18:06:20.246Z" },
]
[[package]]
@@ -327,90 +336,94 @@ wheels = [
[[package]]
name = "coverage"
-version = "7.13.3"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/11/43/3e4ac666cc35f231fa70c94e9f38459299de1a152813f9d2f60fc5f3ecaf/coverage-7.13.3.tar.gz", hash = "sha256:f7f6182d3dfb8802c1747eacbfe611b669455b69b7c037484bb1efbbb56711ac", size = 826832, upload-time = "2026-02-03T14:02:30.944Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/5e/b3/677bb43427fed9298905106f39c6520ac75f746f81b8f01104526a8026e4/coverage-7.13.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c6f6169bbdbdb85aab8ac0392d776948907267fcc91deeacf6f9d55f7a83ae3b", size = 219513, upload-time = "2026-02-03T14:01:34.29Z" },
- { url = "https://files.pythonhosted.org/packages/42/53/290046e3bbf8986cdb7366a42dab3440b9983711eaff044a51b11006c67b/coverage-7.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2f5e731627a3d5ef11a2a35aa0c6f7c435867c7ccbc391268eb4f2ca5dbdcc10", size = 219850, upload-time = "2026-02-03T14:01:35.984Z" },
- { url = "https://files.pythonhosted.org/packages/ea/2b/ab41f10345ba2e49d5e299be8663be2b7db33e77ac1b85cd0af985ea6406/coverage-7.13.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9db3a3285d91c0b70fab9f39f0a4aa37d375873677efe4e71e58d8321e8c5d39", size = 250886, upload-time = "2026-02-03T14:01:38.287Z" },
- { url = "https://files.pythonhosted.org/packages/72/2d/b3f6913ee5a1d5cdd04106f257e5fac5d048992ffc2d9995d07b0f17739f/coverage-7.13.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:06e49c5897cb12e3f7ecdc111d44e97c4f6d0557b81a7a0204ed70a8b038f86f", size = 253393, upload-time = "2026-02-03T14:01:40.118Z" },
- { url = "https://files.pythonhosted.org/packages/f0/f6/b1f48810ffc6accf49a35b9943636560768f0812330f7456aa87dc39aff5/coverage-7.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb25061a66802df9fc13a9ba1967d25faa4dae0418db469264fd9860a921dde4", size = 254740, upload-time = "2026-02-03T14:01:42.413Z" },
- { url = "https://files.pythonhosted.org/packages/57/d0/e59c54f9be0b61808f6bc4c8c4346bd79f02dd6bbc3f476ef26124661f20/coverage-7.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:99fee45adbb1caeb914da16f70e557fb7ff6ddc9e4b14de665bd41af631367ef", size = 250905, upload-time = "2026-02-03T14:01:44.163Z" },
- { url = "https://files.pythonhosted.org/packages/d5/f7/5291bcdf498bafbee3796bb32ef6966e9915aebd4d0954123c8eae921c32/coverage-7.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:318002f1fd819bdc1651c619268aa5bc853c35fa5cc6d1e8c96bd9cd6c828b75", size = 252753, upload-time = "2026-02-03T14:01:45.974Z" },
- { url = "https://files.pythonhosted.org/packages/a0/a9/1dcafa918c281554dae6e10ece88c1add82db685be123e1b05c2056ff3fb/coverage-7.13.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:71295f2d1d170b9977dc386d46a7a1b7cbb30e5405492529b4c930113a33f895", size = 250716, upload-time = "2026-02-03T14:01:48.844Z" },
- { url = "https://files.pythonhosted.org/packages/44/bb/4ea4eabcce8c4f6235df6e059fbc5db49107b24c4bdffc44aee81aeca5a8/coverage-7.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5b1ad2e0dc672625c44bc4fe34514602a9fd8b10d52ddc414dc585f74453516c", size = 250530, upload-time = "2026-02-03T14:01:50.793Z" },
- { url = "https://files.pythonhosted.org/packages/6d/31/4a6c9e6a71367e6f923b27b528448c37f4e959b7e4029330523014691007/coverage-7.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b2beb64c145593a50d90db5c7178f55daeae129123b0d265bdb3cbec83e5194a", size = 252186, upload-time = "2026-02-03T14:01:52.607Z" },
- { url = "https://files.pythonhosted.org/packages/27/92/e1451ef6390a4f655dc42da35d9971212f7abbbcad0bdb7af4407897eb76/coverage-7.13.3-cp314-cp314-win32.whl", hash = "sha256:3d1aed4f4e837a832df2f3b4f68a690eede0de4560a2dbc214ea0bc55aabcdb4", size = 222253, upload-time = "2026-02-03T14:01:55.071Z" },
- { url = "https://files.pythonhosted.org/packages/8a/98/78885a861a88de020c32a2693487c37d15a9873372953f0c3c159d575a43/coverage-7.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f9efbbaf79f935d5fbe3ad814825cbce4f6cdb3054384cb49f0c0f496125fa0", size = 223069, upload-time = "2026-02-03T14:01:56.95Z" },
- { url = "https://files.pythonhosted.org/packages/eb/fb/3784753a48da58a5337972abf7ca58b1fb0f1bda21bc7b4fae992fd28e47/coverage-7.13.3-cp314-cp314-win_arm64.whl", hash = "sha256:31b6e889c53d4e6687ca63706148049494aace140cffece1c4dc6acadb70a7b3", size = 221633, upload-time = "2026-02-03T14:01:58.758Z" },
- { url = "https://files.pythonhosted.org/packages/40/f9/75b732d9674d32cdbffe801ed5f770786dd1c97eecedef2125b0d25102dc/coverage-7.13.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c5e9787cec750793a19a28df7edd85ac4e49d3fb91721afcdc3b86f6c08d9aa8", size = 220243, upload-time = "2026-02-03T14:02:01.109Z" },
- { url = "https://files.pythonhosted.org/packages/cf/7e/2868ec95de5a65703e6f0c87407ea822d1feb3619600fbc3c1c4fa986090/coverage-7.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e5b86db331c682fd0e4be7098e6acee5e8a293f824d41487c667a93705d415ca", size = 220515, upload-time = "2026-02-03T14:02:02.862Z" },
- { url = "https://files.pythonhosted.org/packages/7d/eb/9f0d349652fced20bcaea0f67fc5777bd097c92369f267975732f3dc5f45/coverage-7.13.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:edc7754932682d52cf6e7a71806e529ecd5ce660e630e8bd1d37109a2e5f63ba", size = 261874, upload-time = "2026-02-03T14:02:04.727Z" },
- { url = "https://files.pythonhosted.org/packages/ee/a5/6619bc4a6c7b139b16818149a3e74ab2e21599ff9a7b6811b6afde99f8ec/coverage-7.13.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3a16d6398666510a6886f67f43d9537bfd0e13aca299688a19daa84f543122f", size = 264004, upload-time = "2026-02-03T14:02:06.634Z" },
- { url = "https://files.pythonhosted.org/packages/29/b7/90aa3fc645a50c6f07881fca4fd0ba21e3bfb6ce3a7078424ea3a35c74c9/coverage-7.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:303d38b19626c1981e1bb067a9928236d88eb0e4479b18a74812f05a82071508", size = 266408, upload-time = "2026-02-03T14:02:09.037Z" },
- { url = "https://files.pythonhosted.org/packages/62/55/08bb2a1e4dcbae384e638f0effef486ba5987b06700e481691891427d879/coverage-7.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:284e06eadfe15ddfee2f4ee56631f164ef897a7d7d5a15bca5f0bb88889fc5ba", size = 260977, upload-time = "2026-02-03T14:02:11.755Z" },
- { url = "https://files.pythonhosted.org/packages/9b/76/8bd4ae055a42d8fb5dd2230e5cf36ff2e05f85f2427e91b11a27fea52ed7/coverage-7.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d401f0864a1d3198422816878e4e84ca89ec1c1bf166ecc0ae01380a39b888cd", size = 263868, upload-time = "2026-02-03T14:02:13.565Z" },
- { url = "https://files.pythonhosted.org/packages/e3/f9/ba000560f11e9e32ec03df5aa8477242c2d95b379c99ac9a7b2e7fbacb1a/coverage-7.13.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3f379b02c18a64de78c4ccdddf1c81c2c5ae1956c72dacb9133d7dd7809794ab", size = 261474, upload-time = "2026-02-03T14:02:16.069Z" },
- { url = "https://files.pythonhosted.org/packages/90/4b/4de4de8f9ca7af4733bfcf4baa440121b7dbb3856daf8428ce91481ff63b/coverage-7.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:7a482f2da9086971efb12daca1d6547007ede3674ea06e16d7663414445c683e", size = 260317, upload-time = "2026-02-03T14:02:17.996Z" },
- { url = "https://files.pythonhosted.org/packages/05/71/5cd8436e2c21410ff70be81f738c0dddea91bcc3189b1517d26e0102ccb3/coverage-7.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:562136b0d401992118d9b49fbee5454e16f95f85b120a4226a04d816e33fe024", size = 262635, upload-time = "2026-02-03T14:02:20.405Z" },
- { url = "https://files.pythonhosted.org/packages/e7/f8/2834bb45bdd70b55a33ec354b8b5f6062fc90e5bb787e14385903a979503/coverage-7.13.3-cp314-cp314t-win32.whl", hash = "sha256:ca46e5c3be3b195098dd88711890b8011a9fa4feca942292bb84714ce5eab5d3", size = 223035, upload-time = "2026-02-03T14:02:22.323Z" },
- { url = "https://files.pythonhosted.org/packages/26/75/f8290f0073c00d9ae14056d2b84ab92dff21d5370e464cb6cb06f52bf580/coverage-7.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:06d316dbb3d9fd44cca05b2dbcfbef22948493d63a1f28e828d43e6cc505fed8", size = 224142, upload-time = "2026-02-03T14:02:24.143Z" },
- { url = "https://files.pythonhosted.org/packages/03/01/43ac78dfea8946c4a9161bbc034b5549115cb2b56781a4b574927f0d141a/coverage-7.13.3-cp314-cp314t-win_arm64.whl", hash = "sha256:299d66e9218193f9dc6e4880629ed7c4cd23486005166247c283fb98531656c3", size = 222166, upload-time = "2026-02-03T14:02:26.005Z" },
- { url = "https://files.pythonhosted.org/packages/7d/fb/70af542d2d938c778c9373ce253aa4116dbe7c0a5672f78b2b2ae0e1b94b/coverage-7.13.3-py3-none-any.whl", hash = "sha256:90a8af9dba6429b2573199622d72e0ebf024d6276f16abce394ad4d181bb0910", size = 211237, upload-time = "2026-02-03T14:02:27.986Z" },
+version = "7.13.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" },
+ { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" },
+ { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" },
+ { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" },
+ { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" },
+ { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" },
+ { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" },
+ { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" },
+ { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" },
+ { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" },
+ { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" },
+ { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" },
+ { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" },
]
[[package]]
name = "cryptography"
-version = "46.0.4"
+version = "46.0.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" },
- { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" },
- { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" },
- { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" },
- { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" },
- { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" },
- { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" },
- { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" },
- { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" },
- { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" },
- { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" },
- { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" },
- { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" },
- { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" },
- { url = "https://files.pythonhosted.org/packages/b9/27/542b029f293a5cce59349d799d4d8484b3b1654a7b9a0585c266e974a488/cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908", size = 7116417, upload-time = "2026-01-28T00:23:31.958Z" },
- { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" },
- { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" },
- { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" },
- { url = "https://files.pythonhosted.org/packages/4c/f5/ed3fcddd0a5e39321e595e144615399e47e7c153a1fb8c4862aec3151ff9/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085", size = 4926748, upload-time = "2026-01-28T00:23:38.884Z" },
- { url = "https://files.pythonhosted.org/packages/43/ae/9f03d5f0c0c00e85ecb34f06d3b79599f20630e4db91b8a6e56e8f83d410/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b", size = 4442307, upload-time = "2026-01-28T00:23:40.56Z" },
- { url = "https://files.pythonhosted.org/packages/8b/22/e0f9f2dae8040695103369cf2283ef9ac8abe4d51f68710bec2afd232609/cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd", size = 3959253, upload-time = "2026-01-28T00:23:42.827Z" },
- { url = "https://files.pythonhosted.org/packages/01/5b/6a43fcccc51dae4d101ac7d378a8724d1ba3de628a24e11bf2f4f43cba4d/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2", size = 4269372, upload-time = "2026-01-28T00:23:44.655Z" },
- { url = "https://files.pythonhosted.org/packages/17/b7/0f6b8c1dd0779df2b526e78978ff00462355e31c0a6f6cff8a3e99889c90/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e", size = 4891908, upload-time = "2026-01-28T00:23:46.48Z" },
- { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" },
- { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" },
- { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" },
- { url = "https://files.pythonhosted.org/packages/00/03/0de4ed43c71c31e4fe954edd50b9d28d658fef56555eba7641696370a8e2/cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061", size = 3001986, upload-time = "2026-01-28T00:23:53.485Z" },
- { url = "https://files.pythonhosted.org/packages/5c/70/81830b59df7682917d7a10f833c4dab2a5574cd664e86d18139f2b421329/cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7", size = 3468288, upload-time = "2026-01-28T00:23:55.09Z" },
- { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" },
- { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" },
- { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" },
- { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" },
- { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" },
- { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" },
- { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" },
- { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" },
- { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" },
- { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" },
- { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" },
- { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" },
- { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" },
- { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
+ { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
+ { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
+ { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
+ { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
+ { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" },
+ { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" },
+ { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" },
+ { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
+ { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
+ { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
+ { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
+ { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
+ { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
+ { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
+ { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
+ { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
+ { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
]
[[package]]
@@ -484,7 +497,7 @@ docs = [
[package.metadata]
requires-dist = [
{ name = "ddgs", specifier = "==9.10.0" },
- { name = "deepagents", specifier = "==0.3.12" },
+ { name = "deepagents", specifier = "==0.4.1" },
{ name = "django", specifier = "==6.0.2" },
{ name = "django-crontask", specifier = "==1.1.3" },
{ name = "django-extensions", specifier = "==4.1.0" },
@@ -497,12 +510,12 @@ requires-dist = [
{ name = "httpx", specifier = "==0.28.1" },
{ name = "ipython", specifier = "==9.10.0" },
{ name = "jinja2", specifier = "==3.1.6" },
- { name = "langchain", extras = ["anthropic", "community", "google-genai", "openai"], specifier = "==1.2.8" },
+ { name = "langchain", extras = ["anthropic", "community", "google-genai", "openai"], specifier = "==1.2.10" },
{ name = "langchain-mcp-adapters", specifier = "==0.2.1" },
{ name = "langgraph", specifier = "==1.0.8" },
{ name = "langgraph-checkpoint-postgres", specifier = "==3.0.4" },
- { name = "langsmith", extras = ["pytest"], specifier = "==0.6.9" },
- { name = "langsmith-fetch", specifier = ">=0.3.1" },
+ { name = "langsmith", extras = ["pytest"], specifier = "==0.7.1" },
+ { name = "langsmith-fetch", specifier = "==0.3.1" },
{ name = "markdownify", specifier = "==1.2.2" },
{ name = "openevals", specifier = "==0.1.3" },
{ name = "prompt-toolkit", specifier = "==3.0.52" },
@@ -521,21 +534,21 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
- { name = "coverage", specifier = "==7.13.3" },
+ { name = "coverage", specifier = "==7.13.4" },
{ name = "datasets", specifier = "==4.5.0" },
{ name = "prek", specifier = "==0.3.2" },
- { name = "pyproject-fmt", specifier = "==2.12.1" },
+ { name = "pyproject-fmt", specifier = "==2.15.2" },
{ name = "pytest", specifier = "==9.0.2" },
{ name = "pytest-asyncio", specifier = "==1.3.0" },
{ name = "pytest-cov", specifier = "==7.0.0" },
{ name = "pytest-django", specifier = "==4.11.1" },
- { name = "pytest-env", specifier = "==1.2.0" },
+ { name = "pytest-env", specifier = "==1.3.2" },
{ name = "pytest-httpx", specifier = "==0.36.0" },
{ name = "pytest-mock", specifier = "==3.15.1" },
{ name = "pytest-xdist", specifier = "==3.8.0" },
{ name = "python-dotenv", specifier = "==1.2.1" },
{ name = "ruff", specifier = "==0.15.0" },
- { name = "ty", specifier = "==0.0.15" },
+ { name = "ty", specifier = "==0.0.16" },
{ name = "types-pyyaml", specifier = "==6.0.12.20250915" },
{ name = "watchdog", specifier = "==6.0.0" },
]
@@ -611,7 +624,7 @@ wheels = [
[[package]]
name = "deepagents"
-version = "0.3.12"
+version = "0.4.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain" },
@@ -620,9 +633,9 @@ dependencies = [
{ name = "langchain-google-genai" },
{ name = "wcmatch" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/c0/0b/9d3512327d48e619567797dffb34c356b2e0c7b0aa505fd3aaef342903d3/deepagents-0.3.12.tar.gz", hash = "sha256:ab2d7e7c47040d364a20cc19cc775294c1e942456652d6c12e0f21011068633c", size = 77962, upload-time = "2026-02-06T21:20:43.511Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/32/71/3aa53332733cbe25e45c30ddc1a337c04b11650ba3b4fad18bddb488ae87/deepagents-0.4.1.tar.gz", hash = "sha256:054f3b3baff2405c5053a2d004e5eaaa06b1a8346018753804ec0292a627fe64", size = 78294, upload-time = "2026-02-11T15:59:38.64Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/5f/0a/2b8542a19bb22cf49827a38d04e045c6dba97d03dddc721d3ed2e7be0e5e/deepagents-0.3.12-py3-none-any.whl", hash = "sha256:42e707a1be48db3bc445fbe3243b6dc19333565cac4eab8cdc0e37d780c6cfe7", size = 88553, upload-time = "2026-02-06T21:20:42.575Z" },
+ { url = "https://files.pythonhosted.org/packages/37/fa/64cbb2b3c429d8cfb2417ff7aea67f53c2b507eae135bdeabed6e755f3a0/deepagents-0.4.1-py3-none-any.whl", hash = "sha256:9973c696b452ca050fbba860f4a997d8428dc56f629c5c0f7e01e483d768a4d9", size = 88477, upload-time = "2026-02-11T15:59:37.262Z" },
]
[[package]]
@@ -1288,16 +1301,16 @@ wheels = [
[[package]]
name = "langchain"
-version = "1.2.8"
+version = "1.2.10"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph" },
{ name = "pydantic" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/52/b7/a1d95dbb58e5e82dbd05e3730e2d4b99f784a4c6d39435579a1c2b8a8d12/langchain-1.2.8.tar.gz", hash = "sha256:d2bc45f8279f6291b152f28df3bb060b27c9a71163fe2e2a1ac878bd314d0dec", size = 558326, upload-time = "2026-02-02T15:51:59.425Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/16/22/a4d4ac98fc2e393537130bbfba0d71a8113e6f884d96f935923e247397fe/langchain-1.2.10.tar.gz", hash = "sha256:bdcd7218d9c79a413cf15e106e4eb94408ac0963df9333ccd095b9ed43bf3be7", size = 570071, upload-time = "2026-02-10T14:56:49.74Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/66/1a/e1cabc08d8b12349fa6a898f033cc6b00a9a031b470582f4a9eb4cf8e55b/langchain-1.2.8-py3-none-any.whl", hash = "sha256:74a9595420b90e2fd6dc42e323e5e6c9f2a5d059b0ab51e4ad383893b86f8fbe", size = 108986, upload-time = "2026-02-02T15:51:58.465Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/06/c3394327f815fade875724c0f6cff529777c96a1e17fea066deb997f8cf5/langchain-1.2.10-py3-none-any.whl", hash = "sha256:e07a377204451fffaed88276b8193e894893b1003e25c5bca6539288ccca3698", size = 111738, upload-time = "2026-02-10T14:56:47.985Z" },
]
[package.optional-dependencies]
@@ -1316,16 +1329,16 @@ openai = [
[[package]]
name = "langchain-anthropic"
-version = "1.3.2"
+version = "1.3.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anthropic" },
{ name = "langchain-core" },
{ name = "pydantic" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/e7/dd/c5e094079bdd748ca3f0bd0a09189ed2fa46bba56b5a8351198dc7c19e1f/langchain_anthropic-1.3.2.tar.gz", hash = "sha256:e551726a6ebf20229bde06022b5149d33bd48d28e34bd002a744953667b8ad48", size = 686239, upload-time = "2026-02-06T16:14:46.199Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/48/cf217b3836099220737ff1f8fd07a554993080dfc9c0b4dd4af16ccb0604/langchain_anthropic-1.3.3.tar.gz", hash = "sha256:37198413c9bde5a9e9829f13c7b9ed4870d7085e7fba9fd803ef4d98ef8ea220", size = 686916, upload-time = "2026-02-10T21:02:28.924Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c9/6b/2da16c32308f79bb4588cec7095edbc770722ae4b3c3a1c135e05b0bdc2e/langchain_anthropic-1.3.2-py3-none-any.whl", hash = "sha256:35bc30862696a493680b898eb76bd6c866841f8e48a57d5eca1420a4fd807ac0", size = 46751, upload-time = "2026-02-06T16:14:44.734Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/f1/cf56d47964b6fe080cdc54c3e32bc05e560927d549b2634b39d14aaf6e05/langchain_anthropic-1.3.3-py3-none-any.whl", hash = "sha256:8008ce5fb680268681673e09f93a9ac08eba9e304477101e5e138f06b5cd8710", size = 46831, upload-time = "2026-02-10T21:02:27.386Z" },
]
[[package]]
@@ -1371,7 +1384,7 @@ wheels = [
[[package]]
name = "langchain-core"
-version = "1.2.9"
+version = "1.2.11"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jsonpatch" },
@@ -1383,9 +1396,9 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "uuid-utils" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/a6/85/f501592b5d76b27a198f1102bafe365151a0a6f69444122fad6d10e6f4bf/langchain_core-1.2.9.tar.gz", hash = "sha256:a3768febc762307241d153b0f8bc58fd4b70c0ff077fda3274606741fca3f5a7", size = 815900, upload-time = "2026-02-05T14:21:43.942Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/12/17/1943cedfc118e04b8128e4c3e1dbf0fa0ea58eefddbb6198cfd699d19f01/langchain_core-1.2.11.tar.gz", hash = "sha256:f164bb36602dd74a3a50c1334fca75309ad5ed95767acdfdbb9fa95ce28a1e01", size = 831211, upload-time = "2026-02-10T20:35:28.35Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/94/46/77846a98913e444d0d564070a9056bd999daada52bd099dc1e8812272810/langchain_core-1.2.9-py3-none-any.whl", hash = "sha256:7e5ecba5ed7a65852e8d5288e9ceeba05340fa9baf32baf672818b497bbaea8f", size = 496296, upload-time = "2026-02-05T14:21:42.816Z" },
+ { url = "https://files.pythonhosted.org/packages/10/30/1f80e3fc674353cad975ed5294353d42512535d2094ef032c06454c2c873/langchain_core-1.2.11-py3-none-any.whl", hash = "sha256:ae11ceb8dda60d0b9d09e763116e592f1683327c17be5b715f350fd29aee65d3", size = 500062, upload-time = "2026-02-10T20:35:26.698Z" },
]
[[package]]
@@ -1419,16 +1432,16 @@ wheels = [
[[package]]
name = "langchain-openai"
-version = "1.1.7"
+version = "1.1.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
{ name = "openai" },
{ name = "tiktoken" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/38/b7/30bfc4d1b658a9ee524bcce3b0b2ec9c45a11c853a13c4f0c9da9882784b/langchain_openai-1.1.7.tar.gz", hash = "sha256:f5ec31961ed24777548b63a5fe313548bc6e0eb9730d6552b8c6418765254c81", size = 1039134, upload-time = "2026-01-07T19:44:59.728Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/49/ae/1dbeb49ab8f098f78ec52e21627e705e5d7c684dc8826c2c34cc2746233a/langchain_openai-1.1.9.tar.gz", hash = "sha256:fdee25dcf4b0685d8e2f59856f4d5405431ef9e04ab53afe19e2e8360fed8234", size = 1004828, upload-time = "2026-02-10T21:03:21.615Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/64/a1/50e7596aca775d8c3883eceeaf47489fac26c57c1abe243c00174f715a8a/langchain_openai-1.1.7-py3-none-any.whl", hash = "sha256:34e9cd686aac1a120d6472804422792bf8080a2103b5d21ee450c9e42d053815", size = 84753, upload-time = "2026-01-07T19:44:58.629Z" },
+ { url = "https://files.pythonhosted.org/packages/52/a1/8a20d19f69d022c10d34afa42d972cc50f971b880d0eb4a828cf3dd824a8/langchain_openai-1.1.9-py3-none-any.whl", hash = "sha256:ca2482b136c45fb67c0db84a9817de675e0eb8fb2203a33914c1b7a96f273940", size = 85769, upload-time = "2026-02-10T21:03:20.333Z" },
]
[[package]]
@@ -1503,20 +1516,20 @@ wheels = [
[[package]]
name = "langgraph-sdk"
-version = "0.3.4"
+version = "0.3.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "orjson" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/11/37/1c18ebb9090a29cd360abce7ee0d3c639fa680e20a078b8c5e85044443d9/langgraph_sdk-0.3.4.tar.gz", hash = "sha256:a8055464027c70ff7b454c0d67caec9a91c6a2bc75c66d023d3ce48773a2a774", size = 132239, upload-time = "2026-02-06T00:44:14.309Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/60/2b/2dae368ac76e315197f07ab58077aadf20833c226fbfd450d71745850314/langgraph_sdk-0.3.5.tar.gz", hash = "sha256:64669e9885a908578eed921ef9a8e52b8d0cd38db1e3e5d6d299d4e6f8830ac0", size = 177470, upload-time = "2026-02-10T16:56:09.18Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/74/e6/df257026e1370320b60d54492c0847631729ad80ca8d8571b55ece594281/langgraph_sdk-0.3.4-py3-none-any.whl", hash = "sha256:eb73a2fb57a4167aeb31efeaf0c4daecd2cf0c942e8a376670fd1cc636992f49", size = 67833, upload-time = "2026-02-06T00:44:12.795Z" },
+ { url = "https://files.pythonhosted.org/packages/84/d5/a14d957c515ba7a9713bf0f03f2b9277979c403bc50f829bdfd54ae7dc9e/langgraph_sdk-0.3.5-py3-none-any.whl", hash = "sha256:bcfa1dcbddadb604076ce46f5e08969538735e5ac47fa863d4fac5a512dab5c9", size = 70851, upload-time = "2026-02-10T16:56:07.983Z" },
]
[[package]]
name = "langsmith"
-version = "0.6.9"
+version = "0.7.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
@@ -1529,9 +1542,9 @@ dependencies = [
{ name = "xxhash" },
{ name = "zstandard" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/9a/e0/463a70b43d6755b01598bb59932eec8e2029afcab455b5312c318ac457b5/langsmith-0.6.9.tar.gz", hash = "sha256:aae04cec6e6d8e133f63ba71c332ce0fbd2cda95260db7746ff4c3b6a3c41db1", size = 973557, upload-time = "2026-02-05T20:10:55.629Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/67/48/3151de6df96e0977b8d319b03905e29db0df6929a85df1d922a030b7e68d/langsmith-0.7.1.tar.gz", hash = "sha256:e3fec2f97f7c5192f192f4873d6a076b8c6469768022323dded07087d8cb70a4", size = 984367, upload-time = "2026-02-10T01:55:24.696Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e6/8e/063e09c5e8a3dcd77e2a8f0bff3f71c1c52a9d238da1bcafd2df3281da17/langsmith-0.6.9-py3-none-any.whl", hash = "sha256:86ba521e042397f6fbb79d63991df9d5f7b6a6dd6a6323d4f92131291478dcff", size = 319228, upload-time = "2026-02-05T20:10:54.248Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/87/6f2b008a456b4f5fd0fb1509bb7e1e9368c1a0c9641a535f224a9ddc10f3/langsmith-0.7.1-py3-none-any.whl", hash = "sha256:92cfa54253d35417184c297ad25bfd921d95f15d60a1ca75f14d4e7acd152a29", size = 322515, upload-time = "2026-02-10T01:55:22.531Z" },
]
[package.optional-dependencies]
@@ -1604,11 +1617,11 @@ wheels = [
[[package]]
name = "markdown"
-version = "3.10.1"
+version = "3.10.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b7/b1/af95bcae8549f1f3fd70faacb29075826a0d689a27f232e8cee315efa053/markdown-3.10.1.tar.gz", hash = "sha256:1c19c10bd5c14ac948c53d0d762a04e2fa35a6d58a6b7b1e6bfcbe6fefc0001a", size = 365402, upload-time = "2026-01-21T18:09:28.206Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/59/1b/6ef961f543593969d25b2afe57a3564200280528caa9bd1082eecdd7b3bc/markdown-3.10.1-py3-none-any.whl", hash = "sha256:867d788939fe33e4b736426f5b9f651ad0c0ae0ecf89df0ca5d1176c70812fe3", size = 107684, upload-time = "2026-01-21T18:09:27.203Z" },
+ { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" },
]
[[package]]
@@ -1938,7 +1951,7 @@ wheels = [
[[package]]
name = "openai"
-version = "2.17.0"
+version = "2.20.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -1950,9 +1963,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/9c/a2/677f22c4b487effb8a09439fb6134034b5f0a39ca27df8b95fac23a93720/openai-2.17.0.tar.gz", hash = "sha256:47224b74bd20f30c6b0a6a329505243cb2f26d5cf84d9f8d0825ff8b35e9c999", size = 631445, upload-time = "2026-02-05T16:27:40.953Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/6e/5a/f495777c02625bfa18212b6e3b73f1893094f2bf660976eb4bc6f43a1ca2/openai-2.20.0.tar.gz", hash = "sha256:2654a689208cd0bf1098bb9462e8d722af5cbe961e6bba54e6f19fb843d88db1", size = 642355, upload-time = "2026-02-10T19:02:54.145Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/44/97/284535aa75e6e84ab388248b5a323fc296b1f70530130dee37f7f4fbe856/openai-2.17.0-py3-none-any.whl", hash = "sha256:4f393fd886ca35e113aac7ff239bcd578b81d8f104f5aedc7d3693eb2af1d338", size = 1069524, upload-time = "2026-02-05T16:27:38.941Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/a0/cf4297aa51bbc21e83ef0ac018947fa06aea8f2364aad7c96cbf148590e6/openai-2.20.0-py3-none-any.whl", hash = "sha256:38d989c4b1075cd1f76abc68364059d822327cf1a932531d429795f4fc18be99", size = 1098479, upload-time = "2026-02-10T19:02:52.157Z" },
]
[[package]]
@@ -2063,11 +2076,11 @@ wheels = [
[[package]]
name = "parso"
-version = "0.8.5"
+version = "0.8.6"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/81/76/a1e769043c0c0c9fe391b702539d594731a4362334cdf4dc25d0c09761e7/parso-0.8.6.tar.gz", hash = "sha256:2b9a0332696df97d454fa67b81618fd69c35a7b90327cbe6ba5c92d2c68a7bfd", size = 401621, upload-time = "2026-02-09T15:45:24.425Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" },
]
[[package]]
@@ -2465,20 +2478,20 @@ wheels = [
[[package]]
name = "pyproject-fmt"
-version = "2.12.1"
+version = "2.15.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "toml-fmt-common" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/f9/c5/eb967ebabf50ec45139fef3b70e9a47ab088771bae5d734388487e9171dd/pyproject_fmt-2.12.1.tar.gz", hash = "sha256:6b4cecce51e864bd096e2a5202aa02aa3dfcbf0c3b50589013ee040436074e1a", size = 83062, upload-time = "2026-01-31T16:56:22.673Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/34/0586cba8a147011b7708ef08bd24f6e457669b3953bd2ac8c08d18d81395/pyproject_fmt-2.15.2.tar.gz", hash = "sha256:10b22effb4c1ac12033d41b089bee60aded60f2241e0b95f2794917fc7d5dac8", size = 126980, upload-time = "2026-02-10T23:19:03.435Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6f/46/a6b59474114da09f8808988458ea6a3b6888b321543ad51f0b87e401f9e3/pyproject_fmt-2.12.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ee2e391ce7840ee6a163b029d417130b8399a87e2f3071d06c28fa3ee249b453", size = 1365794, upload-time = "2026-01-31T16:56:05.868Z" },
- { url = "https://files.pythonhosted.org/packages/17/fb/65cf73b3d4083e3fe9c1454af2ce0a5a75064c21b9108f1d0bd1c61dd90d/pyproject_fmt-2.12.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ca3995b269684d2e0eb0c56e842c36011f29803caffd1845f63492b86f0209a7", size = 1297733, upload-time = "2026-01-31T16:56:07.4Z" },
- { url = "https://files.pythonhosted.org/packages/03/a6/4e76ca77311f52e5531bc8d8b1e88ac901283e5d160aaf419f6fa0013e4c/pyproject_fmt-2.12.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:32840c3464b7e88624b781da5583f3f19b978b36065f81adef2d6cd9fd1c3de5", size = 1379311, upload-time = "2026-01-31T16:56:09.298Z" },
- { url = "https://files.pythonhosted.org/packages/4d/88/ba4231bb0939a57ec43af84f3b37dcb58517888b7823b84f6e56065a3d86/pyproject_fmt-2.12.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f3458b6d1ebdbfcbc96b1f5479def9ae4fd6b4da4a500649a2248616acafe928", size = 1475652, upload-time = "2026-01-31T16:56:11.026Z" },
- { url = "https://files.pythonhosted.org/packages/f4/05/2ea2160e58a2ae187511b2cfef18684f058e9f252c4f3efcb5c201378c37/pyproject_fmt-2.12.1-cp39-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:4b55f9d963309b6054efc322d0596c7152b2ef21a9acdc4f1cc010fa082a1243", size = 1388206, upload-time = "2026-01-31T16:56:12.929Z" },
- { url = "https://files.pythonhosted.org/packages/09/95/7bb78158997067eed796332518e35b624ca8478f16935c30885f7b334d80/pyproject_fmt-2.12.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a4225499d17bc4112463fdf360e1c5fe90e33b011ec640d5e7efaeedbe3cd724", size = 1689134, upload-time = "2026-01-31T16:56:14.693Z" },
- { url = "https://files.pythonhosted.org/packages/ee/db/3112285574e584a2aa663b70e44052911051818835585ce52ec64751ff34/pyproject_fmt-2.12.1-cp39-abi3-win_amd64.whl", hash = "sha256:b947aaa33a149ec19cfbb464953dba00a28bbba85b46e418bf361b0df0a0b2f6", size = 1343699, upload-time = "2026-01-31T16:56:16.452Z" },
+ { url = "https://files.pythonhosted.org/packages/64/81/f537fb52345096912ed86dac5805768349758593f7a0112dc25db412010f/pyproject_fmt-2.15.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:7ec5b8cc45125362ac29ef5e79c653a08865a73758bda905920d724f4e806f4b", size = 4708882, upload-time = "2026-02-10T23:18:41.996Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/4e/a8a6419a79586254d0f81f682e6de7dc6b49b99aa7e5ee5b9e34c666e9e2/pyproject_fmt-2.15.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:cebc1e2073730e66be7072110c0237bbe9ba1e751045d64397daf21ee4b44f50", size = 4521260, upload-time = "2026-02-10T23:18:44.366Z" },
+ { url = "https://files.pythonhosted.org/packages/83/4a/35b9b0b9da2c3799564580af44a5a851773b80fd519e7c2b41e5c939d008/pyproject_fmt-2.15.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7a0d6e2675831d00aa8fc54a028727e51e64fa2936f798c87661542da2945ea9", size = 4666216, upload-time = "2026-02-10T23:18:46.375Z" },
+ { url = "https://files.pythonhosted.org/packages/90/c0/bfc9ee58a73820933b7935ac710f96f0cece96dc94527f4bec3b3e796575/pyproject_fmt-2.15.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cdf004e3591e9450f0cbcfb9a39f198f4a5f50d40ad3a26e0f9e9ddf513d7bbc", size = 4970963, upload-time = "2026-02-10T23:18:48.188Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/d2/e0a57cb6f2812da6ea0d9c3e1ec83108c2efe737549be603cd7dccd3b4da/pyproject_fmt-2.15.2-cp39-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:1f59674432fa93410ca2697d2d868793f411a055b9bb9a5a2127047c77fced40", size = 4707417, upload-time = "2026-02-10T23:18:49.92Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/dc/ac96ef4adf722809fb2bf048ec4888a3dfded4e4028072b03631cd4e4d6d/pyproject_fmt-2.15.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dce488a7d99bdac0fde30addb2455ffe220f2063181079653d4754544ce57aed", size = 5173555, upload-time = "2026-02-10T23:18:52.398Z" },
+ { url = "https://files.pythonhosted.org/packages/96/db/7c0efc419d846407a966d3ee581505043828c1e43f97f5424f12fb4d1e8d/pyproject_fmt-2.15.2-cp39-abi3-win_amd64.whl", hash = "sha256:3767a4b185490ac65e73e5ff1d4bc304d48cdddde1efe95f8c364ae6ed1867ec", size = 4826862, upload-time = "2026-02-10T23:18:54.141Z" },
]
[[package]]
@@ -2537,14 +2550,15 @@ wheels = [
[[package]]
name = "pytest-env"
-version = "1.2.0"
+version = "1.3.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest" },
+ { name = "python-dotenv" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/13/12/9c87d0ca45d5992473208bcef2828169fa7d39b8d7fc6e3401f5c08b8bf7/pytest_env-1.2.0.tar.gz", hash = "sha256:475e2ebe8626cee01f491f304a74b12137742397d6c784ea4bc258f069232b80", size = 8973, upload-time = "2025-10-09T19:15:47.42Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/ad/dd32e4614fb68ad980c949fd4299f8c6a8d4874e24ec8d222c056efb4741/pytest_env-1.3.2.tar.gz", hash = "sha256:f091a2c6a8eb91befcae2b4c1bd2905a51f33bc1c6567707b7feed4e51b76b47", size = 12009, upload-time = "2026-02-11T22:09:49.168Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/27/98/822b924a4a3eb58aacba84444c7439fce32680592f394de26af9c76e2569/pytest_env-1.2.0-py3-none-any.whl", hash = "sha256:d7e5b7198f9b83c795377c09feefa45d56083834e60d04767efd64819fc9da00", size = 6251, upload-time = "2025-10-09T19:15:46.077Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/ad/d793670b26f4fb82e974dbff20d05782ebb23490b08987976cdc62d854bb/pytest_env-1.3.2-py3-none-any.whl", hash = "sha256:e8626b776a035112a8ad58fcc9e04926868c58f15225de484de7c8af4b4b526c", size = 7864, upload-time = "2026-02-11T22:09:47.775Z" },
]
[[package]]
@@ -3002,11 +3016,11 @@ wheels = [
[[package]]
name = "tenacity"
-version = "9.1.3"
+version = "9.1.4"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/1e/4a/c3357c8742f361785e3702bb4c9c68c4cb37a80aa657640b820669be5af1/tenacity-9.1.3.tar.gz", hash = "sha256:a6724c947aa717087e2531f883bde5c9188f603f6669a9b8d54eb998e604c12a", size = 49002, upload-time = "2026-02-05T06:33:12.866Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/64/6b/cdc85edb15e384d8e934aad89638cc8646e118c80de94c60125d0fc0a185/tenacity-9.1.3-py3-none-any.whl", hash = "sha256:51171cfc6b8a7826551e2f029426b10a6af189c5ac6986adcd7eb36d42f17954", size = 28858, upload-time = "2026-02-05T06:33:11.219Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" },
]
[[package]]
@@ -3067,39 +3081,53 @@ wheels = [
[[package]]
name = "ty"
-version = "0.0.15"
+version = "0.0.16"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/4e/25/257602d316b9333089b688a7a11b33ebc660b74e8dacf400dc3dfdea1594/ty-0.0.15.tar.gz", hash = "sha256:4f9a5b8df208c62dba56e91b93bed8b5bb714839691b8cff16d12c983bfa1174", size = 5101936, upload-time = "2026-02-05T01:06:34.922Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/18/77f84d89db54ea0d1d1b09fa2f630ac4c240c8e270761cb908c06b6e735c/ty-0.0.16.tar.gz", hash = "sha256:a999b0db6aed7d6294d036ebe43301105681e0c821a19989be7c145805d7351c", size = 5129637, upload-time = "2026-02-10T20:24:16.48Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ce/c5/35626e732b79bf0e6213de9f79aff59b5f247c0a1e3ce0d93e675ab9b728/ty-0.0.15-py3-none-linux_armv6l.whl", hash = "sha256:68e092458516c61512dac541cde0a5e4e5842df00b4e81881ead8f745ddec794", size = 10138374, upload-time = "2026-02-05T01:07:03.804Z" },
- { url = "https://files.pythonhosted.org/packages/d5/8a/48fd81664604848f79d03879b3ca3633762d457a069b07e09fb1b87edd6e/ty-0.0.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:79f2e75289eae3cece94c51118b730211af4ba5762906f52a878041b67e54959", size = 9947858, upload-time = "2026-02-05T01:06:47.453Z" },
- { url = "https://files.pythonhosted.org/packages/b6/85/c1ac8e97bcd930946f4c94db85b675561d590b4e72703bf3733419fc3973/ty-0.0.15-py3-none-macosx_11_0_arm64.whl", hash = "sha256:112a7b26e63e48cc72c8c5b03227d1db280cfa57a45f2df0e264c3a016aa8c3c", size = 9443220, upload-time = "2026-02-05T01:06:44.98Z" },
- { url = "https://files.pythonhosted.org/packages/3c/d9/244bc02599d950f7a4298fbc0c1b25cc808646b9577bdf7a83470b2d1cec/ty-0.0.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71f62a2644972975a657d9dc867bf901235cde51e8d24c20311067e7afd44a56", size = 9949976, upload-time = "2026-02-05T01:07:01.515Z" },
- { url = "https://files.pythonhosted.org/packages/7e/ab/3a0daad66798c91a33867a3ececf17d314ac65d4ae2bbbd28cbfde94da63/ty-0.0.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e48b42be2d257317c85b78559233273b655dd636fc61e7e1d69abd90fd3cba4", size = 9965918, upload-time = "2026-02-05T01:06:54.283Z" },
- { url = "https://files.pythonhosted.org/packages/39/4e/e62b01338f653059a7c0cd09d1a326e9a9eedc351a0f0de9db0601658c3d/ty-0.0.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27dd5b52a421e6871c5bfe9841160331b60866ed2040250cb161886478ab3e4f", size = 10424943, upload-time = "2026-02-05T01:07:08.777Z" },
- { url = "https://files.pythonhosted.org/packages/65/b5/7aa06655ce69c0d4f3e845d2d85e79c12994b6d84c71699cfb437e0bc8cf/ty-0.0.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76b85c9ec2219e11c358a7db8e21b7e5c6674a1fb9b6f633836949de98d12286", size = 10964692, upload-time = "2026-02-05T01:06:37.103Z" },
- { url = "https://files.pythonhosted.org/packages/13/04/36fdfe1f3c908b471e246e37ce3d011175584c26d3853e6c5d9a0364564c/ty-0.0.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e8204c61d8ede4f21f2975dce74efdb80fafb2fae1915c666cceb33ea3c90b", size = 10692225, upload-time = "2026-02-05T01:06:49.714Z" },
- { url = "https://files.pythonhosted.org/packages/13/41/5bf882649bd8b64ded5fbce7fb8d77fb3b868de1a3b1a6c4796402b47308/ty-0.0.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af87c3be7c944bb4d6609d6c63e4594944b0028c7bd490a525a82b88fe010d6d", size = 10516776, upload-time = "2026-02-05T01:06:52.047Z" },
- { url = "https://files.pythonhosted.org/packages/56/75/66852d7e004f859839c17ffe1d16513c1e7cc04bcc810edb80ca022a9124/ty-0.0.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:50dccf7398505e5966847d366c9e4c650b8c225411c2a68c32040a63b9521eea", size = 9928828, upload-time = "2026-02-05T01:06:56.647Z" },
- { url = "https://files.pythonhosted.org/packages/65/72/96bc16c7b337a3ef358fd227b3c8ef0c77405f3bfbbfb59ee5915f0d9d71/ty-0.0.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:bd797b8f231a4f4715110259ad1ad5340a87b802307f3e06d92bfb37b858a8f3", size = 9978960, upload-time = "2026-02-05T01:06:29.567Z" },
- { url = "https://files.pythonhosted.org/packages/a0/18/d2e316a35b626de2227f832cd36d21205e4f5d96fd036a8af84c72ecec1b/ty-0.0.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9deb7f20e18b25440a9aa4884f934ba5628ef456dbde91819d5af1a73da48af3", size = 10135903, upload-time = "2026-02-05T01:06:59.256Z" },
- { url = "https://files.pythonhosted.org/packages/02/d3/b617a79c9dad10c888d7c15cd78859e0160b8772273637b9c4241a049491/ty-0.0.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7b31b3de031255b90a5f4d9cb3d050feae246067c87130e5a6861a8061c71754", size = 10615879, upload-time = "2026-02-05T01:07:06.661Z" },
- { url = "https://files.pythonhosted.org/packages/fb/b0/2652a73c71c77296a6343217063f05745da60c67b7e8a8e25f2064167fce/ty-0.0.15-py3-none-win32.whl", hash = "sha256:9362c528ceb62c89d65c216336d28d500bc9f4c10418413f63ebc16886e16cc1", size = 9578058, upload-time = "2026-02-05T01:06:42.928Z" },
- { url = "https://files.pythonhosted.org/packages/84/6e/08a4aedebd2a6ce2784b5bc3760e43d1861f1a184734a78215c2d397c1df/ty-0.0.15-py3-none-win_amd64.whl", hash = "sha256:4db040695ae67c5524f59cb8179a8fa277112e69042d7dfdac862caa7e3b0d9c", size = 10457112, upload-time = "2026-02-05T01:06:39.885Z" },
- { url = "https://files.pythonhosted.org/packages/b3/be/1991f2bc12847ae2d4f1e3ac5dcff8bb7bc1261390645c0755bb55616355/ty-0.0.15-py3-none-win_arm64.whl", hash = "sha256:e5a98d4119e77d6136461e16ae505f8f8069002874ab073de03fbcb1a5e8bf25", size = 9937490, upload-time = "2026-02-05T01:06:32.388Z" },
+ { url = "https://files.pythonhosted.org/packages/67/b9/909ebcc7f59eaf8a2c18fb54bfcf1c106f99afb3e5460058d4b46dec7b20/ty-0.0.16-py3-none-linux_armv6l.whl", hash = "sha256:6d8833b86396ed742f2b34028f51c0e98dbf010b13ae4b79d1126749dc9dab15", size = 10113870, upload-time = "2026-02-10T20:24:11.864Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/2c/b963204f3df2fdbf46a4a1ea4a060af9bb676e065d59c70ad0f5ae0dbae8/ty-0.0.16-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:934c0055d3b7f1cf3c8eab78c6c127ef7f347ff00443cef69614bda6f1502377", size = 9936286, upload-time = "2026-02-10T20:24:08.695Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/4d/3d78294f2ddfdded231e94453dea0e0adef212b2bd6536296039164c2a3e/ty-0.0.16-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b55e8e8733b416d914003cd22e831e139f034681b05afed7e951cc1a5ea1b8d4", size = 9442660, upload-time = "2026-02-10T20:24:02.704Z" },
+ { url = "https://files.pythonhosted.org/packages/15/40/ce48c0541e3b5749b0890725870769904e6b043e077d4710e5325d5cf807/ty-0.0.16-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feccae8f4abd6657de111353bd604f36e164844466346eb81ffee2c2b06ea0f0", size = 9934506, upload-time = "2026-02-10T20:24:35.818Z" },
+ { url = "https://files.pythonhosted.org/packages/84/16/3b29de57e1ec6e56f50a4bb625ee0923edb058c5f53e29014873573a00cd/ty-0.0.16-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cad5e29d8765b92db5fa284940ac57149561f3f89470b363b9aab8a6ce553b0", size = 9933099, upload-time = "2026-02-10T20:24:43.003Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/a1/e546995c25563d318c502b2f42af0fdbed91e1fc343708241e2076373644/ty-0.0.16-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86f28797c7dc06f081238270b533bf4fc8e93852f34df49fb660e0b58a5cda9a", size = 10438370, upload-time = "2026-02-10T20:24:33.44Z" },
+ { url = "https://files.pythonhosted.org/packages/11/c1/22d301a4b2cce0f75ae84d07a495f87da193bcb68e096d43695a815c4708/ty-0.0.16-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be971a3b42bcae44d0e5787f88156ed2102ad07558c05a5ae4bfd32a99118e66", size = 10992160, upload-time = "2026-02-10T20:24:25.574Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/40/f1892b8c890db3f39a1bab8ec459b572de2df49e76d3cad2a9a239adcde9/ty-0.0.16-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c9f982b7c4250eb91af66933f436b3a2363c24b6353e94992eab6551166c8b7", size = 10717892, upload-time = "2026-02-10T20:24:05.914Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/1b/caf9be8d0c738983845f503f2e92ea64b8d5fae1dd5ca98c3fca4aa7dadc/ty-0.0.16-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d122edf85ce7bdf6f85d19158c991d858fc835677bd31ca46319c4913043dc84", size = 10510916, upload-time = "2026-02-10T20:24:00.252Z" },
+ { url = "https://files.pythonhosted.org/packages/60/ea/28980f5c7e1f4c9c44995811ea6a36f2fcb205232a6ae0f5b60b11504621/ty-0.0.16-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:497ebdddbb0e35c7758ded5aa4c6245e8696a69d531d5c9b0c1a28a075374241", size = 9908506, upload-time = "2026-02-10T20:24:28.133Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/80/8672306596349463c21644554f935ff8720679a14fd658fef658f66da944/ty-0.0.16-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e1e0ac0837bde634b030243aeba8499383c0487e08f22e80f5abdacb5b0bd8ce", size = 9949486, upload-time = "2026-02-10T20:24:18.62Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/8a/d8747d36f30bd82ea157835f5b70d084c9bb5d52dd9491dba8a149792d6a/ty-0.0.16-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1216c9bcca551d9f89f47a817ebc80e88ac37683d71504e5509a6445f24fd024", size = 10145269, upload-time = "2026-02-10T20:24:38.249Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/4c/753535acc7243570c259158b7df67e9c9dd7dab9a21ee110baa4cdcec45d/ty-0.0.16-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:221bbdd2c6ee558452c96916ab67fcc465b86967cf0482e19571d18f9c831828", size = 10608644, upload-time = "2026-02-10T20:24:40.565Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/05/8e8db64cf45a8b16757e907f7a3bfde8d6203e4769b11b64e28d5bdcd79a/ty-0.0.16-py3-none-win32.whl", hash = "sha256:d52c4eb786be878e7514cab637200af607216fcc5539a06d26573ea496b26512", size = 9582579, upload-time = "2026-02-10T20:24:30.406Z" },
+ { url = "https://files.pythonhosted.org/packages/25/bc/45759faea132cd1b2a9ff8374e42ba03d39d076594fbb94f3e0e2c226c62/ty-0.0.16-py3-none-win_amd64.whl", hash = "sha256:f572c216aa8ecf79e86589c6e6d4bebc01f1f3cb3be765c0febd942013e1e73a", size = 10436043, upload-time = "2026-02-10T20:23:57.51Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/02/70a491802e7593e444137ed4e41a04c34d186eb2856f452dd76b60f2e325/ty-0.0.16-py3-none-win_arm64.whl", hash = "sha256:430eadeb1c0de0c31ef7bef9d002bdbb5f25a31e3aad546f1714d76cd8da0a87", size = 9915122, upload-time = "2026-02-10T20:24:14.285Z" },
]
[[package]]
-name = "typer-slim"
-version = "0.21.1"
+name = "typer"
+version = "0.23.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
+ { name = "annotated-doc" },
{ name = "click" },
- { name = "typing-extensions" },
+ { name = "rich" },
+ { name = "shellingham" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7e/e6/44e073787aa57cd71c151f44855232feb0f748428fd5242d7366e3c4ae8b/typer-0.23.0.tar.gz", hash = "sha256:d8378833e47ada5d3d093fa20c4c63427cc4e27127f6b349a6c359463087d8cc", size = 120181, upload-time = "2026-02-11T15:22:18.637Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7a/ed/d6fca788b51d0d4640c4bc82d0e85bad4b49809bca36bf4af01b4dcb66a7/typer-0.23.0-py3-none-any.whl", hash = "sha256:79f4bc262b6c37872091072a3cb7cb6d7d79ee98c0c658b4364bdcde3c42c913", size = 56668, upload-time = "2026-02-11T15:22:21.075Z" },
+]
+
+[[package]]
+name = "typer-slim"
+version = "0.23.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typer" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/17/d4/064570dec6358aa9049d4708e4a10407d74c99258f8b2136bb8702303f1a/typer_slim-0.21.1.tar.gz", hash = "sha256:73495dd08c2d0940d611c5a8c04e91c2a0a98600cbd4ee19192255a233b6dbfd", size = 110478, upload-time = "2026-01-06T11:21:11.176Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/1f/8a/881cfd399a119db89619dc1b93d36e2fb6720ddb112bceff41203f1abd72/typer_slim-0.23.0.tar.gz", hash = "sha256:be8b60243df27cfee444c6db1b10a85f4f3e54d940574f31a996f78aa35a8254", size = 4773, upload-time = "2026-02-11T15:22:19.106Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c8/0a/4aca634faf693e33004796b6cee0ae2e1dba375a800c16ab8d3eff4bb800/typer_slim-0.21.1-py3-none-any.whl", hash = "sha256:6e6c31047f171ac93cc5a973c9e617dbc5ab2bddc4d0a3135dc161b4e2020e0d", size = 47444, upload-time = "2026-01-06T11:21:12.441Z" },
+ { url = "https://files.pythonhosted.org/packages/07/3e/ba3a222c80ee070d9497ece3e1fe77253c142925dd4c90f04278aac0a9eb/typer_slim-0.23.0-py3-none-any.whl", hash = "sha256:1d693daf22d998a7b1edab8413cdcb8af07254154ce3956c1664dc11b01e2f8b", size = 3399, upload-time = "2026-02-11T15:22:17.792Z" },
]
[[package]]