From a973ee5333261eede6e5f4d4b15fc4bc8e3a4e75 Mon Sep 17 00:00:00 2001 From: Sandro Date: Tue, 20 Jan 2026 14:54:34 +0000 Subject: [PATCH 1/2] Migrated celery to django tasks. --- AGENTS.md | 4 +- CHANGELOG.md | 4 + README.md | 2 +- daiv/codebase/clients/github/api/callbacks.py | 76 +++----- daiv/codebase/clients/gitlab/api/callbacks.py | 72 +++---- daiv/codebase/context.py | 6 +- daiv/codebase/tasks.py | 9 +- daiv/core/utils.py | 2 +- daiv/daiv/__init__.py | 7 +- daiv/daiv/celeryapp.py | 94 --------- daiv/daiv/settings/components/celery.py | 12 -- daiv/daiv/settings/components/common.py | 2 +- daiv/daiv/settings/components/i18n.py | 5 - daiv/daiv/settings/components/sentry.py | 3 +- daiv/daiv/settings/components/tasks.py | 1 + daiv/daiv/settings/components/testing.py | 6 +- daiv/daiv/settings/local.py | 2 +- daiv/daiv/settings/production.py | 2 +- daiv/daiv/settings/test.py | 2 +- daiv/quick_actions/tasks.py | 6 +- docker-compose.yml | 8 +- docker/local/app/config.env | 1 - docker/local/app/start-crontask | 5 + docker/local/app/start-worker | 2 +- docker/production/app/Dockerfile | 3 +- docker/production/app/start-crontask | 5 + docker/production/app/start-worker | 4 +- docs/configuration/env-config.md | 11 -- docs/getting-started/up-and-running.md | 61 ++++-- pyproject.toml | 5 +- tests/unit_tests/daiv/__init__.py | 1 + tests/unit_tests/quick_actions/test_tasks.py | 16 +- uv.lock | 178 ++++++------------ 33 files changed, 221 insertions(+), 396 deletions(-) delete mode 100644 daiv/daiv/celeryapp.py delete mode 100644 daiv/daiv/settings/components/celery.py create mode 100644 daiv/daiv/settings/components/tasks.py create mode 100644 docker/local/app/start-crontask create mode 100644 docker/production/app/start-crontask create mode 100644 tests/unit_tests/daiv/__init__.py diff --git a/AGENTS.md b/AGENTS.md index 7836a933..399b776a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,6 +1,6 @@ # Project Overview -DAIV is an AI-powered development assistant built on Django with Celery for async task processing, LangChain/LangGraph for LLM integration, and includes `daiv-sandbox` for sandboxed command execution. It integrates with GitLab and GitHub to automate issue resolution, code reviews, and CI/CD pipeline repairs. +DAIV is an AI-powered development assistant built on Django with Django Tasks for async task processing, LangChain/LangGraph for LLM integration, and includes `daiv-sandbox` for sandboxed command execution. It integrates with GitLab and GitHub to automate issue resolution, code reviews, and CI/CD pipeline repairs. ## Project Structure @@ -10,7 +10,7 @@ DAIV is an AI-powered development assistant built on Django with Celery for asyn * `chat/` - Chat module with the OpenAI compatible API. * `core/` - Core module with common logic. * `quick_actions/` - Quick actions module. - * `daiv/` - Main logic of the Django project: settings, urls, wsgi, asgi, celery, etc. + * `daiv/` - Main logic of the Django project: settings, urls, wsgi, asgi, tasks, etc. * `docker/` - Dockerfiles and configurations for local and production deployments. * `docs/` - Documentation for the project. * `evals/` - Evaluation suite for the project (openevals + langsmith + pytest). diff --git a/CHANGELOG.md b/CHANGELOG.md index f21c76b9..795d1823 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `daiv-max`: Use high-performance mode with `CLAUDE_OPUS_4_5` model and `HIGH` thinking level for both planning and execution - Added `MAX_PLANNING_MODEL_NAME`, `MAX_EXECUTION_MODEL_NAME`, `MAX_PLANNING_THINKING_LEVEL`, and `MAX_EXECUTION_THINKING_LEVEL` configuration settings for high-performance mode - Added support for `gpt-5.2` model from OpenAI +- Added `django-crontask` integration and scheduler service scaffolding for periodic tasks. ### Changed @@ -33,6 +34,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Migrated `PullRequestDescriberAgent` evaluation tests to use data-driven approach with JSONL test cases and reference outputs - Deferred sandbox session creation until the first `bash` tool invocation. - Updated merge request creation to return full metadata, including web URLs, for GitHub and GitLab clients. +- Migrated background processing from Celery to Django Tasks using the `django-tasks` database backend. +- Simplified task definitions to use Django Tasks async support directly. ### Fixed @@ -42,6 +45,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed builtin `maintaining-changelog` skill in favor of the new changelog subagent - Removed `pull_request.branch_name_convention` from `.daiv.yml` configuration file. **BREAKING CHANGE**: Branch name convention must now be defined in the `AGENTS.md` file instead. +- Removed Celery worker configuration and bootstrap scripts. ## [1.1.0] - 2025-12-04 diff --git a/README.md b/README.md index 82a8c5fd..d7235c9b 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ DAIV is an open-source automation assistant designed to enhance developer produc ## Technology Stack - **Backend Framework**: [Django](https://www.djangoproject.com/) for building robust APIs and managing database models. -- **Async Tasks**: [Celery](https://docs.celeryproject.org/) with Redis, applying actions in the background and scaling the agents to handle multiple requests. +- **Async Tasks**: [Django Tasks](https://docs.djangoproject.com/en/6.0/topics/tasks/) with the [`django-tasks` backend](https://pypi.org/project/django-tasks/) and [`django-crontask`](https://pypi.org/project/django-crontask/) for periodic scheduling. - **LLM Frameworks**: [LangChain](https://python.langchain.com/) and [LangGraph](https://langchain-ai.github.io/langgraph), integrating various LLM agents for intent understanding, query transformation, and natural language reasoning about code changes. - **Code Executor**: [Sandbox](https://github.com/srtab/daiv-sandbox/) for running commands in a secure sandbox to allow the agents to perform actions on the codebase. - **Observability**: [LangSmith](https://www.langchain.com/langsmith) for tracing and monitoring all the interactions between DAIV and your codebase. diff --git a/daiv/codebase/clients/github/api/callbacks.py b/daiv/codebase/clients/github/api/callbacks.py index a18a89f3..baa9546a 100644 --- a/daiv/codebase/clients/github/api/callbacks.py +++ b/daiv/codebase/clients/github/api/callbacks.py @@ -2,8 +2,6 @@ from functools import cached_property from typing import Any, Literal -from asgiref.sync import sync_to_async - from codebase.api.callbacks import BaseCallback from codebase.clients import RepoClient from codebase.clients.base import Emoji @@ -53,13 +51,9 @@ def accept_callback(self) -> bool: ) async def process_callback(self): - await sync_to_async( - address_issue_task.si( - repo_id=self.repository.full_name, - issue_iid=self.issue.number, - should_reset_plan=self.should_reset_plan(), - ).delay - )() + await address_issue_task.aenqueue( + repo_id=self.repository.full_name, issue_iid=self.issue.number, should_reset_plan=self.should_reset_plan() + ) def should_reset_plan(self) -> bool: """ @@ -109,47 +103,39 @@ async def process_callback(self): ) if self._action_scope == Scope.ISSUE: - await sync_to_async( - execute_issue_task.si( - repo_id=self.repository.full_name, - comment_id=self.comment.id, - action_command=self._quick_action_command.command, - action_args=" ".join(self._quick_action_command.args), - issue_id=self.issue.number, - ).delay - )() + await execute_issue_task.aenqueue( + repo_id=self.repository.full_name, + comment_id=self.comment.id, + action_command=self._quick_action_command.command, + action_args=" ".join(self._quick_action_command.args), + issue_id=self.issue.number, + ) elif self._action_scope == Scope.MERGE_REQUEST: - await sync_to_async( - execute_merge_request_task.si( - repo_id=self.repository.full_name, - comment_id=self.comment.id, - action_command=self._quick_action_command.command, - action_args=" ".join(self._quick_action_command.args), - merge_request_id=self.issue.number, - ).delay - )() + await execute_merge_request_task.aenqueue( + repo_id=self.repository.full_name, + comment_id=self.comment.id, + action_command=self._quick_action_command.command, + action_args=" ".join(self._quick_action_command.args), + merge_request_id=self.issue.number, + ) elif self._is_issue_comment: self._client.create_issue_note_emoji( self.repository.full_name, self.issue.number, Emoji.EYES, self.comment.id ) - await sync_to_async( - address_issue_task.si( - repo_id=self.repository.full_name, issue_iid=self.issue.number, mention_comment_id=self.comment.id - ).delay - )() + await address_issue_task.aenqueue( + repo_id=self.repository.full_name, issue_iid=self.issue.number, mention_comment_id=self.comment.id + ) elif self._is_merge_request_review: # The webhook doesn't provide the source branch, so we need to fetch it from the merge request. merge_request = self._client.get_merge_request(self.repository.full_name, self.issue.number) - await sync_to_async( - address_mr_comments_task.si( - repo_id=self.repository.full_name, - merge_request_id=self.issue.number, - merge_request_source_branch=merge_request.source_branch, - ).delay - )() + await address_mr_comments_task.aenqueue( + repo_id=self.repository.full_name, + merge_request_id=self.issue.number, + merge_request_source_branch=merge_request.source_branch, + ) @property def _is_quick_action(self) -> bool: @@ -257,13 +243,11 @@ async def process_callback(self): GitLab Note Webhook is called multiple times, one per note/discussion. """ - await sync_to_async( - address_mr_review_task.si( - repo_id=self.repository.full_name, - merge_request_id=self.pull_request.number, - merge_request_source_branch=self.pull_request.head.ref, - ).delay - )() + await address_mr_review_task.aenqueue( + repo_id=self.repository.full_name, + merge_request_id=self.pull_request.number, + merge_request_source_branch=self.pull_request.head.ref, + ) class PushCallback(GitHubCallback): diff --git a/daiv/codebase/clients/gitlab/api/callbacks.py b/daiv/codebase/clients/gitlab/api/callbacks.py index efad082b..bb4a6a11 100644 --- a/daiv/codebase/clients/gitlab/api/callbacks.py +++ b/daiv/codebase/clients/gitlab/api/callbacks.py @@ -2,8 +2,6 @@ from functools import cached_property from typing import Any, Literal -from asgiref.sync import sync_to_async - from codebase.api.callbacks import BaseCallback from codebase.base import NoteType from codebase.clients import RepoClient @@ -64,59 +62,49 @@ async def process_callback(self): self._client.create_merge_request_note_emoji( self.project.path_with_namespace, self.merge_request.iid, Emoji.EYES, self.object_attributes.id ) - await sync_to_async( - execute_merge_request_task.si( - repo_id=self.project.path_with_namespace, - comment_id=self.object_attributes.discussion_id, - action_command=self._quick_action_command.command, - action_args=" ".join(self._quick_action_command.args), - merge_request_id=self.merge_request.iid, - ).delay - )() + await execute_merge_request_task.aenqueue( + repo_id=self.project.path_with_namespace, + comment_id=self.object_attributes.discussion_id, + action_command=self._quick_action_command.command, + action_args=" ".join(self._quick_action_command.args), + merge_request_id=self.merge_request.iid, + ) elif self._action_scope == Scope.ISSUE: self._client.create_issue_note_emoji( self.project.path_with_namespace, self.issue.iid, Emoji.EYES, self.object_attributes.id ) - await sync_to_async( - execute_issue_task.si( - repo_id=self.project.path_with_namespace, - comment_id=self.object_attributes.discussion_id, - action_command=self._quick_action_command.command, - action_args=" ".join(self._quick_action_command.args), - issue_id=self.issue.iid, - ).delay - )() + await execute_issue_task.aenqueue( + repo_id=self.project.path_with_namespace, + comment_id=self.object_attributes.discussion_id, + action_command=self._quick_action_command.command, + action_args=" ".join(self._quick_action_command.args), + issue_id=self.issue.iid, + ) elif self._is_issue_comment: self._client.create_issue_note_emoji( self.project.path_with_namespace, self.issue.iid, Emoji.EYES, self.object_attributes.id ) - await sync_to_async( - address_issue_task.si( - repo_id=self.project.path_with_namespace, - issue_iid=self.issue.iid, - mention_comment_id=self.object_attributes.discussion_id, - ).delay - )() + await address_issue_task.aenqueue( + repo_id=self.project.path_with_namespace, + issue_iid=self.issue.iid, + mention_comment_id=self.object_attributes.discussion_id, + ) elif self._is_merge_request_review: if self.object_attributes.type in [NoteType.DIFF_NOTE, NoteType.DISCUSSION_NOTE]: - await sync_to_async( - address_mr_review_task.si( - repo_id=self.project.path_with_namespace, - merge_request_id=self.merge_request.iid, - merge_request_source_branch=self.merge_request.source_branch, - ).delay - )() + await address_mr_review_task.aenqueue( + repo_id=self.project.path_with_namespace, + merge_request_id=self.merge_request.iid, + merge_request_source_branch=self.merge_request.source_branch, + ) elif self.object_attributes.type is None: # This is a comment note. - await sync_to_async( - address_mr_comments_task.si( - repo_id=self.project.path_with_namespace, - merge_request_id=self.merge_request.iid, - merge_request_source_branch=self.merge_request.source_branch, - mention_comment_id=self.object_attributes.discussion_id, - ).delay - )() + await address_mr_comments_task.aenqueue( + repo_id=self.project.path_with_namespace, + merge_request_id=self.merge_request.iid, + merge_request_source_branch=self.merge_request.source_branch, + mention_comment_id=self.object_attributes.discussion_id, + ) else: logger.warning("Unsupported note type: %s", self.object_attributes.type) diff --git a/daiv/codebase/context.py b/daiv/codebase/context.py index 0bd57ca9..796a129e 100644 --- a/daiv/codebase/context.py +++ b/daiv/codebase/context.py @@ -17,12 +17,12 @@ class RuntimeCtx: """ Context to be used across the application layers. - It needs to be set as early as possible on the request lifecycle or celery task. + It needs to be set as early as possible on the request lifecycle or task execution. With this context, we ensure that application layers that need the repository files can access them without doing API calls by accessing the defined `repo_dir` directory, which is a temporary directory with the repository files. - The context is reset at the end of the request lifecycle or celery task. + The context is reset at the end of the request lifecycle or task execution. """ git_platform: GitPlatform @@ -115,7 +115,7 @@ def get_runtime_ctx() -> RuntimeCtx: if ctx is None: raise RuntimeError( "Runtime context not set. " - "It needs to be set as early as possible on the request lifecycle or celery task. " + "It needs to be set as early as possible on the request lifecycle or task execution. " "Use the `codebase.context.set_runtime_ctx` context manager to set the context." ) return ctx diff --git a/daiv/codebase/tasks.py b/daiv/codebase/tasks.py index a4b91213..6fd6c686 100644 --- a/daiv/codebase/tasks.py +++ b/daiv/codebase/tasks.py @@ -1,16 +1,17 @@ import logging +from django.tasks import task + from codebase.clients import RepoClient from codebase.context import set_runtime_ctx from codebase.managers.issue_addressor import IssueAddressorManager from codebase.managers.review_addressor import CommentsAddressorManager from core.utils import locked_task -from daiv import async_task logger = logging.getLogger("daiv.tasks") -@async_task() +@task @locked_task(key="{repo_id}:{issue_iid}") async def address_issue_task(repo_id: str, issue_iid: int, mention_comment_id: str, ref: str | None = None): """ @@ -30,7 +31,7 @@ async def address_issue_task(repo_id: str, issue_iid: int, mention_comment_id: s ) -@async_task() +@task @locked_task(key="{repo_id}:{merge_request_id}") async def address_mr_review_task(repo_id: str, merge_request_id: int, merge_request_source_branch: str): """ @@ -45,7 +46,7 @@ async def address_mr_review_task(repo_id: str, merge_request_id: int, merge_requ # await ReviewAddressorManager.process_review_comments(merge_request_id=merge_request_id, runtime_ctx=runtime_ctx) # noqa: E501 ERA001 -@async_task() +@task @locked_task(key="{repo_id}:{merge_request_id}") async def address_mr_comments_task( repo_id: str, merge_request_id: int, merge_request_source_branch: str, mention_comment_id: str diff --git a/daiv/core/utils.py b/daiv/core/utils.py index 6f5a238c..bf3bdbd6 100644 --- a/daiv/core/utils.py +++ b/daiv/core/utils.py @@ -115,7 +115,7 @@ def locked_task(key: str = "", blocking: bool = False): Default is False. Example: - @shared_task + @task @locked_task(key="{repo_id}:{issue_iid}") # Lock key will be: "task_name:repo123:issue456" def process_issue(repo_id: str, issue_iid: int): pass diff --git a/daiv/daiv/__init__.py b/daiv/daiv/__init__.py index cc6bf371..5e777f7c 100644 --- a/daiv/daiv/__init__.py +++ b/daiv/daiv/__init__.py @@ -1,9 +1,4 @@ -# This will make sure the app is always imported when -# Django starts so that shared_task will use this app. -from .celeryapp import app as celery_app -from .celeryapp import async_task - __version__ = "1.1.0" USER_AGENT = f"python-daiv-agent/{__version__}" -__all__ = ("celery_app", "async_task", "USER_AGENT") +__all__ = ("USER_AGENT",) diff --git a/daiv/daiv/celeryapp.py b/daiv/daiv/celeryapp.py deleted file mode 100644 index 6a199a72..00000000 --- a/daiv/daiv/celeryapp.py +++ /dev/null @@ -1,94 +0,0 @@ -import asyncio -import functools -import inspect -from typing import TYPE_CHECKING, Any - -from django.core.cache import caches -from django.db import close_old_connections, connections - -from asgiref.sync import ThreadSensitiveContext -from celery import Celery, signals -from langchain_core.tracers.langchain import wait_for_all_tracers - -if TYPE_CHECKING: - from collections.abc import Callable - -app = Celery("daiv") - -app.config_from_object("django.conf:settings", namespace="CELERY") -app.autodiscover_tasks() - - -def _async_to_sync_wrapper(async_func: Callable[..., Any]) -> Callable[..., Any]: - """ - Wraps async functions for Celery tasks with proper connection cleanup. - - Credits: https://mrdonbrown.blogspot.com/2025/10/using-async-functions-in-celery-with.html - """ - - @functools.wraps(async_func) - def sync_wrapper(*args: Any, **kwargs: Any) -> Any: - async def wrapped_with_context() -> Any: - try: - # Close stale connections BEFORE task execution - close_old_connections() - - async with ThreadSensitiveContext(): - return await async_func(*args, **kwargs) - finally: - # Close connections AFTER task execution - close_old_connections() - - # Check if we're already in an event loop (e.g., during tests with CELERY_TASK_ALWAYS_EAGER) - try: - asyncio.get_running_loop() - except RuntimeError: - # No event loop running, create a new one (normal Celery worker behavior) - return asyncio.run(wrapped_with_context()) - else: - # Already in an event loop (test scenario) - # Return the coroutine directly so it can be awaited by the async test - return wrapped_with_context() - - # Preserve function signature for inspection - sync_wrapper.__signature__ = inspect.signature(async_func) - sync_wrapper.__annotations__ = async_func.__annotations__ - return sync_wrapper - - -def async_task(**kwargs): - """Custom task decorator that supports async functions.""" - - def inner(func): - # Detect async functions and wrap them - if inspect.iscoroutinefunction(func): - func = _async_to_sync_wrapper(func) - - return app.task(**kwargs)(func) - - return inner - - -@signals.worker_process_init.connect -def init_worker_process(**kwargs: Any) -> None: - """Close all connections inherited from parent process during prefork.""" - # Close all database connections inherited from parent - for conn in connections.all(): - conn.close() - - # Also close cache connections - for cache in caches.all(): - if hasattr(cache, "close"): - cache.close() - - -@signals.worker_process_shutdown.connect -def shutdown_worker_process(**kwargs: Any) -> None: - """Close all connections when worker process shuts down.""" - for conn in connections.all(): - conn.close() - - -@signals.task_postrun.connect -def flush_after_tasks(**kwargs): - wait_for_all_tracers() diff --git a/daiv/daiv/settings/components/celery.py b/daiv/daiv/settings/components/celery.py deleted file mode 100644 index 5043a35c..00000000 --- a/daiv/daiv/settings/components/celery.py +++ /dev/null @@ -1,12 +0,0 @@ -from decouple import config -from get_docker_secret import get_docker_secret - -# CELERY - http://docs.celeryproject.org/en/latest/userguide/configuration.html - -CELERY_BROKER_URL = get_docker_secret("DJANGO_BROKER_URL", default="memory:///") -CELERY_BROKER_USE_SSL = config("DJANGO_BROKER_USE_SSL", default=False, cast=bool) -CELERY_TASK_COMPRESSION = "gzip" -CELERY_TASK_IGNORE_RESULT = True -CELERY_TASK_TIME_LIMIT = 30 * 60 # half hour -CELERY_WORKER_MAX_MEMORY_PER_CHILD = 200 * 1000 # 200Mb -CELERY_WORKER_HIJACK_ROOT_LOGGER = False diff --git a/daiv/daiv/settings/components/common.py b/daiv/daiv/settings/components/common.py index 2e5a8eb4..bf41aaae 100644 --- a/daiv/daiv/settings/components/common.py +++ b/daiv/daiv/settings/components/common.py @@ -11,7 +11,7 @@ LOCAL_APPS = ["accounts", "automation", "codebase", "core", "quick_actions"] -THIRD_PARTY_APPS = ["django_extensions"] +THIRD_PARTY_APPS = ["crontask", "django_extensions", "django_tasks", "django_tasks.backends.database"] DJANGO_APPS = ["django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions"] diff --git a/daiv/daiv/settings/components/i18n.py b/daiv/daiv/settings/components/i18n.py index 5a728c48..3725b413 100644 --- a/daiv/daiv/settings/components/i18n.py +++ b/daiv/daiv/settings/components/i18n.py @@ -11,8 +11,3 @@ USE_THOUSAND_SEPARATOR = True LOCALE_PATHS = (PROJECT_DIR / "accounts" / "locale", PROJECT_DIR / "codebase" / "locale") - -# CELERY - -CELERY_TIMEZONE = TIME_ZONE -CELERY_ENABLE_UTC = False diff --git a/daiv/daiv/settings/components/sentry.py b/daiv/daiv/settings/components/sentry.py index cd93193d..bf2aca4e 100644 --- a/daiv/daiv/settings/components/sentry.py +++ b/daiv/daiv/settings/components/sentry.py @@ -12,7 +12,6 @@ if SENTRY_DSN: import sentry_sdk - from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration @@ -26,7 +25,7 @@ enable_tracing=SENTRY_ENABLE_TRACING, profiles_sample_rate=1.0 if SENTRY_ENABLE_TRACING else 0.0, server_name=config("NODE_HOSTNAME", default=None), - integrations=[DjangoIntegration(), LoggingIntegration(), RedisIntegration(), CeleryIntegration()], + integrations=[DjangoIntegration(), LoggingIntegration(), RedisIntegration()], ) if SERVICE_NAME := config("SERVICE_NAME", default=None): diff --git a/daiv/daiv/settings/components/tasks.py b/daiv/daiv/settings/components/tasks.py new file mode 100644 index 00000000..e86e7b7f --- /dev/null +++ b/daiv/daiv/settings/components/tasks.py @@ -0,0 +1 @@ +TASKS = {"default": {"BACKEND": "django_tasks.backends.database.DatabaseBackend", "QUEUES": ["default"]}} diff --git a/daiv/daiv/settings/components/testing.py b/daiv/daiv/settings/components/testing.py index d609011e..8ea55d79 100644 --- a/daiv/daiv/settings/components/testing.py +++ b/daiv/daiv/settings/components/testing.py @@ -30,11 +30,9 @@ EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" -# CELERY +# TASKS -CELERY_TASK_ALWAYS_EAGER = True -CELERY_TASK_EAGER_PROPAGATES = True -CELERY_BROKER_URL = "memory:///" +TASKS = {"default": {"BACKEND": "django.tasks.backends.immediate.ImmediateBackend"}} # LOGGING diff --git a/daiv/daiv/settings/local.py b/daiv/daiv/settings/local.py index 1167ef52..b5445c40 100644 --- a/daiv/daiv/settings/local.py +++ b/daiv/daiv/settings/local.py @@ -13,5 +13,5 @@ "components/redis.py", "components/logs.py", "components/debug.py", - "components/celery.py", + "components/tasks.py", ) diff --git a/daiv/daiv/settings/production.py b/daiv/daiv/settings/production.py index 4c706f9e..bccb0c61 100644 --- a/daiv/daiv/settings/production.py +++ b/daiv/daiv/settings/production.py @@ -6,6 +6,6 @@ "components/database.py", "components/redis.py", "components/logs.py", - "components/celery.py", + "components/tasks.py", "components/sentry.py", ) diff --git a/daiv/daiv/settings/test.py b/daiv/daiv/settings/test.py index 627b7f58..1ae6b9df 100644 --- a/daiv/daiv/settings/test.py +++ b/daiv/daiv/settings/test.py @@ -1,3 +1,3 @@ from split_settings.tools import include -include("components/common.py", "components/i18n.py", "components/celery.py", "components/testing.py") +include("components/common.py", "components/i18n.py", "components/tasks.py", "components/testing.py") diff --git a/daiv/quick_actions/tasks.py b/daiv/quick_actions/tasks.py index 75520dcd..fb32cf32 100644 --- a/daiv/quick_actions/tasks.py +++ b/daiv/quick_actions/tasks.py @@ -1,9 +1,9 @@ import logging +from django.tasks import task from django.template.loader import render_to_string from codebase.clients import RepoClient -from daiv import async_task from .base import Scope from .registry import quick_action_registry @@ -11,7 +11,7 @@ logger = logging.getLogger("daiv.quick_actions") -@async_task(pydantic=True) +@task async def execute_issue_task(repo_id: str, action_command: str, action_args: str, comment_id: str, issue_id: int): """ Execute a quick action asynchronously. @@ -61,7 +61,7 @@ async def execute_issue_task(repo_id: str, action_command: str, action_args: str ) -@async_task(pydantic=True) +@task async def execute_merge_request_task( repo_id: str, action_command: str, action_args: str, comment_id: str, merge_request_id: int ) -> None: diff --git a/docker-compose.yml b/docker-compose.yml index 2a44b0c5..d0a5415d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -74,8 +74,12 @@ services: <<: *x_app_default container_name: daiv-worker command: sh /home/app/docker/start-worker - environment: - - C_FORCE_ROOT=true + ports: [] + + scheduler: + <<: *x_app_default + container_name: daiv-scheduler + command: sh /home/app/docker/start-crontask ports: [] gitlab: diff --git a/docker/local/app/config.env b/docker/local/app/config.env index e2e357f4..93ef9418 100644 --- a/docker/local/app/config.env +++ b/docker/local/app/config.env @@ -4,7 +4,6 @@ DJANGO_SECRET_KEY='v6_=1=)!_$aoeswo@hdo@5kov)v9844!1r&+w!!@@+_7&*!jb+' DJANGO_ALLOWED_HOSTS=* DJANGO_REDIS_URL=redis://redis:6379/0 DJANGO_DEBUG=True -DJANGO_BROKER_URL=redis://redis:6379/0 # Database secrets DB_HOST=db diff --git a/docker/local/app/start-crontask b/docker/local/app/start-crontask new file mode 100644 index 00000000..3d82a371 --- /dev/null +++ b/docker/local/app/start-crontask @@ -0,0 +1,5 @@ +#!/bin/sh + +set -eu pipefail + +exec django-admin crontask --skip-checks diff --git a/docker/local/app/start-worker b/docker/local/app/start-worker index 61499bc8..350d0c03 100644 --- a/docker/local/app/start-worker +++ b/docker/local/app/start-worker @@ -2,4 +2,4 @@ set -eu pipefail -exec watchmedo auto-restart --directory=./daiv --pattern=*.py --recursive --debounce-interval=5 -- celery -A daiv worker -l INFO -c 1 +exec django-admin db_worker --reload --skip-checks diff --git a/docker/production/app/Dockerfile b/docker/production/app/Dockerfile index bc2213df..8ee0a68d 100644 --- a/docker/production/app/Dockerfile +++ b/docker/production/app/Dockerfile @@ -69,11 +69,12 @@ COPY --chown=daiv:daiv ./daiv /home/daiv/app COPY --chown=daiv:daiv ./docker/production/app/entrypoint /home/daiv/entrypoint COPY --chown=daiv:daiv ./docker/production/app/start-app /home/daiv/start-app COPY --chown=daiv:daiv ./docker/production/app/start-worker /home/daiv/start-worker +COPY --chown=daiv:daiv ./docker/production/app/start-crontask /home/daiv/start-crontask USER daiv WORKDIR /home/daiv -RUN chmod +x entrypoint start-app start-worker \ +RUN chmod +x entrypoint start-app start-worker start-crontask \ && python -m compileall app \ && django-admin compilemessages --ignore=.venv/**/locale \ && mkdir -p data/tantivy_index data/media data/static data/mcp-proxy diff --git a/docker/production/app/start-crontask b/docker/production/app/start-crontask new file mode 100644 index 00000000..3d82a371 --- /dev/null +++ b/docker/production/app/start-crontask @@ -0,0 +1,5 @@ +#!/bin/sh + +set -eu pipefail + +exec django-admin crontask --skip-checks diff --git a/docker/production/app/start-worker b/docker/production/app/start-worker index cc4a9444..ccb5422c 100644 --- a/docker/production/app/start-worker +++ b/docker/production/app/start-worker @@ -2,6 +2,4 @@ set -eu pipefail -exec celery -A daiv worker \ - --loglevel=${CELERY_LOGLEVEL:-"INFO"} \ - --concurrency=${CELERY_CONCURRENCY:-2} +exec django-admin db_worker --skip-checks diff --git a/docs/configuration/env-config.md b/docs/configuration/env-config.md index b47f4174..7519fdc5 100644 --- a/docs/configuration/env-config.md +++ b/docs/configuration/env-config.md @@ -51,17 +51,6 @@ Variables marked with: |--------------------|----------------------------|:---------:|---------| | :material-asterisk: `DJANGO_REDIS_URL` :material-lock: | Redis connection URL | *(none)* | `redis://redis:6379/0` | -### Celery / Broker - -| Variable | Description | Default | Example | -|-------------------------|------------------------------------|:--------------:|-----------------| -| :material-asterisk: `DJANGO_BROKER_URL` :material-lock: | Celery broker URL | `memory:///` | `redis://redis:6379/0` | -| `DJANGO_BROKER_USE_SSL` | Use SSL for broker connection | `False` | `True` | -| `CELERY_LOGLEVEL` | Celery log level | `INFO` | `DEBUG` | -| `CELERY_CONCURRENCY` | Number of Celery workers | `2` | `4` | - -!!! note - The `CELERY_CONCURRENCY` variable is used to specify the number of Celery workers to use. This is useful for scaling the application. The default value is `2` which is suitable for most use cases. ### Sentry diff --git a/docs/getting-started/up-and-running.md b/docs/getting-started/up-and-running.md index d24da481..133fef7f 100644 --- a/docs/getting-started/up-and-running.md +++ b/docs/getting-started/up-and-running.md @@ -9,14 +9,15 @@ This guide walks you through deploying DAIV using Docker Swarm or Docker Compose **Required Core Services:** * **[PostgreSQL](https://www.postgresql.org/)** - Stores application data; - * **[Redis](https://redis.io/)** - Handles caching and message queueing; +* **[Redis](https://redis.io/)** - Handles caching; * **[DAIV Application](https://github.com/srtab/daiv)** - Main API; - * **[DAIV Worker](https://docs.celeryq.dev/)** - Background task processor. +* **[DAIV Worker](https://docs.djangoproject.com/en/6.0/topics/tasks/)** - Background task processor. **Optional Service:** - * **[DAIV Sandbox](https://github.com/srtab/daiv-sandbox)** - Isolated environment for running arbitrary commands; - * **[MCP Proxy](https://github.com/TBXark/mcp-proxy/)** - Proxy MCP server to run other MCP servers inside a container. +* **[DAIV Scheduler](https://pypi.org/project/django-crontask/)** - Periodic task scheduler; +* **[DAIV Sandbox](https://github.com/srtab/daiv-sandbox)** - Isolated environment for running arbitrary commands; +* **[MCP Proxy](https://github.com/TBXark/mcp-proxy/)** - Proxy MCP server to run other MCP servers inside a container. --- @@ -68,7 +69,6 @@ x-app-environment-defaults: &app_environment_defaults DJANGO_SETTINGS_MODULE: daiv.settings.production DJANGO_ALLOWED_HOSTS: your-hostname.com,app,127.0.0.1 (1) DJANGO_REDIS_URL: redis://daiv_redis:6379/0 - DJANGO_BROKER_URL: redis://daiv_redis:6379/0 DAIV_EXTERNAL_URL: https://your-hostname.com (2) # DATABASE DB_NAME: daiv @@ -162,7 +162,6 @@ services: command: sh /home/daiv/start-worker environment: <<: *app_environment_defaults - CELERY_CONCURRENCY: 2 (6) secrets: - django_secret_key - db_password @@ -175,23 +174,40 @@ services: - internal volumes: - mcp-proxy-volume:/home/daiv/data/mcp-proxy - healthcheck: - test: celery -A daiv inspect ping - interval: 10s + deploy: + <<: *deploy_defaults + + scheduler: + image: ghcr.io/srtab/daiv:latest (5) + command: sh /home/daiv/start-crontask + environment: + <<: *app_environment_defaults + secrets: + - django_secret_key + - db_password + - codebase_gitlab_auth_token + - codebase_gitlab_webhook_secret + - daiv_sandbox_api_key + - openrouter_api_key + - mcp_proxy_auth_token + networks: + - internal + volumes: + - mcp-proxy-volume:/home/daiv/data/mcp-proxy deploy: <<: *deploy_defaults sandbox: image: ghcr.io/srtab/daiv-sandbox:latest (5) environment: - DAIV_SANDBOX_KEEP_TEMPLATE: true (7) + DAIV_SANDBOX_KEEP_TEMPLATE: true (6) networks: - internal secrets: - daiv_sandbox_api_key volumes: - - /var/run/docker.sock:/var/run/docker.sock (8) - - $HOME/.docker/config.json:/home/app/.docker/config.json (9) + - /var/run/docker.sock:/var/run/docker.sock (7) + - $HOME/.docker/config.json:/home/app/.docker/config.json (8) deploy: <<: *deploy_defaults @@ -243,10 +259,9 @@ secrets: 3. Set to your GitLab instance URL (e.g., `https://gitlab.com` for GitLab.com) 4. Points to the Sandbox service. Use `http://sandbox:8000` when deploying Sandbox in the same stack 5. **Recommended**: Replace `latest` with a specific version tag for production deployments -6. Number of parallel worker processes. Adjust based on your server resources and expected workload -7. See [DAIV Sandbox documentation](https://github.com/srtab/daiv-sandbox) for configuration details -8. **Required**: Sandbox needs Docker socket access to create isolated containers -9. **Optional**: Remove this volume if you don't need private registry access +6. See [DAIV Sandbox documentation](https://github.com/srtab/daiv-sandbox) for configuration details +7. **Required**: Sandbox needs Docker socket access to create isolated containers +8. **Optional**: Remove this volume if you don't need private registry access ### Step 3: Deploy the stack @@ -302,7 +317,6 @@ x-app-defaults: &x_app_default DJANGO_SECRET_KEY: secret-key (1) DJANGO_ALLOWED_HOSTS: your-hostname.com,app,127.0.0.1 (2) DJANGO_REDIS_URL: redis://redis:6379/0 - DJANGO_BROKER_URL: redis://redis:6379/0 DAIV_EXTERNAL_URL: https://your-hostname.com (12) # Database settings DB_HOST: db @@ -374,9 +388,16 @@ services: container_name: daiv-worker command: sh /home/daiv/start-worker ports: [] - healthcheck: - test: celery -A daiv inspect ping - interval: 10s + depends_on: + app: + condition: service_healthy + restart: true + + scheduler: + <<: *x_app_default + container_name: daiv-scheduler + command: sh /home/daiv/start-crontask + ports: [] depends_on: app: condition: service_healthy diff --git a/pyproject.toml b/pyproject.toml index ea8e7d50..71049d22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,13 +16,14 @@ classifiers = [ "Programming Language :: Python :: 3.14", ] dependencies = [ - "celery[redis]==5.6.2", "ddgs==9.10.0", "deepagents==0.3.6", "django==6.0.1", + "django-crontask==1.1.3", "django-extensions==4.1.0", "django-ninja==1.5.3", "django-split-settings==1.3.2", + "django-tasks==0.11.0", "get-docker-secret==2.0.0", "gitpython==3.1.46", "httpx==0.28.1", @@ -45,7 +46,7 @@ dependencies = [ "python-gitlab==7.1.0", "pyyaml==6.0.3", "redis==5.2.1", - "sentry-sdk==2.49.0", + "sentry-sdk==2.50.0", "unidiff==0.7.5", "uvicorn[standard]==0.40.0", ] diff --git a/tests/unit_tests/daiv/__init__.py b/tests/unit_tests/daiv/__init__.py new file mode 100644 index 00000000..60fc0ef5 --- /dev/null +++ b/tests/unit_tests/daiv/__init__.py @@ -0,0 +1 @@ +"""Unit tests for the daiv package.""" diff --git a/tests/unit_tests/quick_actions/test_tasks.py b/tests/unit_tests/quick_actions/test_tasks.py index dcffc6ee..224dfd05 100644 --- a/tests/unit_tests/quick_actions/test_tasks.py +++ b/tests/unit_tests/quick_actions/test_tasks.py @@ -42,7 +42,7 @@ async def test_execute_action_success_issue(self, mock_registry, mock_repo_clien mock_repo_client.get_issue.return_value = self.issue # Execute task with string action args - await execute_issue_task( + await execute_issue_task.aenqueue( repo_id="repo123", action_command="help", action_args="arg1 arg2", @@ -81,7 +81,7 @@ async def test_execute_action_success_merge_request(self, mock_registry, mock_re mock_repo_client.get_merge_request.return_value = self.merge_request # Execute task - await execute_merge_request_task( + await execute_merge_request_task.aenqueue( repo_id="repo123", action_command="help", action_args="", @@ -109,7 +109,7 @@ async def test_action_not_found(self, mock_registry): mock_registry.get_actions.return_value = [] # Execute task - await execute_issue_task( + await execute_issue_task.aenqueue( repo_id="repo123", action_command="nonexistent", action_args="", @@ -131,7 +131,7 @@ async def test_multiple_actions_found(self, mock_registry): mock_registry.get_actions.return_value = [mock_action_class1, mock_action_class2] # Execute task - await execute_issue_task( + await execute_issue_task.aenqueue( repo_id="repo123", action_command="duplicate", action_args="", @@ -159,7 +159,7 @@ async def test_action_execution_exception_issue(self, mock_registry, mock_repo_c mock_repo_client.get_issue.return_value = self.issue # Execute task - await execute_issue_task( + await execute_issue_task.aenqueue( repo_id="repo123", action_command="failing_action", action_args="", @@ -192,7 +192,7 @@ async def test_action_execution_exception_merge_request(self, mock_registry, moc mock_repo_client.get_merge_request.return_value = self.merge_request # Execute task - await execute_merge_request_task( + await execute_merge_request_task.aenqueue( repo_id="repo123", action_command="failing_action", action_args="", @@ -223,7 +223,7 @@ async def test_scope_conversion(self, mock_registry, mock_repo_client): mock_repo_client.get_merge_request.return_value = self.merge_request # Execute task with string scope - await execute_merge_request_task( + await execute_merge_request_task.aenqueue( repo_id="repo123", action_command="help", action_args="", @@ -251,7 +251,7 @@ async def test_execute_with_empty_action_args(self, mock_registry, mock_repo_cli mock_repo_client.get_issue.return_value = self.issue # Execute task with empty action args - await execute_issue_task( + await execute_issue_task.aenqueue( repo_id="repo123", action_command="help", action_args="", diff --git a/uv.lock b/uv.lock index 85686c7d..a9696f07 100644 --- a/uv.lock +++ b/uv.lock @@ -74,18 +74,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] -[[package]] -name = "amqp" -version = "5.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "vine" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" }, -] - [[package]] name = "annotated-types" version = "0.7.0" @@ -126,6 +114,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, ] +[[package]] +name = "apscheduler" +version = "3.11.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzlocal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/12/3e4389e5920b4c1763390c6d371162f3784f86f85cd6d6c1bfe68eef14e2/apscheduler-3.11.2.tar.gz", hash = "sha256:2a9966b052ec805f020c8c4c3ae6e6a06e24b1bf19f2e11d91d8cca0473eef41", size = 108683, upload-time = "2025-12-22T00:39:34.884Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/64/2e54428beba8d9992aa478bb8f6de9e4ecaa5f8f513bcfd567ed7fb0262d/apscheduler-3.11.2-py3-none-any.whl", hash = "sha256:ce005177f741409db4e4dd40a7431b76feb856b9dd69d57e0da49d6715bfd26d", size = 64439, upload-time = "2025-12-22T00:39:33.303Z" }, +] + [[package]] name = "asgiref" version = "3.11.0" @@ -189,15 +189,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, ] -[[package]] -name = "billiard" -version = "4.2.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/23/b12ac0bcdfb7360d664f40a00b1bda139cbbbced012c34e375506dbd0143/billiard-4.2.4.tar.gz", hash = "sha256:55f542c371209e03cd5862299b74e52e4fbcba8250ba611ad94276b369b6a85f", size = 156537, upload-time = "2025-11-30T13:28:48.52Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/87/8bab77b323f16d67be364031220069f79159117dd5e43eeb4be2fef1ac9b/billiard-4.2.4-py3-none-any.whl", hash = "sha256:525b42bdec68d2b983347ac312f892db930858495db601b5836ac24e6477cde5", size = 87070, upload-time = "2025-11-30T13:28:47.016Z" }, -] - [[package]] name = "bracex" version = "2.6" @@ -241,31 +232,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" }, ] -[[package]] -name = "celery" -version = "5.6.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "billiard" }, - { name = "click" }, - { name = "click-didyoumean" }, - { name = "click-plugins" }, - { name = "click-repl" }, - { name = "kombu" }, - { name = "python-dateutil" }, - { name = "tzlocal" }, - { name = "vine" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8f/9d/3d13596519cfa7207a6f9834f4b082554845eb3cd2684b5f8535d50c7c44/celery-5.6.2.tar.gz", hash = "sha256:4a8921c3fcf2ad76317d3b29020772103581ed2454c4c042cc55dcc43585009b", size = 1718802, upload-time = "2026-01-04T12:35:58.012Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dd/bd/9ecd619e456ae4ba73b6583cc313f26152afae13e9a82ac4fe7f8856bfd1/celery-5.6.2-py3-none-any.whl", hash = "sha256:3ffafacbe056951b629c7abcf9064c4a2366de0bdfc9fdba421b97ebb68619a5", size = 445502, upload-time = "2026-01-04T12:35:55.894Z" }, -] - -[package.optional-dependencies] -redis = [ - { name = "kombu", extra = ["redis"] }, -] - [[package]] name = "certifi" version = "2026.1.4" @@ -345,43 +311,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] -[[package]] -name = "click-didyoumean" -version = "0.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" }, -] - -[[package]] -name = "click-plugins" -version = "1.1.1.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" }, -] - -[[package]] -name = "click-repl" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "prompt-toolkit" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -487,13 +416,14 @@ name = "daiv" version = "1.1.0" source = { virtual = "." } dependencies = [ - { name = "celery", extra = ["redis"] }, { name = "ddgs" }, { name = "deepagents" }, { name = "django" }, + { name = "django-crontask" }, { name = "django-extensions" }, { name = "django-ninja" }, { name = "django-split-settings" }, + { name = "django-tasks" }, { name = "get-docker-secret" }, { name = "gitpython" }, { name = "httpx" }, @@ -550,13 +480,14 @@ docs = [ [package.metadata] requires-dist = [ - { name = "celery", extras = ["redis"], specifier = "==5.6.2" }, { name = "ddgs", specifier = "==9.10.0" }, { name = "deepagents", specifier = "==0.3.6" }, { name = "django", specifier = "==6.0.1" }, + { name = "django-crontask", specifier = "==1.1.3" }, { name = "django-extensions", specifier = "==4.1.0" }, { name = "django-ninja", specifier = "==1.5.3" }, { name = "django-split-settings", specifier = "==1.3.2" }, + { name = "django-tasks", specifier = "==0.11.0" }, { name = "get-docker-secret", specifier = "==2.0.0" }, { name = "gitpython", specifier = "==3.1.46" }, { name = "httpx", specifier = "==0.28.1" }, @@ -579,7 +510,7 @@ requires-dist = [ { name = "python-gitlab", specifier = "==7.1.0" }, { name = "pyyaml", specifier = "==6.0.3" }, { name = "redis", specifier = "==5.2.1" }, - { name = "sentry-sdk", specifier = "==2.49.0" }, + { name = "sentry-sdk", specifier = "==2.50.0" }, { name = "unidiff", specifier = "==0.7.5" }, { name = "uvicorn", extras = ["standard"], specifier = "==0.40.0" }, ] @@ -722,6 +653,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/95/b5/814ed98bd21235c116fd3436a7ed44d47560329a6d694ec8aac2982dbb93/django-6.0.1-py3-none-any.whl", hash = "sha256:a92a4ff14f664a896f9849009cb8afaca7abe0d6fc53325f3d1895a15253433d", size = 8338791, upload-time = "2026-01-06T18:55:46.175Z" }, ] +[[package]] +name = "django-crontask" +version = "1.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "apscheduler" }, + { name = "django" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/0d/5cfa032e3fa03d4c17eb070735c26128866b522f869ed6e1fda880caff7e/django_crontask-1.1.3.tar.gz", hash = "sha256:0ac69ee9cf52d196112e54e92e19995ba2962c8f45a4c90b821b110facb4b043", size = 8583, upload-time = "2026-01-16T13:25:37.718Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/ea/e04f5959ad46c8fecac0012c23c90237d27d7406986872d0b9379259e0c9/django_crontask-1.1.3-py3-none-any.whl", hash = "sha256:60fc5eceaa159509372de7c82d07ae4540e1ff78ef4a482c6b87198760bfe468", size = 8948, upload-time = "2026-01-16T13:25:36.496Z" }, +] + [[package]] name = "django-extensions" version = "4.1" @@ -756,6 +700,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/63/69/d94db8dac55bcfb6b3243578a3096cfda6c42ea5da292c36919768152ec6/django_split_settings-1.3.2-py3-none-any.whl", hash = "sha256:72bd7dd9f12602585681074d1f859643fb4f6b196b584688fab86bdd73a57dff", size = 6435, upload-time = "2024-07-05T14:29:59.756Z" }, ] +[[package]] +name = "django-stubs-ext" +version = "5.2.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/14/a2/d67f4a5200ff7626b104eddceaf529761cba4ed318a73ffdb0677551be73/django_stubs_ext-5.2.8.tar.gz", hash = "sha256:b39938c46d7a547cd84e4a6378dbe51a3dd64d70300459087229e5fee27e5c6b", size = 6487, upload-time = "2025-12-01T08:12:37.486Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/2d/cb0151b780c3730cf0f2c0fcb1b065a5e88f877cf7a9217483c375353af1/django_stubs_ext-5.2.8-py3-none-any.whl", hash = "sha256:1dd5470c9675591362c78a157a3cf8aec45d0e7a7f0cf32f227a1363e54e0652", size = 9949, upload-time = "2025-12-01T08:12:36.397Z" }, +] + +[[package]] +name = "django-tasks" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, + { name = "django-stubs-ext" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/17/c7478fcae7c277a0f648f6e6334d318ad28f48372c3e0f84d1fdc79ec7f3/django_tasks-0.11.0.tar.gz", hash = "sha256:923bf4ac444daee5d879393daf09c7cdf4575c8b0e12726c9d9fceafdea5187f", size = 32971, upload-time = "2026-01-09T17:38:45.824Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/c4/1cb34c94078ee02fcf14eb9198d1816f3cd24fe0086d2ce8ecf8aab25628/django_tasks-0.11.0-py3-none-any.whl", hash = "sha256:28f00fcda4e2cc8fe09ca685fbe54d52602ab42077543f9164890781c7e58599", size = 45015, upload-time = "2026-01-09T17:38:44.704Z" }, +] + [[package]] name = "docstring-parser" version = "0.17.0" @@ -1293,26 +1264,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] -[[package]] -name = "kombu" -version = "5.6.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "amqp" }, - { name = "packaging" }, - { name = "tzdata" }, - { name = "vine" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b6/a5/607e533ed6c83ae1a696969b8e1c137dfebd5759a2e9682e26ff1b97740b/kombu-5.6.2.tar.gz", hash = "sha256:8060497058066c6f5aed7c26d7cd0d3b574990b09de842a8c5aaed0b92cc5a55", size = 472594, upload-time = "2025-12-29T20:30:07.779Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/0f/834427d8c03ff1d7e867d3db3d176470c64871753252b21b4f4897d1fa45/kombu-5.6.2-py3-none-any.whl", hash = "sha256:efcfc559da324d41d61ca311b0c64965ea35b4c55cc04ee36e55386145dace93", size = 214219, upload-time = "2025-12-29T20:30:05.74Z" }, -] - -[package.optional-dependencies] -redis = [ - { name = "redis" }, -] - [[package]] name = "langchain" version = "1.2.6" @@ -2959,15 +2910,15 @@ wheels = [ [[package]] name = "sentry-sdk" -version = "2.49.0" +version = "2.50.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/94/23ac26616a883f492428d9ee9ad6eee391612125326b784dbfc30e1e7bab/sentry_sdk-2.49.0.tar.gz", hash = "sha256:c1878599cde410d481c04ef50ee3aedd4f600e4d0d253f4763041e468b332c30", size = 387228, upload-time = "2026-01-08T09:56:25.642Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/8a/3c4f53d32c21012e9870913544e56bfa9e931aede080779a0f177513f534/sentry_sdk-2.50.0.tar.gz", hash = "sha256:873437a989ee1b8b25579847bae8384515bf18cfed231b06c591b735c1781fe3", size = 401233, upload-time = "2026-01-20T12:53:16.244Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/43/1c586f9f413765201234541857cb82fda076f4b0f7bad4a0ec248da39cf3/sentry_sdk-2.49.0-py2.py3-none-any.whl", hash = "sha256:6ea78499133874445a20fe9c826c9e960070abeb7ae0cdf930314ab16bb97aa0", size = 415693, upload-time = "2026-01-08T09:56:21.872Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5b/cbc2bb9569f03c8e15d928357e7e6179e5cfab45544a3bbac8aec4caf9be/sentry_sdk-2.50.0-py2.py3-none-any.whl", hash = "sha256:0ef0ed7168657ceb5a0be081f4102d92042a125462d1d1a29277992e344e749e", size = 424961, upload-time = "2026-01-20T12:53:14.826Z" }, ] [[package]] @@ -3341,15 +3292,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/ce/3b6fee91c85626eaf769d617f1be9d2e15c1cca027bbdeb2e0d751469355/verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31", size = 19640, upload-time = "2020-11-30T02:24:08.387Z" }, ] -[[package]] -name = "vine" -version = "5.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" }, -] - [[package]] name = "watchdog" version = "6.0.0" From 327c9ae465769f3cb2b73a7c2aa289c124582d1b Mon Sep 17 00:00:00 2001 From: Sandro Date: Tue, 20 Jan 2026 15:12:00 +0000 Subject: [PATCH 2/2] Updated version of mcp-proxy --- docker-compose.yml | 2 +- docs/getting-started/up-and-running.md | 4 ++-- pyproject.toml | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index d0a5415d..1b62eded 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -130,7 +130,7 @@ services: - ./../daiv-sandbox/daiv_sandbox:/home/app/daiv_sandbox mcp-proxy: - image: ghcr.io/tbxark/mcp-proxy:v0.39.1 + image: ghcr.io/tbxark/mcp-proxy:v0.43.2 restart: unless-stopped container_name: daiv-mcp-proxy env_file: diff --git a/docs/getting-started/up-and-running.md b/docs/getting-started/up-and-running.md index 133fef7f..c468fc88 100644 --- a/docs/getting-started/up-and-running.md +++ b/docs/getting-started/up-and-running.md @@ -212,7 +212,7 @@ services: <<: *deploy_defaults mcp-proxy: - image: ghcr.io/tbxark/mcp-proxy:v0.39.1 + image: ghcr.io/tbxark/mcp-proxy:v0.43.2 networks: - internal volumes: @@ -415,7 +415,7 @@ services: - /var/run/docker.sock:/var/run/docker.sock mcp-proxy: - image: ghcr.io/tbxark/mcp-proxy:v0.39.1 + image: ghcr.io/tbxark/mcp-proxy:v0.43.2 restart: unless-stopped container_name: daiv-mcp-proxy volumes: diff --git a/pyproject.toml b/pyproject.toml index 71049d22..741cd657 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -181,7 +181,6 @@ check_untyped_defs = true [[tool.mypy.overrides]] module = [ "appconf.*", - "celery.*", "chat.*", "decouple.*", "django.*",