diff --git a/src/code_indexer/server/app.py b/src/code_indexer/server/app.py index 5f7734e3..068fe787 100644 --- a/src/code_indexer/server/app.py +++ b/src/code_indexer/server/app.py @@ -1931,18 +1931,7 @@ def create_app() -> FastAPI: Returns: Configured FastAPI app """ - global \ - jwt_manager, \ - user_manager, \ - refresh_token_manager, \ - golden_repo_manager, \ - background_job_manager, \ - activated_repo_manager, \ - repository_listing_manager, \ - semantic_query_manager, \ - _server_start_time, \ - _server_hnsw_cache, \ - _server_fts_cache + global jwt_manager, user_manager, refresh_token_manager, golden_repo_manager, background_job_manager, activated_repo_manager, repository_listing_manager, semantic_query_manager, _server_start_time, _server_hnsw_cache, _server_fts_cache # Story #526: Initialize server-side HNSW cache at bootstrap for 1800x performance # Import and initialize global cache instance @@ -4233,9 +4222,12 @@ async def refresh_golden_repo( status_code=202, ) async def add_golden_repo_index( + http_request: Request, alias: str, request: AddIndexRequest, - current_user: dependencies.User = Depends(dependencies.get_current_admin_user), + current_user: dependencies.User = Depends( + dependencies.get_current_admin_user_hybrid + ), ): """ Add an index type to a golden repository (admin only) - async operation. @@ -4338,8 +4330,9 @@ async def get_golden_repo_index_status( @app.get("/api/jobs/{job_id}", response_model=JobStatusResponse) async def get_job_status( + http_request: Request, job_id: str, - current_user: dependencies.User = Depends(dependencies.get_current_user), + current_user: dependencies.User = Depends(dependencies.get_current_user_hybrid), ): """ Get status of a background job. @@ -5935,30 +5928,28 @@ async def semantic_query( # Execute semantic search for hybrid or degraded mode if search_mode_actual in ["semantic", "hybrid"]: try: - semantic_results_raw = ( - semantic_query_manager.query_user_repositories( - username=current_user.username, - query_text=request.query_text, - repository_alias=request.repository_alias, - limit=request.limit, - min_score=request.min_score, - file_extensions=request.file_extensions, - # Phase 1 parameters (Story #503) - exclude_language=request.exclude_language, - exclude_path=request.exclude_path, - accuracy=request.accuracy, - # Temporal parameters (Story #446) - time_range=request.time_range, - time_range_all=request.time_range_all, - at_commit=request.at_commit, - include_removed=request.include_removed, - show_evolution=request.show_evolution, - evolution_limit=request.evolution_limit, - # Phase 3 temporal filtering parameters (Story #503) - diff_type=request.diff_type, - author=request.author, - chunk_type=request.chunk_type, - ) + semantic_results_raw = semantic_query_manager.query_user_repositories( + username=current_user.username, + query_text=request.query_text, + repository_alias=request.repository_alias, + limit=request.limit, + min_score=request.min_score, + file_extensions=request.file_extensions, + # Phase 1 parameters (Story #503) + exclude_language=request.exclude_language, + exclude_path=request.exclude_path, + accuracy=request.accuracy, + # Temporal parameters (Story #446) + time_range=request.time_range, + time_range_all=request.time_range_all, + at_commit=request.at_commit, + include_removed=request.include_removed, + show_evolution=request.show_evolution, + evolution_limit=request.evolution_limit, + # Phase 3 temporal filtering parameters (Story #503) + diff_type=request.diff_type, + author=request.author, + chunk_type=request.chunk_type, ) semantic_results_list = [ QueryResultItem(**result) diff --git a/src/code_indexer/server/auth/dependencies.py b/src/code_indexer/server/auth/dependencies.py index aafb4040..a883761e 100644 --- a/src/code_indexer/server/auth/dependencies.py +++ b/src/code_indexer/server/auth/dependencies.py @@ -499,3 +499,152 @@ async def get_current_user_for_mcp(request: Request) -> User: detail="Authentication required", headers={"WWW-Authenticate": _build_www_authenticate_header()}, ) + + +async def _hybrid_auth_impl( + request: Request, + credentials: Optional[HTTPAuthorizationCredentials], + require_admin: bool = False, +) -> User: + """ + Internal implementation for hybrid authentication. + + Args: + request: FastAPI Request object + credentials: Optional bearer token credentials + require_admin: If True, require admin role + + Returns: + Authenticated User object + + Raises: + HTTPException: If authentication fails + """ + from code_indexer.server.web.auth import get_session_manager, SESSION_COOKIE_NAME + import logging + + logger = logging.getLogger(__name__) + auth_type = "admin" if require_admin else "user" + + # Try session-based auth first (for web UI) + session_manager = get_session_manager() + session_cookie_value = request.cookies.get(SESSION_COOKIE_NAME) + + logger.info( + f"Hybrid auth ({auth_type}): session_cookie={'present' if session_cookie_value else 'absent'}" + ) + + if session_cookie_value: + session = session_manager.get_session(request) + logger.info( + f"Hybrid auth ({auth_type}): session={'valid' if session else 'invalid'}, " + f"username={session.username if session else None}, " + f"role={session.role if session else None}" + ) + + # Check admin requirement for session auth + if session: + if require_admin and session.role != "admin": + logger.debug(f"Hybrid auth ({auth_type}): Session valid but not admin") + else: + # Create User object from session + if not user_manager: + logger.error( + f"Hybrid auth ({auth_type}): user_manager not initialized" + ) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="User manager not initialized", + ) + user = user_manager.get_user(session.username) + logger.debug( + f"Hybrid auth ({auth_type}): user lookup for {session.username}: {user is not None}" + ) + if user: + logger.info( + f"Hybrid auth ({auth_type}): Session auth SUCCESS for {session.username}" + ) + return user + # Session is valid but user not found - this shouldn't happen + logger.error( + f"Hybrid auth ({auth_type}): User {session.username} not found in database" + ) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"User '{session.username}' not found in user database", + ) + else: + logger.debug(f"Hybrid auth ({auth_type}): Session invalid") + + # Fall back to token-based auth only if no session cookie exists + if not session_cookie_value and credentials: + try: + current_user = get_current_user(request, credentials) + + # Check admin requirement for token auth + if require_admin and not current_user.has_permission("manage_users"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Admin access required", + ) + + logger.info( + f"Hybrid auth ({auth_type}): Token auth SUCCESS for {current_user.username}" + ) + return current_user + except HTTPException: + raise + + # No valid authentication found + logger.warning(f"Hybrid auth ({auth_type}): No valid authentication found") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Authentication required", + headers={"WWW-Authenticate": _build_www_authenticate_header()}, + ) + + +async def get_current_user_hybrid( + request: Request, + credentials: Optional[HTTPAuthorizationCredentials] = Depends(security), +) -> User: + """ + Get current user supporting both session-based and token-based authentication. + + This function tries session-based authentication first (for web UI), + then falls back to token-based authentication (for API clients). + + Args: + request: FastAPI Request object + credentials: Optional bearer token credentials + + Returns: + Authenticated User object + + Raises: + HTTPException: If authentication fails + """ + return await _hybrid_auth_impl(request, credentials, require_admin=False) + + +async def get_current_admin_user_hybrid( + request: Request, + credentials: Optional[HTTPAuthorizationCredentials] = Depends(security), +) -> User: + """ + Get current admin user supporting both session-based and token-based authentication. + + This dependency tries session-based auth first (for web UI), then falls back to + token-based auth (for API clients). + + Args: + request: FastAPI request object + credentials: Optional bearer token credentials + + Returns: + User with admin role + + Raises: + HTTPException: If not authenticated or not admin + """ + return await _hybrid_auth_impl(request, credentials, require_admin=True) diff --git a/src/code_indexer/server/jobs/manager.py b/src/code_indexer/server/jobs/manager.py index 5b946a1c..f84252bf 100644 --- a/src/code_indexer/server/jobs/manager.py +++ b/src/code_indexer/server/jobs/manager.py @@ -110,6 +110,7 @@ def __init__( from code_indexer.server.storage.sqlite_backends import ( SyncJobsSqliteBackend, ) + self._sqlite_backend = SyncJobsSqliteBackend(db_path) self._jobs: Dict[str, SyncJob] = {} @@ -446,8 +447,14 @@ def create_job( job_id=job_id, username=username, user_alias=user_alias, - job_type=job_type.value if hasattr(job_type, "value") else str(job_type), - status=initial_status.value if hasattr(initial_status, "value") else str(initial_status), + job_type=( + job_type.value if hasattr(job_type, "value") else str(job_type) + ), + status=( + initial_status.value + if hasattr(initial_status, "value") + else str(initial_status) + ), repository_url=repository_url, ) @@ -557,7 +564,11 @@ def mark_job_completed( if self._use_sqlite and self._sqlite_backend is not None: self._sqlite_backend.update_job( job_id=job_id, - status=job.status.value if hasattr(job.status, "value") else str(job.status), + status=( + job.status.value + if hasattr(job.status, "value") + else str(job.status) + ), completed_at=completed_at.isoformat(), progress=job.progress, error_message=error_message, @@ -619,7 +630,9 @@ def cancel_job(self, job_id: str) -> None: self._sqlite_backend.update_job( job_id=job_id, status=JobStatus.CANCELLED.value, - completed_at=job.completed_at.isoformat() if job.completed_at else None, + completed_at=( + job.completed_at.isoformat() if job.completed_at else None + ), ) # Persist changes (JSON file, no-op for SQLite) diff --git a/src/code_indexer/server/models/auto_discovery.py b/src/code_indexer/server/models/auto_discovery.py index 9adef1d4..1dbc0315 100644 --- a/src/code_indexer/server/models/auto_discovery.py +++ b/src/code_indexer/server/models/auto_discovery.py @@ -18,33 +18,19 @@ class DiscoveredRepository(BaseModel): platform: Literal["gitlab", "github"] = Field( ..., description="Platform source (gitlab or github)" ) - name: str = Field( - ..., min_length=1, description="Full path (e.g., group/project)" - ) - description: Optional[str] = Field( - None, description="Project description" - ) - clone_url_https: str = Field( - ..., description="HTTPS clone URL" - ) - clone_url_ssh: str = Field( - ..., description="SSH clone URL" - ) - default_branch: str = Field( - ..., description="Default branch (main/master/etc)" - ) + name: str = Field(..., min_length=1, description="Full path (e.g., group/project)") + description: Optional[str] = Field(None, description="Project description") + clone_url_https: str = Field(..., description="HTTPS clone URL") + clone_url_ssh: str = Field(..., description="SSH clone URL") + default_branch: str = Field(..., description="Default branch (main/master/etc)") last_commit_hash: Optional[str] = Field( None, description="Short hash of last commit" ) - last_commit_author: Optional[str] = Field( - None, description="Author of last commit" - ) + last_commit_author: Optional[str] = Field(None, description="Author of last commit") last_activity: Optional[datetime] = Field( None, description="Last activity timestamp" ) - is_private: bool = Field( - ..., description="Whether repository is private" - ) + is_private: bool = Field(..., description="Whether repository is private") @field_validator("clone_url_https") @classmethod @@ -72,18 +58,10 @@ class RepositoryDiscoveryResult(BaseModel): total_count: int = Field( ..., ge=0, description="Total number of repositories available" ) - page: int = Field( - ..., ge=1, description="Current page number (1-indexed)" - ) - page_size: int = Field( - ..., ge=1, description="Number of items per page" - ) - total_pages: int = Field( - ..., ge=0, description="Total number of pages" - ) - platform: Literal["gitlab", "github"] = Field( - ..., description="Platform source" - ) + page: int = Field(..., ge=1, description="Current page number (1-indexed)") + page_size: int = Field(..., ge=1, description="Number of items per page") + total_pages: int = Field(..., ge=0, description="Total number of pages") + platform: Literal["gitlab", "github"] = Field(..., description="Platform source") class DiscoveryProviderError(BaseModel): @@ -95,9 +73,5 @@ class DiscoveryProviderError(BaseModel): error_type: Literal["not_configured", "api_error", "auth_error", "timeout"] = Field( ..., description="Type of error" ) - message: str = Field( - ..., description="Human-readable error message" - ) - details: Optional[str] = Field( - None, description="Additional error details" - ) + message: str = Field(..., description="Human-readable error message") + details: Optional[str] = Field(None, description="Additional error details") diff --git a/src/code_indexer/server/multi/__init__.py b/src/code_indexer/server/multi/__init__.py index 60fb2e7e..0e508eec 100644 --- a/src/code_indexer/server/multi/__init__.py +++ b/src/code_indexer/server/multi/__init__.py @@ -13,7 +13,12 @@ from .multi_result_aggregator import MultiResultAggregator from .multi_search_service import MultiSearchService from .models import MultiSearchRequest, MultiSearchResponse, MultiSearchMetadata -from .scip_models import SCIPMultiRequest, SCIPMultiResponse, SCIPMultiMetadata, SCIPResult +from .scip_models import ( + SCIPMultiRequest, + SCIPMultiResponse, + SCIPMultiMetadata, + SCIPResult, +) from .scip_multi_service import SCIPMultiService __all__ = [ diff --git a/src/code_indexer/server/multi/models.py b/src/code_indexer/server/multi/models.py index d677c74f..d57931c4 100644 --- a/src/code_indexer/server/multi/models.py +++ b/src/code_indexer/server/multi/models.py @@ -58,7 +58,9 @@ class MultiSearchMetadata(BaseModel): """ total_results: int = Field(..., description="Total number of results") - total_repos_searched: int = Field(..., description="Repositories successfully searched") + total_repos_searched: int = Field( + ..., description="Repositories successfully searched" + ) execution_time_ms: int = Field(..., description="Execution time in milliseconds") diff --git a/src/code_indexer/server/multi/multi_result_aggregator.py b/src/code_indexer/server/multi/multi_result_aggregator.py index 5436c62f..e77f969a 100644 --- a/src/code_indexer/server/multi/multi_result_aggregator.py +++ b/src/code_indexer/server/multi/multi_result_aggregator.py @@ -31,7 +31,9 @@ def __init__(self, limit: int, min_score: Optional[float] = None): self.limit = limit self.min_score = min_score - def aggregate(self, repo_results: Dict[str, List[Dict[str, Any]]]) -> Dict[str, List[Dict[str, Any]]]: + def aggregate( + self, repo_results: Dict[str, List[Dict[str, Any]]] + ) -> Dict[str, List[Dict[str, Any]]]: """ Aggregate results in per-repository mode with optional score filtering. diff --git a/src/code_indexer/server/repositories/background_jobs.py b/src/code_indexer/server/repositories/background_jobs.py index 758c4a49..98a9fb6b 100644 --- a/src/code_indexer/server/repositories/background_jobs.py +++ b/src/code_indexer/server/repositories/background_jobs.py @@ -718,8 +718,12 @@ def _persist_jobs_sqlite(self) -> None: self._sqlite_backend.update_job( job_id=job_id, status=job.status.value, - started_at=job.started_at.isoformat() if job.started_at else None, - completed_at=job.completed_at.isoformat() if job.completed_at else None, + started_at=( + job.started_at.isoformat() if job.started_at else None + ), + completed_at=( + job.completed_at.isoformat() if job.completed_at else None + ), result=job.result, error=job.error, progress=job.progress, @@ -737,8 +741,12 @@ def _persist_jobs_sqlite(self) -> None: operation_type=job.operation_type, status=job.status.value, created_at=job.created_at.isoformat(), - started_at=job.started_at.isoformat() if job.started_at else None, - completed_at=job.completed_at.isoformat() if job.completed_at else None, + started_at=( + job.started_at.isoformat() if job.started_at else None + ), + completed_at=( + job.completed_at.isoformat() if job.completed_at else None + ), result=job.result, error=job.error, progress=job.progress, diff --git a/src/code_indexer/server/repositories/golden_repo_manager.py b/src/code_indexer/server/repositories/golden_repo_manager.py index 22a92db5..5a69d79e 100644 --- a/src/code_indexer/server/repositories/golden_repo_manager.py +++ b/src/code_indexer/server/repositories/golden_repo_manager.py @@ -1741,9 +1741,9 @@ async def get_golden_repo_branches( raise GoldenRepoError(f"Golden repository '{alias}' not found") branch_service = GoldenRepoBranchService(self) - branches: List[ - "GoldenRepoBranchInfo" - ] = await branch_service.get_golden_repo_branches(alias) + branches: List["GoldenRepoBranchInfo"] = ( + await branch_service.get_golden_repo_branches(alias) + ) return branches def add_index_to_golden_repo( diff --git a/src/code_indexer/server/services/claude_cli_manager.py b/src/code_indexer/server/services/claude_cli_manager.py index ccfbdf15..ffe67532 100644 --- a/src/code_indexer/server/services/claude_cli_manager.py +++ b/src/code_indexer/server/services/claude_cli_manager.py @@ -52,7 +52,9 @@ def __init__(self, api_key: Optional[str] = None, max_workers: int = 4): """ self._api_key = api_key self._max_workers = max_workers - self._work_queue: "queue.Queue[Optional[Tuple[Path, Callable[[bool, str], None]]]]" = queue.Queue() + self._work_queue: ( + "queue.Queue[Optional[Tuple[Path, Callable[[bool, str], None]]]]" + ) = queue.Queue() self._worker_threads: List[threading.Thread] = [] self._shutdown_event = threading.Event() self._cli_available: Optional[bool] = None diff --git a/src/code_indexer/server/services/database_health_service.py b/src/code_indexer/server/services/database_health_service.py index 35db6452..61140518 100644 --- a/src/code_indexer/server/services/database_health_service.py +++ b/src/code_indexer/server/services/database_health_service.py @@ -127,7 +127,9 @@ def get_all_database_health(self) -> List[DatabaseHealthResult]: db_path = self.server_dir / "data" / file_name elif file_name == "payload_cache.db": # Payload cache is in golden-repos cache directory - db_path = self.server_dir / "data" / "golden-repos" / ".cache" / file_name + db_path = ( + self.server_dir / "data" / "golden-repos" / ".cache" / file_name + ) else: # All other databases are in server root db_path = self.server_dir / file_name diff --git a/src/code_indexer/server/services/repository_providers/github_provider.py b/src/code_indexer/server/services/repository_providers/github_provider.py index 91343dc1..da92a2d6 100644 --- a/src/code_indexer/server/services/repository_providers/github_provider.py +++ b/src/code_indexer/server/services/repository_providers/github_provider.py @@ -203,9 +203,7 @@ def _parse_repository(self, repo: dict) -> DiscoveredRepository: pushed_at = repo.get("pushed_at") if pushed_at: try: - last_activity = datetime.fromisoformat( - pushed_at.replace("Z", "+00:00") - ) + last_activity = datetime.fromisoformat(pushed_at.replace("Z", "+00:00")) except (ValueError, TypeError): pass @@ -246,13 +244,13 @@ def _check_rate_limit(self, response: httpx.Response) -> None: if reset_time: try: reset_dt = datetime.fromtimestamp(int(reset_time)) - reset_msg = f" Rate limit resets at {reset_dt.strftime('%H:%M:%S')}" + reset_msg = ( + f" Rate limit resets at {reset_dt.strftime('%H:%M:%S')}" + ) except (ValueError, TypeError): pass - raise GitHubProviderError( - f"GitHub API rate limit exceeded.{reset_msg}" - ) + raise GitHubProviderError(f"GitHub API rate limit exceeded.{reset_msg}") async def discover_repositories( self, page: int = 1, page_size: int = 50, search: Optional[str] = None @@ -299,20 +297,16 @@ async def discover_repositories( response.raise_for_status() except httpx.TimeoutException as e: - raise GitHubProviderError( - f"GitHub API request timed out: {e}" - ) from e + raise GitHubProviderError(f"GitHub API request timed out: {e}") from e except httpx.HTTPStatusError as e: # Check for rate limit in error response - if hasattr(e, 'response') and e.response is not None: + if hasattr(e, "response") and e.response is not None: self._check_rate_limit(e.response) raise GitHubProviderError( f"GitHub API error: {e.response.status_code if hasattr(e, 'response') and e.response else 'unknown'}" ) from e except httpx.RequestError as e: - raise GitHubProviderError( - f"GitHub API request failed: {e}" - ) from e + raise GitHubProviderError(f"GitHub API request failed: {e}") from e # Parse response repos = response.json() @@ -342,11 +336,18 @@ async def discover_repositories( if search: search_lower = search.lower() repositories = [ - repo for repo in repositories + repo + for repo in repositories if search_lower in repo.name.lower() or (repo.description and search_lower in repo.description.lower()) - or (repo.last_commit_hash and search_lower in repo.last_commit_hash.lower()) - or (repo.last_commit_author and search_lower in repo.last_commit_author.lower()) + or ( + repo.last_commit_hash + and search_lower in repo.last_commit_hash.lower() + ) + or ( + repo.last_commit_author + and search_lower in repo.last_commit_author.lower() + ) ] return RepositoryDiscoveryResult( diff --git a/src/code_indexer/server/services/repository_providers/gitlab_provider.py b/src/code_indexer/server/services/repository_providers/gitlab_provider.py index e7e8ccf5..e7c701f7 100644 --- a/src/code_indexer/server/services/repository_providers/gitlab_provider.py +++ b/src/code_indexer/server/services/repository_providers/gitlab_provider.py @@ -236,17 +236,13 @@ async def discover_repositories( ) response.raise_for_status() except httpx.TimeoutException as e: - raise GitLabProviderError( - f"GitLab API request timed out: {e}" - ) from e + raise GitLabProviderError(f"GitLab API request timed out: {e}") from e except httpx.HTTPStatusError as e: raise GitLabProviderError( f"GitLab API error: {e.response.status_code}" ) from e except httpx.RequestError as e: - raise GitLabProviderError( - f"GitLab API request failed: {e}" - ) from e + raise GitLabProviderError(f"GitLab API request failed: {e}") from e # Parse response projects = response.json() @@ -267,11 +263,18 @@ async def discover_repositories( if search: search_lower = search.lower() repositories = [ - repo for repo in repositories + repo + for repo in repositories if search_lower in repo.name.lower() or (repo.description and search_lower in repo.description.lower()) - or (repo.last_commit_hash and search_lower in repo.last_commit_hash.lower()) - or (repo.last_commit_author and search_lower in repo.last_commit_author.lower()) + or ( + repo.last_commit_hash + and search_lower in repo.last_commit_hash.lower() + ) + or ( + repo.last_commit_author + and search_lower in repo.last_commit_author.lower() + ) ] return RepositoryDiscoveryResult( diff --git a/src/code_indexer/server/storage/sqlite_backends.py b/src/code_indexer/server/storage/sqlite_backends.py index e5c76354..68c599c3 100644 --- a/src/code_indexer/server/storage/sqlite_backends.py +++ b/src/code_indexer/server/storage/sqlite_backends.py @@ -1354,9 +1354,11 @@ def operation(conn): json.dumps(claude_actions) if claude_actions else None, failure_reason, json.dumps(extended_error) if extended_error else None, - json.dumps(language_resolution_status) - if language_resolution_status - else None, + ( + json.dumps(language_resolution_status) + if language_resolution_status + else None + ), ), ) return None diff --git a/src/code_indexer/server/web/routes.py b/src/code_indexer/server/web/routes.py index 3b4e89c0..5e65de56 100644 --- a/src/code_indexer/server/web/routes.py +++ b/src/code_indexer/server/web/routes.py @@ -2530,7 +2530,9 @@ def _get_all_jobs( ] # Sort by started_at (most recently started first), fall back to created_at - all_jobs.sort(key=lambda x: x.get("started_at") or x.get("created_at") or "", reverse=True) + all_jobs.sort( + key=lambda x: x.get("started_at") or x.get("created_at") or "", reverse=True + ) # Pagination total_count = len(all_jobs) diff --git a/src/code_indexer/server/web/static/js/golden_repo_indexes.js b/src/code_indexer/server/web/static/js/golden_repo_indexes.js index 2e63ba93..ceb60de4 100644 --- a/src/code_indexer/server/web/static/js/golden_repo_indexes.js +++ b/src/code_indexer/server/web/static/js/golden_repo_indexes.js @@ -78,6 +78,7 @@ async function submitAddIndex(alias) { 'Content-Type': 'application/json', 'X-CSRF-Token': csrfToken }, + credentials: 'same-origin', // Include cookies (session_id) in request body: JSON.stringify({ index_type: indexType }) }); diff --git a/tests/unit/server/auth/test_user_manager_oidc.py b/tests/unit/server/auth/test_user_manager_oidc.py index aa6e615f..35da98cb 100644 --- a/tests/unit/server/auth/test_user_manager_oidc.py +++ b/tests/unit/server/auth/test_user_manager_oidc.py @@ -136,7 +136,9 @@ def sqlite_db_path(self, tmp_path: Path) -> str: schema.initialize_database() return str(db_path) - def test_get_user_by_email_sqlite_mode_returns_user(self, sqlite_db_path: str) -> None: + def test_get_user_by_email_sqlite_mode_returns_user( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode with a user that has an email When get_user_by_email() is called @@ -154,7 +156,9 @@ def test_get_user_by_email_sqlite_mode_returns_user(self, sqlite_db_path: str) - assert user is not None assert user.username == "sqliteuser" - def test_get_user_by_email_sqlite_mode_case_insensitive(self, sqlite_db_path: str) -> None: + def test_get_user_by_email_sqlite_mode_case_insensitive( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode with a user When get_user_by_email() is called with different case @@ -172,7 +176,9 @@ def test_get_user_by_email_sqlite_mode_case_insensitive(self, sqlite_db_path: st assert user is not None assert user.username == "caseuser" - def test_get_user_by_email_sqlite_mode_returns_none_when_not_found(self, sqlite_db_path: str) -> None: + def test_get_user_by_email_sqlite_mode_returns_none_when_not_found( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode without a matching user When get_user_by_email() is called @@ -189,7 +195,9 @@ def test_get_user_by_email_sqlite_mode_returns_none_when_not_found(self, sqlite_ assert user is None - def test_set_oidc_identity_sqlite_mode_stores_identity(self, sqlite_db_path: str) -> None: + def test_set_oidc_identity_sqlite_mode_stores_identity( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode with an existing user When set_oidc_identity() is called @@ -217,7 +225,9 @@ def test_set_oidc_identity_sqlite_mode_stores_identity(self, sqlite_db_path: str assert user_data["oidc_identity"] is not None assert user_data["oidc_identity"]["subject"] == "sqlite-oidc-123" - def test_set_oidc_identity_sqlite_mode_returns_false_for_nonexistent(self, sqlite_db_path: str) -> None: + def test_set_oidc_identity_sqlite_mode_returns_false_for_nonexistent( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode without the specified user When set_oidc_identity() is called @@ -230,7 +240,9 @@ def test_set_oidc_identity_sqlite_mode_returns_false_for_nonexistent(self, sqlit assert result is False - def test_set_oidc_identity_sqlite_mode_overwrites_existing(self, sqlite_db_path: str) -> None: + def test_set_oidc_identity_sqlite_mode_overwrites_existing( + self, sqlite_db_path: str + ) -> None: """ Given a user with existing OIDC identity in SQLite mode When set_oidc_identity() is called with new identity @@ -312,7 +324,9 @@ def test_delete_mcp_credential_sqlite_mode(self, sqlite_db_path: str) -> None: assert len(creds) == 1 assert creds[0]["credential_id"] == "cred-2" - def test_update_mcp_credential_last_used_sqlite_mode(self, sqlite_db_path: str) -> None: + def test_update_mcp_credential_last_used_sqlite_mode( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode with a user with MCP credentials When update_mcp_credential_last_used() is called @@ -389,10 +403,13 @@ def test_remove_oidc_identity_sqlite_mode(self, sqlite_db_path: str) -> None: # Create user and set OIDC identity manager.create_user("oidcremoveuser", "SecurePass123!@#", UserRole.NORMAL_USER) - manager.set_oidc_identity("oidcremoveuser", { - "subject": "oidc-123", - "email": "oidc@example.com", - }) + manager.set_oidc_identity( + "oidcremoveuser", + { + "subject": "oidc-123", + "email": "oidc@example.com", + }, + ) # Verify identity exists user_data = manager._sqlite_backend.get_user("oidcremoveuser") @@ -438,7 +455,9 @@ def test_create_oidc_user_sqlite_mode(self, sqlite_db_path: str) -> None: assert user_data["email"] == "jit@example.com" assert user_data["oidc_identity"]["subject"] == "oidc-jit-123" - def test_get_mcp_credentials_with_secrets_sqlite_mode(self, sqlite_db_path: str) -> None: + def test_get_mcp_credentials_with_secrets_sqlite_mode( + self, sqlite_db_path: str + ) -> None: """ Given a UserManager in SQLite mode with a user with MCP credentials When get_mcp_credentials_with_secrets() is called @@ -464,4 +483,6 @@ def test_get_mcp_credentials_with_secrets_sqlite_mode(self, sqlite_db_path: str) assert len(creds) == 1 assert creds[0]["credential_id"] == "secret-cred" assert creds[0]["client_id"] == "mcp_secret123" - assert creds[0]["client_secret_hash"] == "argon2$hash$goes$here" # Hash should be included + assert ( + creds[0]["client_secret_hash"] == "argon2$hash$goes$here" + ) # Hash should be included diff --git a/tests/unit/server/cache/test_payload_cache_config.py b/tests/unit/server/cache/test_payload_cache_config.py index 21fbfaab..a4f6ec6a 100644 --- a/tests/unit/server/cache/test_payload_cache_config.py +++ b/tests/unit/server/cache/test_payload_cache_config.py @@ -7,7 +7,6 @@ """ import os -import pytest from unittest.mock import patch diff --git a/tests/unit/server/cache/test_payload_cache_from_server_config.py b/tests/unit/server/cache/test_payload_cache_from_server_config.py index ece2575c..cf1c57ff 100644 --- a/tests/unit/server/cache/test_payload_cache_from_server_config.py +++ b/tests/unit/server/cache/test_payload_cache_from_server_config.py @@ -7,7 +7,6 @@ """ import os -import pytest from unittest.mock import patch from code_indexer.server.cache.payload_cache import PayloadCacheConfig diff --git a/tests/unit/server/cache/test_payload_cache_truncation.py b/tests/unit/server/cache/test_payload_cache_truncation.py index 01017c49..0523582a 100644 --- a/tests/unit/server/cache/test_payload_cache_truncation.py +++ b/tests/unit/server/cache/test_payload_cache_truncation.py @@ -88,4 +88,4 @@ async def test_truncate_result_cached_handle_is_retrievable(self, cache): handle = result["cache_handle"] retrieved = await cache.retrieve(handle, page=0) - assert retrieved.content == large_content[:cache.config.max_fetch_size_chars] + assert retrieved.content == large_content[: cache.config.max_fetch_size_chars] diff --git a/tests/unit/server/handlers/test_fts_truncation_independent.py b/tests/unit/server/handlers/test_fts_truncation_independent.py index 639178b0..840f8da3 100644 --- a/tests/unit/server/handlers/test_fts_truncation_independent.py +++ b/tests/unit/server/handlers/test_fts_truncation_independent.py @@ -90,10 +90,14 @@ async def test_independent_cache_retrieval(self, cache): match_text_retrieved = await cache.retrieve(match_text_handle, page=0) # Verify correct content was cached - assert snippet_content in snippet_retrieved.content or \ - snippet_retrieved.content in snippet_content - assert match_text_content in match_text_retrieved.content or \ - match_text_retrieved.content in match_text_content + assert ( + snippet_content in snippet_retrieved.content + or snippet_retrieved.content in snippet_content + ) + assert ( + match_text_content in match_text_retrieved.content + or match_text_retrieved.content in match_text_content + ) @pytest.mark.asyncio async def test_only_snippet_large(self, cache): diff --git a/tests/unit/server/handlers/test_hybrid_truncation_caching.py b/tests/unit/server/handlers/test_hybrid_truncation_caching.py index 77b3ee4d..42f23ed9 100644 --- a/tests/unit/server/handlers/test_hybrid_truncation_caching.py +++ b/tests/unit/server/handlers/test_hybrid_truncation_caching.py @@ -152,7 +152,6 @@ async def test_content_handle_pagination(self, cache): async def test_snippet_handle_pagination(self, cache): """Test that FTS snippet handle supports pagination.""" from code_indexer.server.mcp.handlers import ( - _apply_payload_truncation, _apply_fts_payload_truncation, ) @@ -186,7 +185,6 @@ async def test_snippet_handle_pagination(self, cache): async def test_match_text_handle_pagination(self, cache): """Test that FTS match_text handle supports pagination.""" from code_indexer.server.mcp.handlers import ( - _apply_payload_truncation, _apply_fts_payload_truncation, ) diff --git a/tests/unit/server/handlers/test_hybrid_truncation_handler.py b/tests/unit/server/handlers/test_hybrid_truncation_handler.py index 9c55f98c..17825afd 100644 --- a/tests/unit/server/handlers/test_hybrid_truncation_handler.py +++ b/tests/unit/server/handlers/test_hybrid_truncation_handler.py @@ -8,7 +8,7 @@ """ import pytest -from unittest.mock import patch, AsyncMock +from unittest.mock import patch class TestHybridModeHandlerTruncationLogic: diff --git a/tests/unit/server/handlers/test_multi_repo_truncation.py b/tests/unit/server/handlers/test_multi_repo_truncation.py index c93df12d..ec4ce77f 100644 --- a/tests/unit/server/handlers/test_multi_repo_truncation.py +++ b/tests/unit/server/handlers/test_multi_repo_truncation.py @@ -153,7 +153,9 @@ async def test_multiple_repos_get_independent_handles(self, cache_100_chars): app_module.app.state.payload_cache = original @pytest.mark.asyncio - async def test_same_repo_multiple_results_get_independent_handles(self, cache_100_chars): + async def test_same_repo_multiple_results_get_independent_handles( + self, cache_100_chars + ): """Multiple results from SAME repo get independent cache handles.""" from code_indexer.server.mcp.handlers import _apply_payload_truncation from code_indexer.server import app as app_module @@ -207,7 +209,9 @@ class TestRepositoryAttributionPreservation: """AC6: Repository Attribution Preservation tests.""" @pytest.mark.asyncio - async def test_all_metadata_fields_preserved_after_truncation(self, cache_100_chars): + async def test_all_metadata_fields_preserved_after_truncation( + self, cache_100_chars + ): """All metadata fields preserved after truncation (only content fields modified).""" from code_indexer.server.mcp.handlers import _apply_payload_truncation from code_indexer.server import app as app_module @@ -343,7 +347,9 @@ class TestMixedResultsTruncation: """Tests for mixed truncated and non-truncated results from multiple repos.""" @pytest.mark.asyncio - async def test_mixed_large_and_small_content_from_multiple_repos(self, cache_100_chars): + async def test_mixed_large_and_small_content_from_multiple_repos( + self, cache_100_chars + ): """Mix of large and small content from multiple repos handled correctly.""" from code_indexer.server.mcp.handlers import _apply_payload_truncation from code_indexer.server import app as app_module diff --git a/tests/unit/server/handlers/test_temporal_truncation_edge_cases.py b/tests/unit/server/handlers/test_temporal_truncation_edge_cases.py index 42326dfc..3faff6d5 100644 --- a/tests/unit/server/handlers/test_temporal_truncation_edge_cases.py +++ b/tests/unit/server/handlers/test_temporal_truncation_edge_cases.py @@ -157,7 +157,7 @@ async def test_unicode_content_truncated_correctly(self, cache): from code_indexer.server.mcp.handlers import _apply_temporal_payload_truncation # Unicode content - emojis are 1+ chars but multiple bytes - unicode_content = "\U0001F600" * (PREVIEW_SIZE + 1000) + unicode_content = "\U0001f600" * (PREVIEW_SIZE + 1000) results = [{"content": unicode_content, "temporal_context": {}}] @@ -172,7 +172,7 @@ async def test_unicode_content_truncated_correctly(self, cache): # Should truncate at char boundary, not byte boundary assert len(result["content_preview"]) == PREVIEW_SIZE - assert result["content_preview"] == "\U0001F600" * PREVIEW_SIZE + assert result["content_preview"] == "\U0001f600" * PREVIEW_SIZE assert result["content_total_size"] == PREVIEW_SIZE + 1000 @pytest.mark.asyncio diff --git a/tests/unit/server/handlers/test_temporal_truncation_evolution.py b/tests/unit/server/handlers/test_temporal_truncation_evolution.py index df85791c..5b3468cb 100644 --- a/tests/unit/server/handlers/test_temporal_truncation_evolution.py +++ b/tests/unit/server/handlers/test_temporal_truncation_evolution.py @@ -325,9 +325,7 @@ async def test_empty_evolution_array_handled(self, cache): """Test that empty evolution array is handled correctly.""" from code_indexer.server.mcp.handlers import _apply_temporal_payload_truncation - results = [ - {"content": "main content", "temporal_context": {"evolution": []}} - ] + results = [{"content": "main content", "temporal_context": {"evolution": []}}] with patch( "code_indexer.server.mcp.handlers.app_module.app.state" diff --git a/tests/unit/server/jobs/test_job_queue_verification.py b/tests/unit/server/jobs/test_job_queue_verification.py index 92d4b66e..62061fc5 100644 --- a/tests/unit/server/jobs/test_job_queue_verification.py +++ b/tests/unit/server/jobs/test_job_queue_verification.py @@ -36,12 +36,10 @@ def test_respects_max_total_concurrent_jobs(self, job_manager): job_ids.append(job_id) running_jobs = [ - jid for jid in job_ids - if job_manager.get_job(jid)["status"] == "running" + jid for jid in job_ids if job_manager.get_job(jid)["status"] == "running" ] queued_jobs = [ - jid for jid in job_ids - if job_manager.get_job(jid)["status"] == "queued" + jid for jid in job_ids if job_manager.get_job(jid)["status"] == "queued" ] assert len(running_jobs) == 2 @@ -60,12 +58,10 @@ def test_respects_max_per_user_concurrent_jobs(self, job_manager): job_ids.append(job_id) running_jobs = [ - jid for jid in job_ids - if job_manager.get_job(jid)["status"] == "running" + jid for jid in job_ids if job_manager.get_job(jid)["status"] == "running" ] queued_jobs = [ - jid for jid in job_ids - if job_manager.get_job(jid)["status"] == "queued" + jid for jid in job_ids if job_manager.get_job(jid)["status"] == "queued" ] assert len(running_jobs) == 1 @@ -222,7 +218,7 @@ def test_estimated_wait_time(self, tmp_path): ) # Create 3 jobs - 1 runs, 2 queued - job1 = manager.create_job( + manager.create_job( username="user1", user_alias="User 1", job_type=JobType.REPOSITORY_SYNC, diff --git a/tests/unit/server/mcp/test_handlers.py b/tests/unit/server/mcp/test_handlers.py index 002bc24b..9bf7b377 100644 --- a/tests/unit/server/mcp/test_handlers.py +++ b/tests/unit/server/mcp/test_handlers.py @@ -86,8 +86,8 @@ def test_all_22_handlers_registered(self): ] # Verify we have a reasonable number of handlers (at least the core set) - assert ( - len(HANDLER_REGISTRY) >= len(core_handlers) + assert len(HANDLER_REGISTRY) >= len( + core_handlers ), f"Expected at least {len(core_handlers)} handlers, found {len(HANDLER_REGISTRY)}" # Verify all core handlers are registered diff --git a/tests/unit/server/mcp/test_mcp_get_cached_content.py b/tests/unit/server/mcp/test_mcp_get_cached_content.py index 4e566bb0..b8f9d431 100644 --- a/tests/unit/server/mcp/test_mcp_get_cached_content.py +++ b/tests/unit/server/mcp/test_mcp_get_cached_content.py @@ -153,9 +153,7 @@ async def test_handler_defaults_page_to_zero(self, mock_user): mock_state.payload_cache = mock_cache # Call without page parameter - result = await handle_get_cached_content( - {"handle": "test-handle"}, mock_user - ) + await handle_get_cached_content({"handle": "test-handle"}, mock_user) # Verify retrieve was called with page=0 mock_cache.retrieve.assert_called_once_with("test-handle", page=0) @@ -180,4 +178,7 @@ async def test_handler_returns_error_when_cache_unavailable(self, mock_user): data = json.loads(result["content"][0]["text"]) assert data["success"] is False assert "error" in data - assert "unavailable" in data["error"].lower() or "not available" in data["error"].lower() + assert ( + "unavailable" in data["error"].lower() + or "not available" in data["error"].lower() + ) diff --git a/tests/unit/server/mcp/test_scip_truncation.py b/tests/unit/server/mcp/test_scip_truncation.py index 1bb98516..d2785c8b 100644 --- a/tests/unit/server/mcp/test_scip_truncation.py +++ b/tests/unit/server/mcp/test_scip_truncation.py @@ -260,7 +260,9 @@ async def test_cache_unavailable_returns_unchanged(self): assert "context_has_more" not in result[0] @pytest.mark.asyncio - async def test_cache_error_returns_unchanged_with_metadata(self, mock_payload_cache): + async def test_cache_error_returns_unchanged_with_metadata( + self, mock_payload_cache + ): """Test that cache errors leave context unchanged but add metadata.""" from code_indexer.server.mcp.handlers import _apply_scip_payload_truncation @@ -313,7 +315,9 @@ class TestScipDefinitionPayloadTruncation: """Tests for SCIP definition handler with payload truncation.""" @pytest.mark.asyncio - async def test_scip_definition_applies_truncation(self, mock_user, mock_payload_cache): + async def test_scip_definition_applies_truncation( + self, mock_user, mock_payload_cache + ): """Test that scip_definition applies payload truncation to results.""" from code_indexer.server.mcp.handlers import scip_definition @@ -336,12 +340,17 @@ async def test_scip_definition_applies_truncation(self, mock_user, mock_payload_ # Set up store() mock for truncation (AsyncMock imported at line 12) mock_payload_cache.store = AsyncMock(return_value="uuid-def-123") - with patch( - "code_indexer.server.mcp.handlers.app_module.app.state" - ) as mock_state, patch( - "code_indexer.server.mcp.handlers._find_scip_files" - ) as mock_find_files, patch( - "code_indexer.scip.query.primitives.SCIPQueryEngine", return_value=mock_engine + with ( + patch( + "code_indexer.server.mcp.handlers.app_module.app.state" + ) as mock_state, + patch( + "code_indexer.server.mcp.handlers._find_scip_files" + ) as mock_find_files, + patch( + "code_indexer.scip.query.primitives.SCIPQueryEngine", + return_value=mock_engine, + ), ): mock_state.payload_cache = mock_payload_cache mock_find_files.return_value = [Mock()] # One mock SCIP file @@ -365,7 +374,9 @@ class TestScipReferencesPayloadTruncation: """Tests for SCIP references handler with payload truncation.""" @pytest.mark.asyncio - async def test_scip_references_applies_truncation(self, mock_user, mock_payload_cache): + async def test_scip_references_applies_truncation( + self, mock_user, mock_payload_cache + ): """Test that scip_references applies payload truncation to results.""" from code_indexer.server.mcp.handlers import scip_references @@ -388,12 +399,17 @@ async def test_scip_references_applies_truncation(self, mock_user, mock_payload_ # Set up store() mock for truncation (AsyncMock imported at line 12) mock_payload_cache.store = AsyncMock(return_value="uuid-ref-123") - with patch( - "code_indexer.server.mcp.handlers.app_module.app.state" - ) as mock_state, patch( - "code_indexer.server.mcp.handlers._find_scip_files" - ) as mock_find_files, patch( - "code_indexer.scip.query.primitives.SCIPQueryEngine", return_value=mock_engine + with ( + patch( + "code_indexer.server.mcp.handlers.app_module.app.state" + ) as mock_state, + patch( + "code_indexer.server.mcp.handlers._find_scip_files" + ) as mock_find_files, + patch( + "code_indexer.scip.query.primitives.SCIPQueryEngine", + return_value=mock_engine, + ), ): mock_state.payload_cache = mock_payload_cache mock_find_files.return_value = [Mock()] @@ -415,7 +431,9 @@ class TestScipDependenciesPayloadTruncation: """Tests for SCIP dependencies handler with payload truncation.""" @pytest.mark.asyncio - async def test_scip_dependencies_applies_truncation(self, mock_user, mock_payload_cache): + async def test_scip_dependencies_applies_truncation( + self, mock_user, mock_payload_cache + ): """Test that scip_dependencies applies payload truncation to results.""" from code_indexer.server.mcp.handlers import scip_dependencies @@ -437,12 +455,17 @@ async def test_scip_dependencies_applies_truncation(self, mock_user, mock_payloa # Set up store() mock for truncation (AsyncMock imported at line 12) mock_payload_cache.store = AsyncMock(return_value="uuid-dep-123") - with patch( - "code_indexer.server.mcp.handlers.app_module.app.state" - ) as mock_state, patch( - "code_indexer.server.mcp.handlers._find_scip_files" - ) as mock_find_files, patch( - "code_indexer.scip.query.primitives.SCIPQueryEngine", return_value=mock_engine + with ( + patch( + "code_indexer.server.mcp.handlers.app_module.app.state" + ) as mock_state, + patch( + "code_indexer.server.mcp.handlers._find_scip_files" + ) as mock_find_files, + patch( + "code_indexer.scip.query.primitives.SCIPQueryEngine", + return_value=mock_engine, + ), ): mock_state.payload_cache = mock_payload_cache mock_find_files.return_value = [Mock()] @@ -462,7 +485,9 @@ class TestScipDependentsPayloadTruncation: """Tests for SCIP dependents handler with payload truncation.""" @pytest.mark.asyncio - async def test_scip_dependents_applies_truncation(self, mock_user, mock_payload_cache): + async def test_scip_dependents_applies_truncation( + self, mock_user, mock_payload_cache + ): """Test that scip_dependents applies payload truncation to results.""" from code_indexer.server.mcp.handlers import scip_dependents @@ -484,12 +509,17 @@ async def test_scip_dependents_applies_truncation(self, mock_user, mock_payload_ # Set up store() mock for truncation (AsyncMock imported at line 12) mock_payload_cache.store = AsyncMock(return_value="uuid-dpt-123") - with patch( - "code_indexer.server.mcp.handlers.app_module.app.state" - ) as mock_state, patch( - "code_indexer.server.mcp.handlers._find_scip_files" - ) as mock_find_files, patch( - "code_indexer.scip.query.primitives.SCIPQueryEngine", return_value=mock_engine + with ( + patch( + "code_indexer.server.mcp.handlers.app_module.app.state" + ) as mock_state, + patch( + "code_indexer.server.mcp.handlers._find_scip_files" + ) as mock_find_files, + patch( + "code_indexer.scip.query.primitives.SCIPQueryEngine", + return_value=mock_engine, + ), ): mock_state.payload_cache = mock_payload_cache mock_find_files.return_value = [Mock()] diff --git a/tests/unit/server/mcp/test_temporal_payload_truncation.py b/tests/unit/server/mcp/test_temporal_payload_truncation.py index c81a2aae..5adb0a82 100644 --- a/tests/unit/server/mcp/test_temporal_payload_truncation.py +++ b/tests/unit/server/mcp/test_temporal_payload_truncation.py @@ -192,7 +192,7 @@ async def test_code_snippet_unicode_truncated_at_char_boundary(self, cache): from code_indexer.server.mcp.handlers import _apply_temporal_payload_truncation # Unicode content - emojis are 1+ chars but multiple bytes - unicode_snippet = "\U0001F600" * (PREVIEW_SIZE + 1000) + unicode_snippet = "\U0001f600" * (PREVIEW_SIZE + 1000) results = [{"code_snippet": unicode_snippet, "temporal_context": {}}] @@ -207,7 +207,7 @@ async def test_code_snippet_unicode_truncated_at_char_boundary(self, cache): # Should truncate at char boundary, not byte boundary assert len(result["code_snippet_preview"]) == PREVIEW_SIZE - assert result["code_snippet_preview"] == "\U0001F600" * PREVIEW_SIZE + assert result["code_snippet_preview"] == "\U0001f600" * PREVIEW_SIZE assert result["code_snippet_total_size"] == PREVIEW_SIZE + 1000 @pytest.mark.asyncio diff --git a/tests/unit/server/multi/test_scip_models.py b/tests/unit/server/multi/test_scip_models.py index 960332a0..db03f112 100644 --- a/tests/unit/server/multi/test_scip_models.py +++ b/tests/unit/server/multi/test_scip_models.py @@ -22,8 +22,7 @@ def test_valid_request_definition(self): from code_indexer.server.multi.scip_models import SCIPMultiRequest request = SCIPMultiRequest( - repositories=["repo1", "repo2"], - symbol="UserService" + repositories=["repo1", "repo2"], symbol="UserService" ) assert request.repositories == ["repo1", "repo2"] @@ -39,7 +38,7 @@ def test_valid_request_callchain(self): repositories=["repo1"], symbol="", # Not used for callchain from_symbol="api_handler", - to_symbol="database_query" + to_symbol="database_query", ) assert request.repositories == ["repo1"] @@ -51,10 +50,7 @@ def test_empty_repositories_rejected(self): from code_indexer.server.multi.scip_models import SCIPMultiRequest with pytest.raises(ValidationError): - SCIPMultiRequest( - repositories=[], - symbol="UserService" - ) + SCIPMultiRequest(repositories=[], symbol="UserService") def test_missing_symbol_rejected(self): """Request without symbol is rejected.""" @@ -77,7 +73,7 @@ def test_valid_definition_result(self): line=42, column=4, symbol="UserService", - kind="definition" + kind="definition", ) assert result.repository == "repo1" @@ -99,7 +95,7 @@ def test_valid_reference_result_with_context(self): column=0, symbol="UserService", kind="reference", - context=" user = UserService()" + context=" user = UserService()", ) assert result.kind == "reference" @@ -115,7 +111,7 @@ def test_dependency_result(self): line=5, column=0, symbol="DatabaseConnection", - kind="dependency" + kind="dependency", ) assert result.kind == "dependency" @@ -130,7 +126,7 @@ def test_dependent_result(self): line=20, column=4, symbol="APIHandler", - kind="dependent" + kind="dependent", ) assert result.kind == "dependent" @@ -147,7 +143,7 @@ def test_valid_metadata(self): total_results=25, repos_searched=3, repos_with_results=2, - execution_time_ms=450 + execution_time_ms=450, ) assert metadata.total_results == 25 @@ -163,7 +159,7 @@ def test_zero_results_metadata(self): total_results=0, repos_searched=2, repos_with_results=0, - execution_time_ms=100 + execution_time_ms=100, ) assert metadata.total_results == 0 @@ -178,7 +174,7 @@ def test_valid_response_with_results(self): from code_indexer.server.multi.scip_models import ( SCIPMultiResponse, SCIPResult, - SCIPMultiMetadata + SCIPMultiMetadata, ) results = { @@ -189,7 +185,7 @@ def test_valid_response_with_results(self): line=42, column=4, symbol="UserService", - kind="definition" + kind="definition", ) ], "repo2": [ @@ -199,23 +195,19 @@ def test_valid_response_with_results(self): line=10, column=0, symbol="UserService", - kind="reference" + kind="reference", ) - ] + ], } metadata = SCIPMultiMetadata( total_results=2, repos_searched=2, repos_with_results=2, - execution_time_ms=300 + execution_time_ms=300, ) - response = SCIPMultiResponse( - results=results, - metadata=metadata, - skipped={} - ) + response = SCIPMultiResponse(results=results, metadata=metadata, skipped={}) assert len(response.results) == 2 assert "repo1" in response.results @@ -228,7 +220,7 @@ def test_valid_response_with_skipped_repos(self): from code_indexer.server.multi.scip_models import ( SCIPMultiResponse, SCIPResult, - SCIPMultiMetadata + SCIPMultiMetadata, ) results = { @@ -239,7 +231,7 @@ def test_valid_response_with_skipped_repos(self): line=42, column=4, symbol="UserService", - kind="definition" + kind="definition", ) ] } @@ -248,13 +240,13 @@ def test_valid_response_with_skipped_repos(self): total_results=1, repos_searched=1, repos_with_results=1, - execution_time_ms=200 + execution_time_ms=200, ) response = SCIPMultiResponse( results=results, metadata=metadata, - skipped={"repo2": "No SCIP index available"} + skipped={"repo2": "No SCIP index available"}, ) assert len(response.results) == 1 @@ -265,21 +257,21 @@ def test_valid_response_with_errors(self): """Valid response with errors from some repos.""" from code_indexer.server.multi.scip_models import ( SCIPMultiResponse, - SCIPMultiMetadata + SCIPMultiMetadata, ) metadata = SCIPMultiMetadata( total_results=0, repos_searched=0, repos_with_results=0, - execution_time_ms=150 + execution_time_ms=150, ) response = SCIPMultiResponse( results={}, metadata=metadata, skipped={}, - errors={"repo1": "Database connection failed"} + errors={"repo1": "Database connection failed"}, ) assert len(response.results) == 0 @@ -290,21 +282,17 @@ def test_empty_response(self): """Valid empty response (no results, no errors).""" from code_indexer.server.multi.scip_models import ( SCIPMultiResponse, - SCIPMultiMetadata + SCIPMultiMetadata, ) metadata = SCIPMultiMetadata( total_results=0, repos_searched=1, repos_with_results=0, - execution_time_ms=50 + execution_time_ms=50, ) - response = SCIPMultiResponse( - results={}, - metadata=metadata, - skipped={} - ) + response = SCIPMultiResponse(results={}, metadata=metadata, skipped={}) assert len(response.results) == 0 assert len(response.skipped) == 0 diff --git a/tests/unit/server/multi/test_scip_multi_service.py b/tests/unit/server/multi/test_scip_multi_service.py index e8284f09..acd3c154 100644 --- a/tests/unit/server/multi/test_scip_multi_service.py +++ b/tests/unit/server/multi/test_scip_multi_service.py @@ -14,14 +14,10 @@ AC8: SCIP Index Availability Handling """ -import asyncio import pytest -from pathlib import Path -from unittest.mock import Mock, patch, MagicMock +from unittest.mock import patch from code_indexer.server.multi.scip_models import ( SCIPMultiRequest, - SCIPMultiResponse, - SCIPResult, ) from code_indexer.scip.query.primitives import QueryResult @@ -36,14 +32,11 @@ async def test_definition_across_multiple_repos(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo2"], - symbol="UserService" + repositories=["repo1", "repo2"], symbol="UserService" ) # Mock the single-repo definition method - with patch.object( - service, "_find_definition_in_repo" - ) as mock_find: + with patch.object(service, "_find_definition_in_repo") as mock_find: # Repo1 has definition, repo2 doesn't mock_find.side_effect = [ [ @@ -53,10 +46,10 @@ async def test_definition_across_multiple_repos(self): file_path="src/auth.py", line=42, column=4, - kind="definition" + kind="definition", ) ], - [] # repo2 has no definition + [], # repo2 has no definition ] response = await service.definition(request) @@ -74,8 +67,7 @@ async def test_definition_no_scip_index(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo_no_scip"], - symbol="UserService" + repositories=["repo1", "repo_no_scip"], symbol="UserService" ) with patch.object(service, "_find_definition_in_repo") as mock_find: @@ -88,10 +80,10 @@ async def test_definition_no_scip_index(self): file_path="src/auth.py", line=42, column=4, - kind="definition" + kind="definition", ) ], - None # Indicates no SCIP index + None, # Indicates no SCIP index ] response = await service.definition(request) @@ -111,8 +103,7 @@ async def test_references_across_multiple_repos(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo2"], - symbol="UserService" + repositories=["repo1", "repo2"], symbol="UserService" ) with patch.object(service, "_find_references_in_repo") as mock_find: @@ -125,7 +116,7 @@ async def test_references_across_multiple_repos(self): file_path="tests/test_auth.py", line=10, column=0, - kind="reference" + kind="reference", ) ], [ @@ -135,9 +126,9 @@ async def test_references_across_multiple_repos(self): file_path="lib/user.py", line=5, column=4, - kind="reference" + kind="reference", ) - ] + ], ] response = await service.references(request) @@ -160,8 +151,7 @@ async def test_dependencies_across_multiple_repos(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo2"], - symbol="UserService" + repositories=["repo1", "repo2"], symbol="UserService" ) with patch.object(service, "_get_dependencies_in_repo") as mock_deps: @@ -174,7 +164,7 @@ async def test_dependencies_across_multiple_repos(self): file_path="src/auth.py", line=5, column=0, - kind="dependency" + kind="dependency", ) ], [ @@ -184,9 +174,9 @@ async def test_dependencies_across_multiple_repos(self): file_path="lib/user.py", line=2, column=0, - kind="dependency" + kind="dependency", ) - ] + ], ] response = await service.dependencies(request) @@ -207,8 +197,7 @@ async def test_dependents_across_multiple_repos(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo2"], - symbol="UserService" + repositories=["repo1", "repo2"], symbol="UserService" ) with patch.object(service, "_get_dependents_in_repo") as mock_deps: @@ -221,7 +210,7 @@ async def test_dependents_across_multiple_repos(self): file_path="src/api.py", line=20, column=4, - kind="dependent" + kind="dependent", ) ], [ @@ -231,9 +220,9 @@ async def test_dependents_across_multiple_repos(self): file_path="controllers/user.py", line=15, column=0, - kind="dependent" + kind="dependent", ) - ] + ], ] response = await service.dependents(request) @@ -257,7 +246,7 @@ async def test_callchain_per_repository_no_stitching(self): repositories=["repo1", "repo2"], symbol="", # Not used for callchain from_symbol="api_handler", - to_symbol="database_query" + to_symbol="database_query", ) with patch.object(service, "_trace_callchain_in_repo") as mock_chain: @@ -271,7 +260,7 @@ async def test_callchain_per_repository_no_stitching(self): line=0, column=0, kind="callchain", - context="api_handler -> service -> database_query" + context="api_handler -> service -> database_query", ) ], [ @@ -282,9 +271,9 @@ async def test_callchain_per_repository_no_stitching(self): line=0, column=0, kind="callchain", - context="api_handler -> database_query" + context="api_handler -> database_query", ) - ] + ], ] response = await service.callchain(request) @@ -300,7 +289,9 @@ async def test_callchain_per_repository_no_stitching(self): repo1_chain = response.results["repo1"][0].context repo2_chain = response.results["repo2"][0].context assert "service" in repo1_chain # repo1 has intermediate symbol - assert "service" not in repo2_chain # repo2 doesn't have intermediate symbol + assert ( + "service" not in repo2_chain + ) # repo2 doesn't have intermediate symbol # Verify repository attribution is correct assert response.results["repo1"][0].repository == "repo1" @@ -320,8 +311,7 @@ async def test_timeout_parameter_accepted(self): assert service.query_timeout_seconds == 1 request = SCIPMultiRequest( - repositories=["repo1", "repo2"], - symbol="UserService" + repositories=["repo1", "repo2"], symbol="UserService" ) with patch.object(service, "_find_definition_in_repo") as mock_find: @@ -333,7 +323,7 @@ async def test_timeout_parameter_accepted(self): file_path="src/auth.py", line=42, column=4, - kind="definition" + kind="definition", ) ] @@ -354,8 +344,7 @@ async def test_results_grouped_by_repository(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo2", "repo3"], - symbol="UserService" + repositories=["repo1", "repo2", "repo3"], symbol="UserService" ) with patch.object(service, "_find_definition_in_repo") as mock_find: @@ -367,7 +356,7 @@ async def test_results_grouped_by_repository(self): file_path="src/auth.py", line=42, column=4, - kind="definition" + kind="definition", ) ], [], # repo2 has no results @@ -378,9 +367,9 @@ async def test_results_grouped_by_repository(self): file_path="lib/auth.py", line=10, column=0, - kind="definition" + kind="definition", ) - ] + ], ] response = await service.definition(request) @@ -399,7 +388,9 @@ async def test_results_grouped_by_repository(self): # Verify metadata assert response.metadata.repos_searched == 3 - assert response.metadata.repos_with_results == 2 # repo1 and repo3 have results + assert ( + response.metadata.repos_with_results == 2 + ) # repo1 and repo3 have results assert response.metadata.total_results == 2 @@ -413,11 +404,11 @@ async def test_partial_failure_continues_other_repos(self): service = SCIPMultiService() request = SCIPMultiRequest( - repositories=["repo1", "repo_error", "repo3"], - symbol="UserService" + repositories=["repo1", "repo_error", "repo3"], symbol="UserService" ) with patch.object(service, "_find_definition_in_repo") as mock_find: + def find_with_error(repo_id, symbol): if repo_id == "repo_error": raise RuntimeError("Database connection failed") @@ -428,7 +419,7 @@ def find_with_error(repo_id, symbol): file_path="src/auth.py", line=42, column=4, - kind="definition" + kind="definition", ) ] diff --git a/tests/unit/server/repositories/test_job_datetime_sorting.py b/tests/unit/server/repositories/test_job_datetime_sorting.py index ecd99407..7f9d58c7 100644 --- a/tests/unit/server/repositories/test_job_datetime_sorting.py +++ b/tests/unit/server/repositories/test_job_datetime_sorting.py @@ -8,10 +8,9 @@ import tempfile import time -from datetime import datetime, timezone, timedelta +from datetime import datetime from pathlib import Path -import pytest from src.code_indexer.server.repositories.background_jobs import ( BackgroundJobManager, @@ -33,6 +32,7 @@ def teardown_method(self): self.manager.shutdown() import shutil import os + if os.path.exists(self.temp_dir): shutil.rmtree(self.temp_dir) @@ -43,6 +43,7 @@ def test_get_recent_jobs_sorts_by_iso_datetime_correctly(self): The bug was that int() was being called on ISO format datetime strings like '2025-12-09T18:42:39.792746+00:00', causing a ValueError. """ + def success_task(): return {"status": "success"} @@ -61,48 +62,50 @@ def success_task(): # This should NOT raise ValueError when sorting by completed_at # Previously this would crash with: # ValueError: invalid literal for int() with base 10: '2025-12-09T18:42:39.792746+00:00' - recent_jobs = self.manager.get_recent_jobs_with_filter( - time_filter="24h" - ) + recent_jobs = self.manager.get_recent_jobs_with_filter(time_filter="24h") # Verify we got jobs back - assert len(recent_jobs) >= 3, f"Expected at least 3 jobs, got {len(recent_jobs)}" + assert ( + len(recent_jobs) >= 3 + ), f"Expected at least 3 jobs, got {len(recent_jobs)}" # Verify they are sorted by completion time (newest first) for i in range(len(recent_jobs) - 1): current_time = recent_jobs[i]["completed_at"] next_time = recent_jobs[i + 1]["completed_at"] # Both should be ISO format strings - assert isinstance(current_time, str), f"Expected string, got {type(current_time)}" + assert isinstance( + current_time, str + ), f"Expected string, got {type(current_time)}" assert isinstance(next_time, str), f"Expected string, got {type(next_time)}" # Current should be >= next (descending order) current_dt = datetime.fromisoformat(current_time) next_dt = datetime.fromisoformat(next_time) - assert current_dt >= next_dt, f"Jobs not sorted correctly: {current_time} < {next_time}" + assert ( + current_dt >= next_dt + ), f"Jobs not sorted correctly: {current_time} < {next_time}" def test_get_recent_jobs_handles_none_completed_at(self): """Test that sorting handles jobs with None completed_at gracefully.""" + def success_task(): return {"status": "success"} # Submit a job - job_id = self.manager.submit_job( - "test_op", success_task, submitter_username="testuser" - ) + self.manager.submit_job("test_op", success_task, submitter_username="testuser") # Wait for completion time.sleep(0.2) # This should work without error - recent_jobs = self.manager.get_recent_jobs_with_filter( - time_filter="24h" - ) + recent_jobs = self.manager.get_recent_jobs_with_filter(time_filter="24h") # Should have at least the one job we submitted assert len(recent_jobs) >= 1 def test_get_recent_jobs_with_various_time_filters(self): """Test get_recent_jobs_with_filter with different time filters.""" + def success_task(): return {"status": "success"} @@ -121,13 +124,12 @@ def success_task(): def test_get_recent_jobs_empty_list_does_not_crash(self): """Test that an empty job list doesn't cause sorting issues.""" # No jobs submitted - should return empty list without crashing - recent_jobs = self.manager.get_recent_jobs_with_filter( - time_filter="24h" - ) + recent_jobs = self.manager.get_recent_jobs_with_filter(time_filter="24h") assert recent_jobs == [] def test_datetime_sorting_with_timezone_aware_strings(self): """Test sorting handles timezone-aware ISO strings correctly.""" + def success_task(): return {"status": "success"} @@ -140,9 +142,7 @@ def success_task(): time.sleep(0.3) - recent_jobs = self.manager.get_recent_jobs_with_filter( - time_filter="24h" - ) + recent_jobs = self.manager.get_recent_jobs_with_filter(time_filter="24h") # Verify all completed_at values are valid ISO format with timezone for job in recent_jobs: @@ -151,4 +151,6 @@ def success_task(): # Should be parseable as ISO format dt = datetime.fromisoformat(completed_at) # Should be timezone aware - assert dt.tzinfo is not None, f"Expected timezone-aware datetime, got {completed_at}" + assert ( + dt.tzinfo is not None + ), f"Expected timezone-aware datetime, got {completed_at}" diff --git a/tests/unit/server/routes/test_scip_multi_routes.py b/tests/unit/server/routes/test_scip_multi_routes.py index 7654f1c6..58b88aab 100644 --- a/tests/unit/server/routes/test_scip_multi_routes.py +++ b/tests/unit/server/routes/test_scip_multi_routes.py @@ -166,9 +166,7 @@ def test_references_partial_failure(self, mock_auth, mock_scip_multi_service): class TestSCIPMultiRoutesDependencies: """Test /api/scip/multi/dependencies endpoint (AC3: Multi-Repository Dependency Analysis).""" - def test_successful_dependencies_analysis( - self, mock_auth, mock_scip_multi_service - ): + def test_successful_dependencies_analysis(self, mock_auth, mock_scip_multi_service): """Successful dependency analysis across multiple repositories.""" pytest.skip("Route not implemented yet - TDD RED phase") @@ -212,8 +210,6 @@ def test_validation_error_returns_422(self, mock_auth, mock_scip_multi_service): """Service ValueError returns 422 Unprocessable Entity.""" pytest.skip("Route not implemented yet - TDD RED phase") - def test_timeout_returns_partial_results( - self, mock_auth, mock_scip_multi_service - ): + def test_timeout_returns_partial_results(self, mock_auth, mock_scip_multi_service): """Timeout returns results from completed repos with error for timed out repos (AC7).""" pytest.skip("Route not implemented yet - TDD RED phase") diff --git a/tests/unit/server/services/repository_providers/test_base_provider.py b/tests/unit/server/services/repository_providers/test_base_provider.py index f4a8a184..f07308a2 100644 --- a/tests/unit/server/services/repository_providers/test_base_provider.py +++ b/tests/unit/server/services/repository_providers/test_base_provider.py @@ -8,7 +8,6 @@ import pytest from abc import ABC -from unittest.mock import AsyncMock class TestRepositoryProviderBase: @@ -70,7 +69,6 @@ def test_concrete_implementation_can_be_created(self): ) from code_indexer.server.models.auto_discovery import ( RepositoryDiscoveryResult, - DiscoveredRepository, ) class ConcreteProvider(RepositoryProviderBase): diff --git a/tests/unit/server/services/repository_providers/test_github_provider.py b/tests/unit/server/services/repository_providers/test_github_provider.py index 6a3b5bb5..524815b6 100644 --- a/tests/unit/server/services/repository_providers/test_github_provider.py +++ b/tests/unit/server/services/repository_providers/test_github_provider.py @@ -267,7 +267,9 @@ def test_parse_link_header_returns_1_when_no_last(self): ) # Only prev, no last - means we're on the last page - link_header = '; rel="prev"' + link_header = ( + '; rel="prev"' + ) total_pages = provider._parse_link_header_for_last_page(link_header) assert total_pages == 1 @@ -505,10 +507,12 @@ def capture_request(endpoint, params=None): await provider.discover_repositories(page=1, page_size=50) # Verify sorting parameters are correct for last push descending - assert captured_params.get("sort") == "pushed", \ - f"Expected sort='pushed', got '{captured_params.get('sort')}'" - assert captured_params.get("direction") == "desc", \ - f"Expected direction='desc', got '{captured_params.get('direction')}'" + assert ( + captured_params.get("sort") == "pushed" + ), f"Expected sort='pushed', got '{captured_params.get('sort')}'" + assert ( + captured_params.get("direction") == "desc" + ), f"Expected direction='desc', got '{captured_params.get('direction')}'" class TestGitHubProviderErrorHandling: @@ -569,7 +573,10 @@ async def test_handles_api_error(self): with pytest.raises(GitHubProviderError) as exc_info: await provider.discover_repositories(page=1, page_size=50) - assert "api" in str(exc_info.value).lower() or "error" in str(exc_info.value).lower() + assert ( + "api" in str(exc_info.value).lower() + or "error" in str(exc_info.value).lower() + ) @pytest.mark.asyncio async def test_handles_timeout(self): @@ -630,7 +637,7 @@ async def test_handles_rate_limit(self): mock_response.status_code = 403 mock_response.headers = { "X-RateLimit-Remaining": "0", - "X-RateLimit-Reset": "1704067200" + "X-RateLimit-Reset": "1704067200", } mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( "rate limit exceeded", request=MagicMock(), response=mock_response @@ -641,4 +648,7 @@ async def test_handles_rate_limit(self): await provider.discover_repositories(page=1, page_size=50) # Should include rate limit info in error - assert "rate limit" in str(exc_info.value).lower() or "api" in str(exc_info.value).lower() + assert ( + "rate limit" in str(exc_info.value).lower() + or "api" in str(exc_info.value).lower() + ) diff --git a/tests/unit/server/services/repository_providers/test_gitlab_provider.py b/tests/unit/server/services/repository_providers/test_gitlab_provider.py index c036b707..ea6df695 100644 --- a/tests/unit/server/services/repository_providers/test_gitlab_provider.py +++ b/tests/unit/server/services/repository_providers/test_gitlab_provider.py @@ -6,8 +6,7 @@ """ import pytest -from datetime import datetime, timezone -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import MagicMock, patch import httpx @@ -408,10 +407,12 @@ def capture_request(endpoint, params=None): await provider.discover_repositories(page=1, page_size=50) # Verify sorting parameters are correct for last activity descending - assert captured_params.get("order_by") == "last_activity_at", \ - f"Expected order_by='last_activity_at', got '{captured_params.get('order_by')}'" - assert captured_params.get("sort") == "desc", \ - f"Expected sort='desc', got '{captured_params.get('sort')}'" + assert ( + captured_params.get("order_by") == "last_activity_at" + ), f"Expected order_by='last_activity_at', got '{captured_params.get('order_by')}'" + assert ( + captured_params.get("sort") == "desc" + ), f"Expected sort='desc', got '{captured_params.get('sort')}'" class TestGitLabProviderErrorHandling: @@ -472,7 +473,10 @@ async def test_handles_api_error(self): with pytest.raises(GitLabProviderError) as exc_info: await provider.discover_repositories(page=1, page_size=50) - assert "api" in str(exc_info.value).lower() or "error" in str(exc_info.value).lower() + assert ( + "api" in str(exc_info.value).lower() + or "error" in str(exc_info.value).lower() + ) @pytest.mark.asyncio async def test_handles_timeout(self): diff --git a/tests/unit/server/services/repository_providers/test_search_filter.py b/tests/unit/server/services/repository_providers/test_search_filter.py index 0e89a988..515e9296 100644 --- a/tests/unit/server/services/repository_providers/test_search_filter.py +++ b/tests/unit/server/services/repository_providers/test_search_filter.py @@ -60,7 +60,9 @@ def gitlab_provider(): ) -def create_mock_response(projects: list, total: Optional[int] = None, total_pages: int = 1): +def create_mock_response( + projects: list, total: Optional[int] = None, total_pages: int = 1 +): """Create a mock HTTP response for GitLab API.""" if total is None: total = len(projects) @@ -80,12 +82,18 @@ async def test_search_by_name_matches(self, gitlab_provider): """Test that search matches repository name substring.""" projects = [ create_mock_gitlab_project("auth-service", "team/auth-service", "Auth"), - create_mock_gitlab_project("payment-service", "team/payment-service", "Pay"), + create_mock_gitlab_project( + "payment-service", "team/payment-service", "Pay" + ), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="auth") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="auth" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "team/auth-service" @@ -94,13 +102,19 @@ async def test_search_by_name_matches(self, gitlab_provider): async def test_search_by_description_matches(self, gitlab_provider): """Test that search matches repository description substring.""" projects = [ - create_mock_gitlab_project("gateway", "team/gateway", "API with authentication"), + create_mock_gitlab_project( + "gateway", "team/gateway", "API with authentication" + ), create_mock_gitlab_project("utils", "team/utils", "Utility functions"), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="authentication") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="authentication" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "team/gateway" @@ -113,24 +127,38 @@ async def test_search_case_insensitive(self, gitlab_provider): ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="myproject") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="myproject" + ) assert len(result.repositories) == 1 - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="important") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="important" + ) assert len(result.repositories) == 1 @pytest.mark.asyncio async def test_search_no_matches(self, gitlab_provider): """Test that search returns empty list when no matches.""" projects = [ - create_mock_gitlab_project("api-service", "team/api-service", "API backend"), + create_mock_gitlab_project( + "api-service", "team/api-service", "API backend" + ), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="nonexistent") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="nonexistent" + ) assert len(result.repositories) == 0 @@ -143,26 +171,40 @@ async def test_search_empty_string_returns_all(self, gitlab_provider): ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="" + ) assert len(result.repositories) == 2 - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search=None) + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search=None + ) assert len(result.repositories) == 2 @pytest.mark.asyncio async def test_search_special_characters_handled_safely(self, gitlab_provider): """Test that special characters in search are handled safely.""" projects = [ - create_mock_gitlab_project("test-project", "team/test-project", "Test (v1.0)"), + create_mock_gitlab_project( + "test-project", "team/test-project", "Test (v1.0)" + ), ] mock_response = create_mock_response(projects) special_searches = ["(v1.0)", "[test]", "test.*", "test/path"] for search_term in special_searches: - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search=search_term) + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search=search_term + ) assert isinstance(result.repositories, list) @pytest.mark.asyncio @@ -174,8 +216,12 @@ async def test_search_with_null_description(self, gitlab_provider): ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="target") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="target" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "team/target" @@ -189,12 +235,18 @@ async def test_search_applies_after_indexed_repo_exclusion(self, gitlab_provider projects = [ create_mock_gitlab_project("auth-service", "team/auth-service", "Indexed"), - create_mock_gitlab_project("auth-middleware", "team/auth-middleware", "Not indexed"), + create_mock_gitlab_project( + "auth-middleware", "team/auth-middleware", "Not indexed" + ), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="auth") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="auth" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "team/auth-middleware" @@ -204,20 +256,28 @@ async def test_search_by_commit_hash_matches(self, gitlab_provider): """Search by commit hash should find matching repos.""" projects = [ create_mock_gitlab_project( - "project-a", "team/project-a", "Desc A", + "project-a", + "team/project-a", + "Desc A", last_commit_hash="abc1234def5678", - last_commit_author="John Doe" + last_commit_author="John Doe", ), create_mock_gitlab_project( - "project-b", "team/project-b", "Desc B", + "project-b", + "team/project-b", + "Desc B", last_commit_hash="xyz9999fff1111", - last_commit_author="Jane Smith" + last_commit_author="Jane Smith", ), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="abc1234") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="abc1234" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "team/project-a" @@ -227,20 +287,28 @@ async def test_search_by_committer_matches(self, gitlab_provider): """Search by committer name should find matching repos.""" projects = [ create_mock_gitlab_project( - "project-a", "team/project-a", "Desc A", + "project-a", + "team/project-a", + "Desc A", last_commit_hash="abc1234def5678", - last_commit_author="John Doe" + last_commit_author="John Doe", ), create_mock_gitlab_project( - "project-b", "team/project-b", "Desc B", + "project-b", + "team/project-b", + "Desc B", last_commit_hash="xyz9999fff1111", - last_commit_author="Jane Smith" + last_commit_author="Jane Smith", ), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="jane") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="jane" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "team/project-b" @@ -250,17 +318,23 @@ async def test_search_by_committer_case_insensitive(self, gitlab_provider): """Search by committer should be case insensitive.""" projects = [ create_mock_gitlab_project( - "project-a", "team/project-a", "Desc A", + "project-a", + "team/project-a", + "Desc A", last_commit_hash="abc1234def5678", - last_commit_author="John Doe" + last_commit_author="John Doe", ), ] mock_response = create_mock_response(projects) # Search with different case variations for search_term in ["john", "JOHN", "John", "doe", "DOE"]: - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search=search_term) + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search=search_term + ) assert len(result.repositories) == 1, f"Search '{search_term}' should match" @pytest.mark.asyncio @@ -268,18 +342,26 @@ async def test_search_with_null_commit_info(self, gitlab_provider): """Search should handle repos with null commit info gracefully.""" projects = [ create_mock_gitlab_project( - "no-commit", "team/no-commit", "No commit info", + "no-commit", + "team/no-commit", + "No commit info", ), create_mock_gitlab_project( - "has-commit", "team/has-commit", "Has commit info", + "has-commit", + "team/has-commit", + "Has commit info", last_commit_hash="abc1234", - last_commit_author="Author" + last_commit_author="Author", ), ] mock_response = create_mock_response(projects) - with patch.object(gitlab_provider, "_make_api_request", return_value=mock_response): - result = await gitlab_provider.discover_repositories(page=1, page_size=50, search="abc1234") + with patch.object( + gitlab_provider, "_make_api_request", return_value=mock_response + ): + result = await gitlab_provider.discover_repositories( + page=1, page_size=50, search="abc1234" + ) # Should only find the one with commit info, not crash on null assert len(result.repositories) == 1 @@ -358,8 +440,12 @@ async def test_search_by_name_matches(self, github_provider): ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="auth") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="auth" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "owner/auth-lib" @@ -368,13 +454,19 @@ async def test_search_by_name_matches(self, github_provider): async def test_search_by_description_matches(self, github_provider): """Test that search matches repository description substring.""" repos = [ - create_mock_github_repo("gateway", "owner/gateway", "API with authentication"), + create_mock_github_repo( + "gateway", "owner/gateway", "API with authentication" + ), create_mock_github_repo("utils", "owner/utils", "Utility functions"), ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="authentication") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="authentication" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "owner/gateway" @@ -387,20 +479,30 @@ async def test_search_case_insensitive(self, github_provider): ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="awesome") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="awesome" + ) assert len(result.repositories) == 1 @pytest.mark.asyncio async def test_search_no_matches(self, github_provider): """Test that search returns empty list when no matches.""" repos = [ - create_mock_github_repo("my-service", "owner/my-service", "Service backend"), + create_mock_github_repo( + "my-service", "owner/my-service", "Service backend" + ), ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="nonexistent") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="nonexistent" + ) assert len(result.repositories) == 0 @@ -413,12 +515,20 @@ async def test_search_empty_string_returns_all(self, github_provider): ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="" + ) assert len(result.repositories) == 2 - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search=None) + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search=None + ) assert len(result.repositories) == 2 @pytest.mark.asyncio @@ -431,8 +541,12 @@ async def test_search_special_characters_handled_safely(self, github_provider): special_searches = ["(v2.0)", "[test]", "test.*", "test/path"] for search_term in special_searches: - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search=search_term) + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search=search_term + ) assert isinstance(result.repositories, list) @pytest.mark.asyncio @@ -440,20 +554,28 @@ async def test_search_by_commit_hash_matches(self, github_provider): """Search by commit hash should find matching repos.""" repos = [ create_mock_github_repo( - "repo-a", "owner/repo-a", "Desc A", + "repo-a", + "owner/repo-a", + "Desc A", last_commit_hash="abc1234def5678", - last_commit_author="John Doe" + last_commit_author="John Doe", ), create_mock_github_repo( - "repo-b", "owner/repo-b", "Desc B", + "repo-b", + "owner/repo-b", + "Desc B", last_commit_hash="xyz9999fff1111", - last_commit_author="Jane Smith" + last_commit_author="Jane Smith", ), ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="abc1234") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="abc1234" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "owner/repo-a" @@ -463,20 +585,28 @@ async def test_search_by_committer_matches(self, github_provider): """Search by committer name should find matching repos.""" repos = [ create_mock_github_repo( - "repo-a", "owner/repo-a", "Desc A", + "repo-a", + "owner/repo-a", + "Desc A", last_commit_hash="abc1234def5678", - last_commit_author="John Doe" + last_commit_author="John Doe", ), create_mock_github_repo( - "repo-b", "owner/repo-b", "Desc B", + "repo-b", + "owner/repo-b", + "Desc B", last_commit_hash="xyz9999fff1111", - last_commit_author="Jane Smith" + last_commit_author="Jane Smith", ), ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="jane") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="jane" + ) assert len(result.repositories) == 1 assert result.repositories[0].name == "owner/repo-b" @@ -486,17 +616,23 @@ async def test_search_by_committer_case_insensitive(self, github_provider): """Search by committer should be case insensitive.""" repos = [ create_mock_github_repo( - "repo-a", "owner/repo-a", "Desc A", + "repo-a", + "owner/repo-a", + "Desc A", last_commit_hash="abc1234def5678", - last_commit_author="John Doe" + last_commit_author="John Doe", ), ] mock_response = create_github_mock_response(repos) # Search with different case variations for search_term in ["john", "JOHN", "John", "doe", "DOE"]: - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search=search_term) + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search=search_term + ) assert len(result.repositories) == 1, f"Search '{search_term}' should match" @pytest.mark.asyncio @@ -504,18 +640,26 @@ async def test_search_with_null_commit_info(self, github_provider): """Search should handle repos with null commit info gracefully.""" repos = [ create_mock_github_repo( - "no-commit", "owner/no-commit", "No commit info", + "no-commit", + "owner/no-commit", + "No commit info", ), create_mock_github_repo( - "has-commit", "owner/has-commit", "Has commit info", + "has-commit", + "owner/has-commit", + "Has commit info", last_commit_hash="abc1234", - last_commit_author="Author" + last_commit_author="Author", ), ] mock_response = create_github_mock_response(repos) - with patch.object(github_provider, "_make_api_request", return_value=mock_response): - result = await github_provider.discover_repositories(page=1, page_size=50, search="abc1234") + with patch.object( + github_provider, "_make_api_request", return_value=mock_response + ): + result = await github_provider.discover_repositories( + page=1, page_size=50, search="abc1234" + ) # Should only find the one with commit info, not crash on null assert len(result.repositories) == 1 diff --git a/tests/unit/server/services/test_ci_token_manager.py b/tests/unit/server/services/test_ci_token_manager.py index 0fe05f9a..13abf7d7 100644 --- a/tests/unit/server/services/test_ci_token_manager.py +++ b/tests/unit/server/services/test_ci_token_manager.py @@ -283,7 +283,9 @@ def test_validate_gitlab_token_valid_versioned_format(self, token_manager): """Test validation accepts newer GitLab versioned token format with periods.""" # Given a valid glpat- format token with versioned suffix (newer GitLab format) # Example: glpat-x5DbmTJCwT6wqLXX6DxdmG86MQp1OmN5dG5qCw.01.120qe28y8 - valid_versioned_token = "glpat-x5DbmTJCwT6wqLXX6DxdmG86MQp1OmN5dG5qCw.01.120qe28y8" + valid_versioned_token = ( + "glpat-x5DbmTJCwT6wqLXX6DxdmG86MQp1OmN5dG5qCw.01.120qe28y8" + ) # When validating # Then no exception should be raised diff --git a/tests/unit/server/services/test_config_service_payload_cache.py b/tests/unit/server/services/test_config_service_payload_cache.py index 54bdbf4a..9baa5d79 100644 --- a/tests/unit/server/services/test_config_service_payload_cache.py +++ b/tests/unit/server/services/test_config_service_payload_cache.py @@ -6,8 +6,6 @@ Tests that ConfigService properly exposes and updates payload cache settings. """ -import pytest - from code_indexer.server.services.config_service import ConfigService diff --git a/tests/unit/server/services/test_dashboard_collection_name.py b/tests/unit/server/services/test_dashboard_collection_name.py index 9befcdb0..357bc577 100644 --- a/tests/unit/server/services/test_dashboard_collection_name.py +++ b/tests/unit/server/services/test_dashboard_collection_name.py @@ -6,12 +6,8 @@ """ import tempfile -import os -from pathlib import Path from unittest.mock import MagicMock, patch -import pytest - class TestDashboardServiceCollectionName: """Test dashboard service collection_name handling.""" @@ -45,8 +41,10 @@ def test_get_repo_counts_handles_missing_collection_name(self): ] # Mock the managers and store - with patch.object(service, '_get_golden_repo_manager') as mock_golden, \ - patch.object(service, '_get_activated_repo_manager') as mock_activated: + with ( + patch.object(service, "_get_golden_repo_manager") as mock_golden, + patch.object(service, "_get_activated_repo_manager") as mock_activated, + ): # Setup golden repo manager mock mock_golden_manager = MagicMock() @@ -60,7 +58,9 @@ def test_get_repo_counts_handles_missing_collection_name(self): mock_activated.return_value = mock_activated_manager # Mock FilesystemVectorStore at its source module - with patch('code_indexer.storage.filesystem_vector_store.FilesystemVectorStore') as mock_store_class: + with patch( + "code_indexer.storage.filesystem_vector_store.FilesystemVectorStore" + ) as mock_store_class: mock_store = MagicMock() mock_store.get_indexed_file_count_fast.return_value = 100 mock_store_class.return_value = mock_store @@ -71,8 +71,9 @@ def test_get_repo_counts_handles_missing_collection_name(self): # Both repos should be processed (total_files = 200 if both counted) # With the bug, only 1 repo would be counted (100) # After the fix, both should be counted (200) - assert result.total_files == 200, \ - f"Expected 200 total files (both repos), got {result.total_files}" + assert ( + result.total_files == 200 + ), f"Expected 200 total files (both repos), got {result.total_files}" # Verify get_indexed_file_count_fast was called twice assert mock_store.get_indexed_file_count_fast.call_count == 2 @@ -80,10 +81,12 @@ def test_get_repo_counts_handles_missing_collection_name(self): # Verify the legacy repo was called with user_alias as collection_name calls = mock_store.get_indexed_file_count_fast.call_args_list collection_names = [call[0][0] for call in calls] - assert "legacy-repo-active" in collection_names, \ - "Legacy repo should use user_alias as collection_name" - assert "modern-repo-active" in collection_names, \ - "Modern repo should use its collection_name" + assert ( + "legacy-repo-active" in collection_names + ), "Legacy repo should use user_alias as collection_name" + assert ( + "modern-repo-active" in collection_names + ), "Modern repo should use its collection_name" def test_get_repo_counts_with_all_missing_collection_names(self): """ @@ -112,8 +115,10 @@ def test_get_repo_counts_with_all_missing_collection_names(self): }, ] - with patch.object(service, '_get_golden_repo_manager') as mock_golden, \ - patch.object(service, '_get_activated_repo_manager') as mock_activated: + with ( + patch.object(service, "_get_golden_repo_manager") as mock_golden, + patch.object(service, "_get_activated_repo_manager") as mock_activated, + ): mock_golden_manager = MagicMock() mock_golden_manager.list_golden_repos.return_value = [] @@ -124,7 +129,9 @@ def test_get_repo_counts_with_all_missing_collection_names(self): mock_activated_manager.data_dir = tempfile.mkdtemp() mock_activated.return_value = mock_activated_manager - with patch('code_indexer.storage.filesystem_vector_store.FilesystemVectorStore') as mock_store_class: + with patch( + "code_indexer.storage.filesystem_vector_store.FilesystemVectorStore" + ) as mock_store_class: mock_store = MagicMock() mock_store.get_indexed_file_count_fast.return_value = 50 mock_store_class.return_value = mock_store @@ -132,8 +139,9 @@ def test_get_repo_counts_with_all_missing_collection_names(self): result = service._get_repo_counts("testuser") # All 3 repos should be processed - assert result.total_files == 150, \ - f"Expected 150 total files (3 repos * 50), got {result.total_files}" + assert ( + result.total_files == 150 + ), f"Expected 150 total files (3 repos * 50), got {result.total_files}" assert mock_store.get_indexed_file_count_fast.call_count == 3 def test_get_repo_counts_handles_repo_missing_user_alias_too(self): @@ -159,8 +167,10 @@ def test_get_repo_counts_handles_repo_missing_user_alias_too(self): }, ] - with patch.object(service, '_get_golden_repo_manager') as mock_golden, \ - patch.object(service, '_get_activated_repo_manager') as mock_activated: + with ( + patch.object(service, "_get_golden_repo_manager") as mock_golden, + patch.object(service, "_get_activated_repo_manager") as mock_activated, + ): mock_golden_manager = MagicMock() mock_golden_manager.list_golden_repos.return_value = [] @@ -171,7 +181,9 @@ def test_get_repo_counts_handles_repo_missing_user_alias_too(self): mock_activated_manager.data_dir = tempfile.mkdtemp() mock_activated.return_value = mock_activated_manager - with patch('code_indexer.storage.filesystem_vector_store.FilesystemVectorStore') as mock_store_class: + with patch( + "code_indexer.storage.filesystem_vector_store.FilesystemVectorStore" + ) as mock_store_class: mock_store = MagicMock() mock_store.get_indexed_file_count_fast.return_value = 100 mock_store_class.return_value = mock_store @@ -180,6 +192,7 @@ def test_get_repo_counts_handles_repo_missing_user_alias_too(self): result = service._get_repo_counts("testuser") # Only the working repo should be counted - assert result.total_files == 100, \ - f"Expected 100 total files (1 working repo), got {result.total_files}" + assert ( + result.total_files == 100 + ), f"Expected 100 total files (1 working repo), got {result.total_files}" assert mock_store.get_indexed_file_count_fast.call_count == 1 diff --git a/tests/unit/server/services/test_dashboard_temporal_status.py b/tests/unit/server/services/test_dashboard_temporal_status.py index 38c8bdf8..3e4c297e 100644 --- a/tests/unit/server/services/test_dashboard_temporal_status.py +++ b/tests/unit/server/services/test_dashboard_temporal_status.py @@ -39,7 +39,9 @@ def test_get_temporal_status_v2_format(self): ) as MockPath: mock_temporal_path = MagicMock(spec=Path) mock_temporal_path.exists.return_value = True - MockPath.return_value.__truediv__.return_value.__truediv__.return_value = mock_temporal_path + MockPath.return_value.__truediv__.return_value.__truediv__.return_value = ( + mock_temporal_path + ) # Mock format detection at source module with patch( @@ -94,7 +96,9 @@ def test_get_temporal_status_v1_format(self): ) as MockPath: mock_temporal_path = MagicMock(spec=Path) mock_temporal_path.exists.return_value = True - MockPath.return_value.__truediv__.return_value.__truediv__.return_value = mock_temporal_path + MockPath.return_value.__truediv__.return_value.__truediv__.return_value = ( + mock_temporal_path + ) # Mock format detection at source module with patch( @@ -149,7 +153,9 @@ def test_get_temporal_status_no_index(self): # Mock temporal collection path does not exist mock_temporal_path = MagicMock(spec=Path) mock_temporal_path.exists.return_value = False - MockPath.return_value.__truediv__.return_value.__truediv__.return_value = mock_temporal_path + MockPath.return_value.__truediv__.return_value.__truediv__.return_value = ( + mock_temporal_path + ) # Act result = service.get_temporal_index_status(username, repo_alias) @@ -211,7 +217,9 @@ def test_get_temporal_status_requires_username_parameter(self): ) as MockPath: mock_temporal_path = MagicMock(spec=Path) mock_temporal_path.exists.return_value = False - MockPath.return_value.__truediv__.return_value.__truediv__.return_value = mock_temporal_path + MockPath.return_value.__truediv__.return_value.__truediv__.return_value = ( + mock_temporal_path + ) # Act - Call with username parameter result = service.get_temporal_index_status( diff --git a/tests/unit/server/services/test_dashboard_temporal_status_global_repos.py b/tests/unit/server/services/test_dashboard_temporal_status_global_repos.py index 0c9c7be0..386d512c 100644 --- a/tests/unit/server/services/test_dashboard_temporal_status_global_repos.py +++ b/tests/unit/server/services/test_dashboard_temporal_status_global_repos.py @@ -59,10 +59,14 @@ def test_global_repo_uses_global_registry(self): # Path(expected_index_path) / ".code-indexer" / "index" mock_code_indexer_path = MagicMock() - mock_code_indexer_path.__truediv__ = MagicMock(return_value=mock_index_dir) + mock_code_indexer_path.__truediv__ = MagicMock( + return_value=mock_index_dir + ) mock_main_path = MagicMock() - mock_main_path.__truediv__ = MagicMock(return_value=mock_code_indexer_path) + mock_main_path.__truediv__ = MagicMock( + return_value=mock_code_indexer_path + ) MockPath.return_value = mock_main_path # Act @@ -70,7 +74,9 @@ def test_global_repo_uses_global_registry(self): # Assert - GlobalRegistry was used to look up the repo MockRegistry.assert_called_once() - mock_registry_instance.get_global_repo.assert_called_once_with(repo_alias) + mock_registry_instance.get_global_repo.assert_called_once_with( + repo_alias + ) # Assert - Result is valid (no temporal index in this case) assert result["format"] == "none" @@ -131,10 +137,14 @@ def test_global_repo_with_temporal_v2_index(self): mock_index_dir.__truediv__ = MagicMock(return_value=mock_temporal_path) mock_code_indexer_path = MagicMock() - mock_code_indexer_path.__truediv__ = MagicMock(return_value=mock_index_dir) + mock_code_indexer_path.__truediv__ = MagicMock( + return_value=mock_index_dir + ) mock_main_path = MagicMock() - mock_main_path.__truediv__ = MagicMock(return_value=mock_code_indexer_path) + mock_main_path.__truediv__ = MagicMock( + return_value=mock_code_indexer_path + ) MockPath.return_value = mock_main_path # Mock format detection @@ -148,7 +158,9 @@ def test_global_repo_with_temporal_v2_index(self): "code_indexer.storage.filesystem_vector_store.FilesystemVectorStore" ) as MockVectorStore: mock_store_instance = MockVectorStore.return_value - mock_store_instance.get_indexed_file_count_fast.return_value = 200 + mock_store_instance.get_indexed_file_count_fast.return_value = ( + 200 + ) # Act result = service.get_temporal_index_status(username, repo_alias) @@ -157,7 +169,10 @@ def test_global_repo_with_temporal_v2_index(self): assert result["format"] == "v2" assert result["file_count"] == 200 assert result["needs_reindex"] is False - assert "active" in result["message"].lower() or "v2" in result["message"].lower() + assert ( + "active" in result["message"].lower() + or "v2" in result["message"].lower() + ) def test_activated_repo_still_uses_activated_manager(self): """Test that non-global repos still use activated_manager (regression test). @@ -188,13 +203,17 @@ def test_activated_repo_still_uses_activated_manager(self): ) as MockPath: mock_temporal_path = MagicMock(spec=Path) mock_temporal_path.exists.return_value = False - MockPath.return_value.__truediv__.return_value.__truediv__.return_value = mock_temporal_path + MockPath.return_value.__truediv__.return_value.__truediv__.return_value = ( + mock_temporal_path + ) # Act result = service.get_temporal_index_status(username, repo_alias) # Assert - activated_manager was used (NOT GlobalRegistry) - mock_manager.get_repository.assert_called_once_with(username, repo_alias) + mock_manager.get_repository.assert_called_once_with( + username, repo_alias + ) assert result["format"] == "none" def test_global_repo_golden_repos_dir_from_environment(self): @@ -205,9 +224,7 @@ def test_global_repo_golden_repos_dir_from_environment(self): repo_alias = "test-repo-global" custom_server_dir = "/custom/cidx-server" - with patch.dict( - "os.environ", {"CIDX_SERVER_DATA_DIR": custom_server_dir} - ): + with patch.dict("os.environ", {"CIDX_SERVER_DATA_DIR": custom_server_dir}): # Mock GlobalRegistry at source module (lazy import in function) with patch( "code_indexer.global_repos.global_registry.GlobalRegistry" diff --git a/tests/unit/server/services/test_file_chunking_default_limits.py b/tests/unit/server/services/test_file_chunking_default_limits.py index dff30e05..3ed61d1c 100644 --- a/tests/unit/server/services/test_file_chunking_default_limits.py +++ b/tests/unit/server/services/test_file_chunking_default_limits.py @@ -6,7 +6,6 @@ """ import pytest -from pathlib import Path from code_indexer.server.services.file_service import FileListingService from code_indexer.server.models.file_content_limits_config import ( @@ -24,9 +23,7 @@ def high_token_service(tmp_path): repo_path = tmp_path / "test_repo" repo_path.mkdir(parents=True) - config_manager = FileContentLimitsConfigManager( - db_path=str(tmp_path / "config.db") - ) + config_manager = FileContentLimitsConfigManager(db_path=str(tmp_path / "config.db")) # 20000 tokens * 4 chars = 80000 chars max (max allowed by config) # This is enough for 500 lines * 10 chars = 5000 chars for short lines config_manager.update_config( diff --git a/tests/unit/server/services/test_file_service_pagination.py b/tests/unit/server/services/test_file_service_pagination.py index 9ad5ee4c..a56b47c0 100644 --- a/tests/unit/server/services/test_file_service_pagination.py +++ b/tests/unit/server/services/test_file_service_pagination.py @@ -106,9 +106,7 @@ def test_default_behavior_returns_full_file(self, temp_repo, service): metadata["has_more"] is True ), "has_more should be True when more content exists" # Story #686: next_offset should be set for pagination - assert ( - metadata["next_offset"] == 501 - ), "next_offset should be 501 for next page" + assert metadata["next_offset"] == 501, "next_offset should be 501 for next page" # ------------------------------------------------------------------------- # AC2: First Page (User Limit Respected Under MAX_ALLOWED_LIMIT - Story #686) @@ -184,9 +182,7 @@ def test_subsequent_page_offset_2001_limit_2000(self, temp_repo, service): assert ( metadata["requires_pagination"] is True ), "requires_pagination should be True" - assert ( - metadata["has_more"] is True - ), "Should have more lines after 4000" + assert metadata["has_more"] is True, "Should have more lines after 4000" # Story #686: next_offset should be set assert metadata["next_offset"] == 4001, "next_offset should be 4001" diff --git a/tests/unit/server/services/test_git_operations_pagination.py b/tests/unit/server/services/test_git_operations_pagination.py index 368739f2..cba43597 100644 --- a/tests/unit/server/services/test_git_operations_pagination.py +++ b/tests/unit/server/services/test_git_operations_pagination.py @@ -207,9 +207,7 @@ def test_git_diff_with_offset_skips_lines( if "offset" in result: assert result["offset"] == 50 - def test_git_diff_pagination_metadata( - self, git_repo_with_large_diff, git_service - ): + def test_git_diff_pagination_metadata(self, git_repo_with_large_diff, git_service): """git_diff should include pagination metadata when chunked.""" result = git_service.git_diff(git_repo_with_large_diff, limit=100) diff --git a/tests/unit/server/services/test_issue_716_bug2a_token_whitespace.py b/tests/unit/server/services/test_issue_716_bug2a_token_whitespace.py index 5811f114..c70134f8 100644 --- a/tests/unit/server/services/test_issue_716_bug2a_token_whitespace.py +++ b/tests/unit/server/services/test_issue_716_bug2a_token_whitespace.py @@ -7,8 +7,6 @@ Tests are written FIRST following TDD methodology. """ -import pytest - class TestTokenWhitespaceStripping: """Tests for Bug 2a: Whitespace should be stripped before token validation.""" diff --git a/tests/unit/server/services/test_issue_716_bug3_token_decryption.py b/tests/unit/server/services/test_issue_716_bug3_token_decryption.py index 8ef6c4fa..68fc68ff 100644 --- a/tests/unit/server/services/test_issue_716_bug3_token_decryption.py +++ b/tests/unit/server/services/test_issue_716_bug3_token_decryption.py @@ -8,7 +8,6 @@ """ import json -import pytest class TestTokenDecryptionFailureHandling: diff --git a/tests/unit/server/storage/test_background_jobs_sqlite_backend.py b/tests/unit/server/storage/test_background_jobs_sqlite_backend.py index 2ea4ec7c..7f84714d 100644 --- a/tests/unit/server/storage/test_background_jobs_sqlite_backend.py +++ b/tests/unit/server/storage/test_background_jobs_sqlite_backend.py @@ -58,7 +58,10 @@ def test_save_job_with_all_fields(self, backend) -> None: result_data = {"files_indexed": 100, "time_seconds": 60} claude_actions = ["Installed dependencies", "Built project"] extended_error = {"code": "SCIP_FAILED", "project": "backend"} - language_status = {"python": {"status": "completed"}, "java": {"status": "failed"}} + language_status = { + "python": {"status": "completed"}, + "java": {"status": "failed"}, + } backend.save_job( job_id="job-full", @@ -149,12 +152,20 @@ def test_update_job_modifies_record(self, backend) -> None: def test_list_jobs_returns_all_records(self, backend) -> None: """When list_jobs() is called, it returns all jobs.""" backend.save_job( - job_id="job-a", operation_type="add_golden_repo", status="pending", - created_at="2025-01-15T10:00:00+00:00", username="user1", progress=0, + job_id="job-a", + operation_type="add_golden_repo", + status="pending", + created_at="2025-01-15T10:00:00+00:00", + username="user1", + progress=0, ) backend.save_job( - job_id="job-b", operation_type="refresh_repo", status="running", - created_at="2025-01-15T10:01:00+00:00", username="user2", progress=50, + job_id="job-b", + operation_type="refresh_repo", + status="running", + created_at="2025-01-15T10:01:00+00:00", + username="user2", + progress=50, ) result = backend.list_jobs() @@ -167,16 +178,28 @@ def test_list_jobs_returns_all_records(self, backend) -> None: def test_list_jobs_by_username(self, backend) -> None: """When list_jobs() is called with username filter, it returns only that user's jobs.""" backend.save_job( - job_id="job-user1-a", operation_type="add_golden_repo", status="completed", - created_at="2025-01-15T10:00:00+00:00", username="user1", progress=100, + job_id="job-user1-a", + operation_type="add_golden_repo", + status="completed", + created_at="2025-01-15T10:00:00+00:00", + username="user1", + progress=100, ) backend.save_job( - job_id="job-user1-b", operation_type="refresh_repo", status="running", - created_at="2025-01-15T10:01:00+00:00", username="user1", progress=50, + job_id="job-user1-b", + operation_type="refresh_repo", + status="running", + created_at="2025-01-15T10:01:00+00:00", + username="user1", + progress=50, ) backend.save_job( - job_id="job-user2-a", operation_type="add_golden_repo", status="pending", - created_at="2025-01-15T10:02:00+00:00", username="user2", progress=0, + job_id="job-user2-a", + operation_type="add_golden_repo", + status="pending", + created_at="2025-01-15T10:02:00+00:00", + username="user2", + progress=0, ) result = backend.list_jobs(username="user1") @@ -187,12 +210,20 @@ def test_list_jobs_by_username(self, backend) -> None: def test_list_jobs_by_status(self, backend) -> None: """When list_jobs() is called with status filter, it returns only jobs with that status.""" backend.save_job( - job_id="job-pending", operation_type="add_golden_repo", status="pending", - created_at="2025-01-15T10:00:00+00:00", username="user1", progress=0, + job_id="job-pending", + operation_type="add_golden_repo", + status="pending", + created_at="2025-01-15T10:00:00+00:00", + username="user1", + progress=0, ) backend.save_job( - job_id="job-running", operation_type="refresh_repo", status="running", - created_at="2025-01-15T10:01:00+00:00", username="user1", progress=50, + job_id="job-running", + operation_type="refresh_repo", + status="running", + created_at="2025-01-15T10:01:00+00:00", + username="user1", + progress=50, ) result = backend.list_jobs(status="running") @@ -204,8 +235,12 @@ def test_list_jobs_with_pagination(self, backend) -> None: """When list_jobs() is called with limit and offset, it returns paginated results.""" for i in range(5): backend.save_job( - job_id=f"job-{i}", operation_type="add_golden_repo", status="completed", - created_at=f"2025-01-15T10:0{i}:00+00:00", username="user1", progress=100, + job_id=f"job-{i}", + operation_type="add_golden_repo", + status="completed", + created_at=f"2025-01-15T10:0{i}:00+00:00", + username="user1", + progress=100, ) page1 = backend.list_jobs(limit=2, offset=0) @@ -219,8 +254,12 @@ def test_list_jobs_with_pagination(self, backend) -> None: def test_delete_job_removes_record(self, backend) -> None: """When delete_job() is called, the record is removed.""" backend.save_job( - job_id="job-del", operation_type="add_golden_repo", status="completed", - created_at="2025-01-15T10:00:00+00:00", username="user1", progress=100, + job_id="job-del", + operation_type="add_golden_repo", + status="completed", + created_at="2025-01-15T10:00:00+00:00", + username="user1", + progress=100, ) assert backend.get_job("job-del") is not None @@ -239,18 +278,30 @@ def test_cleanup_old_jobs_removes_old_completed_jobs(self, backend) -> None: recent_time = datetime.now(timezone.utc) - timedelta(hours=1) backend.save_job( - job_id="old-completed", operation_type="add_golden_repo", status="completed", - created_at=old_time.isoformat(), completed_at=old_time.isoformat(), - username="user1", progress=100, + job_id="old-completed", + operation_type="add_golden_repo", + status="completed", + created_at=old_time.isoformat(), + completed_at=old_time.isoformat(), + username="user1", + progress=100, ) backend.save_job( - job_id="recent-completed", operation_type="add_golden_repo", status="completed", - created_at=recent_time.isoformat(), completed_at=recent_time.isoformat(), - username="user1", progress=100, + job_id="recent-completed", + operation_type="add_golden_repo", + status="completed", + created_at=recent_time.isoformat(), + completed_at=recent_time.isoformat(), + username="user1", + progress=100, ) backend.save_job( - job_id="running", operation_type="add_golden_repo", status="running", - created_at=old_time.isoformat(), username="user1", progress=50, + job_id="running", + operation_type="add_golden_repo", + status="running", + created_at=old_time.isoformat(), + username="user1", + progress=50, ) cleaned_count = backend.cleanup_old_jobs(max_age_hours=24) @@ -263,16 +314,28 @@ def test_cleanup_old_jobs_removes_old_completed_jobs(self, backend) -> None: def test_count_jobs_by_status(self, backend) -> None: """When count_jobs_by_status() is called, it returns counts for each status.""" backend.save_job( - job_id="job-pending", operation_type="add_golden_repo", status="pending", - created_at="2025-01-15T10:00:00+00:00", username="user1", progress=0, + job_id="job-pending", + operation_type="add_golden_repo", + status="pending", + created_at="2025-01-15T10:00:00+00:00", + username="user1", + progress=0, ) backend.save_job( - job_id="job-running-1", operation_type="refresh_repo", status="running", - created_at="2025-01-15T10:01:00+00:00", username="user1", progress=50, + job_id="job-running-1", + operation_type="refresh_repo", + status="running", + created_at="2025-01-15T10:01:00+00:00", + username="user1", + progress=50, ) backend.save_job( - job_id="job-running-2", operation_type="refresh_repo", status="running", - created_at="2025-01-15T10:02:00+00:00", username="user2", progress=30, + job_id="job-running-2", + operation_type="refresh_repo", + status="running", + created_at="2025-01-15T10:02:00+00:00", + username="user2", + progress=30, ) counts = backend.count_jobs_by_status() @@ -287,14 +350,23 @@ def test_get_job_stats_with_time_filter(self, backend) -> None: old_time = datetime.now(timezone.utc) - timedelta(days=3) backend.save_job( - job_id="recent-completed", operation_type="add_golden_repo", status="completed", - created_at=recent_time.isoformat(), completed_at=recent_time.isoformat(), - username="user1", progress=100, + job_id="recent-completed", + operation_type="add_golden_repo", + status="completed", + created_at=recent_time.isoformat(), + completed_at=recent_time.isoformat(), + username="user1", + progress=100, ) backend.save_job( - job_id="old-failed", operation_type="refresh_repo", status="failed", - created_at=old_time.isoformat(), completed_at=old_time.isoformat(), - error="Something went wrong", username="user1", progress=25, + job_id="old-failed", + operation_type="refresh_repo", + status="failed", + created_at=old_time.isoformat(), + completed_at=old_time.isoformat(), + error="Something went wrong", + username="user1", + progress=25, ) stats_24h = backend.get_job_stats(time_filter="24h") @@ -312,8 +384,12 @@ class TestBackgroundJobsSqliteBackendScipFields: def test_update_scip_resolution_status(self, backend) -> None: """When update_job() is called with language_resolution_status, it is updated correctly.""" backend.save_job( - job_id="scip-job", operation_type="scip_generate", status="running", - created_at="2025-01-15T10:00:00+00:00", username="admin", progress=25, + job_id="scip-job", + operation_type="scip_generate", + status="running", + created_at="2025-01-15T10:00:00+00:00", + username="admin", + progress=25, language_resolution_status={"python": {"status": "pending"}}, ) @@ -345,10 +421,16 @@ def test_save_job_with_extended_error(self, backend) -> None: } backend.save_job( - job_id="failed-scip", operation_type="scip_generate", status="failed", - created_at="2025-01-15T10:00:00+00:00", completed_at="2025-01-15T10:05:00+00:00", - error="SCIP indexer failed for java project", username="admin", progress=50, - extended_error=extended_error, failure_reason="Maven dependencies missing", + job_id="failed-scip", + operation_type="scip_generate", + status="failed", + created_at="2025-01-15T10:00:00+00:00", + completed_at="2025-01-15T10:05:00+00:00", + error="SCIP indexer failed for java project", + username="admin", + progress=50, + extended_error=extended_error, + failure_reason="Maven dependencies missing", ) job = backend.get_job("failed-scip") diff --git a/tests/unit/server/storage/test_database_manager.py b/tests/unit/server/storage/test_database_manager.py index 92bd203e..d048a4d9 100644 --- a/tests/unit/server/storage/test_database_manager.py +++ b/tests/unit/server/storage/test_database_manager.py @@ -6,7 +6,6 @@ """ import sqlite3 -from concurrent.futures import ThreadPoolExecutor from pathlib import Path import pytest @@ -50,9 +49,7 @@ def test_database_schema_creates_all_required_tables(self, tmp_path: Path) -> No ] assert sorted(tables) == sorted(expected_tables) - def test_database_schema_global_repos_table_structure( - self, tmp_path: Path - ) -> None: + def test_database_schema_global_repos_table_structure(self, tmp_path: Path) -> None: """ Given an initialized database When we inspect global_repos table diff --git a/tests/unit/server/storage/test_migration_service.py b/tests/unit/server/storage/test_migration_service.py index 1f4bcd9c..2851d4e1 100644 --- a/tests/unit/server/storage/test_migration_service.py +++ b/tests/unit/server/storage/test_migration_service.py @@ -8,8 +8,6 @@ import json from pathlib import Path -import pytest - class TestMigrationServiceInit: """Tests for MigrationService initialization.""" @@ -274,8 +272,8 @@ def test_migration_is_idempotent(self, tmp_path: Path) -> None: # Run migration twice service = MigrationService(str(source_dir), str(db_path)) - result1 = service.migrate_global_repos() - result2 = service.migrate_global_repos() + service.migrate_global_repos() + service.migrate_global_repos() # Second run should report as already migrated or update-in-place backend = GlobalReposSqliteBackend(str(db_path)) @@ -347,7 +345,9 @@ def test_migrate_background_jobs_transfers_all_jobs(self, tmp_path: Path) -> Non """ from code_indexer.server.storage.database_manager import DatabaseSchema from code_indexer.server.storage.migration_service import MigrationService - from code_indexer.server.storage.sqlite_backends import BackgroundJobsSqliteBackend + from code_indexer.server.storage.sqlite_backends import ( + BackgroundJobsSqliteBackend, + ) # Setup source JSON source_dir = tmp_path / "source" @@ -488,7 +488,9 @@ def test_migrate_background_jobs_idempotent(self, tmp_path: Path) -> None: """ from code_indexer.server.storage.database_manager import DatabaseSchema from code_indexer.server.storage.migration_service import MigrationService - from code_indexer.server.storage.sqlite_backends import BackgroundJobsSqliteBackend + from code_indexer.server.storage.sqlite_backends import ( + BackgroundJobsSqliteBackend, + ) # Setup source JSON source_dir = tmp_path / "source" @@ -613,7 +615,9 @@ def test_migrate_background_jobs_skips_underscore_prefixed_keys( """ from code_indexer.server.storage.database_manager import DatabaseSchema from code_indexer.server.storage.migration_service import MigrationService - from code_indexer.server.storage.sqlite_backends import BackgroundJobsSqliteBackend + from code_indexer.server.storage.sqlite_backends import ( + BackgroundJobsSqliteBackend, + ) # Setup source JSON with underscore-prefixed internal keys source_dir = tmp_path / "source" diff --git a/tests/unit/server/storage/test_sqlite_backends.py b/tests/unit/server/storage/test_sqlite_backends.py index fd29a97c..e5f9d3bd 100644 --- a/tests/unit/server/storage/test_sqlite_backends.py +++ b/tests/unit/server/storage/test_sqlite_backends.py @@ -525,7 +525,9 @@ def test_get_user_by_email_case_insensitive(self, tmp_path: Path) -> None: assert result is not None assert result["username"] == "caseuser" - def test_get_user_by_email_returns_none_when_not_found(self, tmp_path: Path) -> None: + def test_get_user_by_email_returns_none_when_not_found( + self, tmp_path: Path + ) -> None: """ Given a database without a user with the specified email When get_user_by_email() is called @@ -577,7 +579,9 @@ def test_get_user_by_email_strips_whitespace(self, tmp_path: Path) -> None: assert result is not None assert result["username"] == "trimuser" - def test_get_user_by_email_includes_api_keys_and_mcp_credentials(self, tmp_path: Path) -> None: + def test_get_user_by_email_includes_api_keys_and_mcp_credentials( + self, tmp_path: Path + ) -> None: """ Given a user with api_keys and mcp_credentials When get_user_by_email() is called @@ -659,7 +663,9 @@ def test_set_oidc_identity_updates_user(self, tmp_path: Path) -> None: assert user["oidc_identity"]["subject"] == "oidc-12345" assert user["oidc_identity"]["email"] == "oidc@example.com" - def test_set_oidc_identity_returns_false_for_nonexistent_user(self, tmp_path: Path) -> None: + def test_set_oidc_identity_returns_false_for_nonexistent_user( + self, tmp_path: Path + ) -> None: """ Given a database without the specified user When set_oidc_identity() is called @@ -679,7 +685,9 @@ def test_set_oidc_identity_returns_false_for_nonexistent_user(self, tmp_path: Pa assert result is False - def test_set_oidc_identity_overwrites_existing_identity(self, tmp_path: Path) -> None: + def test_set_oidc_identity_overwrites_existing_identity( + self, tmp_path: Path + ) -> None: """ Given a user with existing oidc_identity When set_oidc_identity() is called with new identity @@ -731,7 +739,9 @@ def test_delete_mcp_credential_removes_credential(self, tmp_path: Path) -> None: schema.initialize_database() backend = UsersSqliteBackend(str(db_path)) - backend.create_user(username="mcpuser", password_hash="hash", role="normal_user") + backend.create_user( + username="mcpuser", password_hash="hash", role="normal_user" + ) backend.add_mcp_credential( username="mcpuser", credential_id="cred-to-delete", @@ -760,7 +770,9 @@ def test_delete_mcp_credential_removes_credential(self, tmp_path: Path) -> None: assert len(user["mcp_credentials"]) == 1 assert user["mcp_credentials"][0]["credential_id"] == "cred-to-keep" - def test_delete_mcp_credential_returns_false_for_nonexistent(self, tmp_path: Path) -> None: + def test_delete_mcp_credential_returns_false_for_nonexistent( + self, tmp_path: Path + ) -> None: """ Given a user without the specified credential When delete_mcp_credential() is called @@ -780,7 +792,9 @@ def test_delete_mcp_credential_returns_false_for_nonexistent(self, tmp_path: Pat assert result is False - def test_update_mcp_credential_last_used_updates_timestamp(self, tmp_path: Path) -> None: + def test_update_mcp_credential_last_used_updates_timestamp( + self, tmp_path: Path + ) -> None: """ Given a user with MCP credentials in SQLite When update_mcp_credential_last_used() is called @@ -794,7 +808,9 @@ def test_update_mcp_credential_last_used_updates_timestamp(self, tmp_path: Path) schema.initialize_database() backend = UsersSqliteBackend(str(db_path)) - backend.create_user(username="lastused", password_hash="hash", role="normal_user") + backend.create_user( + username="lastused", password_hash="hash", role="normal_user" + ) backend.add_mcp_credential( username="lastused", credential_id="cred-update", @@ -819,7 +835,9 @@ def test_update_mcp_credential_last_used_updates_timestamp(self, tmp_path: Path) assert user is not None assert user["mcp_credentials"][0]["last_used_at"] is not None - def test_update_mcp_credential_last_used_returns_false_for_nonexistent(self, tmp_path: Path) -> None: + def test_update_mcp_credential_last_used_returns_false_for_nonexistent( + self, tmp_path: Path + ) -> None: """ Given a user without the specified credential When update_mcp_credential_last_used() is called @@ -839,7 +857,9 @@ def test_update_mcp_credential_last_used_returns_false_for_nonexistent(self, tmp assert result is False - def test_list_all_mcp_credentials_returns_all_credentials(self, tmp_path: Path) -> None: + def test_list_all_mcp_credentials_returns_all_credentials( + self, tmp_path: Path + ) -> None: """ Given multiple users with MCP credentials When list_all_mcp_credentials() is called @@ -929,8 +949,12 @@ def test_remove_oidc_identity_removes_identity(self, tmp_path: Path) -> None: schema.initialize_database() backend = UsersSqliteBackend(str(db_path)) - backend.create_user(username="oidcremove", password_hash="hash", role="normal_user") - backend.set_oidc_identity("oidcremove", {"subject": "oidc-123", "email": "oidc@example.com"}) + backend.create_user( + username="oidcremove", password_hash="hash", role="normal_user" + ) + backend.set_oidc_identity( + "oidcremove", {"subject": "oidc-123", "email": "oidc@example.com"} + ) # Verify identity exists user = backend.get_user("oidcremove") @@ -947,7 +971,9 @@ def test_remove_oidc_identity_removes_identity(self, tmp_path: Path) -> None: assert user is not None assert user["oidc_identity"] is None - def test_remove_oidc_identity_returns_false_for_nonexistent_user(self, tmp_path: Path) -> None: + def test_remove_oidc_identity_returns_false_for_nonexistent_user( + self, tmp_path: Path + ) -> None: """ Given a database without the specified user When remove_oidc_identity() is called @@ -1105,8 +1131,20 @@ def test_list_jobs_returns_all_records(self, tmp_path: Path) -> None: schema.initialize_database() backend = SyncJobsSqliteBackend(str(db_path)) - backend.create_job(job_id="job-a", username="user1", user_alias="U1", job_type="sync", status="pending") - backend.create_job(job_id="job-b", username="user2", user_alias="U2", job_type="refresh", status="running") + backend.create_job( + job_id="job-a", + username="user1", + user_alias="U1", + job_type="sync", + status="pending", + ) + backend.create_job( + job_id="job-b", + username="user2", + user_alias="U2", + job_type="refresh", + status="running", + ) result = backend.list_jobs() @@ -1129,7 +1167,13 @@ def test_delete_job_removes_record(self, tmp_path: Path) -> None: schema.initialize_database() backend = SyncJobsSqliteBackend(str(db_path)) - backend.create_job(job_id="job-del", username="user", user_alias="U", job_type="sync", status="pending") + backend.create_job( + job_id="job-del", + username="user", + user_alias="U", + job_type="sync", + status="pending", + ) # Verify it exists assert backend.get_job("job-del") is not None @@ -1162,7 +1206,10 @@ def test_update_job_with_json_blob_columns(self, tmp_path: Path) -> None: status="running", ) - phases = {"clone": {"status": "completed", "progress": 100}, "index": {"status": "running", "progress": 50}} + phases = { + "clone": {"status": "completed", "progress": 100}, + "index": {"status": "running", "progress": 50}, + } phase_weights = {"clone": 0.3, "index": 0.7} analytics_data = {"files_processed": 100, "duration_seconds": 120} @@ -1336,9 +1383,7 @@ def test_invalidate_session_inserts_record(self, tmp_path: Path) -> None: # Verify record was inserted conn = sqlite3.connect(str(db_path)) - cursor = conn.execute( - "SELECT username, token_id FROM invalidated_sessions" - ) + cursor = conn.execute("SELECT username, token_id FROM invalidated_sessions") row = cursor.fetchone() conn.close() @@ -1346,7 +1391,9 @@ def test_invalidate_session_inserts_record(self, tmp_path: Path) -> None: assert row[0] == "testuser" assert row[1] == "token-abc-123" - def test_is_session_invalidated_returns_true_for_invalidated(self, tmp_path: Path) -> None: + def test_is_session_invalidated_returns_true_for_invalidated( + self, tmp_path: Path + ) -> None: """ Given a database with an invalidated session When is_session_invalidated() is called @@ -1366,7 +1413,9 @@ def test_is_session_invalidated_returns_true_for_invalidated(self, tmp_path: Pat assert result is True - def test_is_session_invalidated_returns_false_for_valid(self, tmp_path: Path) -> None: + def test_is_session_invalidated_returns_false_for_valid( + self, tmp_path: Path + ) -> None: """ Given a database without the session invalidated When is_session_invalidated() is called @@ -1432,7 +1481,9 @@ def test_get_password_change_timestamp_returns_value(self, tmp_path: Path) -> No assert result == "2025-01-20T14:00:00Z" - def test_get_password_change_timestamp_returns_none_for_nonexistent(self, tmp_path: Path) -> None: + def test_get_password_change_timestamp_returns_none_for_nonexistent( + self, tmp_path: Path + ) -> None: """ Given a database without password change timestamp for user When get_password_change_timestamp() is called @@ -1510,7 +1561,9 @@ def test_cleanup_old_data_removes_old_entries(self, tmp_path: Path) -> None: # recent_user should remain assert backend.get_password_change_timestamp("recent_user") is not None - def test_cleanup_old_data_returns_zero_when_nothing_to_clean(self, tmp_path: Path) -> None: + def test_cleanup_old_data_returns_zero_when_nothing_to_clean( + self, tmp_path: Path + ) -> None: """ Given a database with only recent session data When cleanup_old_data() is called @@ -1747,8 +1800,20 @@ def test_list_keys_returns_all_records(self, tmp_path: Path) -> None: schema.initialize_database() backend = SSHKeysSqliteBackend(str(db_path)) - backend.create_key(name="key1", fingerprint="fp1", key_type="ed25519", private_path="/p1", public_path="/p1.pub") - backend.create_key(name="key2", fingerprint="fp2", key_type="rsa", private_path="/p2", public_path="/p2.pub") + backend.create_key( + name="key1", + fingerprint="fp1", + key_type="ed25519", + private_path="/p1", + public_path="/p1.pub", + ) + backend.create_key( + name="key2", + fingerprint="fp2", + key_type="rsa", + private_path="/p2", + public_path="/p2.pub", + ) backend.assign_host("key1", "github.com") result = backend.list_keys() diff --git a/tests/unit/server/test_health_service_interval_metrics.py b/tests/unit/server/test_health_service_interval_metrics.py index c51db377..27128abf 100644 --- a/tests/unit/server/test_health_service_interval_metrics.py +++ b/tests/unit/server/test_health_service_interval_metrics.py @@ -34,7 +34,7 @@ def test_system_health_info_has_disk_read_kb_s_field(self): net_rx_kb_s=2048.0, net_tx_kb_s=1024.0, ) - assert hasattr(info, 'disk_read_kb_s') + assert hasattr(info, "disk_read_kb_s") assert info.disk_read_kb_s == 1024.0 def test_system_health_info_has_disk_write_kb_s_field(self): @@ -49,7 +49,7 @@ def test_system_health_info_has_disk_write_kb_s_field(self): net_rx_kb_s=2048.0, net_tx_kb_s=1024.0, ) - assert hasattr(info, 'disk_write_kb_s') + assert hasattr(info, "disk_write_kb_s") assert info.disk_write_kb_s == 512.0 def test_system_health_info_has_net_rx_kb_s_field(self): @@ -64,7 +64,7 @@ def test_system_health_info_has_net_rx_kb_s_field(self): net_rx_kb_s=2048.0, net_tx_kb_s=1024.0, ) - assert hasattr(info, 'net_rx_kb_s') + assert hasattr(info, "net_rx_kb_s") assert info.net_rx_kb_s == 2048.0 def test_system_health_info_has_net_tx_kb_s_field(self): @@ -79,7 +79,7 @@ def test_system_health_info_has_net_tx_kb_s_field(self): net_rx_kb_s=2048.0, net_tx_kb_s=1024.0, ) - assert hasattr(info, 'net_tx_kb_s') + assert hasattr(info, "net_tx_kb_s") assert info.net_tx_kb_s == 1024.0 def test_system_health_info_all_io_fields_are_floats(self): @@ -112,14 +112,14 @@ def test_system_health_info_serialization_includes_io_fields(self): net_tx_kb_s=1024.125, ) json_dict = info.model_dump() - assert 'disk_read_kb_s' in json_dict - assert 'disk_write_kb_s' in json_dict - assert 'net_rx_kb_s' in json_dict - assert 'net_tx_kb_s' in json_dict - assert json_dict['disk_read_kb_s'] == 1024.5 - assert json_dict['disk_write_kb_s'] == 512.25 - assert json_dict['net_rx_kb_s'] == 2048.75 - assert json_dict['net_tx_kb_s'] == 1024.125 + assert "disk_read_kb_s" in json_dict + assert "disk_write_kb_s" in json_dict + assert "net_rx_kb_s" in json_dict + assert "net_tx_kb_s" in json_dict + assert json_dict["disk_read_kb_s"] == 1024.5 + assert json_dict["disk_write_kb_s"] == 512.25 + assert json_dict["net_rx_kb_s"] == 2048.75 + assert json_dict["net_tx_kb_s"] == 1024.125 class TestHealthServiceCpuIntervalAveraged: @@ -129,7 +129,9 @@ def test_cpu_percent_uses_interval_none(self): """AC1: CPU should use psutil.cpu_percent(interval=None) for interval-averaging.""" # We need to verify that the service calls cpu_percent with interval=None # This test verifies the implementation uses interval=None, not interval=0.1 - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: # Setup mock returns mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 @@ -142,10 +144,12 @@ def test_cpu_percent_uses_interval_none(self): ) # Import after patching - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) # Create service and call _get_system_info - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -164,7 +168,9 @@ class TestHealthServiceDiskIO: def test_first_call_returns_zero_disk_io(self): """AC5: First call should return 0.0 for disk I/O metrics.""" - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -175,9 +181,11 @@ def test_first_call_returns_zero_disk_io(self): bytes_recv=2000000, bytes_sent=1000000 ) - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -192,9 +200,11 @@ def test_first_call_returns_zero_disk_io(self): def test_second_call_calculates_disk_io_rate(self): """AC2: Second call should calculate disk I/O in KB/s from counter diffs.""" - DiskCounters = namedtuple('DiskCounters', ['read_bytes', 'write_bytes']) + DiskCounters = namedtuple("DiskCounters", ["read_bytes", "write_bytes"]) - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -203,15 +213,21 @@ def test_second_call_calculates_disk_io_rate(self): ) # First call: 1MB read, 500KB written - first_counters = DiskCounters(read_bytes=1024 * 1024, write_bytes=512 * 1024) + first_counters = DiskCounters( + read_bytes=1024 * 1024, write_bytes=512 * 1024 + ) # Second call: 2MB read (1MB more), 1MB written (512KB more), 1 second later - second_counters = DiskCounters(read_bytes=2 * 1024 * 1024, write_bytes=1024 * 1024) + second_counters = DiskCounters( + read_bytes=2 * 1024 * 1024, write_bytes=1024 * 1024 + ) mock_psutil.disk_io_counters.side_effect = [first_counters, second_counters] - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -235,9 +251,11 @@ def test_second_call_calculates_disk_io_rate(self): def test_disk_io_calculation_formula_accuracy(self): """AC2: Verify KB/s formula: (bytes_diff / 1024) / elapsed_seconds.""" - DiskCounters = namedtuple('DiskCounters', ['read_bytes', 'write_bytes']) + DiskCounters = namedtuple("DiskCounters", ["read_bytes", "write_bytes"]) - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -253,9 +271,11 @@ def test_disk_io_calculation_formula_accuracy(self): mock_psutil.disk_io_counters.side_effect = [first_counters, second_counters] - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -283,7 +303,9 @@ class TestHealthServiceNetworkIO: def test_first_call_returns_zero_network_io(self): """AC5: First call should return 0.0 for network I/O metrics.""" - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -294,9 +316,11 @@ def test_first_call_returns_zero_network_io(self): bytes_recv=2000000, bytes_sent=1000000 ) - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -311,9 +335,11 @@ def test_first_call_returns_zero_network_io(self): def test_second_call_calculates_network_io_rate(self): """AC3: Second call should calculate network I/O in KB/s from counter diffs.""" - NetCounters = namedtuple('NetCounters', ['bytes_recv', 'bytes_sent']) + NetCounters = namedtuple("NetCounters", ["bytes_recv", "bytes_sent"]) - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -324,13 +350,17 @@ def test_second_call_calculates_network_io_rate(self): # First call: 1MB received, 500KB sent first_counters = NetCounters(bytes_recv=1024 * 1024, bytes_sent=512 * 1024) # Second call: 2MB received (1MB more), 1MB sent (512KB more), 1 second later - second_counters = NetCounters(bytes_recv=2 * 1024 * 1024, bytes_sent=1024 * 1024) + second_counters = NetCounters( + bytes_recv=2 * 1024 * 1024, bytes_sent=1024 * 1024 + ) mock_psutil.net_io_counters.side_effect = [first_counters, second_counters] - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -354,9 +384,11 @@ def test_second_call_calculates_network_io_rate(self): def test_network_io_calculation_formula_accuracy(self): """AC3: Verify KB/s formula: (bytes_diff / 1024) / elapsed_seconds.""" - NetCounters = namedtuple('NetCounters', ['bytes_recv', 'bytes_sent']) + NetCounters = namedtuple("NetCounters", ["bytes_recv", "bytes_sent"]) - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -374,9 +406,11 @@ def test_network_io_calculation_formula_accuracy(self): mock_psutil.net_io_counters.side_effect = [first_counters, second_counters] - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -405,18 +439,28 @@ def test_dashboard_refresh_interval_is_2_seconds(self): from pathlib import Path # Find the dashboard.html template - template_path = Path(__file__).parent.parent.parent.parent / "src" / "code_indexer" / "server" / "web" / "templates" / "dashboard.html" + template_path = ( + Path(__file__).parent.parent.parent.parent + / "src" + / "code_indexer" + / "server" + / "web" + / "templates" + / "dashboard.html" + ) if template_path.exists(): content = template_path.read_text() # Verify the interval is 2000ms (2 seconds), not 5000ms - assert "setInterval(refreshAll, 2000)" in content, \ - "Dashboard should use 2000ms (2 second) refresh interval" + assert ( + "setInterval(refreshAll, 2000)" in content + ), "Dashboard should use 2000ms (2 second) refresh interval" # Verify 5000ms is NOT present (old interval) - assert "setInterval(refreshAll, 5000)" not in content, \ - "Dashboard should NOT use 5000ms (5 second) refresh interval" + assert ( + "setInterval(refreshAll, 5000)" not in content + ), "Dashboard should NOT use 5000ms (5 second) refresh interval" else: pytest.skip("Dashboard template not found at expected path") @@ -428,16 +472,27 @@ def test_dashboard_displays_disk_io_metrics(self): """AC2: Dashboard should display disk read/write speeds.""" from pathlib import Path - template_path = Path(__file__).parent.parent.parent.parent / "src" / "code_indexer" / "server" / "web" / "templates" / "partials" / "dashboard_health.html" + template_path = ( + Path(__file__).parent.parent.parent.parent + / "src" + / "code_indexer" + / "server" + / "web" + / "templates" + / "partials" + / "dashboard_health.html" + ) if template_path.exists(): content = template_path.read_text() # Verify disk I/O metrics are displayed - assert "disk_read_kb_s" in content, \ - "Dashboard should display disk read speed (disk_read_kb_s)" - assert "disk_write_kb_s" in content, \ - "Dashboard should display disk write speed (disk_write_kb_s)" + assert ( + "disk_read_kb_s" in content + ), "Dashboard should display disk read speed (disk_read_kb_s)" + assert ( + "disk_write_kb_s" in content + ), "Dashboard should display disk write speed (disk_write_kb_s)" else: pytest.skip("Dashboard health template not found at expected path") @@ -445,16 +500,27 @@ def test_dashboard_displays_network_io_metrics(self): """AC3: Dashboard should display network Rx/Tx speeds.""" from pathlib import Path - template_path = Path(__file__).parent.parent.parent.parent / "src" / "code_indexer" / "server" / "web" / "templates" / "partials" / "dashboard_health.html" + template_path = ( + Path(__file__).parent.parent.parent.parent + / "src" + / "code_indexer" + / "server" + / "web" + / "templates" + / "partials" + / "dashboard_health.html" + ) if template_path.exists(): content = template_path.read_text() # Verify network I/O metrics are displayed - assert "net_rx_kb_s" in content, \ - "Dashboard should display network receive speed (net_rx_kb_s)" - assert "net_tx_kb_s" in content, \ - "Dashboard should display network transmit speed (net_tx_kb_s)" + assert ( + "net_rx_kb_s" in content + ), "Dashboard should display network receive speed (net_rx_kb_s)" + assert ( + "net_tx_kb_s" in content + ), "Dashboard should display network transmit speed (net_tx_kb_s)" else: pytest.skip("Dashboard health template not found at expected path") @@ -462,18 +528,26 @@ def test_dashboard_displays_io_labels(self): """Dashboard should have user-friendly labels for I/O metrics.""" from pathlib import Path - template_path = Path(__file__).parent.parent.parent.parent / "src" / "code_indexer" / "server" / "web" / "templates" / "partials" / "dashboard_health.html" + template_path = ( + Path(__file__).parent.parent.parent.parent + / "src" + / "code_indexer" + / "server" + / "web" + / "templates" + / "partials" + / "dashboard_health.html" + ) if template_path.exists(): content = template_path.read_text() # Verify user-friendly labels exist - assert "Disk" in content, \ - "Dashboard should have Disk label" - assert "Network" in content or "Net" in content, \ - "Dashboard should have Network label" - assert "KB/s" in content, \ - "Dashboard should display KB/s units" + assert "Disk" in content, "Dashboard should have Disk label" + assert ( + "Network" in content or "Net" in content + ), "Dashboard should have Network label" + assert "KB/s" in content, "Dashboard should display KB/s units" else: pytest.skip("Dashboard health template not found at expected path") @@ -483,7 +557,9 @@ class TestHealthServiceStatePersistence: def test_state_variables_persist_across_calls(self): """State variables should persist across _get_system_info calls.""" - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -494,9 +570,11 @@ def test_state_variables_persist_across_calls(self): bytes_recv=2000000, bytes_sent=1000000 ) - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = None service._last_disk_time = None @@ -515,7 +593,9 @@ def test_state_variables_persist_across_calls(self): def test_zero_elapsed_time_no_division_error(self): """Edge case: Handle zero elapsed time without division by zero.""" - with patch('src.code_indexer.server.services.health_service.psutil') as mock_psutil: + with patch( + "src.code_indexer.server.services.health_service.psutil" + ) as mock_psutil: mock_psutil.virtual_memory.return_value = MagicMock(percent=50.0) mock_psutil.cpu_percent.return_value = 25.0 mock_psutil.disk_usage.return_value = MagicMock(free=100 * 1024**3) @@ -526,9 +606,11 @@ def test_zero_elapsed_time_no_division_error(self): bytes_recv=2000000, bytes_sent=1000000 ) - from src.code_indexer.server.services.health_service import HealthCheckService + from src.code_indexer.server.services.health_service import ( + HealthCheckService, + ) - with patch.object(HealthCheckService, '__init__', lambda self: None): + with patch.object(HealthCheckService, "__init__", lambda self: None): service = HealthCheckService() service._last_disk_counters = MagicMock( read_bytes=500000, write_bytes=250000 diff --git a/tests/unit/server/utils/test_cache_config_payload_fields.py b/tests/unit/server/utils/test_cache_config_payload_fields.py index 87f1512b..ced0346c 100644 --- a/tests/unit/server/utils/test_cache_config_payload_fields.py +++ b/tests/unit/server/utils/test_cache_config_payload_fields.py @@ -8,7 +8,6 @@ """ import json -import pytest from code_indexer.server.utils.config_manager import ( CacheConfig, diff --git a/tests/unit/server/web/test_issue_716_bug1a_jobs_sorting.py b/tests/unit/server/web/test_issue_716_bug1a_jobs_sorting.py index 49da3e05..11541323 100644 --- a/tests/unit/server/web/test_issue_716_bug1a_jobs_sorting.py +++ b/tests/unit/server/web/test_issue_716_bug1a_jobs_sorting.py @@ -7,7 +7,6 @@ Tests are written FIRST following TDD methodology. """ -import pytest from unittest.mock import MagicMock, patch from datetime import datetime @@ -59,7 +58,7 @@ def test_jobs_sorted_by_started_at_when_available(self): with patch( "src.code_indexer.server.web.routes._get_background_job_manager", - return_value=mock_job_manager + return_value=mock_job_manager, ): jobs, total, pages = _get_all_jobs() @@ -112,7 +111,7 @@ def test_jobs_fallback_to_created_at_when_no_started_at(self): with patch( "src.code_indexer.server.web.routes._get_background_job_manager", - return_value=mock_job_manager + return_value=mock_job_manager, ): jobs, total, pages = _get_all_jobs() diff --git a/tests/unit/server/web/test_routes_payload_cache_validation.py b/tests/unit/server/web/test_routes_payload_cache_validation.py index 6b601018..a25fd59e 100644 --- a/tests/unit/server/web/test_routes_payload_cache_validation.py +++ b/tests/unit/server/web/test_routes_payload_cache_validation.py @@ -6,8 +6,6 @@ Tests that _validate_config_section properly validates payload cache fields. """ -import pytest - # Import the validation function from code_indexer.server.web.routes import _validate_config_section