From 97342d0dba8a9301a0cdb4a0d8d460bca6a9964e Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sun, 3 Aug 2025 17:31:00 +0300
Subject: [PATCH 01/19] refactor(cli,core): migrate to click/rich-click CLI and
replace BrowserImpersonation with BrowserTypeLiteral
---
README.md | 3 +-
naminter/cli/config.py | 17 ++-
naminter/cli/main.py | 269 ++++++++++++++++++-------------------
naminter/core/constants.py | 6 +-
naminter/core/main.py | 31 +++--
naminter/core/models.py | 13 +-
pyproject.toml | 3 +-
7 files changed, 175 insertions(+), 167 deletions(-)
diff --git a/README.md b/README.md
index 733f27c..fccaac0 100644
--- a/README.md
+++ b/README.md
@@ -225,7 +225,6 @@ asyncio.run(main())
```python
import asyncio
from naminter import Naminter
-from naminter.core.models import BrowserImpersonation
async def main():
wmn_data, wmn_schema = await load_wmn_data()
@@ -236,7 +235,7 @@ async def main():
wmn_schema=wmn_schema,
max_tasks=100,
timeout=15,
- impersonate=BrowserImpersonation.CHROME,
+ impersonate="chrome",
verify_ssl=True,
proxy="http://proxy:8080"
) as naminter:
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index e4cb94b..0f23bfd 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -9,7 +9,8 @@
WMN_REMOTE_URL,
WMN_SCHEMA_URL,
)
-from ..core.models import BrowserImpersonation
+from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
+
@dataclass
class NaminterConfig:
@@ -44,7 +45,10 @@ class NaminterConfig:
proxy: Optional[str] = None
allow_redirects: bool = False
verify_ssl: bool = False
- impersonate: BrowserImpersonation = BrowserImpersonation.CHROME
+ impersonate: BrowserTypeLiteral = "chrome"
+ ja3: Optional[str] = None
+ akamai: Optional[str] = None
+ extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any]]] = None
browse: bool = False
fuzzy_mode: bool = False
self_check: bool = False
@@ -91,8 +95,8 @@ def __post_init__(self) -> None:
self.impersonate = self.get_impersonation()
def get_impersonation(self) -> Optional[str]:
- """Return impersonation string or None if impersonation is NONE."""
- return None if self.impersonate == BrowserImpersonation.NONE else self.impersonate.value
+ """Return impersonation string or None if impersonation is 'none'."""
+ return None if self.impersonate == "none" else self.impersonate
@property
def response_dir(self) -> Optional[Path]:
@@ -134,7 +138,10 @@ def to_dict(self) -> Dict[str, Any]:
"proxy": self.proxy,
"allow_redirects": self.allow_redirects,
"verify_ssl": self.verify_ssl,
- "impersonate": self.impersonate.value if isinstance(self.impersonate, BrowserImpersonation) else self.impersonate,
+ "impersonate": self.impersonate,
+ "ja3": self.ja3,
+ "akamai": self.akamai,
+ "extra_fp": self.extra_fp.to_dict() if isinstance(self.extra_fp, ExtraFingerprints) else self.extra_fp,
"browse": self.browse,
"fuzzy_mode": self.fuzzy_mode,
"self_check": self.self_check,
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 8f3e987..b7a650c 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -3,15 +3,17 @@
import logging
import webbrowser
from pathlib import Path
-from typing import Annotated, Any, Dict, List, Optional, Tuple, Union
+from typing import Any, Dict, List, Optional, Tuple, Union
+import typing
-import typer
+import rich_click as click
from curl_cffi import requests
from rich import box
from rich.panel import Panel
from rich.table import Table
-from ..cli.config import BrowserImpersonation, NaminterConfig
+from curl_cffi import BrowserTypeLiteral
+from ..cli.config import NaminterConfig
from ..cli.console import (
console,
display_error,
@@ -27,12 +29,6 @@
from ..core.exceptions import DataError, ConfigurationError
from .. import __description__, __version__
-app = typer.Typer(
- help=__description__,
- add_completion=False,
- rich_markup_mode="rich",
- no_args_is_help=True,
-)
class NaminterCLI:
"""Handles username availability checks."""
@@ -146,10 +142,13 @@ async def run(self) -> None:
wmn_schema=wmn_schema,
max_tasks=self.config.max_tasks,
timeout=self.config.timeout,
- impersonate=self.config.impersonate,
+ proxy=self.config.proxy,
verify_ssl=self.config.verify_ssl,
allow_redirects=self.config.allow_redirects,
- proxy=self.config.proxy,
+ impersonate=self.config.impersonate,
+ ja3=self.config.ja3,
+ akamai=self.config.akamai,
+ extra_fp=self.config.extra_fp,
) as naminter:
if self.config.self_check:
results = await self._run_self_check(naminter)
@@ -304,132 +303,128 @@ async def _process_result(self, result: SiteResult) -> Optional[Path]:
return response_file
-def version_callback(value: bool):
- """Callback to handle version display."""
- if value:
- display_version()
- raise typer.Exit()
-
-def main(
- usernames: Optional[List[str]] = typer.Option(None, "--username", "-u", help="Username(s) to search for across social media platforms", show_default=False),
- site_names: Optional[List[str]] = typer.Option(None, "--site", "-s", help="Specific site name(s) to check (e.g., 'GitHub', 'Twitter')", show_default=False),
- version: Annotated[Optional[bool], typer.Option("--version", help="Show version information and exit", callback=version_callback, is_eager=True)] = None,
- no_color: bool = typer.Option(False, "--no-color", help="Disable colored console output"),
- no_progressbar: bool = typer.Option(False, "--no-progressbar", help="Disable progress bar during execution"),
-
- # Input lists
- local_list: Optional[List[Path]] = typer.Option(
- None, "--local-list", help="Path(s) to local JSON file(s) containing WhatsMyName site data", show_default=False
- ),
- remote_list_url: Optional[List[str]] = typer.Option(
- None, "--remote-list", help="URL(s) to fetch remote WhatsMyName site data", show_default=False
- ),
- local_schema: Optional[Path] = typer.Option(
- None, "--local-schema", help="Path to local WhatsMyName JSON schema file for validation", show_default=False
- ),
- remote_schema_url: Optional[str] = typer.Option(
- WMN_SCHEMA_URL, "--remote-schema", help="URL to fetch custom WhatsMyName JSON schema for validation"
- ),
-
- skip_validation: bool = typer.Option(False, "--skip-validation", help="Skip JSON schema validation of WhatsMyName data"),
- # Self-check
- self_check: bool = typer.Option(False, "--self-check", help="Run self-check mode to validate site detection accuracy"),
-
- # Category filters
- include_categories: Optional[List[str]] = typer.Option(
- None, "--include-categories", show_default=False, help="Include only sites from specified categories (e.g., 'social', 'coding')"
- ),
- exclude_categories: Optional[List[str]] = typer.Option(
- None, "--exclude-categories", show_default=False, help="Exclude sites from specified categories (e.g., 'adult', 'gaming')"
- ),
-
- # Network
- proxy: Optional[str] = typer.Option(
- None, "--proxy", show_default=False, help="Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)"
- ),
- timeout: int = typer.Option(HTTP_REQUEST_TIMEOUT_SECONDS, "--timeout", help="Maximum time in seconds to wait for each HTTP request"),
- allow_redirects: bool = typer.Option(HTTP_ALLOW_REDIRECTS, "--allow-redirects", help="Whether to follow HTTP redirects automatically"),
- verify_ssl: bool = typer.Option(HTTP_SSL_VERIFY, "--verify-ssl", help="Whether to verify SSL/TLS certificates for HTTPS requests"),
- impersonate: BrowserImpersonation = typer.Option(
- BrowserImpersonation.CHROME, "--impersonate", "-i", help="Browser to impersonate in HTTP requests"
- ),
+@click.group(invoke_without_command=True, context_settings=dict(help_option_names=['-h', '--help']))
+@click.option('--version', is_flag=True, help='Show version information and exit')
+@click.option('--no-color', is_flag=True, help='Disable colored console output')
+@click.option('--no-progressbar', is_flag=True, help='Disable progress bar during execution')
+@click.option('--username', '-u', multiple=True, help='Username(s) to search for across social media platforms')
+@click.option('--site', '-s', multiple=True, help='Specific site name(s) to check (e.g., "GitHub", "Twitter")')
+@click.option('--local-list', type=click.Path(exists=True, path_type=Path), multiple=True, help='Path(s) to local JSON file(s) containing WhatsMyName site data')
+@click.option('--remote-list', multiple=True, help='URL(s) to fetch remote WhatsMyName site data')
+@click.option('--local-schema', type=click.Path(exists=True, path_type=Path), help='Path to local WhatsMyName JSON schema file for validation')
+@click.option('--remote-schema', default=WMN_SCHEMA_URL, help='URL to fetch custom WhatsMyName JSON schema for validation')
+@click.option('--skip-validation', is_flag=True, help='Skip JSON schema validation of WhatsMyName data')
+@click.option('--self-check', is_flag=True, help='Run self-check mode to validate site detection accuracy')
+@click.option('--include-categories', multiple=True, help='Include only sites from specified categories (e.g., "social", "coding")')
+@click.option('--exclude-categories', multiple=True, help='Exclude sites from specified categories (e.g., "adult", "gaming")')
+@click.option('--proxy', help='Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)')
+@click.option('--timeout', type=int, default=HTTP_REQUEST_TIMEOUT_SECONDS, help='Maximum time in seconds to wait for each HTTP request')
+@click.option('--allow-redirects', is_flag=True, default=HTTP_ALLOW_REDIRECTS, help='Whether to follow HTTP redirects automatically')
+@click.option('--verify-ssl', is_flag=True, default=HTTP_SSL_VERIFY, help='Whether to verify SSL/TLS certificates for HTTPS requests')
+@click.option('--impersonate', type=click.Choice(typing.get_args(BrowserTypeLiteral) + ("none",)), default="chrome", help='Browser to impersonate in HTTP requests (use "none" to disable impersonation)')
+@click.option('--no-impersonate', is_flag=True, help='Disable browser impersonation (equivalent to --impersonate none)')
+@click.option('--ja3', help='JA3 fingerprint string for TLS fingerprinting')
+@click.option('--akamai', help='Akamai fingerprint string for Akamai bot detection bypass')
+@click.option('--extra-fp', help='Extra fingerprinting options as JSON string (e.g., \'{"tls_grease": true, "tls_cert_compression": "brotli"}\')')
+@click.option('--max-tasks', type=int, default=MAX_CONCURRENT_TASKS, help='Maximum number of concurrent tasks')
+@click.option('--fuzzy', 'fuzzy_mode', is_flag=True, help='Enable fuzzy validation mode')
+@click.option('--log-level', type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), help='Set logging level')
+@click.option('--log-file', help='Path to log file for debug output')
+@click.option('--show-details', is_flag=True, help='Show detailed information in console output')
+@click.option('--browse', is_flag=True, help='Open found profiles in web browser')
+@click.option('--save-response', is_flag=True, help='Save HTTP response content for each result to files')
+@click.option('--response-path', help='Custom directory path for saving response files')
+@click.option('--open-response', is_flag=True, help='Open saved response files in web browser')
+@click.option('--csv', 'csv_export', is_flag=True, help='Export results to CSV file')
+@click.option('--csv-path', help='Custom path for CSV export')
+@click.option('--pdf', 'pdf_export', is_flag=True, help='Export results to PDF file')
+@click.option('--pdf-path', help='Custom path for PDF export')
+@click.option('--html', 'html_export', is_flag=True, help='Export results to HTML file')
+@click.option('--html-path', help='Custom path for HTML export')
+@click.option('--json', 'json_export', is_flag=True, help='Export results to JSON file')
+@click.option('--json-path', help='Custom path for JSON export')
+@click.option('--filter-all', is_flag=True, help='Include all results in console output and exports')
+@click.option('--filter-errors', is_flag=True, help='Show only error results in console output and exports')
+@click.option('--filter-not-found', is_flag=True, help='Show only not found results in console output and exports')
+@click.option('--filter-unknown', is_flag=True, help='Show only unknown results in console output and exports')
+@click.option('--filter-ambiguous', is_flag=True, help='Show only ambiguous results in console output and exports')
+@click.pass_context
+def main(ctx: click.Context, version: bool, **kwargs) -> None:
+ """The most powerful and fast username availability checker that searches across hundreds of websites using WhatsMyName dataset."""
- # Concurrency & Debug
- max_tasks: int = typer.Option(MAX_CONCURRENT_TASKS, "--max-tasks", help="Maximum number of concurrent tasks"),
- fuzzy_mode: bool = typer.Option(False, "--fuzzy", help="Enable fuzzy validation mode"),
- log_level: Optional[str] = typer.Option(None, "--log-level", help="Set logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", show_default=False),
- log_file: Optional[str] = typer.Option(None, "--log-file", help="Path to log file for debug output", show_default=False),
- show_details: bool = typer.Option(False, "--show-details", help="Show detailed information in console output"),
- browse: bool = typer.Option(False, "--browse", help="Open found profiles in web browser"),
-
- # Response handling
- save_response: bool = typer.Option(False, "--save-response", help="Save HTTP response content for each result to files"),
- response_path: Optional[str] = typer.Option(None, "--response-path", help="Custom directory path for saving response files", show_default=False),
- open_response: bool = typer.Option(False, "--open-response", help="Open saved response files in web browser"),
-
- # Export
- csv_export: bool = typer.Option(False, "--csv", help="Export results to CSV file"),
- csv_path: Optional[str] = typer.Option(None, "--csv-path", help="Custom path for CSV export", show_default=False),
- pdf_export: bool = typer.Option(False, "--pdf", help="Export results to PDF file"),
- pdf_path: Optional[str] = typer.Option(None, "--pdf-path", help="Custom path for PDF export", show_default=False),
- html_export: bool = typer.Option(False, "--html", help="Export results to HTML file"),
- html_path: Optional[str] = typer.Option(None, "--html-path", help="Custom path for HTML export", show_default=False),
- json_export: bool = typer.Option(False, "--json", help="Export results to JSON file"),
- json_path: Optional[str] = typer.Option(None, "--json-path", help="Custom path for JSON export", show_default=False),
-
- # Result filters
- filter_all: bool = typer.Option(False, "--filter-all", help="Include all results in console output and exports"),
- filter_errors: bool = typer.Option(False, "--filter-errors", help="Show only error results in console output and exports"),
- filter_not_found: bool = typer.Option(False, "--filter-not-found", help="Show only not found results in console output and exports"),
- filter_unknown: bool = typer.Option(False, "--filter-unknown", help="Show only unknown results in console output and exports"),
- filter_ambiguous: bool = typer.Option(False, "--filter-ambiguous", help="Show only ambiguous results in console output and exports"),
-) -> None:
- """Main CLI entry point."""
+ if version:
+ display_version()
+ ctx.exit()
- if no_color:
+ if ctx.invoked_subcommand is not None:
+ return
+
+ # If no subcommand is invoked, run the main functionality
+ if not kwargs.get('username') and not kwargs.get('self_check'):
+ click.echo(ctx.get_help())
+ ctx.exit(1)
+
+ if kwargs.get('no_color'):
console.no_color = True
try:
+ # Handle --no-impersonate flag
+ impersonate_value = kwargs.get('impersonate')
+ if kwargs.get('no_impersonate'):
+ impersonate_value = "none"
+
+ # Parse extra fingerprinting options if provided
+ extra_fp = None
+ if kwargs.get('extra_fp'):
+ try:
+ extra_fp = json.loads(kwargs.get('extra_fp'))
+ except json.JSONDecodeError as e:
+ display_error(f"Invalid JSON in --extra-fp option: {e}")
+ ctx.exit(1)
+
config = NaminterConfig(
- usernames=usernames,
- site_names=site_names,
- local_list_paths=local_list,
- remote_list_urls=remote_list_url,
- local_schema_path=local_schema,
- remote_schema_url=remote_schema_url,
- skip_validation=skip_validation,
- include_categories=include_categories,
- exclude_categories=exclude_categories,
- max_tasks=max_tasks,
- timeout=timeout,
- proxy=proxy,
- allow_redirects=allow_redirects,
- verify_ssl=verify_ssl,
- impersonate=impersonate,
- fuzzy_mode=fuzzy_mode,
- self_check=self_check,
- log_level=log_level,
- log_file=log_file,
- show_details=show_details,
- browse=browse,
- save_response=save_response,
- response_path=response_path,
- open_response=open_response,
- csv_export=csv_export,
- csv_path=csv_path,
- pdf_export=pdf_export,
- pdf_path=pdf_path,
- html_export=html_export,
- html_path=html_path,
- json_export=json_export,
- json_path=json_path,
- filter_all=filter_all,
- filter_errors=filter_errors,
- filter_not_found=filter_not_found,
- filter_unknown=filter_unknown,
- filter_ambiguous=filter_ambiguous,
- no_progressbar=no_progressbar,
+ usernames=kwargs.get('username'),
+ site_names=kwargs.get('site'),
+ local_list_paths=kwargs.get('local_list'),
+ remote_list_urls=kwargs.get('remote_list'),
+ local_schema_path=kwargs.get('local_schema'),
+ remote_schema_url=kwargs.get('remote_schema'),
+ skip_validation=kwargs.get('skip_validation'),
+ include_categories=kwargs.get('include_categories'),
+ exclude_categories=kwargs.get('exclude_categories'),
+ max_tasks=kwargs.get('max_tasks'),
+ timeout=kwargs.get('timeout'),
+ proxy=kwargs.get('proxy'),
+ allow_redirects=kwargs.get('allow_redirects'),
+ verify_ssl=kwargs.get('verify_ssl'),
+ impersonate=impersonate_value,
+ ja3=kwargs.get('ja3'),
+ akamai=kwargs.get('akamai'),
+ extra_fp=extra_fp,
+ fuzzy_mode=kwargs.get('fuzzy_mode'),
+ self_check=kwargs.get('self_check'),
+ log_level=kwargs.get('log_level'),
+ log_file=kwargs.get('log_file'),
+ show_details=kwargs.get('show_details'),
+ browse=kwargs.get('browse'),
+ save_response=kwargs.get('save_response'),
+ response_path=kwargs.get('response_path'),
+ open_response=kwargs.get('open_response'),
+ csv_export=kwargs.get('csv_export'),
+ csv_path=kwargs.get('csv_path'),
+ pdf_export=kwargs.get('pdf_export'),
+ pdf_path=kwargs.get('pdf_path'),
+ html_export=kwargs.get('html_export'),
+ html_path=kwargs.get('html_path'),
+ json_export=kwargs.get('json_export'),
+ json_path=kwargs.get('json_path'),
+ filter_all=kwargs.get('filter_all'),
+ filter_errors=kwargs.get('filter_errors'),
+ filter_not_found=kwargs.get('filter_not_found'),
+ filter_unknown=kwargs.get('filter_unknown'),
+ filter_ambiguous=kwargs.get('filter_ambiguous'),
+ no_progressbar=kwargs.get('no_progressbar'),
)
if config.log_level and config.log_file:
@@ -446,23 +441,25 @@ def main(
asyncio.run(naminter_cli.run())
except KeyboardInterrupt:
display_warning("Operation interrupted")
- raise typer.Exit(1)
+ ctx.exit(1)
except asyncio.TimeoutError:
display_error("Operation timed out")
- raise typer.Exit(1)
+ ctx.exit(1)
except ConfigurationError as e:
display_error(f"Configuration error: {e}")
- raise typer.Exit(1)
+ ctx.exit(1)
except DataError as e:
display_error(f"Data error: {e}")
- raise typer.Exit(1)
+ ctx.exit(1)
except Exception as e:
display_error(f"Fatal error: {e}")
- raise typer.Exit(1)
+ ctx.exit(1)
+
def entry_point() -> None:
"""Entry point for the application."""
- typer.run(main)
+ main()
+
if __name__ == "__main__":
entry_point()
\ No newline at end of file
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index 633363a..532d488 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -1,6 +1,5 @@
from typing import Final
-
-from ..core.models import BrowserImpersonation
+from curl_cffi import BrowserTypeLiteral
# Remote data source configuration
WMN_REMOTE_URL: Final[str] = "https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data.json"
@@ -12,7 +11,8 @@
HTTP_ALLOW_REDIRECTS: Final[bool] = False
# Browser impersonation settings
-BROWSER_IMPERSONATE_AGENT: Final[str] = BrowserImpersonation.CHROME.value
+# To disable browser impersonation by default, change this to "none"
+BROWSER_IMPERSONATE_AGENT: Final[str] = "chrome"
# Concurrency settings
MAX_CONCURRENT_TASKS: Final[int] = 50
diff --git a/naminter/core/main.py b/naminter/core/main.py
index 0beb1e9..f19aa41 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -7,7 +7,8 @@
import jsonschema
from curl_cffi.requests import AsyncSession, RequestsError
-from ..core.models import BrowserImpersonation, ResultStatus, SiteResult, SelfCheckResult
+from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
+from ..core.models import ResultStatus, SiteResult, SelfCheckResult
from ..core.exceptions import (
ConfigurationError,
NetworkError,
@@ -52,18 +53,21 @@ def __init__(
wmn_schema: Optional[Dict[str, Any]] = None,
max_tasks: int = MAX_CONCURRENT_TASKS,
timeout: int = HTTP_REQUEST_TIMEOUT_SECONDS,
- impersonate: Optional[BrowserImpersonation] = BROWSER_IMPERSONATE_AGENT,
+ proxy: Optional[Union[str, Dict[str, str]]] = None,
verify_ssl: bool = HTTP_SSL_VERIFY,
allow_redirects: bool = HTTP_ALLOW_REDIRECTS,
- proxy: Optional[Union[str, Dict[str, str]]] = None,
+ impersonate: Optional[BrowserTypeLiteral] = BROWSER_IMPERSONATE_AGENT,
+ ja3: Optional[str] = None,
+ akamai: Optional[str] = None,
+ extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any]]] = None,
) -> None:
"""Initialize Naminter with configuration parameters."""
self._logger = logging.getLogger(__name__)
self._logger.addHandler(logging.NullHandler())
self._logger.info(
- "Initializing Naminter with configuration: max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, allow_redirects=%s, proxy=%s",
- max_tasks, timeout, impersonate, verify_ssl, allow_redirects, bool(proxy)
+ "Initializing Naminter with configuration: max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, allow_redirects=%s, proxy=%s, ja3=%s, akamai=%s",
+ max_tasks, timeout, impersonate, verify_ssl, allow_redirects, bool(proxy), ja3, akamai
)
self.max_tasks = max_tasks if max_tasks is not None else MAX_CONCURRENT_TASKS
@@ -72,6 +76,9 @@ def __init__(
self.verify_ssl = verify_ssl if verify_ssl is not None else HTTP_SSL_VERIFY
self.allow_redirects = allow_redirects if allow_redirects is not None else HTTP_ALLOW_REDIRECTS
self.proxy = configure_proxy(proxy)
+ self.ja3 = ja3
+ self.akamai = akamai
+ self.extra_fp = extra_fp
validate_numeric_values(self.max_tasks, self.timeout)
validate_wmn_data(wmn_data, wmn_schema)
@@ -83,18 +90,24 @@ def __init__(
sites_count = len(self._wmn_data.get("sites", [])) if self._wmn_data else 0
self._logger.info(
- "Naminter initialized successfully: loaded %d sites, max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, proxy=%s",
+ "Naminter initialized successfully: loaded %d sites, max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, proxy=%s, ja3=%s, akamai=%s",
sites_count, self.max_tasks, self.timeout,
- self.impersonate, self.verify_ssl, bool(self.proxy)
+ self.impersonate, self.verify_ssl, bool(self.proxy), self.ja3, self.akamai
)
async def __aenter__(self) -> "Naminter":
+ # Convert ExtraFingerprints to dict if needed
+ extra_fp_value = self.extra_fp.to_dict() if isinstance(self.extra_fp, ExtraFingerprints) else self.extra_fp
+
self._session = AsyncSession(
- impersonate=self.impersonate,
+ proxies=self.proxy,
verify=self.verify_ssl,
timeout=self.timeout,
allow_redirects=self.allow_redirects,
- proxies=self.proxy,
+ impersonate=self.impersonate,
+ ja3=self.ja3,
+ akamai=self.akamai,
+ extra_fp=extra_fp_value,
)
return self
diff --git a/naminter/core/models.py b/naminter/core/models.py
index a347027..4349ad8 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -1,7 +1,8 @@
from dataclasses import dataclass, asdict, field
from enum import Enum
-from typing import Optional, Dict, Any, List, Union, Set
+from typing import Optional, Dict, Any, List, Union, Set, Literal, TypedDict
from datetime import datetime
+from curl_cffi import BrowserTypeLiteral, CurlSslVersion, ExtraFingerprints
class ResultStatus(Enum):
"""Status of username search results."""
@@ -12,16 +13,6 @@ class ResultStatus(Enum):
AMBIGUOUS = "ambiguous"
NOT_VALID = "not_valid"
-class BrowserImpersonation(str, Enum):
- """Browser impersonation options."""
- NONE = "none"
- CHROME = "chrome"
- CHROME_ANDROID = "chrome_android"
- SAFARI = "safari"
- SAFARI_IOS = "safari_ios"
- EDGE = "edge"
- FIREFOX = "firefox"
-
@dataclass
class SiteResult:
"""Result of testing a username on a site."""
diff --git a/pyproject.toml b/pyproject.toml
index b70276a..cd822e0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -29,8 +29,9 @@ classifiers = [
]
dependencies = [
- "typer>=0.16.0",
+ "click>=8.0.0",
"rich>=14.0.0",
+ "rich-click>=1.8.0",
"curl-cffi>=0.11.4",
"jsonschema>=4.24.0",
"jinja2>=3.1.6",
From 0326f692b0db02b8ae96e04b780785ba88f4f05c Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sun, 3 Aug 2025 18:15:56 +0300
Subject: [PATCH 02/19] chore(deps): update dependencies to latest versions,
sort alphabetically
---
pyproject.toml | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index cd822e0..6efd148 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -29,13 +29,13 @@ classifiers = [
]
dependencies = [
- "click>=8.0.0",
- "rich>=14.0.0",
- "rich-click>=1.8.0",
- "curl-cffi>=0.11.4",
- "jsonschema>=4.24.0",
+ "click>=8.2.1",
+ "curl-cffi>=0.12.0",
"jinja2>=3.1.6",
- "weasyprint>=65.1"
+ "jsonschema>=4.25.0",
+ "rich>=14.1.0",
+ "rich-click>=1.8.9",
+ "weasyprint>=66.0"
]
maintainers = [{ name = "3xp0rt" }]
From 8fb311922728babc736145f06350835b8c98ed2d Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sun, 3 Aug 2025 18:25:08 +0300
Subject: [PATCH 03/19] refactor: rename overall_status to result_status for
consistency
---
naminter/cli/console.py | 4 ++--
naminter/cli/main.py | 2 +-
naminter/core/models.py | 12 ++++++------
3 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 5aba788..9907b63 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -97,10 +97,10 @@ def format_self_check(self, self_check_result: SelfCheckResult, response_files:
site_name = self_check_result.site_name
test_results = self_check_result.results
- overall_status = self_check_result.overall_status
+ result_status = self_check_result.result_status
root_label = Text()
- root_label.append(_STATUS_SYMBOLS.get(overall_status, "?"), style=_STATUS_STYLES.get(overall_status, Style()))
+ root_label.append(_STATUS_SYMBOLS.get(result_status, "?"), style=_STATUS_STYLES.get(result_status, Style()))
root_label.append(" [", style=THEME["muted"])
root_label.append(site_name, style=THEME["info"])
root_label.append("]", style=THEME["muted"])
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index b7a650c..eb9599c 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -249,7 +249,7 @@ async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
def _should_include_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
"""Determine if a result should be included in output based on filter settings."""
if isinstance(result, SelfCheckResult):
- status = result.overall_status
+ status = result.result_status
else:
status = result.result_status
diff --git a/naminter/core/models.py b/naminter/core/models.py
index 4349ad8..536a02a 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -89,16 +89,16 @@ class SelfCheckResult:
site_name: str
category: str
results: List[SiteResult]
- overall_status: ResultStatus = field(init=False)
+ result_status: ResultStatus = field(init=False)
error: Optional[str] = None
created_at: datetime = field(default_factory=datetime.now)
def __post_init__(self) -> None:
- """Calculate overall status from results."""
- self.overall_status = self._get_overall_status()
+ """Calculate result status from results."""
+ self.result_status = self.get_result_status()
- def _get_overall_status(self) -> ResultStatus:
- """Determine overall status from results."""
+ def get_result_status(self) -> ResultStatus:
+ """Determine result status from results."""
if self.error:
return ResultStatus.ERROR
@@ -123,7 +123,7 @@ def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
return {
'site_name': self.site_name,
'category': self.category,
- 'overall_status': self.overall_status.value,
+ 'result_status': self.result_status.value,
'results': [result.to_dict(exclude_response_text=exclude_response_text) for result in self.results],
'created_at': self.created_at.isoformat(),
'error': self.error,
From 279abb157ab1d22a209c6f9f875c67e749b83fce Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Mon, 4 Aug 2025 20:14:23 +0300
Subject: [PATCH 04/19] refactor(cli): move WMN list loading logic to utils
module
---
naminter/cli/main.py | 78 ++++-------------------------------------
naminter/cli/utils.py | 81 +++++++++++++++++++++++++++++++++++++++++++
2 files changed, 88 insertions(+), 71 deletions(-)
create mode 100644 naminter/cli/utils.py
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index eb9599c..cfe79de 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -1,13 +1,11 @@
import asyncio
-import json
import logging
import webbrowser
from pathlib import Path
-from typing import Any, Dict, List, Optional, Tuple, Union
+from typing import Any, Dict, List, Optional, Union
import typing
import rich_click as click
-from curl_cffi import requests
from rich import box
from rich.panel import Panel
from rich.table import Table
@@ -23,9 +21,10 @@
)
from ..cli.exporters import Exporter
from ..cli.progress import ProgressManager, ResultsTracker
+from ..cli.utils import load_wmn_lists
from ..core.models import ResultStatus, SiteResult, SelfCheckResult
from ..core.main import Naminter
-from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_REMOTE_URL, WMN_SCHEMA_URL
+from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL
from ..core.exceptions import DataError, ConfigurationError
from .. import __description__, __version__
@@ -62,79 +61,16 @@ def _sanitize_filename(self, filename: str) -> str:
sanitized = sanitized.strip(' .')[:200] if sanitized.strip(' .') else 'unnamed'
return sanitized
- def _load_wmn_lists(self, local_list_paths: Optional[List[Path]] = None, remote_list_urls: Optional[List[str]] = None, skip_validation: bool = False) -> Tuple[Dict[str, Any], Optional[Dict[str, Any]]]:
- """Load and merge WMN lists from local and remote sources."""
- wmn_data = {"sites": [], "categories": [], "authors": [], "license": []}
- wmn_schema = None
-
- def _fetch_json(url: str, timeout: int = 30) -> Dict[str, Any]:
- """Helper to fetch and parse JSON from URL."""
- if not url or not isinstance(url, str) or not url.strip():
- raise ValueError(f"Invalid URL: {url}")
-
- try:
- response = requests.get(url, timeout=timeout)
- response.raise_for_status()
- return response.json()
- except requests.exceptions.RequestException as e:
- raise DataError(f"Failed to fetch from {url}: {e}") from e
- except json.JSONDecodeError as e:
- raise DataError(f"Failed to parse JSON from {url}: {e}") from e
- def _merge_data(data: Dict[str, Any]) -> None:
- """Helper to merge data into wmn_data."""
- if isinstance(data, dict):
- for key in ["sites", "categories", "authors", "license"]:
- if key in data and isinstance(data[key], list):
- wmn_data[key].extend(data[key])
-
- if not skip_validation:
- try:
- if self.config.local_schema_path:
- wmn_schema = json.loads(Path(self.config.local_schema_path).read_text())
- elif self.config.remote_schema_url:
- wmn_schema = _fetch_json(self.config.remote_schema_url)
- except Exception:
- pass
-
- sources = []
- if remote_list_urls:
- sources.extend([(url, True) for url in remote_list_urls])
- if local_list_paths:
- sources.extend([(path, False) for path in local_list_paths])
-
- if not sources:
- sources = [(WMN_REMOTE_URL, True)]
-
- for source, is_remote in sources:
- try:
- if is_remote:
- data = _fetch_json(source)
- else:
- data = json.loads(Path(source).read_text())
- _merge_data(data)
- except Exception as e:
- if not sources or source == WMN_REMOTE_URL:
- raise DataError(f"Failed to load WMN data from {source}: {e}") from e
-
- if not wmn_data["sites"]:
- raise DataError("No sites loaded from any source")
-
- unique_sites = {site["name"]: site for site in wmn_data["sites"]
- if isinstance(site, dict) and site.get("name")}
- wmn_data["sites"] = list(unique_sites.values())
- wmn_data["categories"] = sorted(set(wmn_data["categories"]))
- wmn_data["authors"] = sorted(set(wmn_data["authors"]))
- wmn_data["license"] = list(dict.fromkeys(wmn_data["license"]))
-
- return wmn_data, wmn_schema
async def run(self) -> None:
"""Main execution method with progress tracking."""
- wmn_data, wmn_schema = self._load_wmn_lists(
+ wmn_data, wmn_schema = load_wmn_lists(
local_list_paths=self.config.local_list_paths,
remote_list_urls=self.config.remote_list_urls,
- skip_validation=self.config.skip_validation
+ skip_validation=self.config.skip_validation,
+ local_schema_path=self.config.local_schema_path,
+ remote_schema_url=self.config.remote_schema_url
)
async with Naminter(
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
new file mode 100644
index 0000000..905dc07
--- /dev/null
+++ b/naminter/cli/utils.py
@@ -0,0 +1,81 @@
+import json
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Tuple
+
+from curl_cffi import requests
+from ..core.constants import WMN_REMOTE_URL
+from ..core.exceptions import DataError
+
+
+def load_wmn_lists(
+ local_list_paths: Optional[List[Path]] = None,
+ remote_list_urls: Optional[List[str]] = None,
+ skip_validation: bool = False,
+ local_schema_path: Optional[Path] = None,
+ remote_schema_url: Optional[str] = None
+) -> Tuple[Dict[str, Any], Optional[Dict[str, Any]]]:
+ """Load and merge WMN lists from local and remote sources."""
+ wmn_data = {"sites": [], "categories": [], "authors": [], "license": []}
+ wmn_schema = None
+
+ def _fetch_json(url: str, timeout: int = 30) -> Dict[str, Any]:
+ """Helper to fetch and parse JSON from URL."""
+ if not url or not isinstance(url, str) or not url.strip():
+ raise ValueError(f"Invalid URL: {url}")
+
+ try:
+ response = requests.get(url, timeout=timeout)
+ response.raise_for_status()
+ return response.json()
+ except requests.exceptions.RequestException as e:
+ raise DataError(f"Failed to fetch from {url}: {e}") from e
+ except json.JSONDecodeError as e:
+ raise DataError(f"Failed to parse JSON from {url}: {e}") from e
+
+ def _merge_data(data: Dict[str, Any]) -> None:
+ """Helper to merge data into wmn_data."""
+ if isinstance(data, dict):
+ for key in ["sites", "categories", "authors", "license"]:
+ if key in data and isinstance(data[key], list):
+ wmn_data[key].extend(data[key])
+
+ if not skip_validation:
+ try:
+ if local_schema_path:
+ wmn_schema = json.loads(Path(local_schema_path).read_text())
+ elif remote_schema_url:
+ wmn_schema = _fetch_json(remote_schema_url)
+ except Exception:
+ pass
+
+ sources = []
+ if remote_list_urls:
+ sources.extend([(url, True) for url in remote_list_urls])
+ if local_list_paths:
+ sources.extend([(path, False) for path in local_list_paths])
+
+ if not sources:
+ sources = [(WMN_REMOTE_URL, True)]
+
+ for source, is_remote in sources:
+ try:
+ if is_remote:
+ data = _fetch_json(source)
+ else:
+ data = json.loads(Path(source).read_text())
+ _merge_data(data)
+ except Exception as e:
+ if not sources or source == WMN_REMOTE_URL:
+ raise DataError(f"Failed to load WMN data from {source}: {e}") from e
+
+ if not wmn_data["sites"]:
+ raise DataError("No sites loaded from any source")
+
+ unique_sites = {site["name"]: site for site in wmn_data["sites"]
+ if isinstance(site, dict) and site.get("name")}
+ wmn_data["sites"] = list(unique_sites.values())
+ wmn_data["categories"] = sorted(set(wmn_data["categories"]))
+ wmn_data["authors"] = sorted(set(wmn_data["authors"]))
+ wmn_data["license"] = list(dict.fromkeys(wmn_data["license"]))
+
+ return wmn_data, wmn_schema
\ No newline at end of file
From 4e7ee9d95281ceff1bee1abba4c66d0822d05d2c Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Mon, 4 Aug 2025 20:18:39 +0300
Subject: [PATCH 05/19] refactor(cli): move sanitize_filename to utils module
---
naminter/cli/main.py | 16 +++-------------
naminter/cli/utils.py | 12 +++++++++++-
2 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index cfe79de..4c66a21 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -21,7 +21,7 @@
)
from ..cli.exporters import Exporter
from ..cli.progress import ProgressManager, ResultsTracker
-from ..cli.utils import load_wmn_lists
+from ..cli.utils import load_wmn_lists, sanitize_filename
from ..core.models import ResultStatus, SiteResult, SelfCheckResult
from ..core.main import Naminter
from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL
@@ -51,16 +51,6 @@ def _setup_response_dir(self) -> Optional[Path]:
display_error(f"Cannot create/access response directory: {e}")
return None
- def _sanitize_filename(self, filename: str) -> str:
- """Sanitize filename for cross-platform compatibility."""
- if not filename or not str(filename).strip():
- return "unnamed"
-
- invalid_chars = '<>:"|?*\\/\0'
- sanitized = ''.join('_' if c in invalid_chars or ord(c) < 32 else c for c in str(filename))
- sanitized = sanitized.strip(' .')[:200] if sanitized.strip(' .') else 'unnamed'
- return sanitized
-
async def run(self) -> None:
@@ -218,8 +208,8 @@ async def _process_result(self, result: SiteResult) -> Optional[Path]:
if self.config.save_response and result.response_text and self._response_dir:
try:
- safe_site_name = self._sanitize_filename(result.site_name)
- safe_username = self._sanitize_filename(result.username)
+ safe_site_name = sanitize_filename(result.site_name)
+ safe_username = sanitize_filename(result.username)
status_str = result.result_status.value
created_at_str = result.created_at.strftime('%Y%m%d_%H%M%S')
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
index 905dc07..3ad7f12 100644
--- a/naminter/cli/utils.py
+++ b/naminter/cli/utils.py
@@ -78,4 +78,14 @@ def _merge_data(data: Dict[str, Any]) -> None:
wmn_data["authors"] = sorted(set(wmn_data["authors"]))
wmn_data["license"] = list(dict.fromkeys(wmn_data["license"]))
- return wmn_data, wmn_schema
\ No newline at end of file
+ return wmn_data, wmn_schema
+
+def sanitize_filename(filename: str) -> str:
+ """Sanitize filename for cross-platform compatibility."""
+ if not filename or not str(filename).strip():
+ return "unnamed"
+
+ invalid_chars = '<>:"|?*\\/\0'
+ sanitized = ''.join('_' if c in invalid_chars or ord(c) < 32 else c for c in str(filename))
+ sanitized = sanitized.strip(' .')[:200] if sanitized.strip(' .') else 'unnamed'
+ return sanitized
\ No newline at end of file
From 0b791b7b4a61a7652cb91f9d421d26befaffedb0 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Thu, 14 Aug 2025 16:15:42 +0300
Subject: [PATCH 06/19] refactor(cli,core): breaking refactor and fixes
---
README.md | 17 +--
naminter/__init__.py | 2 +-
naminter/cli/config.py | 55 +++++---
naminter/cli/console.py | 21 ++-
naminter/cli/constants.py | 3 +
naminter/cli/exporters.py | 2 +-
naminter/cli/main.py | 274 +++++++++++++++++++------------------
naminter/cli/progress.py | 7 +-
naminter/core/constants.py | 4 +-
naminter/core/main.py | 269 ++++++++++++++++++++++++------------
naminter/core/models.py | 31 +++--
naminter/core/utils.py | 21 ---
pyproject.toml | 13 +-
13 files changed, 406 insertions(+), 313 deletions(-)
create mode 100644 naminter/cli/constants.py
diff --git a/README.md b/README.md
index fccaac0..e111992 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# 🔍 Naminter
-[](https://www.python.org/downloads/)
+[](https://www.python.org/downloads/)
[](LICENSE)
[](https://github.com/3xp0rt/naminter)
[](https://pypi.org/project/naminter/)
@@ -156,7 +156,7 @@ async def load_wmn_data():
wmn_data = await response.json()
# Optionally load the schema for validation
- async with session.get("https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn_schema.json") as response:
+ async with session.get("https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data-schema.json") as response:
wmn_schema = await response.json()
return wmn_data, wmn_schema
@@ -167,7 +167,7 @@ def load_local_wmn_data():
with open("wmn-data.json", "r") as f:
wmn_data = json.load(f)
- with open("wmn_schema.json", "r") as f:
+ with open("wmn-data-schema.json", "r") as f:
wmn_schema = json.load(f)
return wmn_data, wmn_schema
@@ -288,17 +288,12 @@ async def main():
async with Naminter(wmn_data, wmn_schema) as naminter:
# Get information about the loaded WMN data
- info = await naminter.get_wmn_info()
+ info = await naminter.get_wmn_summary()
print(f"Total sites: {info['sites_count']}")
print(f"Categories: {', '.join(info['categories'])}")
- # List all available sites
- sites = naminter.list_sites()
- print(f"Available sites: {sites[:10]}...") # Show first 10
-
- # List all categories
- categories = naminter.list_categories()
- print(f"All categories: {categories}")
+ # Summaries include sites_count, categories and categories_count
+ # Use this data to derive lists as needed.
asyncio.run(main())
```
diff --git a/naminter/__init__.py b/naminter/__init__.py
index 711a0de..ad02e70 100644
--- a/naminter/__init__.py
+++ b/naminter/__init__.py
@@ -1,6 +1,6 @@
from .core.main import Naminter
-__version__ = "1.0.6"
+__version__ = "1.0.7"
__author__ = "3xp0rt"
__description__ = "WhatsMyName Enumeration Tool"
__license__ = "MIT"
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index 0f23bfd..f5906e5 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -1,14 +1,16 @@
from dataclasses import dataclass, field
from pathlib import Path
from typing import List, Optional, Union, Dict, Any
+import json
-from ..cli.console import display_error, display_warning
+from ..cli.console import display_warning
from ..core.constants import (
HTTP_REQUEST_TIMEOUT_SECONDS,
MAX_CONCURRENT_TASKS,
WMN_REMOTE_URL,
WMN_SCHEMA_URL,
)
+from ..core.exceptions import ConfigurationError
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
@@ -38,6 +40,7 @@ class NaminterConfig:
filter_not_found: bool = False
filter_unknown: bool = False
filter_ambiguous: bool = False
+ filter_not_valid: bool = False
# Network and concurrency
max_tasks: int = MAX_CONCURRENT_TASKS
@@ -45,10 +48,10 @@ class NaminterConfig:
proxy: Optional[str] = None
allow_redirects: bool = False
verify_ssl: bool = False
- impersonate: BrowserTypeLiteral = "chrome"
+ impersonate: Optional[BrowserTypeLiteral] = "chrome"
ja3: Optional[str] = None
akamai: Optional[str] = None
- extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any]]] = None
+ extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any], str]] = None
browse: bool = False
fuzzy_mode: bool = False
self_check: bool = False
@@ -81,8 +84,10 @@ def __post_init__(self) -> None:
"Self-check mode enabled: provided usernames will be ignored, "
"using known usernames from site configurations instead."
)
+
if not self.self_check and not self.usernames:
- raise ValueError("No usernames provided and self-check not enabled.")
+ raise ValueError("At least one username is required")
+
try:
if self.local_list_paths:
self.local_list_paths = [str(p) for p in self.local_list_paths]
@@ -92,34 +97,45 @@ def __post_init__(self) -> None:
self.remote_list_urls = [WMN_REMOTE_URL]
except Exception as e:
raise ValueError(f"Configuration validation failed: {e}") from e
- self.impersonate = self.get_impersonation()
- def get_impersonation(self) -> Optional[str]:
- """Return impersonation string or None if impersonation is 'none'."""
- return None if self.impersonate == "none" else self.impersonate
+ if isinstance(self.impersonate, str) and self.impersonate.lower() == "none":
+ self.impersonate = None
+
+ if self.extra_fp is not None and isinstance(self.extra_fp, str):
+ try:
+ self.extra_fp = json.loads(self.extra_fp)
+ except json.JSONDecodeError as e:
+ raise ConfigurationError(f"Invalid JSON in extra_fp: {e}") from e
+ except TypeError as e:
+ raise ConfigurationError(f"Invalid data type in extra_fp: {e}") from e
+
@property
def response_dir(self) -> Optional[Path]:
"""Return response directory Path if save_response is enabled."""
if not self.save_response:
return None
+
if self.response_path:
return Path(self.response_path)
- return Path.cwd()
+
+ return Path.cwd() / "responses"
@property
def export_formats(self) -> Dict[str, Optional[str]]:
"""Return enabled export formats with their custom paths."""
- formats: Dict[str, Optional[str]] = {}
- if self.csv_export:
- formats["csv"] = self.csv_path
- if self.pdf_export:
- formats["pdf"] = self.pdf_path
- if self.html_export:
- formats["html"] = self.html_path
- if self.json_export:
- formats["json"] = self.json_path
- return formats
+ export_configs = [
+ ("csv", self.csv_export, self.csv_path),
+ ("pdf", self.pdf_export, self.pdf_path),
+ ("html", self.html_export, self.html_path),
+ ("json", self.json_export, self.json_path),
+ ]
+
+ return {
+ format_name: path
+ for format_name, is_enabled, path in export_configs
+ if is_enabled
+ }
def to_dict(self) -> Dict[str, Any]:
"""Convert configuration to a dictionary."""
@@ -164,5 +180,6 @@ def to_dict(self) -> Dict[str, Any]:
"filter_not_found": self.filter_not_found,
"filter_unknown": self.filter_unknown,
"filter_ambiguous": self.filter_ambiguous,
+ "filter_not_valid": self.filter_not_valid,
"no_progressbar": self.no_progressbar,
}
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 9907b63..512acf2 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -27,18 +27,18 @@
ResultStatus.FOUND: "+",
ResultStatus.NOT_FOUND: "-",
ResultStatus.UNKNOWN: "?",
+ ResultStatus.AMBIGUOUS: "*",
ResultStatus.ERROR: "!",
ResultStatus.NOT_VALID: "X",
- ResultStatus.AMBIGUOUS: "*",
}
_STATUS_STYLES: Dict[ResultStatus, Style] = {
ResultStatus.FOUND: Style(color=THEME['success'], bold=True),
ResultStatus.NOT_FOUND: Style(color=THEME['error']),
ResultStatus.UNKNOWN: Style(color=THEME['warning']),
+ ResultStatus.AMBIGUOUS: Style(color=THEME['warning'], bold=True),
ResultStatus.ERROR: Style(color=THEME['error'], bold=True),
ResultStatus.NOT_VALID: Style(color=THEME['error']),
- ResultStatus.AMBIGUOUS: Style(color=THEME['warning'], bold=True),
}
class ResultFormatter:
@@ -54,7 +54,7 @@ def format_result(self, site_result: SiteResult, response_file_path: Optional[Pa
if site_result is None:
raise ValueError("SiteResult cannot be None")
- if not hasattr(site_result, 'result_status') or site_result.result_status not in ResultStatus:
+ if not hasattr(site_result, 'result_status') or not isinstance(site_result.result_status, ResultStatus):
raise ValueError("SiteResult must have a valid result_status")
root_label = Text()
@@ -82,13 +82,13 @@ def format_result(self, site_result: SiteResult, response_file_path: Optional[Pa
def format_self_check(self, self_check_result: SelfCheckResult, response_files: Optional[List[Optional[Path]]] = None) -> Tree:
"""Format self-check results into a tree structure."""
-
+
if not self_check_result:
raise ValueError("SelfCheckResult cannot be None or empty")
-
+
if not isinstance(self_check_result, SelfCheckResult):
raise ValueError("Parameter must be a SelfCheckResult instance")
-
+
if not self_check_result.site_name or not self_check_result.site_name.strip():
raise ValueError("SelfCheckResult must have a valid site_name")
@@ -110,16 +110,15 @@ def format_self_check(self, self_check_result: SelfCheckResult, response_files:
for i, test in enumerate(test_results):
if test is None:
continue
-
+
url_text = Text()
- url_text.append(_STATUS_SYMBOLS.get(test.result_status, "?"),
- style=_STATUS_STYLES.get(test.result_status, Style()))
+ url_text.append(_STATUS_SYMBOLS.get(test.result_status, "?"), style=_STATUS_STYLES.get(test.result_status, Style()))
url_text.append(" ", style=THEME["muted"])
url_text.append(f"{test.username}: ", style=THEME["info"])
url_text.append(test.result_url or "No URL", style=THEME["primary"])
-
+
test_node = tree.add(url_text)
-
+
if self.show_details:
response_file = response_files[i] if response_files and i < len(response_files) else None
self._add_debug_info(
diff --git a/naminter/cli/constants.py b/naminter/cli/constants.py
new file mode 100644
index 0000000..b231f66
--- /dev/null
+++ b/naminter/cli/constants.py
@@ -0,0 +1,3 @@
+# Constants for file operations
+RESPONSE_FILE_DATE_FORMAT = '%Y%m%d_%H%M%S'
+RESPONSE_FILE_EXTENSION = '.html'
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index 3c3ecdd..ef74206 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -10,7 +10,7 @@
from ..core.models import SiteResult
from ..core.exceptions import ConfigurationError
-type FormatName = Literal['csv', 'json', 'html', 'pdf']
+FormatName = Literal['csv', 'json', 'html', 'pdf']
ResultDict = Dict[str, Any]
class ExportMethod(Protocol):
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 4c66a21..90aa3ca 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -1,14 +1,13 @@
import asyncio
+import json
import logging
import webbrowser
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
import typing
+import aiofiles
import rich_click as click
-from rich import box
-from rich.panel import Panel
-from rich.table import Table
from curl_cffi import BrowserTypeLiteral
from ..cli.config import NaminterConfig
@@ -21,20 +20,29 @@
)
from ..cli.exporters import Exporter
from ..cli.progress import ProgressManager, ResultsTracker
+from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
from ..cli.utils import load_wmn_lists, sanitize_filename
from ..core.models import ResultStatus, SiteResult, SelfCheckResult
from ..core.main import Naminter
-from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL
+from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL, LOGGING_FORMAT
+
from ..core.exceptions import DataError, ConfigurationError
from .. import __description__, __version__
+def _version_callback(ctx: click.Context, param: click.Option, value: bool) -> None:
+ """Eager callback to display version and exit."""
+ if not value or ctx.resilient_parsing:
+ return
+ display_version()
+ ctx.exit()
+
+
class NaminterCLI:
"""Handles username availability checks."""
def __init__(self, config: NaminterConfig) -> None:
self.config: NaminterConfig = config
- self._found_results: List[SiteResult] = []
self._formatter: ResultFormatter = ResultFormatter(show_details=config.show_details)
self._response_dir: Optional[Path] = self._setup_response_dir()
@@ -44,15 +52,21 @@ def _setup_response_dir(self) -> Optional[Path]:
return None
try:
- response_dir = Path(self.config.response_path) if self.config.response_path else Path.cwd() / "responses"
+ response_dir = self.config.response_dir
+ if response_dir is None:
+ return None
response_dir.mkdir(parents=True, exist_ok=True)
return response_dir
+ except PermissionError as e:
+ display_error(f"Permission denied creating/accessing response directory: {e}")
+ return None
+ except OSError as e:
+ display_error(f"OS error creating/accessing response directory: {e}")
+ return None
except Exception as e:
- display_error(f"Cannot create/access response directory: {e}")
+ display_error(f"Unexpected error setting up response directory: {e}")
return None
-
-
async def run(self) -> None:
"""Main execution method with progress tracking."""
wmn_data, wmn_schema = load_wmn_lists(
@@ -80,158 +94,182 @@ async def run(self) -> None:
results = await self._run_self_check(naminter)
else:
results = await self._run_check(naminter)
-
- filtered_results = [r for r in results if self._should_include_result(r)]
-
- if self.config.export_formats:
+
+ if self.config.export_formats and results:
export_manager = Exporter(self.config.usernames or [], __version__)
- export_manager.export(filtered_results, self.config.export_formats)
+ export_manager.export(results, self.config.export_formats)
async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
"""Run the username check functionality."""
- if not self.config.usernames:
- raise ValueError("At least one username is required")
-
- if self.config.site_names:
- available_sites = naminter.list_sites()
- actual_site_count = len([s for s in self.config.site_names if s in available_sites])
- else:
- actual_site_count = len(naminter._wmn_data.get("sites", []))
-
+ summary = await naminter.get_wmn_summary(
+ site_names=self.config.site_names,
+ include_categories=self.config.include_categories,
+ exclude_categories=self.config.exclude_categories,
+ )
+ actual_site_count = int(summary.get("sites_count", 0))
total_sites = actual_site_count * len(self.config.usernames)
tracker = ResultsTracker(total_sites)
- all_results = []
+ results: List[SiteResult] = []
with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
progress_mgr.start(total_sites, "Checking usernames...")
- results = await naminter.check_usernames(
+ result_stream = await naminter.check_usernames(
usernames=self.config.usernames,
site_names=self.config.site_names,
+ include_categories=self.config.include_categories,
+ exclude_categories=self.config.exclude_categories,
fuzzy_mode=self.config.fuzzy_mode,
as_generator=True
)
- async for result in results:
+
+ async for result in result_stream:
tracker.add_result(result)
if self._should_include_result(result):
- response_file_path = await self._process_result(result)
+ response_file_path = await self._process_result(result)
formatted_output = self._formatter.format_result(result, response_file_path)
console.print(formatted_output)
-
- all_results.append(result)
- progress_mgr.update(description=tracker.get_progress_text())
+ results.append(result)
+
+ progress_mgr.update(advance=1, description=tracker.get_progress_text())
- return all_results
+ return results
async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
"""Run the self-check functionality."""
- sites_data = naminter._wmn_data.get("sites", [])
-
- if self.config.site_names:
- available_sites = [site.get("name") for site in sites_data if site.get("name")]
- filtered_sites = [site for site in sites_data if site.get("name") in self.config.site_names]
- site_count = len(filtered_sites)
- else:
- site_count = len(sites_data)
-
- total_tests = 0
- for site in sites_data:
- if isinstance(site, dict):
- known_accounts = site.get("known", [])
- if isinstance(known_accounts, list) and known_accounts:
- total_tests += len(known_accounts)
+ summary = await naminter.get_wmn_summary(
+ site_names=self.config.site_names,
+ include_categories=self.config.include_categories,
+ exclude_categories=self.config.exclude_categories,
+ )
+ total_tests = int(summary.get("known_accounts_total", 0))
tracker = ResultsTracker(total_tests)
- all_results = []
+ results: List[SelfCheckResult] = []
with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(site_count, "Running self-check...")
+ progress_mgr.start(total_tests, "Running self-check...")
- results = await naminter.self_check(
+ result_stream = await naminter.self_check(
site_names=self.config.site_names,
+ include_categories=self.config.include_categories,
+ exclude_categories=self.config.exclude_categories,
fuzzy_mode=self.config.fuzzy_mode,
as_generator=True
)
- async for result in results:
+
+ async for result in result_stream:
for site_result in result.results:
tracker.add_result(site_result)
-
+ progress_mgr.update(advance=1, description=tracker.get_progress_text())
+
if self._should_include_result(result):
- response_files = []
+ response_files: List[Optional[Path]] = []
for site_result in result.results:
response_file_path = await self._process_result(site_result)
if response_file_path:
response_files.append(response_file_path)
-
+ else:
+ response_files.append(None)
formatted_output = self._formatter.format_self_check(result, response_files)
console.print(formatted_output)
-
- all_results.append(result)
- progress_mgr.update(description=tracker.get_progress_text())
+ results.append(result)
- return all_results
+ return results
+
def _should_include_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
"""Determine if a result should be included in output based on filter settings."""
- if isinstance(result, SelfCheckResult):
- status = result.result_status
- else:
- status = result.result_status
+ status = result.result_status
if self.config.filter_all:
return True
- elif self.config.filter_errors and status == ResultStatus.ERROR:
- return True
- elif self.config.filter_not_found and status == ResultStatus.NOT_FOUND:
- return True
- elif self.config.filter_unknown and status == ResultStatus.UNKNOWN:
- return True
- elif self.config.filter_ambiguous and status == ResultStatus.AMBIGUOUS:
- return True
- elif not any([self.config.filter_errors, self.config.filter_not_found, self.config.filter_unknown, self.config.filter_ambiguous]):
+
+ filter_conditions = [
+ (self.config.filter_ambiguous, ResultStatus.AMBIGUOUS),
+ (self.config.filter_unknown, ResultStatus.UNKNOWN),
+ (self.config.filter_not_found, ResultStatus.NOT_FOUND),
+ (self.config.filter_not_valid, ResultStatus.NOT_VALID),
+ (self.config.filter_errors, ResultStatus.ERROR),
+ ]
+
+ for filter_enabled, expected_status in filter_conditions:
+ if filter_enabled and status == expected_status:
+ return True
+
+ if not any(filter_enabled for filter_enabled, _ in filter_conditions):
return status == ResultStatus.FOUND
return False
+ async def _open_browser(self, url: str) -> None:
+ """Open a URL in the browser with error handling."""
+ try:
+ await asyncio.to_thread(webbrowser.open, url)
+ except Exception as e:
+ display_error(f"Error opening browser for {url}: {e}")
+
+ async def _write_file(self, file_path: Path, content: str) -> None:
+ """Write content to a file with error handling."""
+ try:
+ async with aiofiles.open(file_path, "w", encoding="utf-8") as file:
+ await file.write(content)
+ except PermissionError as e:
+ display_error(f"Permission denied writing to {file_path}: {e}")
+ except OSError as e:
+ display_error(f"OS error writing to {file_path}: {e}")
+ except Exception as e:
+ display_error(f"Failed to write to {file_path}: {e}")
+
async def _process_result(self, result: SiteResult) -> Optional[Path]:
"""Process a single result: handle browser opening, response saving, and console output."""
response_file = None
- if result.result_url:
- self._found_results.append(result)
- if self.config.browse:
- try:
- await asyncio.to_thread(webbrowser.open, result.result_url)
- except Exception as e:
- display_error(f"Error opening browser for {result.result_url}: {e}")
-
+ if result.result_url and self.config.browse:
+ await self._open_browser(result.result_url)
+
if self.config.save_response and result.response_text and self._response_dir:
try:
safe_site_name = sanitize_filename(result.site_name)
safe_username = sanitize_filename(result.username)
status_str = result.result_status.value
- created_at_str = result.created_at.strftime('%Y%m%d_%H%M%S')
-
- base_filename = f"{status_str}_{result.response_code}_{safe_site_name}_{safe_username}_{created_at_str}.html"
+ created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
+
+ base_filename = f"{status_str}_{result.response_code}_{safe_site_name}_{safe_username}_{created_at_str}{RESPONSE_FILE_EXTENSION}"
response_file = self._response_dir / base_filename
-
- await asyncio.to_thread(response_file.write_text, result.response_text, encoding="utf-8")
-
+
+ await self._write_file(response_file, result.response_text)
+
if self.config.open_response:
- try:
- file_uri = response_file.resolve().as_uri()
- await asyncio.to_thread(webbrowser.open, file_uri)
- except Exception as e:
- display_error(f"Error opening response file {response_file}: {e}")
+ file_uri = response_file.resolve().as_uri()
+ await self._open_browser(file_uri)
+ except PermissionError as e:
+ display_error(f"Permission denied saving response to file: {e}")
+ except OSError as e:
+ display_error(f"OS error saving response to file: {e}")
except Exception as e:
display_error(f"Failed to save response to file: {e}")
-
+
return response_file
+ @staticmethod
+ def _setup_logging(config: NaminterConfig) -> None:
+ """Setup logging configuration if log level and file are specified."""
+ if config.log_level and config.log_file:
+ log_path = Path(config.log_file)
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+ level_value = getattr(logging, str(config.log_level).upper(), logging.INFO)
+ logging.basicConfig(
+ level=level_value,
+ format=LOGGING_FORMAT,
+ filename=str(log_path),
+ filemode="a",
+ )
-@click.group(invoke_without_command=True, context_settings=dict(help_option_names=['-h', '--help']))
-@click.option('--version', is_flag=True, help='Show version information and exit')
+
+@click.group(invoke_without_command=True, no_args_is_help=True, context_settings=dict(help_option_names=['-h', '--help']))
+@click.option('--version', is_flag=True, is_eager=True, expose_value=False, callback=_version_callback, help='Show version information and exit')
@click.option('--no-color', is_flag=True, help='Disable colored console output')
@click.option('--no-progressbar', is_flag=True, help='Disable progress bar during execution')
@click.option('--username', '-u', multiple=True, help='Username(s) to search for across social media platforms')
@@ -248,8 +286,7 @@ async def _process_result(self, result: SiteResult) -> Optional[Path]:
@click.option('--timeout', type=int, default=HTTP_REQUEST_TIMEOUT_SECONDS, help='Maximum time in seconds to wait for each HTTP request')
@click.option('--allow-redirects', is_flag=True, default=HTTP_ALLOW_REDIRECTS, help='Whether to follow HTTP redirects automatically')
@click.option('--verify-ssl', is_flag=True, default=HTTP_SSL_VERIFY, help='Whether to verify SSL/TLS certificates for HTTPS requests')
-@click.option('--impersonate', type=click.Choice(typing.get_args(BrowserTypeLiteral) + ("none",)), default="chrome", help='Browser to impersonate in HTTP requests (use "none" to disable impersonation)')
-@click.option('--no-impersonate', is_flag=True, help='Disable browser impersonation (equivalent to --impersonate none)')
+@click.option('--impersonate', type=click.Choice(["none", *typing.get_args(BrowserTypeLiteral)]), default="chrome", help='Browser to impersonate in HTTP requests (use "none" to disable)')
@click.option('--ja3', help='JA3 fingerprint string for TLS fingerprinting')
@click.option('--akamai', help='Akamai fingerprint string for Akamai bot detection bypass')
@click.option('--extra-fp', help='Extra fingerprinting options as JSON string (e.g., \'{"tls_grease": true, "tls_cert_compression": "brotli"}\')')
@@ -271,44 +308,22 @@ async def _process_result(self, result: SiteResult) -> Optional[Path]:
@click.option('--json', 'json_export', is_flag=True, help='Export results to JSON file')
@click.option('--json-path', help='Custom path for JSON export')
@click.option('--filter-all', is_flag=True, help='Include all results in console output and exports')
-@click.option('--filter-errors', is_flag=True, help='Show only error results in console output and exports')
-@click.option('--filter-not-found', is_flag=True, help='Show only not found results in console output and exports')
-@click.option('--filter-unknown', is_flag=True, help='Show only unknown results in console output and exports')
@click.option('--filter-ambiguous', is_flag=True, help='Show only ambiguous results in console output and exports')
+@click.option('--filter-unknown', is_flag=True, help='Show only unknown results in console output and exports')
+@click.option('--filter-not-found', is_flag=True, help='Show only not found results in console output and exports')
+@click.option('--filter-not-valid', is_flag=True, help='Show only not valid results in console output and exports')
+@click.option('--filter-errors', is_flag=True, help='Show only error results in console output and exports')
@click.pass_context
-def main(ctx: click.Context, version: bool, **kwargs) -> None:
+def main(ctx: click.Context, **kwargs: Any) -> None:
"""The most powerful and fast username availability checker that searches across hundreds of websites using WhatsMyName dataset."""
-
- if version:
- display_version()
- ctx.exit()
-
+
if ctx.invoked_subcommand is not None:
return
- # If no subcommand is invoked, run the main functionality
- if not kwargs.get('username') and not kwargs.get('self_check'):
- click.echo(ctx.get_help())
- ctx.exit(1)
-
if kwargs.get('no_color'):
console.no_color = True
try:
- # Handle --no-impersonate flag
- impersonate_value = kwargs.get('impersonate')
- if kwargs.get('no_impersonate'):
- impersonate_value = "none"
-
- # Parse extra fingerprinting options if provided
- extra_fp = None
- if kwargs.get('extra_fp'):
- try:
- extra_fp = json.loads(kwargs.get('extra_fp'))
- except json.JSONDecodeError as e:
- display_error(f"Invalid JSON in --extra-fp option: {e}")
- ctx.exit(1)
-
config = NaminterConfig(
usernames=kwargs.get('username'),
site_names=kwargs.get('site'),
@@ -324,10 +339,10 @@ def main(ctx: click.Context, version: bool, **kwargs) -> None:
proxy=kwargs.get('proxy'),
allow_redirects=kwargs.get('allow_redirects'),
verify_ssl=kwargs.get('verify_ssl'),
- impersonate=impersonate_value,
+ impersonate=kwargs.get('impersonate'),
ja3=kwargs.get('ja3'),
akamai=kwargs.get('akamai'),
- extra_fp=extra_fp,
+ extra_fp=kwargs.get('extra_fp'),
fuzzy_mode=kwargs.get('fuzzy_mode'),
self_check=kwargs.get('self_check'),
log_level=kwargs.get('log_level'),
@@ -350,18 +365,11 @@ def main(ctx: click.Context, version: bool, **kwargs) -> None:
filter_not_found=kwargs.get('filter_not_found'),
filter_unknown=kwargs.get('filter_unknown'),
filter_ambiguous=kwargs.get('filter_ambiguous'),
+ filter_not_valid=kwargs.get('filter_not_valid'),
no_progressbar=kwargs.get('no_progressbar'),
)
- if config.log_level and config.log_file:
- log_path = Path(config.log_file)
- log_path.parent.mkdir(parents=True, exist_ok=True)
- logging.basicConfig(
- level=config.log_level,
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
- filename=str(log_path),
- filemode="a"
- )
+ NaminterCLI._setup_logging(config)
naminter_cli = NaminterCLI(config)
asyncio.run(naminter_cli.run())
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index 71d648c..f3d0a76 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -35,10 +35,8 @@ def add_result(self, result: SiteResult) -> None:
raise ValueError("Result cannot be None")
if not hasattr(result, 'result_status'):
raise ValueError("Result must have a result_status attribute")
-
- if result.result_status not in (ResultStatus.ERROR, ResultStatus.NOT_VALID):
- self.results_count += 1
-
+
+ self.results_count += 1
self.status_counts[result.result_status] += 1
def get_progress_text(self) -> str:
@@ -100,7 +98,6 @@ def create_progress_bar(self) -> Progress:
TimeElapsedColumn(),
TextColumn("•"),
TimeRemainingColumn(),
- TextColumn(""),
console=self.console,
)
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index 532d488..e41b694 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -1,5 +1,4 @@
from typing import Final
-from curl_cffi import BrowserTypeLiteral
# Remote data source configuration
WMN_REMOTE_URL: Final[str] = "https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data.json"
@@ -11,7 +10,6 @@
HTTP_ALLOW_REDIRECTS: Final[bool] = False
# Browser impersonation settings
-# To disable browser impersonation by default, change this to "none"
BROWSER_IMPERSONATE_AGENT: Final[str] = "chrome"
# Concurrency settings
@@ -35,4 +33,4 @@
LOGGING_FORMAT: Final[str] = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# Placeholder for account name substitution in uri_check or post_body
-ACCOUNT_PLACEHOLDER: Final[str] = "{account}"
\ No newline at end of file
+ACCOUNT_PLACEHOLDER: Final[str] = "{account}"
diff --git a/naminter/core/main.py b/naminter/core/main.py
index f19aa41..cc9ae16 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -1,29 +1,21 @@
import asyncio
-import jsonschema
import logging
import time
-from typing import Any, AsyncGenerator, Coroutine, Dict, List, Optional, Union
+from typing import Any, AsyncGenerator, Dict, List, Optional, Union, Set
-import jsonschema
from curl_cffi.requests import AsyncSession, RequestsError
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
-from ..core.models import ResultStatus, SiteResult, SelfCheckResult
+from ..core.models import ResultStatus, SiteResult, SelfCheckResult, ValidationMode
from ..core.exceptions import (
- ConfigurationError,
- NetworkError,
DataError,
- SessionError,
- SchemaValidationError,
ValidationError,
- ConcurrencyError,
)
from ..core.utils import (
validate_wmn_data,
validate_numeric_values,
configure_proxy,
validate_usernames,
- filter_sites,
)
from ..core.constants import (
HTTP_REQUEST_TIMEOUT_SECONDS,
@@ -31,16 +23,6 @@
HTTP_ALLOW_REDIRECTS,
BROWSER_IMPERSONATE_AGENT,
MAX_CONCURRENT_TASKS,
- MIN_TASKS,
- MAX_TASKS_LIMIT,
- MIN_TIMEOUT,
- MAX_TIMEOUT,
- HIGH_CONCURRENCY_THRESHOLD,
- HIGH_CONCURRENCY_MIN_TIMEOUT,
- VERY_HIGH_CONCURRENCY_THRESHOLD,
- VERY_HIGH_CONCURRENCY_MIN_TIMEOUT,
- EXTREME_CONCURRENCY_THRESHOLD,
- LOW_TIMEOUT_WARNING_THRESHOLD,
ACCOUNT_PLACEHOLDER,
)
@@ -70,15 +52,15 @@ def __init__(
max_tasks, timeout, impersonate, verify_ssl, allow_redirects, bool(proxy), ja3, akamai
)
- self.max_tasks = max_tasks if max_tasks is not None else MAX_CONCURRENT_TASKS
- self.timeout = timeout if timeout is not None else HTTP_REQUEST_TIMEOUT_SECONDS
- self.impersonate = impersonate if impersonate is not None else BROWSER_IMPERSONATE_AGENT
- self.verify_ssl = verify_ssl if verify_ssl is not None else HTTP_SSL_VERIFY
- self.allow_redirects = allow_redirects if allow_redirects is not None else HTTP_ALLOW_REDIRECTS
+ self.max_tasks = max_tasks
+ self.timeout = timeout
+ self.impersonate = impersonate
+ self.verify_ssl = verify_ssl
+ self.allow_redirects = allow_redirects
self.proxy = configure_proxy(proxy)
self.ja3 = ja3
self.akamai = akamai
- self.extra_fp = extra_fp
+ self.extra_fp = extra_fp.to_dict() if isinstance(extra_fp, ExtraFingerprints) else extra_fp
validate_numeric_values(self.max_tasks, self.timeout)
validate_wmn_data(wmn_data, wmn_schema)
@@ -86,20 +68,18 @@ def __init__(
self._wmn_data = wmn_data
self._wmn_schema = wmn_schema
self._semaphore = asyncio.Semaphore(self.max_tasks)
+ self._session_lock = asyncio.Lock()
self._session: Optional[AsyncSession] = None
- sites_count = len(self._wmn_data.get("sites", [])) if self._wmn_data else 0
self._logger.info(
- "Naminter initialized successfully: loaded %d sites, max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, proxy=%s, ja3=%s, akamai=%s",
- sites_count, self.max_tasks, self.timeout,
+ "Naminter initialized successfully: max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, proxy=%s, ja3=%s, akamai=%s",
+ self.max_tasks, self.timeout,
self.impersonate, self.verify_ssl, bool(self.proxy), self.ja3, self.akamai
)
- async def __aenter__(self) -> "Naminter":
- # Convert ExtraFingerprints to dict if needed
- extra_fp_value = self.extra_fp.to_dict() if isinstance(self.extra_fp, ExtraFingerprints) else self.extra_fp
-
- self._session = AsyncSession(
+ def _create_async_session(self) -> AsyncSession:
+ """Create and configure the underlying HTTP session."""
+ return AsyncSession(
proxies=self.proxy,
verify=self.verify_ssl,
timeout=self.timeout,
@@ -107,12 +87,27 @@ async def __aenter__(self) -> "Naminter":
impersonate=self.impersonate,
ja3=self.ja3,
akamai=self.akamai,
- extra_fp=extra_fp_value,
+ extra_fp=self.extra_fp,
)
- return self
-
- async def __aexit__(self, exc_type: Optional[type], exc_val: Optional[BaseException], exc_tb: Optional[Any]) -> None:
- """Async context manager exit."""
+
+ def open_session(self) -> None:
+ """Open the HTTP session for manual (non-context) usage."""
+ if self._session is None:
+ self._session = self._create_async_session()
+ self._logger.info("HTTP session opened successfully.")
+
+ async def ensure_session(self) -> None:
+ """Ensure the HTTP session is initialized (safe for concurrent calls)."""
+ if self._session is not None:
+ return
+
+ async with self._session_lock:
+ if self._session is None:
+ self._session = self._create_async_session()
+ self._logger.info("HTTP session opened successfully.")
+
+ async def close_session(self) -> None:
+ """Close the HTTP session if it is open."""
if self._session:
try:
await self._session.close()
@@ -122,33 +117,111 @@ async def __aexit__(self, exc_type: Optional[type], exc_val: Optional[BaseExcept
finally:
self._session = None
- async def get_wmn_info(self) -> Dict[str, Any]:
- """Get WMN metadata information."""
+ async def __aenter__(self) -> "Naminter":
+ await self.ensure_session()
+ return self
+
+ async def __aexit__(self, exc_type: Optional[type], exc_val: Optional[BaseException], exc_tb: Optional[Any]) -> None:
+ """Async context manager exit."""
+ await self.close_session()
+
+ async def get_wmn_summary(
+ self,
+ site_names: Optional[List[str]] = None,
+ include_categories: Optional[List[str]] = None,
+ exclude_categories: Optional[List[str]] = None,
+ ) -> Dict[str, Any]:
+ """Get enriched WMN metadata information for diagnostics and UI.
+
+ Filters can be applied to compute statistics on a subset of sites.
+ """
try:
- info = {
- "license": self._wmn_data.get("license", []),
- "authors": self._wmn_data.get("authors", []),
- "categories": list(set(site.get("cat", "") for site in self._wmn_data.get("sites", []))),
- "sites_count": len(self._wmn_data.get("sites", []))
+ sites: List[Dict[str, Any]] = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
+
+ category_list: List[str] = [site.get("cat") for site in sites if site.get("cat")]
+ site_name_list: List[str] = [site.get("name") for site in sites if site.get("name")]
+
+ total_known_accounts: int = 0
+
+ for site in sites:
+ known_list = site.get("known")
+ if isinstance(known_list, list) and len(known_list) > 0:
+ total_known_accounts += len(known_list)
+
+ info: Dict[str, Any] = {
+ "license": list(dict.fromkeys(self._wmn_data.get("license", []))),
+ "authors": list(dict.fromkeys(self._wmn_data.get("authors", []))),
+ "site_names": list(dict.fromkeys(site_name_list)),
+ "sites_count": len(sites),
+ "categories": list(dict.fromkeys(category_list)),
+ "categories_count": len(set(category_list)),
+ "known_accounts_total": total_known_accounts,
}
- self._logger.info("Retrieved WMN metadata: %d sites across %d categories",
- info["sites_count"], len(info["categories"]))
+
+ self._logger.info(
+ "WMN info: %d sites, %d categories (filters - names: %s, include: %s, exclude: %s)",
+ info["sites_count"],
+ info["categories_count"],
+ bool(site_names),
+ bool(include_categories),
+ bool(exclude_categories),
+ )
return info
except Exception as e:
self._logger.error("Error retrieving WMN metadata: %s", e, exc_info=True)
return {"error": f"Failed to retrieve metadata: {e}"}
- def list_sites(self) -> List[str]:
- """List all site names."""
- sites = [site.get("name", "") for site in self._wmn_data.get("sites", [])]
- self._logger.info("Retrieved %d site names from WMN data", len(sites))
- return sites
- def list_categories(self) -> List[str]:
- """List all unique categories."""
- category_list = sorted({site.get("cat") for site in self._wmn_data.get("sites", []) if site.get("cat")})
- self._logger.info("Retrieved %d unique categories from WMN data", len(category_list))
- return category_list
+ def _filter_sites(
+ self,
+ site_names: Optional[List[str]],
+ include_categories: Optional[List[str]] = None,
+ exclude_categories: Optional[List[str]] = None,
+ ) -> List[Dict[str, Any]]:
+ """Filter sites by names and categories for the current WMN dataset."""
+ sites: List[Dict[str, Any]] = self._wmn_data.get("sites", [])
+
+ if site_names:
+ requested_site_names: Set[str] = set(site_names)
+ available_names: Set[str] = {site.get("name") for site in sites}
+ missing_names = requested_site_names - available_names
+ if missing_names:
+ raise DataError(f"Unknown site names: {missing_names}")
+ else:
+ requested_site_names = set()
+
+ filtered_sites: List[Dict[str, Any]] = sites
+
+ if requested_site_names:
+ filtered_sites = [
+ site for site in filtered_sites if site.get("name") in requested_site_names
+ ]
+
+ if include_categories:
+ include_set: Set[str] = set(include_categories)
+ filtered_sites = [
+ site for site in filtered_sites if site.get("cat") in include_set
+ ]
+
+ if exclude_categories:
+ exclude_set: Set[str] = set(exclude_categories)
+ filtered_sites = [
+ site for site in filtered_sites if site.get("cat") not in exclude_set
+ ]
+
+ self._logger.info(
+ "Filtered to %d sites from %d total (names: %s, include: %s, exclude: %s)",
+ len(filtered_sites),
+ len(sites),
+ bool(site_names),
+ bool(include_categories),
+ bool(exclude_categories),
+ )
+ return filtered_sites
async def check_site(
self,
@@ -157,6 +230,8 @@ async def check_site(
fuzzy_mode: bool = False,
) -> SiteResult:
"""Check a single site for the given username."""
+ await self.ensure_session()
+
site_name = site.get("name")
category = site.get("cat")
uri_check_template = site.get("uri_check")
@@ -208,32 +283,34 @@ async def check_site(
if fuzzy_mode:
if all(val is None for val in matchers.values()):
self._logger.error(
- "Site '%s' must define at least one matcher (e_code, e_string, m_code, or m_string) for fuzzy mode",
- site_name
+ "Site '%s' must define at least one matcher (e_code, e_string, m_code, or m_string) for %s mode",
+ site_name,
+ ValidationMode.FUZZY,
)
return SiteResult(
site_name=site_name,
category=category,
username=username,
result_status=ResultStatus.ERROR,
- error="Site must define at least one matcher for fuzzy mode",
+ error=f"Site must define at least one matcher for {ValidationMode.FUZZY} mode",
)
else:
missing = [name for name, val in matchers.items() if val is None]
if missing:
self._logger.error(
- "Site '%s' missing required matchers for strict mode: %s",
- site_name, missing
+ "Site '%s' missing required matchers for %s mode: %s",
+ site_name, ValidationMode.STRICT, missing
)
return SiteResult(
site_name=site_name,
category=category,
username=username,
result_status=ResultStatus.ERROR,
- error=f"Site missing required matchers: {missing}",
+ error=f"Site missing required matchers for {ValidationMode.STRICT} mode: {missing}",
)
-
- clean_username = username.translate(str.maketrans("", "", site.get("strip_bad_char", "")))
+
+ strip_bad_char = site.get("strip_bad_char", "")
+ clean_username = username.translate(str.maketrans("", "", strip_bad_char))
if not clean_username:
return SiteResult(site_name, category, username, ResultStatus.ERROR, error=f"Username '{username}' became empty after character stripping")
@@ -241,7 +318,7 @@ async def check_site(
uri_pretty = site.get("uri_pretty", uri_check_template).replace(ACCOUNT_PLACEHOLDER, clean_username)
self._logger.info("Checking site '%s' (category: %s) for username '%s' in %s mode",
- site_name, category, username, "fuzzy" if fuzzy_mode else "strict")
+ site_name, category, username, ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
try:
async with self._semaphore:
@@ -302,7 +379,7 @@ async def check_site(
result_status.name,
response_code,
elapsed,
- "fuzzy" if fuzzy_mode else "strict",
+ ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT,
)
return SiteResult(
@@ -320,41 +397,57 @@ async def check_usernames(
self,
usernames: List[str],
site_names: Optional[List[str]] = None,
+ include_categories: Optional[List[str]] = None,
+ exclude_categories: Optional[List[str]] = None,
fuzzy_mode: bool = False,
as_generator: bool = False,
) -> Union[List[SiteResult], AsyncGenerator[SiteResult, None]]:
"""Check one or multiple usernames across all loaded sites."""
+ await self.ensure_session()
+
usernames = validate_usernames(usernames)
self._logger.info("Starting username enumeration for %d username(s): %s", len(usernames), usernames)
- sites = await filter_sites(site_names, self._wmn_data.get("sites", []))
- self._logger.info("Will check against %d sites in %s mode", len(sites), "fuzzy" if fuzzy_mode else "strict")
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
+ self._logger.info("Will check against %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
- tasks: List[Coroutine[Any, Any, SiteResult]] = [
+ coroutines = [
self.check_site(site, username, fuzzy_mode)
for site in sites for username in usernames
]
- async def generate_results() -> AsyncGenerator[SiteResult, None]:
- for task in asyncio.as_completed(tasks):
- yield await task
+ async def iterate_results() -> AsyncGenerator[SiteResult, None]:
+ for completed_task in asyncio.as_completed(coroutines):
+ yield await completed_task
if as_generator:
- return generate_results()
+ return iterate_results()
- results = await asyncio.gather(*tasks)
+ results = await asyncio.gather(*coroutines)
return results
async def self_check(
self,
site_names: Optional[List[str]] = None,
+ include_categories: Optional[List[str]] = None,
+ exclude_categories: Optional[List[str]] = None,
fuzzy_mode: bool = False,
- as_generator: bool = False,
+ as_generator: bool = False
) -> Union[List[SelfCheckResult], AsyncGenerator[SelfCheckResult, None]]:
"""Run self-checks using known accounts for each site."""
- sites = await filter_sites(site_names, self._wmn_data.get("sites", []))
+ await self.ensure_session()
+
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
- self._logger.info("Starting self-check validation for %d sites in %s mode", len(sites), "fuzzy" if fuzzy_mode else "strict")
+ self._logger.info("Starting self-check validation for %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
async def _check_known(site: Dict[str, Any]) -> SelfCheckResult:
"""Helper function to check a site with all its known users."""
@@ -392,13 +485,13 @@ async def _check_known(site: Dict[str, Any]) -> SelfCheckResult:
self._logger.info("Self-checking site '%s' (category: %s) with %d known accounts", site_name, category, len(known))
try:
- tasks = [self.check_site(site, username, fuzzy_mode) for username in known]
- site_results = await asyncio.gather(*tasks)
+ coroutines = [self.check_site(site, username, fuzzy_mode) for username in known]
+ results = await asyncio.gather(*coroutines)
return SelfCheckResult(
site_name=site_name,
category=category,
- results=site_results
+ results=results
)
except Exception as e:
self._logger.error("Unexpected error during self-check for site '%s': %s", site_name, e, exc_info=True)
@@ -409,16 +502,16 @@ async def _check_known(site: Dict[str, Any]) -> SelfCheckResult:
error=f"Unexpected error during self-check: {e}"
)
- tasks: List[Coroutine[Any, Any, SelfCheckResult]] = [
+ coroutines = [
_check_known(site) for site in sites if isinstance(site, dict)
]
- async def generate_results() -> AsyncGenerator[SelfCheckResult, None]:
- for task in asyncio.as_completed(tasks):
- yield await task
+ async def iterate_results() -> AsyncGenerator[SelfCheckResult, None]:
+ for completed_task in asyncio.as_completed(coroutines):
+ yield await completed_task
if as_generator:
- return generate_results()
+ return iterate_results()
- results = await asyncio.gather(*tasks)
+ results = await asyncio.gather(*coroutines)
return results
\ No newline at end of file
diff --git a/naminter/core/models.py b/naminter/core/models.py
index 536a02a..294071d 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -1,19 +1,22 @@
from dataclasses import dataclass, asdict, field
-from enum import Enum
-from typing import Optional, Dict, Any, List, Union, Set, Literal, TypedDict
+from enum import StrEnum, auto
+from typing import Optional, Dict, Any, List, Set
from datetime import datetime
-from curl_cffi import BrowserTypeLiteral, CurlSslVersion, ExtraFingerprints
-class ResultStatus(Enum):
+class ValidationMode(StrEnum):
+ FUZZY = "fuzzy"
+ STRICT = "strict"
+
+class ResultStatus(StrEnum):
"""Status of username search results."""
- FOUND = "found"
- NOT_FOUND = "not_found"
- ERROR = "error"
- UNKNOWN = "unknown"
- AMBIGUOUS = "ambiguous"
- NOT_VALID = "not_valid"
+ FOUND = auto()
+ NOT_FOUND = auto()
+ ERROR = auto()
+ UNKNOWN = auto()
+ AMBIGUOUS = auto()
+ NOT_VALID = auto()
-@dataclass
+@dataclass(slots=True, frozen=True)
class SiteResult:
"""Result of testing a username on a site."""
site_name: str
@@ -83,7 +86,7 @@ def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
result.pop('response_text', None)
return result
-@dataclass
+@dataclass(slots=True, frozen=True)
class SelfCheckResult:
"""Result of a self-check for a username."""
site_name: str
@@ -95,9 +98,9 @@ class SelfCheckResult:
def __post_init__(self) -> None:
"""Calculate result status from results."""
- self.result_status = self.get_result_status()
+ object.__setattr__(self, 'result_status', self._get_result_status())
- def get_result_status(self) -> ResultStatus:
+ def _get_result_status(self) -> ResultStatus:
"""Determine result status from results."""
if self.error:
return ResultStatus.ERROR
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index 2f32241..8196fec 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -132,24 +132,3 @@ def validate_usernames(usernames: List[str]) -> List[str]:
logger.info(f"Validated {len(unique_usernames)} unique usernames")
return unique_usernames
-
-
-async def filter_sites(
- site_names: Optional[List[str]],
- sites: List[Dict[str, Any]],
-) -> List[Dict[str, Any]]:
- """Filter the list of sites by the provided site names."""
- if not site_names:
- return sites
-
- # Convert to set for O(1) lookup performance
- site_names_set = set(site_names)
- available = {site.get("name") for site in sites}
- missing = site_names_set - available
-
- if missing:
- raise DataError(f"Unknown site names: {missing}")
-
- filtered_sites = [site for site in sites if site.get("name") in site_names_set]
- logger.info(f"Filtered to {len(filtered_sites)} sites from {len(sites)} total")
- return filtered_sites
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 6efd148..4394dd1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,12 +4,12 @@ build-backend = "setuptools.build_meta"
[project]
name = "naminter"
-version = "1.0.6"
+dynamic = ["version"]
description = "The most powerful and fast username availability checker that searches across hundreds of websites using WhatsMyName dataset"
readme = "README.md"
authors = [{ name = "3xp0rt", email = "contact@3xp0rt.com" }]
license = { text = "MIT" }
-requires-python = ">=3.8"
+requires-python = ">=3.11"
classifiers = [
"Development Status :: 5 - Production/Stable",
@@ -18,9 +18,6 @@ classifiers = [
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
@@ -30,7 +27,8 @@ classifiers = [
dependencies = [
"click>=8.2.1",
- "curl-cffi>=0.12.0",
+ "curl-cffi>=0.13.0",
+ "aiofiles>=24.1.0",
"jinja2>=3.1.6",
"jsonschema>=4.25.0",
"rich>=14.1.0",
@@ -62,6 +60,9 @@ Source = "https://github.com/3xp0rt/naminter"
[project.scripts]
naminter = "naminter.cli.main:entry_point"
+[tool.setuptools.dynamic]
+version = {attr = "naminter.__version__"}
+
[tool.setuptools]
package-dir = {"" = "."}
include-package-data = true
From dcff8c1a60c93226b7f6a0afb07f75cffd629917 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Fri, 15 Aug 2025 16:00:29 +0300
Subject: [PATCH 07/19] feat(core, cli): add filter-found option and improve
filtering behavior
---
README.md | 7 +++++--
naminter/cli/config.py | 25 ++++++++++++++++++------
naminter/cli/console.py | 12 ++++++------
naminter/cli/main.py | 42 ++++++++++++++++++++--------------------
naminter/cli/progress.py | 6 +++---
naminter/core/models.py | 6 +++---
6 files changed, 57 insertions(+), 41 deletions(-)
diff --git a/README.md b/README.md
index e111992..7946b60 100644
--- a/README.md
+++ b/README.md
@@ -371,9 +371,12 @@ asyncio.run(main())
| Option | Description |
|-----------------------------|------------------------------------------------------------|
| `--filter-all` | Include all results in console and exports |
-| `--filter-errors` | Show only error results in console and exports |
-| `--filter-not-found` | Show only not found results in console and exports |
+| `--filter-found` | Show only found results in console and exports |
+| `--filter-ambiguous` | Show only ambiguous results in console and exports |
| `--filter-unknown` | Show only unknown results in console and exports |
+| `--filter-not-found` | Show only not found results in console and exports |
+| `--filter-not-valid` | Show only not valid results in console and exports |
+| `--filter-errors` | Show only error results in console and exports |
## Contributing
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index f5906e5..6009168 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -36,11 +36,12 @@ class NaminterConfig:
include_categories: List[str] = field(default_factory=list)
exclude_categories: List[str] = field(default_factory=list)
filter_all: bool = False
- filter_errors: bool = False
- filter_not_found: bool = False
- filter_unknown: bool = False
+ filter_found: bool = False
filter_ambiguous: bool = False
+ filter_unknown: bool = False
+ filter_not_found: bool = False
filter_not_valid: bool = False
+ filter_errors: bool = False
# Network and concurrency
max_tasks: int = MAX_CONCURRENT_TASKS
@@ -98,6 +99,17 @@ def __post_init__(self) -> None:
except Exception as e:
raise ValueError(f"Configuration validation failed: {e}") from e
+ filter_fields = [
+ self.filter_all,
+ self.filter_ambiguous,
+ self.filter_unknown,
+ self.filter_not_found,
+ self.filter_not_valid,
+ self.filter_errors
+ ]
+ if not any(filter_fields):
+ self.filter_found = True
+
if isinstance(self.impersonate, str) and self.impersonate.lower() == "none":
self.impersonate = None
@@ -176,10 +188,11 @@ def to_dict(self) -> Dict[str, Any]:
"json_export": self.json_export,
"json_path": self.json_path,
"filter_all": self.filter_all,
- "filter_errors": self.filter_errors,
- "filter_not_found": self.filter_not_found,
- "filter_unknown": self.filter_unknown,
+ "filter_found": self.filter_found,
"filter_ambiguous": self.filter_ambiguous,
+ "filter_unknown": self.filter_unknown,
+ "filter_not_found": self.filter_not_found,
"filter_not_valid": self.filter_not_valid,
+ "filter_errors": self.filter_errors,
"no_progressbar": self.no_progressbar,
}
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 512acf2..ba77b01 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -25,20 +25,20 @@
_STATUS_SYMBOLS: Dict[ResultStatus, str] = {
ResultStatus.FOUND: "+",
- ResultStatus.NOT_FOUND: "-",
- ResultStatus.UNKNOWN: "?",
ResultStatus.AMBIGUOUS: "*",
- ResultStatus.ERROR: "!",
+ ResultStatus.UNKNOWN: "?",
+ ResultStatus.NOT_FOUND: "-",
ResultStatus.NOT_VALID: "X",
+ ResultStatus.ERROR: "!",
}
_STATUS_STYLES: Dict[ResultStatus, Style] = {
ResultStatus.FOUND: Style(color=THEME['success'], bold=True),
- ResultStatus.NOT_FOUND: Style(color=THEME['error']),
- ResultStatus.UNKNOWN: Style(color=THEME['warning']),
ResultStatus.AMBIGUOUS: Style(color=THEME['warning'], bold=True),
- ResultStatus.ERROR: Style(color=THEME['error'], bold=True),
+ ResultStatus.UNKNOWN: Style(color=THEME['warning']),
+ ResultStatus.NOT_FOUND: Style(color=THEME['error']),
ResultStatus.NOT_VALID: Style(color=THEME['error']),
+ ResultStatus.ERROR: Style(color=THEME['error'], bold=True),
}
class ResultFormatter:
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 90aa3ca..77bf959 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -108,6 +108,7 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
)
actual_site_count = int(summary.get("sites_count", 0))
total_sites = actual_site_count * len(self.config.usernames)
+
tracker = ResultsTracker(total_sites)
results: List[SiteResult] = []
@@ -126,7 +127,7 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
async for result in result_stream:
tracker.add_result(result)
- if self._should_include_result(result):
+ if self._filter_result(result):
response_file_path = await self._process_result(result)
formatted_output = self._formatter.format_result(result, response_file_path)
console.print(formatted_output)
@@ -164,7 +165,7 @@ async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
tracker.add_result(site_result)
progress_mgr.update(advance=1, description=tracker.get_progress_text())
- if self._should_include_result(result):
+ if self._filter_result(result):
response_files: List[Optional[Path]] = []
for site_result in result.results:
response_file_path = await self._process_result(site_result)
@@ -179,29 +180,26 @@ async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
return results
- def _should_include_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
+ def _filter_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
"""Determine if a result should be included in output based on filter settings."""
status = result.result_status
if self.config.filter_all:
return True
- filter_conditions = [
- (self.config.filter_ambiguous, ResultStatus.AMBIGUOUS),
- (self.config.filter_unknown, ResultStatus.UNKNOWN),
- (self.config.filter_not_found, ResultStatus.NOT_FOUND),
- (self.config.filter_not_valid, ResultStatus.NOT_VALID),
- (self.config.filter_errors, ResultStatus.ERROR),
- ]
-
- for filter_enabled, expected_status in filter_conditions:
- if filter_enabled and status == expected_status:
- return True
-
- if not any(filter_enabled for filter_enabled, _ in filter_conditions):
- return status == ResultStatus.FOUND
+ filter_map = {
+ self.config.filter_found: ResultStatus.FOUND,
+ self.config.filter_ambiguous: ResultStatus.AMBIGUOUS,
+ self.config.filter_unknown: ResultStatus.UNKNOWN,
+ self.config.filter_not_found: ResultStatus.NOT_FOUND,
+ self.config.filter_not_valid: ResultStatus.NOT_VALID,
+ self.config.filter_errors: ResultStatus.ERROR,
+ }
- return False
+ return any(
+ filter_enabled and status == expected_status
+ for filter_enabled, expected_status in filter_map.items()
+ ) or not any(filter_map.keys())
async def _open_browser(self, url: str) -> None:
"""Open a URL in the browser with error handling."""
@@ -308,6 +306,7 @@ def _setup_logging(config: NaminterConfig) -> None:
@click.option('--json', 'json_export', is_flag=True, help='Export results to JSON file')
@click.option('--json-path', help='Custom path for JSON export')
@click.option('--filter-all', is_flag=True, help='Include all results in console output and exports')
+@click.option('--filter-found', is_flag=True, help='Show only found results in console output and exports')
@click.option('--filter-ambiguous', is_flag=True, help='Show only ambiguous results in console output and exports')
@click.option('--filter-unknown', is_flag=True, help='Show only unknown results in console output and exports')
@click.option('--filter-not-found', is_flag=True, help='Show only not found results in console output and exports')
@@ -361,11 +360,12 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
json_export=kwargs.get('json_export'),
json_path=kwargs.get('json_path'),
filter_all=kwargs.get('filter_all'),
- filter_errors=kwargs.get('filter_errors'),
- filter_not_found=kwargs.get('filter_not_found'),
- filter_unknown=kwargs.get('filter_unknown'),
+ filter_found=kwargs.get('filter_found'),
filter_ambiguous=kwargs.get('filter_ambiguous'),
+ filter_unknown=kwargs.get('filter_unknown'),
+ filter_not_found=kwargs.get('filter_not_found'),
filter_not_valid=kwargs.get('filter_not_valid'),
+ filter_errors=kwargs.get('filter_errors'),
no_progressbar=kwargs.get('no_progressbar'),
)
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index f3d0a76..e2ed39e 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -45,11 +45,11 @@ def get_progress_text(self) -> str:
rate = self.results_count / elapsed if elapsed > 0 else 0.0
found = self.status_counts[ResultStatus.FOUND]
- not_found = self.status_counts[ResultStatus.NOT_FOUND]
+ ambiguous = self.status_counts[ResultStatus.AMBIGUOUS]
unknown = self.status_counts[ResultStatus.UNKNOWN]
- errors = self.status_counts[ResultStatus.ERROR]
+ not_found = self.status_counts[ResultStatus.NOT_FOUND]
not_valid = self.status_counts[ResultStatus.NOT_VALID]
- ambiguous = self.status_counts[ResultStatus.AMBIGUOUS]
+ errors = self.status_counts[ResultStatus.ERROR]
sections = [
f"[{THEME['primary']}]{rate:.1f} req/s[/]",
diff --git a/naminter/core/models.py b/naminter/core/models.py
index 294071d..ca46e36 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -10,11 +10,11 @@ class ValidationMode(StrEnum):
class ResultStatus(StrEnum):
"""Status of username search results."""
FOUND = auto()
- NOT_FOUND = auto()
- ERROR = auto()
- UNKNOWN = auto()
AMBIGUOUS = auto()
+ UNKNOWN = auto()
+ NOT_FOUND = auto()
NOT_VALID = auto()
+ ERROR = auto()
@dataclass(slots=True, frozen=True)
class SiteResult:
From 5b45d10f639f201146ca992a6c180015e5a7f4d2 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Fri, 15 Aug 2025 16:18:53 +0300
Subject: [PATCH 08/19] refactor(cli): reorganize NaminterCLI methods by
logical grouping
---
naminter/cli/main.py | 65 ++++++++++++++++++++++----------------------
1 file changed, 32 insertions(+), 33 deletions(-)
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 77bf959..37a6e0c 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -67,6 +67,20 @@ def _setup_response_dir(self) -> Optional[Path]:
display_error(f"Unexpected error setting up response directory: {e}")
return None
+ @staticmethod
+ def _setup_logging(config: NaminterConfig) -> None:
+ """Setup logging configuration if log level and file are specified."""
+ if config.log_level and config.log_file:
+ log_path = Path(config.log_file)
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+ level_value = getattr(logging, str(config.log_level).upper(), logging.INFO)
+ logging.basicConfig(
+ level=level_value,
+ format=LOGGING_FORMAT,
+ filename=str(log_path),
+ filemode="a",
+ )
+
async def run(self) -> None:
"""Main execution method with progress tracking."""
wmn_data, wmn_schema = load_wmn_lists(
@@ -178,7 +192,6 @@ async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
results.append(result)
return results
-
def _filter_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
"""Determine if a result should be included in output based on filter settings."""
@@ -201,25 +214,6 @@ def _filter_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
for filter_enabled, expected_status in filter_map.items()
) or not any(filter_map.keys())
- async def _open_browser(self, url: str) -> None:
- """Open a URL in the browser with error handling."""
- try:
- await asyncio.to_thread(webbrowser.open, url)
- except Exception as e:
- display_error(f"Error opening browser for {url}: {e}")
-
- async def _write_file(self, file_path: Path, content: str) -> None:
- """Write content to a file with error handling."""
- try:
- async with aiofiles.open(file_path, "w", encoding="utf-8") as file:
- await file.write(content)
- except PermissionError as e:
- display_error(f"Permission denied writing to {file_path}: {e}")
- except OSError as e:
- display_error(f"OS error writing to {file_path}: {e}")
- except Exception as e:
- display_error(f"Failed to write to {file_path}: {e}")
-
async def _process_result(self, result: SiteResult) -> Optional[Path]:
"""Process a single result: handle browser opening, response saving, and console output."""
response_file = None
@@ -251,19 +245,24 @@ async def _process_result(self, result: SiteResult) -> Optional[Path]:
return response_file
- @staticmethod
- def _setup_logging(config: NaminterConfig) -> None:
- """Setup logging configuration if log level and file are specified."""
- if config.log_level and config.log_file:
- log_path = Path(config.log_file)
- log_path.parent.mkdir(parents=True, exist_ok=True)
- level_value = getattr(logging, str(config.log_level).upper(), logging.INFO)
- logging.basicConfig(
- level=level_value,
- format=LOGGING_FORMAT,
- filename=str(log_path),
- filemode="a",
- )
+ async def _open_browser(self, url: str) -> None:
+ """Open a URL in the browser with error handling."""
+ try:
+ await asyncio.to_thread(webbrowser.open, url)
+ except Exception as e:
+ display_error(f"Error opening browser for {url}: {e}")
+
+ async def _write_file(self, file_path: Path, content: str) -> None:
+ """Write content to a file with error handling."""
+ try:
+ async with aiofiles.open(file_path, "w", encoding="utf-8") as file:
+ await file.write(content)
+ except PermissionError as e:
+ display_error(f"Permission denied writing to {file_path}: {e}")
+ except OSError as e:
+ display_error(f"OS error writing to {file_path}: {e}")
+ except Exception as e:
+ display_error(f"Failed to write to {file_path}: {e}")
@click.group(invoke_without_command=True, no_args_is_help=True, context_settings=dict(help_option_names=['-h', '--help']))
From 030501c0be1fc9e018d300291a0d8d94dd9d470c Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Fri, 15 Aug 2025 18:19:09 +0300
Subject: [PATCH 09/19] refactor(core,cli): make session management methods
private and improve encapsulation
---
naminter/cli/main.py | 9 +++++----
naminter/core/main.py | 18 +++++++++---------
naminter/core/models.py | 4 ++--
3 files changed, 16 insertions(+), 15 deletions(-)
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 37a6e0c..5de18ca 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -52,11 +52,12 @@ def _setup_response_dir(self) -> Optional[Path]:
return None
try:
- response_dir = self.config.response_dir
- if response_dir is None:
+ response_path = self.config.response_path
+ if response_path is None:
return None
- response_dir.mkdir(parents=True, exist_ok=True)
- return response_dir
+
+ response_path.mkdir(parents=True, exist_ok=True)
+ return response_path
except PermissionError as e:
display_error(f"Permission denied creating/accessing response directory: {e}")
return None
diff --git a/naminter/core/main.py b/naminter/core/main.py
index cc9ae16..a61a7c4 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -38,7 +38,7 @@ def __init__(
proxy: Optional[Union[str, Dict[str, str]]] = None,
verify_ssl: bool = HTTP_SSL_VERIFY,
allow_redirects: bool = HTTP_ALLOW_REDIRECTS,
- impersonate: Optional[BrowserTypeLiteral] = BROWSER_IMPERSONATE_AGENT,
+ impersonate: BrowserTypeLiteral = BROWSER_IMPERSONATE_AGENT,
ja3: Optional[str] = None,
akamai: Optional[str] = None,
extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any]]] = None,
@@ -90,13 +90,13 @@ def _create_async_session(self) -> AsyncSession:
extra_fp=self.extra_fp,
)
- def open_session(self) -> None:
+ async def _open_session(self) -> None:
"""Open the HTTP session for manual (non-context) usage."""
if self._session is None:
self._session = self._create_async_session()
self._logger.info("HTTP session opened successfully.")
- async def ensure_session(self) -> None:
+ async def _ensure_session(self) -> None:
"""Ensure the HTTP session is initialized (safe for concurrent calls)."""
if self._session is not None:
return
@@ -106,7 +106,7 @@ async def ensure_session(self) -> None:
self._session = self._create_async_session()
self._logger.info("HTTP session opened successfully.")
- async def close_session(self) -> None:
+ async def _close_session(self) -> None:
"""Close the HTTP session if it is open."""
if self._session:
try:
@@ -118,12 +118,12 @@ async def close_session(self) -> None:
self._session = None
async def __aenter__(self) -> "Naminter":
- await self.ensure_session()
+ await self._ensure_session()
return self
async def __aexit__(self, exc_type: Optional[type], exc_val: Optional[BaseException], exc_tb: Optional[Any]) -> None:
"""Async context manager exit."""
- await self.close_session()
+ await self._close_session()
async def get_wmn_summary(
self,
@@ -230,7 +230,7 @@ async def check_site(
fuzzy_mode: bool = False,
) -> SiteResult:
"""Check a single site for the given username."""
- await self.ensure_session()
+ await self._ensure_session()
site_name = site.get("name")
category = site.get("cat")
@@ -403,7 +403,7 @@ async def check_usernames(
as_generator: bool = False,
) -> Union[List[SiteResult], AsyncGenerator[SiteResult, None]]:
"""Check one or multiple usernames across all loaded sites."""
- await self.ensure_session()
+ await self._ensure_session()
usernames = validate_usernames(usernames)
self._logger.info("Starting username enumeration for %d username(s): %s", len(usernames), usernames)
@@ -439,7 +439,7 @@ async def self_check(
as_generator: bool = False
) -> Union[List[SelfCheckResult], AsyncGenerator[SelfCheckResult, None]]:
"""Run self-checks using known accounts for each site."""
- await self.ensure_session()
+ await self._ensure_session()
sites = self._filter_sites(
site_names,
diff --git a/naminter/core/models.py b/naminter/core/models.py
index ca46e36..dd411ae 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -4,8 +4,8 @@
from datetime import datetime
class ValidationMode(StrEnum):
- FUZZY = "fuzzy"
- STRICT = "strict"
+ FUZZY = auto()
+ STRICT = auto()
class ResultStatus(StrEnum):
"""Status of username search results."""
From 8740cf768891d908f2f2166e821410a011c01613 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Fri, 15 Aug 2025 21:31:22 +0300
Subject: [PATCH 10/19] refactor(core,cli): rename check to enumeration
throughout codebase
---
.github/workflows/docker-publish.yml | 2 +-
README.md | 42 +++++++++----------
naminter/__init__.py | 4 +-
naminter/cli/config.py | 12 +++---
naminter/cli/console.py | 28 ++++++-------
naminter/cli/main.py | 36 ++++++++---------
naminter/cli/progress.py | 2 +-
naminter/core/main.py | 60 ++++++++++++++--------------
naminter/core/models.py | 6 +--
pyproject.toml | 10 +++--
10 files changed, 103 insertions(+), 99 deletions(-)
diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml
index b6f49ef..ed11bee 100644
--- a/.github/workflows/docker-publish.yml
+++ b/.github/workflows/docker-publish.yml
@@ -68,7 +68,7 @@ jobs:
type=sha,prefix=sha-
labels: |
org.opencontainers.image.title=Naminter
- org.opencontainers.image.description=The most powerful and fast username availability checker
+ org.opencontainers.image.description=Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset
org.opencontainers.image.vendor=3xp0rt
org.opencontainers.image.licenses=MIT
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
diff --git a/README.md b/README.md
index 7946b60..745a8eb 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
[](https://pypi.org/project/naminter/)
[](https://pypi.org/project/naminter/)
-Naminter is a powerful, fast, and flexible username enumeration tool and Python package. Leveraging the comprehensive [WhatsMyName](https://github.com/WebBreacher/WhatsMyName) list, Naminter efficiently enumerates usernames across hundreds of websites. With advanced features like browser impersonation, concurrent checking, and customizable filtering, it can be used both as a command-line tool and as a library in your Python projects.
+Naminter is an asynchronous OSINT username enumeration tool and Python package. Leveraging the comprehensive [WhatsMyName](https://github.com/WebBreacher/WhatsMyName) list, Naminter enumerates usernames across hundreds of websites. With advanced features like browser impersonation, asynchronous enumeration, and customizable filtering, it can be used both as a command-line tool and as a library in your Python projects.
@@ -37,7 +37,7 @@ Naminter is a powerful, fast, and flexible username enumeration tool and Python
- **Category Filters:** Include or exclude sites by category
- **Custom Site Lists:** Use your own or remote WhatsMyName-format lists and schemas
- **Proxy & Network Options:** Full proxy support, SSL verification, and redirect control
-- **Self-Check Mode:** Validate detection methods for reliability
+- **Self-Enum Mode:** Validate detection methods for reliability
- **Export Results:** Output to CSV, JSON, HTML, and PDF
- **Response Handling:** Save/open HTTP responses for analysis
- **Flexible Filtering:** Filter results by found, not found, errors, or unknown
@@ -80,13 +80,13 @@ docker compose run --rm naminter --username john_doe
### Basic CLI Usage
-Check a single username:
+Enumerate a single username:
```bash
naminter --username john_doe
```
-Check multiple usernames:
+Enumerate multiple usernames:
```bash
naminter --username user1 --username user2 --username user3
@@ -94,7 +94,7 @@ naminter --username user1 --username user2 --username user3
### Advanced CLI Options
-Customize the checker with various command-line arguments:
+Customize the enumerator with various command-line arguments:
```bash
# Basic username enumeration with custom settings
@@ -127,8 +127,8 @@ naminter --username alice_bob \
--html \
--filter-all
-# Self-check with detailed output
-naminter --self-check \
+# Self-enum with detailed output
+naminter --self-enum \
--show-details \
--log-level DEBUG \
--log-file debug.log
@@ -136,7 +136,7 @@ naminter --self-check \
### Using as a Python Package
-Naminter can be used programmatically in Python projects to check the availability of usernames across various platforms. The Naminter class requires WhatsMyName (WMN) data to operate. You can either load this data from local files or fetch it from remote sources.
+Naminter can be used programmatically in Python projects to enumerate usernames across various platforms. The Naminter class requires WhatsMyName (WMN) data to operate. You can either load this data from local files or fetch it from remote sources.
#### Getting Started - Loading WMN Data
@@ -185,14 +185,14 @@ async def main():
# Initialize Naminter with the WMN data
async with Naminter(wmn_data, wmn_schema) as naminter:
- results = await naminter.check_usernames(["example_username"])
+ results = await naminter.enumerate_usernames(["example_username"])
for result in results:
if result.result_status.value == "found":
print(f"✅ {result.username} found on {result.site_name}: {result.result_url}")
elif result.result_status.value == "not_found":
print(f"❌ {result.username} not found on {result.site_name}")
elif result.result_status.value == "error":
- print(f"⚠️ Error checking {result.username} on {result.site_name}: {result.error}")
+ print(f"⚠️ Error enumerating {result.username} on {result.site_name}: {result.error}")
asyncio.run(main())
```
@@ -210,7 +210,7 @@ async def main():
async with Naminter(wmn_data, wmn_schema) as naminter:
# Use as_generator=True for streaming results
- results = await naminter.check_usernames(["example_username"], as_generator=True)
+ results = await naminter.enumerate_usernames(["example_username"], as_generator=True)
async for result in results:
if result.result_status.value == "found":
print(f"✅ {result.username} found on {result.site_name}: {result.result_url}")
@@ -240,7 +240,7 @@ async def main():
proxy="http://proxy:8080"
) as naminter:
usernames = ["user1", "user2", "user3"]
- results = await naminter.check_usernames(usernames, fuzzy_mode=True)
+ results = await naminter.enumerate_usernames(usernames, fuzzy_mode=True)
for result in results:
if result.result_status.value == "found":
@@ -253,7 +253,7 @@ async def main():
asyncio.run(main())
```
-#### Self-Check and Validation
+#### Self-Enum and Validation
```python
import asyncio
@@ -263,10 +263,10 @@ async def main():
wmn_data, wmn_schema = await load_wmn_data()
async with Naminter(wmn_data, wmn_schema) as naminter:
- # Perform self-check to validate site configurations
- self_check_results = await naminter.self_check()
+ # Perform self-enum to validate site configurations
+ self_enum_results = await naminter.self_enum()
- for site_result in self_check_results:
+ for site_result in self_enum_results:
if site_result.error:
print(f"❌ {site_result.site_name}: {site_result.error}")
else:
@@ -304,7 +304,7 @@ asyncio.run(main())
| Option | Description |
|-----------------------------|------------------------------------------------------------|
| `--username, -u` | Username(s) to search |
-| `--site, -s` | Specific site name(s) to check |
+| `--site, -s` | Specific site name(s) to enumerate |
| `--version` | Show version information |
| `--no-color` | Disable colored output |
| `--no-progressbar` | Disable progress bar display |
@@ -312,16 +312,16 @@ asyncio.run(main())
### Input Lists
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--local-list` | Path(s) to local file(s) containing list of sites to check |
-| `--remote-list` | URL(s) to fetch remote list(s) of sites to check |
+| `--local-list` | Path(s) to local file(s) containing list of sites to enumerate |
+| `--remote-list` | URL(s) to fetch remote list(s) of sites to enumerate |
| `--skip-validation` | Skip WhatsMyName schema validation for lists |
| `--local-schema` | Path to local WhatsMyName schema file |
| `--remote-schema` | URL to fetch custom WhatsMyName schema |
-### Self-Check
+### Self-Enum
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--self-check` | Perform self-check of the application |
+| `--self-enum` | Perform self-enum of the application |
### Category Filters
| Option | Description |
diff --git a/naminter/__init__.py b/naminter/__init__.py
index ad02e70..7f07e6d 100644
--- a/naminter/__init__.py
+++ b/naminter/__init__.py
@@ -1,8 +1,10 @@
from .core.main import Naminter
__version__ = "1.0.7"
+__name__ = "naminter"
__author__ = "3xp0rt"
-__description__ = "WhatsMyName Enumeration Tool"
+__author_email__ = "contact@3xp0rt.com"
+__description__ = "Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset"
__license__ = "MIT"
__email__ = "contact@3xp0rt.com"
__url__ = "https://github.com/3xp0rt/Naminter"
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index 6009168..b9df857 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -18,7 +18,7 @@
class NaminterConfig:
"""Configuration for Naminter CLI tool.
- Holds all configuration parameters for username checking operations, including network settings, export options, filtering, and validation parameters.
+ Holds all configuration parameters for username enumeration operations, including network settings, export options, filtering, and validation parameters.
"""
# Required parameters
usernames: List[str]
@@ -55,7 +55,7 @@ class NaminterConfig:
extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any], str]] = None
browse: bool = False
fuzzy_mode: bool = False
- self_check: bool = False
+ self_enum: bool = False
no_progressbar: bool = False
# Logging
@@ -80,13 +80,13 @@ class NaminterConfig:
def __post_init__(self) -> None:
"""Validate and normalize configuration after initialization."""
- if self.self_check and self.usernames:
+ if self.self_enum and self.usernames:
display_warning(
- "Self-check mode enabled: provided usernames will be ignored, "
+ "Self-enum mode enabled: provided usernames will be ignored, "
"using known usernames from site configurations instead."
)
- if not self.self_check and not self.usernames:
+ if not self.self_enum and not self.usernames:
raise ValueError("At least one username is required")
try:
@@ -172,7 +172,7 @@ def to_dict(self) -> Dict[str, Any]:
"extra_fp": self.extra_fp.to_dict() if isinstance(self.extra_fp, ExtraFingerprints) else self.extra_fp,
"browse": self.browse,
"fuzzy_mode": self.fuzzy_mode,
- "self_check": self.self_check,
+ "self_enum": self.self_enum,
"log_level": self.log_level,
"log_file": self.log_file,
"show_details": self.show_details,
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index ba77b01..982bd54 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -9,7 +9,7 @@
from rich.text import Text
from rich.tree import Tree
-from ..core.models import ResultStatus, SiteResult, SelfCheckResult
+from ..core.models import ResultStatus, SiteResult, SelfEnumResult
from .. import __description__, __version__, __author__, __license__, __email__, __url__
console: Console = Console()
@@ -80,24 +80,24 @@ def format_result(self, site_result: SiteResult, response_file_path: Optional[Pa
return tree
- def format_self_check(self, self_check_result: SelfCheckResult, response_files: Optional[List[Optional[Path]]] = None) -> Tree:
- """Format self-check results into a tree structure."""
+ def format_self_enum(self, self_enum_result: SelfEnumResult, response_files: Optional[List[Optional[Path]]] = None) -> Tree:
+ """Format self-enum results into a tree structure."""
- if not self_check_result:
- raise ValueError("SelfCheckResult cannot be None or empty")
+ if not self_enum_result:
+ raise ValueError("SelfEnumResult cannot be None or empty")
- if not isinstance(self_check_result, SelfCheckResult):
- raise ValueError("Parameter must be a SelfCheckResult instance")
+ if not isinstance(self_enum_result, SelfEnumResult):
+ raise ValueError("Parameter must be a SelfEnumResult instance")
- if not self_check_result.site_name or not self_check_result.site_name.strip():
- raise ValueError("SelfCheckResult must have a valid site_name")
+ if not self_enum_result.site_name or not self_enum_result.site_name.strip():
+ raise ValueError("SelfEnumResult must have a valid site_name")
- if not self_check_result.results:
- raise ValueError("SelfCheckResult must have test results")
+ if not self_enum_result.results:
+ raise ValueError("SelfEnumResult must have test results")
- site_name = self_check_result.site_name
- test_results = self_check_result.results
- result_status = self_check_result.result_status
+ site_name = self_enum_result.site_name
+ test_results = self_enum_result.results
+ result_status = self_enum_result.result_status
root_label = Text()
root_label.append(_STATUS_SYMBOLS.get(result_status, "?"), style=_STATUS_STYLES.get(result_status, Style()))
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 5de18ca..fdccebd 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -22,7 +22,7 @@
from ..cli.progress import ProgressManager, ResultsTracker
from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
from ..cli.utils import load_wmn_lists, sanitize_filename
-from ..core.models import ResultStatus, SiteResult, SelfCheckResult
+from ..core.models import ResultStatus, SiteResult, SelfEnumResult
from ..core.main import Naminter
from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL, LOGGING_FORMAT
@@ -39,7 +39,7 @@ def _version_callback(ctx: click.Context, param: click.Option, value: bool) -> N
class NaminterCLI:
- """Handles username availability checks."""
+ """Handles username enumeration operations."""
def __init__(self, config: NaminterConfig) -> None:
self.config: NaminterConfig = config
@@ -105,8 +105,8 @@ async def run(self) -> None:
akamai=self.config.akamai,
extra_fp=self.config.extra_fp,
) as naminter:
- if self.config.self_check:
- results = await self._run_self_check(naminter)
+ if self.config.self_enum:
+ results = await self._run_self_enum(naminter)
else:
results = await self._run_check(naminter)
@@ -115,7 +115,7 @@ async def run(self) -> None:
export_manager.export(results, self.config.export_formats)
async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
- """Run the username check functionality."""
+ """Run the username enumeration functionality."""
summary = await naminter.get_wmn_summary(
site_names=self.config.site_names,
include_categories=self.config.include_categories,
@@ -128,9 +128,9 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
results: List[SiteResult] = []
with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(total_sites, "Checking usernames...")
+ progress_mgr.start(total_sites, "Enumerating usernames...")
- result_stream = await naminter.check_usernames(
+ result_stream = await naminter.enumerate_usernames(
usernames=self.config.usernames,
site_names=self.config.site_names,
include_categories=self.config.include_categories,
@@ -152,8 +152,8 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
return results
- async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
- """Run the self-check functionality."""
+ async def _run_self_enum(self, naminter: Naminter) -> List[SelfEnumResult]:
+ """Run the self-enum functionality."""
summary = await naminter.get_wmn_summary(
site_names=self.config.site_names,
include_categories=self.config.include_categories,
@@ -162,12 +162,12 @@ async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
total_tests = int(summary.get("known_accounts_total", 0))
tracker = ResultsTracker(total_tests)
- results: List[SelfCheckResult] = []
+ results: List[SelfEnumResult] = []
with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(total_tests, "Running self-check...")
+ progress_mgr.start(total_tests, "Running self-enum...")
- result_stream = await naminter.self_check(
+ result_stream = await naminter.self_enum(
site_names=self.config.site_names,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
@@ -188,13 +188,13 @@ async def _run_self_check(self, naminter: Naminter) -> List[SelfCheckResult]:
response_files.append(response_file_path)
else:
response_files.append(None)
- formatted_output = self._formatter.format_self_check(result, response_files)
+ formatted_output = self._formatter.format_self_enum(result, response_files)
console.print(formatted_output)
results.append(result)
return results
- def _filter_result(self, result: Union[SiteResult, SelfCheckResult]) -> bool:
+ def _filter_result(self, result: Union[SiteResult, SelfEnumResult]) -> bool:
"""Determine if a result should be included in output based on filter settings."""
status = result.result_status
@@ -271,13 +271,13 @@ async def _write_file(self, file_path: Path, content: str) -> None:
@click.option('--no-color', is_flag=True, help='Disable colored console output')
@click.option('--no-progressbar', is_flag=True, help='Disable progress bar during execution')
@click.option('--username', '-u', multiple=True, help='Username(s) to search for across social media platforms')
-@click.option('--site', '-s', multiple=True, help='Specific site name(s) to check (e.g., "GitHub", "Twitter")')
+@click.option('--site', '-s', multiple=True, help='Specific site name(s) to enumerate (e.g., "GitHub", "Twitter")')
@click.option('--local-list', type=click.Path(exists=True, path_type=Path), multiple=True, help='Path(s) to local JSON file(s) containing WhatsMyName site data')
@click.option('--remote-list', multiple=True, help='URL(s) to fetch remote WhatsMyName site data')
@click.option('--local-schema', type=click.Path(exists=True, path_type=Path), help='Path to local WhatsMyName JSON schema file for validation')
@click.option('--remote-schema', default=WMN_SCHEMA_URL, help='URL to fetch custom WhatsMyName JSON schema for validation')
@click.option('--skip-validation', is_flag=True, help='Skip JSON schema validation of WhatsMyName data')
-@click.option('--self-check', is_flag=True, help='Run self-check mode to validate site detection accuracy')
+@click.option('--self-enum', is_flag=True, help='Run self-enum mode to validate site detection accuracy')
@click.option('--include-categories', multiple=True, help='Include only sites from specified categories (e.g., "social", "coding")')
@click.option('--exclude-categories', multiple=True, help='Exclude sites from specified categories (e.g., "adult", "gaming")')
@click.option('--proxy', help='Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)')
@@ -314,7 +314,7 @@ async def _write_file(self, file_path: Path, content: str) -> None:
@click.option('--filter-errors', is_flag=True, help='Show only error results in console output and exports')
@click.pass_context
def main(ctx: click.Context, **kwargs: Any) -> None:
- """The most powerful and fast username availability checker that searches across hundreds of websites using WhatsMyName dataset."""
+ """Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset."""
if ctx.invoked_subcommand is not None:
return
@@ -343,7 +343,7 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
akamai=kwargs.get('akamai'),
extra_fp=kwargs.get('extra_fp'),
fuzzy_mode=kwargs.get('fuzzy_mode'),
- self_check=kwargs.get('self_check'),
+ self_enum=kwargs.get('self_enum'),
log_level=kwargs.get('log_level'),
log_file=kwargs.get('log_file'),
show_details=kwargs.get('show_details'),
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index e2ed39e..23ad187 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -17,7 +17,7 @@
from ..core.models import ResultStatus, SiteResult
class ResultsTracker:
- """Tracks results for the username availability checks."""
+ """Tracks results for the username enumeration operations."""
def __init__(self, total_sites: int) -> None:
"""Initialize the results tracker."""
diff --git a/naminter/core/main.py b/naminter/core/main.py
index a61a7c4..f637e3e 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -6,7 +6,7 @@
from curl_cffi.requests import AsyncSession, RequestsError
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
-from ..core.models import ResultStatus, SiteResult, SelfCheckResult, ValidationMode
+from ..core.models import ResultStatus, SiteResult, SelfEnumResult, ValidationMode
from ..core.exceptions import (
DataError,
ValidationError,
@@ -223,13 +223,13 @@ def _filter_sites(
)
return filtered_sites
- async def check_site(
+ async def enumerate_site(
self,
site: Dict[str, Any],
username: str,
fuzzy_mode: bool = False,
) -> SiteResult:
- """Check a single site for the given username."""
+ """Enumerate a single site for the given username."""
await self._ensure_session()
site_name = site.get("name")
@@ -317,7 +317,7 @@ async def check_site(
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
uri_pretty = site.get("uri_pretty", uri_check_template).replace(ACCOUNT_PLACEHOLDER, clean_username)
- self._logger.info("Checking site '%s' (category: %s) for username '%s' in %s mode",
+ self._logger.info("Enumerating site '%s' (category: %s) for username '%s' in %s mode",
site_name, category, username, ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
try:
@@ -340,7 +340,7 @@ async def check_site(
self._logger.warning("Request to '%s' was cancelled", site_name)
raise
except RequestsError as e:
- self._logger.warning("Network error while checking '%s': %s", site_name, e, exc_info=True)
+ self._logger.warning("Network error while enumerating '%s': %s", site_name, e, exc_info=True)
return SiteResult(
site_name=site_name,
category=category,
@@ -350,7 +350,7 @@ async def check_site(
error=f"Network error: {e}",
)
except Exception as e:
- self._logger.error("Unexpected error while checking '%s': %s", site_name, e, exc_info=True)
+ self._logger.error("Unexpected error while enumerating '%s': %s", site_name, e, exc_info=True)
return SiteResult(
site_name=site_name,
category=category,
@@ -393,7 +393,7 @@ async def check_site(
response_text=response_text,
)
- async def check_usernames(
+ async def enumerate_usernames(
self,
usernames: List[str],
site_names: Optional[List[str]] = None,
@@ -402,7 +402,7 @@ async def check_usernames(
fuzzy_mode: bool = False,
as_generator: bool = False,
) -> Union[List[SiteResult], AsyncGenerator[SiteResult, None]]:
- """Check one or multiple usernames across all loaded sites."""
+ """Enumerate one or multiple usernames across all loaded sites."""
await self._ensure_session()
usernames = validate_usernames(usernames)
@@ -413,10 +413,10 @@ async def check_usernames(
include_categories=include_categories,
exclude_categories=exclude_categories,
)
- self._logger.info("Will check against %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ self._logger.info("Will enumerate against %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
coroutines = [
- self.check_site(site, username, fuzzy_mode)
+ self.enumerate_site(site, username, fuzzy_mode)
for site in sites for username in usernames
]
@@ -430,15 +430,15 @@ async def iterate_results() -> AsyncGenerator[SiteResult, None]:
results = await asyncio.gather(*coroutines)
return results
- async def self_check(
+ async def self_enum(
self,
site_names: Optional[List[str]] = None,
include_categories: Optional[List[str]] = None,
exclude_categories: Optional[List[str]] = None,
fuzzy_mode: bool = False,
as_generator: bool = False
- ) -> Union[List[SelfCheckResult], AsyncGenerator[SelfCheckResult, None]]:
- """Run self-checks using known accounts for each site."""
+ ) -> Union[List[SelfEnumResult], AsyncGenerator[SelfEnumResult, None]]:
+ """Run self-enum using known accounts for each site."""
await self._ensure_session()
sites = self._filter_sites(
@@ -447,17 +447,17 @@ async def self_check(
exclude_categories=exclude_categories,
)
- self._logger.info("Starting self-check validation for %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ self._logger.info("Starting self-enum validation for %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
- async def _check_known(site: Dict[str, Any]) -> SelfCheckResult:
- """Helper function to check a site with all its known users."""
+ async def _enumerate_known(site: Dict[str, Any]) -> SelfEnumResult:
+ """Helper function to enumerate a site with all its known users."""
site_name = site.get("name")
category = site.get("cat")
known = site.get("known")
if not site_name:
- self._logger.error("Site configuration missing required 'name' field for self-check: %r", site)
- return SelfCheckResult(
+ self._logger.error("Site configuration missing required 'name' field for self-enum: %r", site)
+ return SelfEnumResult(
site_name=site_name,
category=category,
results=[],
@@ -465,8 +465,8 @@ async def _check_known(site: Dict[str, Any]) -> SelfCheckResult:
)
if not category:
- self._logger.error("Site '%s' missing required 'cat' field for self-check", site_name)
- return SelfCheckResult(
+ self._logger.error("Site '%s' missing required 'cat' field for self-enum", site_name)
+ return SelfEnumResult(
site_name=site_name,
category=category,
results=[],
@@ -474,39 +474,39 @@ async def _check_known(site: Dict[str, Any]) -> SelfCheckResult:
)
if known is None:
- self._logger.error("Site '%s' missing required 'known' field for self-check", site_name)
- return SelfCheckResult(
+ self._logger.error("Site '%s' missing required 'known' field for self-enum", site_name)
+ return SelfEnumResult(
site_name=site_name,
category=category,
results=[],
error=f"Site '{site_name}' missing required field: known"
)
- self._logger.info("Self-checking site '%s' (category: %s) with %d known accounts", site_name, category, len(known))
+ self._logger.info("Self-enuming site '%s' (category: %s) with %d known accounts", site_name, category, len(known))
try:
- coroutines = [self.check_site(site, username, fuzzy_mode) for username in known]
+ coroutines = [self.enumerate_site(site, username, fuzzy_mode) for username in known]
results = await asyncio.gather(*coroutines)
- return SelfCheckResult(
+ return SelfEnumResult(
site_name=site_name,
category=category,
results=results
)
except Exception as e:
- self._logger.error("Unexpected error during self-check for site '%s': %s", site_name, e, exc_info=True)
- return SelfCheckResult(
+ self._logger.error("Unexpected error during self-enum for site '%s': %s", site_name, e, exc_info=True)
+ return SelfEnumResult(
site_name=site_name,
category=category,
results=[],
- error=f"Unexpected error during self-check: {e}"
+ error=f"Unexpected error during self-enum: {e}"
)
coroutines = [
- _check_known(site) for site in sites if isinstance(site, dict)
+ _enumerate_known(site) for site in sites if isinstance(site, dict)
]
- async def iterate_results() -> AsyncGenerator[SelfCheckResult, None]:
+ async def iterate_results() -> AsyncGenerator[SelfEnumResult, None]:
for completed_task in asyncio.as_completed(coroutines):
yield await completed_task
diff --git a/naminter/core/models.py b/naminter/core/models.py
index dd411ae..b85d235 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -87,8 +87,8 @@ def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
return result
@dataclass(slots=True, frozen=True)
-class SelfCheckResult:
- """Result of a self-check for a username."""
+class SelfEnumResult:
+ """Result of a self-enum for a username."""
site_name: str
category: str
results: List[SiteResult]
@@ -122,7 +122,7 @@ def _get_result_status(self) -> ResultStatus:
return next(iter(statuses))
def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
- """Convert SelfCheckResult to dict."""
+ """Convert SelfEnumResult to dict."""
return {
'site_name': self.site_name,
'category': self.category,
diff --git a/pyproject.toml b/pyproject.toml
index 4394dd1..302ffe4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,11 +4,8 @@ build-backend = "setuptools.build_meta"
[project]
name = "naminter"
-dynamic = ["version"]
-description = "The most powerful and fast username availability checker that searches across hundreds of websites using WhatsMyName dataset"
+dynamic = ["version", "description", "authors", "license"]
readme = "README.md"
-authors = [{ name = "3xp0rt", email = "contact@3xp0rt.com" }]
-license = { text = "MIT" }
requires-python = ">=3.11"
classifiers = [
@@ -62,6 +59,11 @@ naminter = "naminter.cli.main:entry_point"
[tool.setuptools.dynamic]
version = {attr = "naminter.__version__"}
+description = {attr = "naminter.__description__"}
+authors = [
+ {name = "naminter.__author__", email = "naminter.__author_email__"}
+]
+license = {text = "naminter.__license__"}
[tool.setuptools]
package-dir = {"" = "."}
From ac4c4cba1e2a4f6a708e68cb5b036a0507448946 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sat, 16 Aug 2025 14:19:47 +0300
Subject: [PATCH 11/19] refactor(cli,core): standardize naming and simplify
code structure
---
README.md | 50 ++++++++--------
naminter/cli/config.py | 15 ++---
naminter/cli/console.py | 65 +++++++--------------
naminter/cli/exporters.py | 6 +-
naminter/cli/main.py | 42 ++++++-------
naminter/cli/progress.py | 18 +-----
naminter/core/main.py | 120 +++++++++++++++++++-------------------
naminter/core/models.py | 32 ++++------
8 files changed, 150 insertions(+), 198 deletions(-)
diff --git a/README.md b/README.md
index 745a8eb..53ee33e 100644
--- a/README.md
+++ b/README.md
@@ -37,7 +37,7 @@ Naminter is an asynchronous OSINT username enumeration tool and Python package.
- **Category Filters:** Include or exclude sites by category
- **Custom Site Lists:** Use your own or remote WhatsMyName-format lists and schemas
- **Proxy & Network Options:** Full proxy support, SSL verification, and redirect control
-- **Self-Enum Mode:** Validate detection methods for reliability
+- **Self-Enumeration Mode:** Validate detection methods for reliability
- **Export Results:** Output to CSV, JSON, HTML, and PDF
- **Response Handling:** Save/open HTTP responses for analysis
- **Flexible Filtering:** Filter results by found, not found, errors, or unknown
@@ -127,8 +127,8 @@ naminter --username alice_bob \
--html \
--filter-all
-# Self-enum with detailed output
-naminter --self-enum \
+# Self-enumeration with detailed output
+naminter --self-enumeration \
--show-details \
--log-level DEBUG \
--log-file debug.log
@@ -187,12 +187,12 @@ async def main():
async with Naminter(wmn_data, wmn_schema) as naminter:
results = await naminter.enumerate_usernames(["example_username"])
for result in results:
- if result.result_status.value == "found":
- print(f"✅ {result.username} found on {result.site_name}: {result.result_url}")
- elif result.result_status.value == "not_found":
- print(f"❌ {result.username} not found on {result.site_name}")
- elif result.result_status.value == "error":
- print(f"⚠️ Error enumerating {result.username} on {result.site_name}: {result.error}")
+ if result.status.value == "found":
+ print(f"✅ {result.username} found on {result.name}: {result.result_url}")
+ elif result.status.value == "not_found":
+ print(f"❌ {result.username} not found on {result.name}")
+ elif result.status.value == "error":
+ print(f"⚠️ Error enumerating {result.username} on {result.name}: {result.error}")
asyncio.run(main())
```
@@ -212,10 +212,10 @@ async def main():
# Use as_generator=True for streaming results
results = await naminter.enumerate_usernames(["example_username"], as_generator=True)
async for result in results:
- if result.result_status.value == "found":
- print(f"✅ {result.username} found on {result.site_name}: {result.result_url}")
- elif result.result_status.value == "not_found":
- print(f"❌ {result.username} not found on {result.site_name}")
+ if result.status.value == "found":
+ print(f"✅ {result.username} found on {result.name}: {result.result_url}")
+ elif result.status.value == "not_found":
+ print(f"❌ {result.username} not found on {result.name}")
asyncio.run(main())
```
@@ -243,17 +243,17 @@ async def main():
results = await naminter.enumerate_usernames(usernames, fuzzy_mode=True)
for result in results:
- if result.result_status.value == "found":
- print(f"✅ Found: {result.username} on {result.site_name}")
+ if result.status.value == "found":
+ print(f"✅ Found: {result.username} on {result.name}")
print(f" URL: {result.result_url}")
print(f" Response time: {result.elapsed:.2f}s")
else:
- print(f"❌ Not found: {result.username} on {result.site_name}")
+ print(f"❌ Not found: {result.username} on {result.name}")
asyncio.run(main())
```
-#### Self-Enum and Validation
+#### Self-Enumeration and Validation
```python
import asyncio
@@ -263,16 +263,16 @@ async def main():
wmn_data, wmn_schema = await load_wmn_data()
async with Naminter(wmn_data, wmn_schema) as naminter:
- # Perform self-enum to validate site configurations
- self_enum_results = await naminter.self_enum()
+ # Perform self-enumeration to validate site configurations
+ self_enumeration_results = await naminter.self_enumeration()
- for site_result in self_enum_results:
+ for site_result in self_enumeration_results:
if site_result.error:
- print(f"❌ {site_result.site_name}: {site_result.error}")
+ print(f"❌ {site_result.name}: {site_result.error}")
else:
- found_count = sum(1 for r in site_result.results if r.result_status.value == "found")
+ found_count = sum(1 for r in site_result.results if r.status.value == "found")
total_count = len(site_result.results)
- print(f"✅ {site_result.site_name}: {found_count}/{total_count} known accounts found")
+ print(f"✅ {site_result.name}: {found_count}/{total_count} known accounts found")
asyncio.run(main())
```
@@ -318,10 +318,10 @@ asyncio.run(main())
| `--local-schema` | Path to local WhatsMyName schema file |
| `--remote-schema` | URL to fetch custom WhatsMyName schema |
-### Self-Enum
+### Self-Enumeration
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--self-enum` | Perform self-enum of the application |
+| `--self-enumeration` | Perform self-enumeration of the application |
### Category Filters
| Option | Description |
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index b9df857..e0cfb95 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -22,7 +22,7 @@ class NaminterConfig:
"""
# Required parameters
usernames: List[str]
- site_names: Optional[List[str]] = None
+ sites: Optional[List[str]] = None
logger: Optional[object] = None
# List and schema sources
@@ -55,7 +55,7 @@ class NaminterConfig:
extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any], str]] = None
browse: bool = False
fuzzy_mode: bool = False
- self_enum: bool = False
+ self_enumeration: bool = False
no_progressbar: bool = False
# Logging
@@ -80,15 +80,12 @@ class NaminterConfig:
def __post_init__(self) -> None:
"""Validate and normalize configuration after initialization."""
- if self.self_enum and self.usernames:
+ if self.self_enumeration and self.usernames:
display_warning(
- "Self-enum mode enabled: provided usernames will be ignored, "
+ "Self-enumeration mode enabled: provided usernames will be ignored, "
"using known usernames from site configurations instead."
)
- if not self.self_enum and not self.usernames:
- raise ValueError("At least one username is required")
-
try:
if self.local_list_paths:
self.local_list_paths = [str(p) for p in self.local_list_paths]
@@ -153,7 +150,7 @@ def to_dict(self) -> Dict[str, Any]:
"""Convert configuration to a dictionary."""
return {
"usernames": self.usernames,
- "site_names": self.site_names,
+ "sites": self.sites,
"local_list_paths": self.local_list_paths,
"remote_list_urls": self.remote_list_urls,
"local_schema_path": self.local_schema_path,
@@ -172,7 +169,7 @@ def to_dict(self) -> Dict[str, Any]:
"extra_fp": self.extra_fp.to_dict() if isinstance(self.extra_fp, ExtraFingerprints) else self.extra_fp,
"browse": self.browse,
"fuzzy_mode": self.fuzzy_mode,
- "self_enum": self.self_enum,
+ "self_enumeration": self.self_enumeration,
"log_level": self.log_level,
"log_file": self.log_file,
"show_details": self.show_details,
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 982bd54..ddd603b 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -9,7 +9,7 @@
from rich.text import Text
from rich.tree import Tree
-from ..core.models import ResultStatus, SiteResult, SelfEnumResult
+from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult
from .. import __description__, __version__, __author__, __license__, __email__, __url__
console: Console = Console()
@@ -50,20 +50,14 @@ def __init__(self, show_details: bool = False) -> None:
def format_result(self, site_result: SiteResult, response_file_path: Optional[Path] = None) -> Tree:
"""Format a single result as a tree-style output."""
-
- if site_result is None:
- raise ValueError("SiteResult cannot be None")
-
- if not hasattr(site_result, 'result_status') or not isinstance(site_result.result_status, ResultStatus):
- raise ValueError("SiteResult must have a valid result_status")
root_label = Text()
- status_symbol = _STATUS_SYMBOLS.get(site_result.result_status, "?")
- status_style = _STATUS_STYLES.get(site_result.result_status, Style())
+ status_symbol = _STATUS_SYMBOLS.get(site_result.status, "?")
+ status_style = _STATUS_STYLES.get(site_result.status, Style())
root_label.append(status_symbol, style=status_style)
root_label.append(" [", style=THEME['muted'])
- root_label.append(site_result.site_name or "Unknown", style=THEME['info'])
+ root_label.append(site_result.name or "Unknown", style=THEME['info'])
root_label.append("] ", style=THEME['muted'])
root_label.append(site_result.result_url or "No URL", style=THEME['primary'])
@@ -80,39 +74,23 @@ def format_result(self, site_result: SiteResult, response_file_path: Optional[Pa
return tree
- def format_self_enum(self, self_enum_result: SelfEnumResult, response_files: Optional[List[Optional[Path]]] = None) -> Tree:
- """Format self-enum results into a tree structure."""
-
- if not self_enum_result:
- raise ValueError("SelfEnumResult cannot be None or empty")
-
- if not isinstance(self_enum_result, SelfEnumResult):
- raise ValueError("Parameter must be a SelfEnumResult instance")
-
- if not self_enum_result.site_name or not self_enum_result.site_name.strip():
- raise ValueError("SelfEnumResult must have a valid site_name")
-
- if not self_enum_result.results:
- raise ValueError("SelfEnumResult must have test results")
-
- site_name = self_enum_result.site_name
- test_results = self_enum_result.results
- result_status = self_enum_result.result_status
+ def format_self_enumeration(self, self_enumeration_result: SelfEnumerationResult, response_files: Optional[List[Optional[Path]]] = None) -> Tree:
+ """Format self-enumeration results into a tree structure."""
root_label = Text()
- root_label.append(_STATUS_SYMBOLS.get(result_status, "?"), style=_STATUS_STYLES.get(result_status, Style()))
+ root_label.append(_STATUS_SYMBOLS.get(self_enumeration_result.status, "?"), style=_STATUS_STYLES.get(self_enumeration_result.status, Style()))
root_label.append(" [", style=THEME["muted"])
- root_label.append(site_name, style=THEME["info"])
+ root_label.append(self_enumeration_result.name, style=THEME["info"])
root_label.append("]", style=THEME["muted"])
tree = Tree(root_label, guide_style=THEME["muted"], expanded=True)
- for i, test in enumerate(test_results):
+ for i, test in enumerate(self_enumeration_result.results):
if test is None:
continue
url_text = Text()
- url_text.append(_STATUS_SYMBOLS.get(test.result_status, "?"), style=_STATUS_STYLES.get(test.result_status, Style()))
+ url_text.append(_STATUS_SYMBOLS.get(test.status, "?"), style=_STATUS_STYLES.get(test.status, Style()))
url_text.append(" ", style=THEME["muted"])
url_text.append(f"{test.username}: ", style=THEME["info"])
url_text.append(test.result_url or "No URL", style=THEME["primary"])
@@ -134,18 +112,19 @@ def format_self_enum(self, self_enum_result: SelfEnumResult, response_files: Opt
def _add_debug_info(self, node: Tree, response_code: Optional[int] = None, elapsed: Optional[float] = None,
error: Optional[str] = None, response_file: Optional[Path] = None) -> None:
"""Add debug information to a tree node."""
-
+
if response_code is not None:
node.add(Text(f"Response Code: {response_code}", style=THEME['info']))
- if response_file:
+ if response_file is not None:
node.add(Text(f"Response File: {response_file}", style=THEME['info']))
if elapsed is not None:
node.add(Text(f"Elapsed: {elapsed:.2f}s", style=THEME['info']))
- if error:
+ if error is not None:
node.add(Text(f"Error: {error}", style=THEME['error']))
def display_version() -> None:
"""Display version and metadata of the application."""
+
version_table = Table.grid(padding=(0, 2))
version_table.add_column(style=THEME['info'])
version_table.add_column(style="bold")
@@ -168,37 +147,33 @@ def display_version() -> None:
def _display_message(message: str, style: str, symbol: str, label: str) -> None:
"""Display a styled message with symbol and label."""
-
- if not all([message and message.strip(), style and style.strip(), symbol and symbol.strip(), label and label.strip()]):
- raise ValueError("Message, style, symbol, and label must be non-empty strings")
-
+
formatted_message = Text()
formatted_message.append(symbol, style=style)
formatted_message.append(f" [{label}] ", style=style)
formatted_message.append(message)
console.print(formatted_message)
- if hasattr(console.file, 'flush'):
- console.file.flush()
+ console.file.flush()
def display_error(message: str, show_traceback: bool = False) -> None:
"""Display an error message."""
-
+
_display_message(message, THEME['error'], "!", "ERROR")
if show_traceback:
console.print_exception()
def display_warning(message: str) -> None:
"""Display a warning message."""
-
+
_display_message(message, THEME['warning'], "?", "WARNING")
def display_info(message: str) -> None:
"""Display an info message."""
-
+
_display_message(message, THEME['info'], "*", "INFO")
def display_success(message: str) -> None:
"""Display a success message."""
-
+
_display_message(message, THEME['success'], "+", "SUCCESS")
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index ef74206..e86d277 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -60,7 +60,9 @@ def export(self,
def _export_csv(self, results: List[ResultDict], output_path: Path) -> None:
if not results:
return
+
fieldnames = list(results[0].keys())
+
try:
with output_path.open('w', newline='', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
@@ -81,7 +83,7 @@ def _generate_html(self, results: List[ResultDict]) -> str:
cat = item.get('category', 'uncategorized')
grouped.setdefault(cat, []).append(item)
- default_fields = ['site_name', 'result_url', 'elapsed']
+ default_fields = ['name', 'result_url', 'elapsed']
display_fields = [f for f in default_fields if any(f in r for r in results)]
try:
@@ -112,6 +114,7 @@ def _export_html(self, results: List[ResultDict], output_path: Path) -> None:
def _export_pdf(self, results: List[ResultDict], output_path: Path) -> None:
if not results:
raise ValueError('No results to export to PDF')
+
try:
html = self._generate_html(results)
HTML(string=html).write_pdf(str(output_path))
@@ -121,6 +124,7 @@ def _export_pdf(self, results: List[ResultDict], output_path: Path) -> None:
def _resolve_path(self, fmt: FormatName, custom: Optional[str | Path]) -> Path:
if custom:
return Path(custom)
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
filename = f"results_{timestamp}.{fmt}"
return Path.cwd() / filename
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index fdccebd..4405791 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -22,7 +22,7 @@
from ..cli.progress import ProgressManager, ResultsTracker
from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
from ..cli.utils import load_wmn_lists, sanitize_filename
-from ..core.models import ResultStatus, SiteResult, SelfEnumResult
+from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult
from ..core.main import Naminter
from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL, LOGGING_FORMAT
@@ -105,8 +105,8 @@ async def run(self) -> None:
akamai=self.config.akamai,
extra_fp=self.config.extra_fp,
) as naminter:
- if self.config.self_enum:
- results = await self._run_self_enum(naminter)
+ if self.config.self_enumeration:
+ results = await self._run_self_enumeration(naminter)
else:
results = await self._run_check(naminter)
@@ -117,7 +117,7 @@ async def run(self) -> None:
async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
"""Run the username enumeration functionality."""
summary = await naminter.get_wmn_summary(
- site_names=self.config.site_names,
+ site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
)
@@ -128,11 +128,11 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
results: List[SiteResult] = []
with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(total_sites, "Enumerating usernames...")
+ progress_mgr.start(total_sites, "[bright_cyan]Enumerating usernames...[/bright_cyan]")
result_stream = await naminter.enumerate_usernames(
usernames=self.config.usernames,
- site_names=self.config.site_names,
+ site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
fuzzy_mode=self.config.fuzzy_mode,
@@ -152,23 +152,23 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
return results
- async def _run_self_enum(self, naminter: Naminter) -> List[SelfEnumResult]:
- """Run the self-enum functionality."""
+ async def _run_self_enumeration(self, naminter: Naminter) -> List[SelfEnumerationResult]:
+ """Run the self-enumeration functionality."""
summary = await naminter.get_wmn_summary(
- site_names=self.config.site_names,
+ site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
)
total_tests = int(summary.get("known_accounts_total", 0))
tracker = ResultsTracker(total_tests)
- results: List[SelfEnumResult] = []
+ results: List[SelfEnumerationResult] = []
with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(total_tests, "Running self-enum...")
+ progress_mgr.start(total_tests, "[bright_cyan]Running self-enumeration...[/bright_cyan]")
- result_stream = await naminter.self_enum(
- site_names=self.config.site_names,
+ result_stream = await naminter.self_enumeration(
+ site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
fuzzy_mode=self.config.fuzzy_mode,
@@ -188,15 +188,15 @@ async def _run_self_enum(self, naminter: Naminter) -> List[SelfEnumResult]:
response_files.append(response_file_path)
else:
response_files.append(None)
- formatted_output = self._formatter.format_self_enum(result, response_files)
+ formatted_output = self._formatter.format_self_enumeration(result, response_files)
console.print(formatted_output)
results.append(result)
return results
- def _filter_result(self, result: Union[SiteResult, SelfEnumResult]) -> bool:
+ def _filter_result(self, result: Union[SiteResult, SelfEnumerationResult]) -> bool:
"""Determine if a result should be included in output based on filter settings."""
- status = result.result_status
+ status = result.status
if self.config.filter_all:
return True
@@ -224,9 +224,9 @@ async def _process_result(self, result: SiteResult) -> Optional[Path]:
if self.config.save_response and result.response_text and self._response_dir:
try:
- safe_site_name = sanitize_filename(result.site_name)
+ safe_site_name = sanitize_filename(result.name)
safe_username = sanitize_filename(result.username)
- status_str = result.result_status.value
+ status_str = result.status.value
created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
base_filename = f"{status_str}_{result.response_code}_{safe_site_name}_{safe_username}_{created_at_str}{RESPONSE_FILE_EXTENSION}"
@@ -277,7 +277,7 @@ async def _write_file(self, file_path: Path, content: str) -> None:
@click.option('--local-schema', type=click.Path(exists=True, path_type=Path), help='Path to local WhatsMyName JSON schema file for validation')
@click.option('--remote-schema', default=WMN_SCHEMA_URL, help='URL to fetch custom WhatsMyName JSON schema for validation')
@click.option('--skip-validation', is_flag=True, help='Skip JSON schema validation of WhatsMyName data')
-@click.option('--self-enum', is_flag=True, help='Run self-enum mode to validate site detection accuracy')
+@click.option('--self-enumeration', is_flag=True, help='Run self-enumeration mode to validate site detection accuracy')
@click.option('--include-categories', multiple=True, help='Include only sites from specified categories (e.g., "social", "coding")')
@click.option('--exclude-categories', multiple=True, help='Exclude sites from specified categories (e.g., "adult", "gaming")')
@click.option('--proxy', help='Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)')
@@ -325,7 +325,7 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
try:
config = NaminterConfig(
usernames=kwargs.get('username'),
- site_names=kwargs.get('site'),
+ sites=kwargs.get('site'),
local_list_paths=kwargs.get('local_list'),
remote_list_urls=kwargs.get('remote_list'),
local_schema_path=kwargs.get('local_schema'),
@@ -343,7 +343,7 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
akamai=kwargs.get('akamai'),
extra_fp=kwargs.get('extra_fp'),
fuzzy_mode=kwargs.get('fuzzy_mode'),
- self_enum=kwargs.get('self_enum'),
+ self_enumeration=kwargs.get('self_enumeration'),
log_level=kwargs.get('log_level'),
log_file=kwargs.get('log_file'),
show_details=kwargs.get('show_details'),
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index 23ad187..cf56a82 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -21,9 +21,6 @@ class ResultsTracker:
def __init__(self, total_sites: int) -> None:
"""Initialize the results tracker."""
- if total_sites < 0:
- raise ValueError("total_sites must be non-negative")
-
self.total_sites = total_sites
self.results_count = 0
self.start_time = time.time()
@@ -31,13 +28,8 @@ def __init__(self, total_sites: int) -> None:
def add_result(self, result: SiteResult) -> None:
"""Update counters with a new result."""
- if result is None:
- raise ValueError("Result cannot be None")
- if not hasattr(result, 'result_status'):
- raise ValueError("Result must have a result_status attribute")
-
self.results_count += 1
- self.status_counts[result.result_status] += 1
+ self.status_counts[result.status] += 1
def get_progress_text(self) -> str:
"""Get formatted progress text with request speed and statistics."""
@@ -103,11 +95,6 @@ def create_progress_bar(self) -> Progress:
def start(self, total: int, description: str) -> None:
"""Start the progress bar."""
- if total < 0:
- raise ValueError("Total must be non-negative")
- if not description or not description.strip():
- raise ValueError("Description cannot be empty")
-
if not self.disabled:
self.progress = self.create_progress_bar()
self.progress.start()
@@ -115,9 +102,6 @@ def start(self, total: int, description: str) -> None:
def update(self, advance: int = 1, description: Optional[str] = None) -> None:
"""Update the progress bar."""
- if advance < 0:
- raise ValueError("Advance must be non-negative")
-
if self.progress and self.task_id is not None:
update_kwargs: Dict[str, Any] = {"advance": advance}
if description is not None:
diff --git a/naminter/core/main.py b/naminter/core/main.py
index f637e3e..8bb6f4c 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -6,7 +6,7 @@
from curl_cffi.requests import AsyncSession, RequestsError
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
-from ..core.models import ResultStatus, SiteResult, SelfEnumResult, ValidationMode
+from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult, ValidationMode
from ..core.exceptions import (
DataError,
ValidationError,
@@ -232,46 +232,46 @@ async def enumerate_site(
"""Enumerate a single site for the given username."""
await self._ensure_session()
- site_name = site.get("name")
+ name = site.get("name")
category = site.get("cat")
uri_check_template = site.get("uri_check")
post_body_template = site.get("post_body")
e_code, e_string = site.get("e_code"), site.get("e_string")
m_code, m_string = site.get("m_code"), site.get("m_string")
- if not site_name:
+ if not name:
self._logger.error("Site configuration missing required 'name' field: %r", site)
return SiteResult(
- site_name="",
+ name="",
category=category,
username=username,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error="Site missing required field: name",
)
if not category:
- self._logger.error("Site '%s' missing required 'cat' field", site_name)
+ self._logger.error("Site '%s' missing required 'cat' field", name)
return SiteResult(
- site_name=site_name,
- category=category,
+ name=name,
+ category="",
username=username,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error="Site missing required field: cat",
)
if not uri_check_template:
- self._logger.error("Site '%s' missing required 'uri_check' field", site_name)
+ self._logger.error("Site '%s' missing required 'uri_check' field", name)
return SiteResult(
- site_name=site_name,
+ name=name,
category=category,
username=username,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error="Site missing required field: uri_check",
)
has_placeholder = ACCOUNT_PLACEHOLDER in uri_check_template or (post_body_template and ACCOUNT_PLACEHOLDER in post_body_template)
if not has_placeholder:
- return SiteResult(site_name, category, username, ResultStatus.ERROR, error=f"Site '{site_name}' missing {ACCOUNT_PLACEHOLDER} placeholder")
+ return SiteResult(name, category, username, ResultStatus.ERROR, error=f"Site '{name}' missing {ACCOUNT_PLACEHOLDER} placeholder")
matchers = {
'e_code': e_code,
@@ -284,14 +284,14 @@ async def enumerate_site(
if all(val is None for val in matchers.values()):
self._logger.error(
"Site '%s' must define at least one matcher (e_code, e_string, m_code, or m_string) for %s mode",
- site_name,
+ name,
ValidationMode.FUZZY,
)
return SiteResult(
- site_name=site_name,
+ name=name,
category=category,
username=username,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error=f"Site must define at least one matcher for {ValidationMode.FUZZY} mode",
)
else:
@@ -299,26 +299,26 @@ async def enumerate_site(
if missing:
self._logger.error(
"Site '%s' missing required matchers for %s mode: %s",
- site_name, ValidationMode.STRICT, missing
+ name, ValidationMode.STRICT, missing
)
return SiteResult(
- site_name=site_name,
+ name=name,
category=category,
username=username,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error=f"Site missing required matchers for {ValidationMode.STRICT} mode: {missing}",
)
strip_bad_char = site.get("strip_bad_char", "")
clean_username = username.translate(str.maketrans("", "", strip_bad_char))
if not clean_username:
- return SiteResult(site_name, category, username, ResultStatus.ERROR, error=f"Username '{username}' became empty after character stripping")
+ return SiteResult(name, category, username, ResultStatus.ERROR, error=f"Username '{username}' became empty after character stripping")
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
uri_pretty = site.get("uri_pretty", uri_check_template).replace(ACCOUNT_PLACEHOLDER, clean_username)
self._logger.info("Enumerating site '%s' (category: %s) for username '%s' in %s mode",
- site_name, category, username, ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ name, category, username, ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
try:
async with self._semaphore:
@@ -335,28 +335,28 @@ async def enumerate_site(
response = await self._session.get(uri_check, headers=headers)
elapsed = time.monotonic() - start_time
- self._logger.info("Request to '%s' completed in %.2fs with status %d", site_name, elapsed, response.status_code)
+ self._logger.info("Request to '%s' completed in %.2fs with status %d", name, elapsed, response.status_code)
except asyncio.CancelledError:
- self._logger.warning("Request to '%s' was cancelled", site_name)
+ self._logger.warning("Request to '%s' was cancelled", name)
raise
except RequestsError as e:
- self._logger.warning("Network error while enumerating '%s': %s", site_name, e, exc_info=True)
+ self._logger.warning("Network error while enumerating '%s': %s", name, e, exc_info=True)
return SiteResult(
- site_name=site_name,
+ name=name,
category=category,
username=username,
result_url=uri_pretty,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error=f"Network error: {e}",
)
except Exception as e:
- self._logger.error("Unexpected error while enumerating '%s': %s", site_name, e, exc_info=True)
+ self._logger.error("Unexpected error while enumerating '%s': %s", name, e, exc_info=True)
return SiteResult(
- site_name=site_name,
+ name=name,
category=category,
username=username,
result_url=uri_pretty,
- result_status=ResultStatus.ERROR,
+ status=ResultStatus.ERROR,
error=f"Unexpected error: {e}",
)
@@ -375,7 +375,7 @@ async def enumerate_site(
self._logger.debug(
"Site '%s' result: %s (HTTP %d) in %.2fs (%s mode)",
- site_name,
+ name,
result_status.name,
response_code,
elapsed,
@@ -383,11 +383,11 @@ async def enumerate_site(
)
return SiteResult(
- site_name=site_name,
+ name=name,
category=category,
username=username,
result_url=uri_pretty,
- result_status=result_status,
+ status=result_status,
response_code=response_code,
elapsed=elapsed,
response_text=response_text,
@@ -430,15 +430,15 @@ async def iterate_results() -> AsyncGenerator[SiteResult, None]:
results = await asyncio.gather(*coroutines)
return results
- async def self_enum(
+ async def self_enumeration(
self,
site_names: Optional[List[str]] = None,
include_categories: Optional[List[str]] = None,
exclude_categories: Optional[List[str]] = None,
fuzzy_mode: bool = False,
as_generator: bool = False
- ) -> Union[List[SelfEnumResult], AsyncGenerator[SelfEnumResult, None]]:
- """Run self-enum using known accounts for each site."""
+ ) -> Union[List[SelfEnumerationResult], AsyncGenerator[SelfEnumerationResult, None]]:
+ """Run self-enumeration using known accounts for each site."""
await self._ensure_session()
sites = self._filter_sites(
@@ -447,66 +447,66 @@ async def self_enum(
exclude_categories=exclude_categories,
)
- self._logger.info("Starting self-enum validation for %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ self._logger.info("Starting self-enumeration validation for %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
- async def _enumerate_known(site: Dict[str, Any]) -> SelfEnumResult:
+ async def _enumerate_known(site: Dict[str, Any]) -> SelfEnumerationResult:
"""Helper function to enumerate a site with all its known users."""
- site_name = site.get("name")
+ name = site.get("name")
category = site.get("cat")
known = site.get("known")
- if not site_name:
- self._logger.error("Site configuration missing required 'name' field for self-enum: %r", site)
- return SelfEnumResult(
- site_name=site_name,
- category=category,
+ if not name:
+ self._logger.error("Site configuration missing required 'name' field for self-enumeration: %r", site)
+ return SelfEnumerationResult(
+ name="",
+ category=category or "",
results=[],
error=f"Site missing required field: name"
)
if not category:
- self._logger.error("Site '%s' missing required 'cat' field for self-enum", site_name)
- return SelfEnumResult(
- site_name=site_name,
- category=category,
+ self._logger.error("Site '%s' missing required 'cat' field for self-enumeration", name)
+ return SelfEnumerationResult(
+ name=name,
+ category="",
results=[],
- error=f"Site '{site_name}' missing required field: cat"
+ error=f"Site missing required field: cat"
)
if known is None:
- self._logger.error("Site '%s' missing required 'known' field for self-enum", site_name)
- return SelfEnumResult(
- site_name=site_name,
+ self._logger.error("Site '%s' missing required 'known' field for self-enumeration", name)
+ return SelfEnumerationResult(
+ name=name,
category=category,
results=[],
- error=f"Site '{site_name}' missing required field: known"
+ error=f"Site '{name}' missing required field: known"
)
- self._logger.info("Self-enuming site '%s' (category: %s) with %d known accounts", site_name, category, len(known))
+ self._logger.info("Self-enumerating site '%s' (category: %s) with %d known accounts", name, category, len(known))
try:
coroutines = [self.enumerate_site(site, username, fuzzy_mode) for username in known]
results = await asyncio.gather(*coroutines)
- return SelfEnumResult(
- site_name=site_name,
+ return SelfEnumerationResult(
+ name=name,
category=category,
results=results
)
except Exception as e:
- self._logger.error("Unexpected error during self-enum for site '%s': %s", site_name, e, exc_info=True)
- return SelfEnumResult(
- site_name=site_name,
+ self._logger.error("Unexpected error during self-enumeration for site '%s': %s", name, e, exc_info=True)
+ return SelfEnumerationResult(
+ name=name,
category=category,
results=[],
- error=f"Unexpected error during self-enum: {e}"
+ error=f"Unexpected error during self-enumeration: {e}"
)
coroutines = [
_enumerate_known(site) for site in sites if isinstance(site, dict)
]
- async def iterate_results() -> AsyncGenerator[SelfEnumResult, None]:
+ async def iterate_results() -> AsyncGenerator[SelfEnumerationResult, None]:
for completed_task in asyncio.as_completed(coroutines):
yield await completed_task
diff --git a/naminter/core/models.py b/naminter/core/models.py
index b85d235..16c3029 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -19,10 +19,10 @@ class ResultStatus(StrEnum):
@dataclass(slots=True, frozen=True)
class SiteResult:
"""Result of testing a username on a site."""
- site_name: str
+ name: str
category: str
username: str
- result_status: ResultStatus
+ status: ResultStatus
result_url: Optional[str] = None
response_code: Optional[int] = None
response_text: Optional[str] = None
@@ -30,14 +30,6 @@ class SiteResult:
error: Optional[str] = None
created_at: datetime = field(default_factory=datetime.now)
- def __post_init__(self) -> None:
- """Validate numeric fields after initialization."""
- if self.response_code is not None and self.response_code < 0:
- raise ValueError("response_code must be non-negative")
-
- if self.elapsed is not None and self.elapsed < 0:
- raise ValueError("elapsed must be non-negative")
-
@classmethod
def get_result_status(
cls,
@@ -80,25 +72,25 @@ def get_result_status(
def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
"""Convert SiteResult to dict."""
result = asdict(self)
- result['result_status'] = self.result_status.value
+ result['status'] = self.status.value
result['created_at'] = self.created_at.isoformat()
if exclude_response_text:
result.pop('response_text', None)
return result
@dataclass(slots=True, frozen=True)
-class SelfEnumResult:
- """Result of a self-enum for a username."""
- site_name: str
+class SelfEnumerationResult:
+ """Result of a self-enumeration for a username."""
+ name: str
category: str
results: List[SiteResult]
- result_status: ResultStatus = field(init=False)
+ status: ResultStatus = field(init=False)
error: Optional[str] = None
created_at: datetime = field(default_factory=datetime.now)
def __post_init__(self) -> None:
"""Calculate result status from results."""
- object.__setattr__(self, 'result_status', self._get_result_status())
+ object.__setattr__(self, 'status', self._get_result_status())
def _get_result_status(self) -> ResultStatus:
"""Determine result status from results."""
@@ -108,7 +100,7 @@ def _get_result_status(self) -> ResultStatus:
if not self.results:
return ResultStatus.UNKNOWN
- statuses: Set[ResultStatus] = {result.result_status for result in self.results if result}
+ statuses: Set[ResultStatus] = {result.status for result in self.results if result}
if not statuses:
return ResultStatus.UNKNOWN
@@ -122,12 +114,12 @@ def _get_result_status(self) -> ResultStatus:
return next(iter(statuses))
def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
- """Convert SelfEnumResult to dict."""
+ """Convert SelfEnumerationResult to dict."""
return {
- 'site_name': self.site_name,
+ 'name': self.name,
'category': self.category,
- 'result_status': self.result_status.value,
'results': [result.to_dict(exclude_response_text=exclude_response_text) for result in self.results],
+ 'status': self.status.value,
'created_at': self.created_at.isoformat(),
'error': self.error,
}
From be77a836f358a8bbc660e333611274de4338100e Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sun, 28 Sep 2025 15:46:05 +0300
Subject: [PATCH 12/19] refactor(cli,core): implement network layer and misc
---
naminter/cli/config.py | 2 +-
naminter/cli/exporters.py | 76 +++--
naminter/cli/main.py | 97 ++++--
naminter/cli/progress.py | 11 +-
naminter/cli/utils.py | 85 +----
naminter/core/constants.py | 33 ++
naminter/core/exceptions.py | 36 +-
naminter/core/main.py | 651 ++++++++++++++++++++----------------
naminter/core/models.py | 42 ++-
naminter/core/network.py | 171 ++++++++++
naminter/core/utils.py | 117 +++----
11 files changed, 784 insertions(+), 537 deletions(-)
create mode 100644 naminter/core/network.py
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index e0cfb95..3243eb4 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -94,7 +94,7 @@ def __post_init__(self) -> None:
if not self.local_list_paths and not self.remote_list_urls:
self.remote_list_urls = [WMN_REMOTE_URL]
except Exception as e:
- raise ValueError(f"Configuration validation failed: {e}") from e
+ raise ConfigurationError(f"Configuration validation failed: {e}") from e
filter_fields = [
self.filter_all,
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index e86d277..a8c6d05 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -7,8 +7,8 @@
import jinja2
from weasyprint import HTML
-from ..core.models import SiteResult
-from ..core.exceptions import ConfigurationError
+from ..core.models import SiteResult, SelfEnumerationResult
+from ..core.exceptions import ConfigurationError, ExportError, FileAccessError
FormatName = Literal['csv', 'json', 'html', 'pdf']
ResultDict = Dict[str, Any]
@@ -33,7 +33,7 @@ def __init__(self, usernames: Optional[List[str]] = None, version: Optional[str]
}
def export(self,
- results: List[SiteResult | ResultDict],
+ results: List[SiteResult | SelfEnumerationResult],
formats: Dict[FormatName, Optional[str | Path]]) -> None:
"""
Export results in the given formats.
@@ -42,20 +42,22 @@ def export(self,
return
dict_results = [
- (r.to_dict(exclude_response_text=True) if isinstance(r, SiteResult) else r)
- for r in results if isinstance(r, (SiteResult, dict))
+ result.to_dict(exclude_response_text=True)
+ for result in results
]
- for fmt, path in formats.items():
- if fmt not in self.SUPPORTED_FORMATS:
- raise ValueError(f"Skipping unsupported format '{fmt}'")
- out_path = self._resolve_path(fmt, path)
- out_path.parent.mkdir(parents=True, exist_ok=True)
-
+ for format_name, path in formats.items():
+ if format_name not in self.SUPPORTED_FORMATS:
+ raise ExportError(f"Unsupported export format: {format_name}")
+
try:
- self.export_methods[fmt](dict_results, out_path)
- except Exception as exc:
- raise RuntimeError(f"Failed to export {fmt}: {exc}") from exc
+ out_path = self._resolve_path(format_name, path)
+ out_path.parent.mkdir(parents=True, exist_ok=True)
+ self.export_methods[format_name](dict_results, out_path)
+ except FileAccessError as e:
+ raise ExportError(f"File access error during {format_name} export: {e}") from e
+ except Exception as e:
+ raise ExportError(f"Failed to export {format_name}: {e}") from e
def _export_csv(self, results: List[ResultDict], output_path: Path) -> None:
if not results:
@@ -68,14 +70,24 @@ def _export_csv(self, results: List[ResultDict], output_path: Path) -> None:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(results)
- except Exception as exc:
- raise RuntimeError(f"CSV export error: {exc}") from exc
+ except PermissionError as e:
+ raise FileAccessError(f"Permission denied writing CSV file: {e}") from e
+ except OSError as e:
+ raise FileAccessError(f"OS error writing CSV file: {e}") from e
+ except Exception as e:
+ raise ExportError(f"CSV export error: {e}") from e
def _export_json(self, results: List[ResultDict], output_path: Path) -> None:
try:
output_path.write_text(json.dumps(results, ensure_ascii=False, indent=2), encoding='utf-8')
- except Exception as exc:
- raise RuntimeError(f"JSON export error: {exc}") from exc
+ except PermissionError as e:
+ raise FileAccessError(f"Permission denied writing JSON file: {e}") from e
+ except OSError as e:
+ raise FileAccessError(f"OS error writing JSON file: {e}") from e
+ except (TypeError, ValueError) as e:
+ raise ExportError(f"JSON serialization error: {e}") from e
+ except Exception as e:
+ raise ExportError(f"JSON export error: {e}") from e
def _generate_html(self, results: List[ResultDict]) -> str:
grouped: Dict[str, List[ResultDict]] = {}
@@ -89,8 +101,12 @@ def _generate_html(self, results: List[ResultDict]) -> str:
try:
with importlib.resources.files('naminter.cli.templates').joinpath('report.html').open('r', encoding='utf-8') as f:
template_source = f.read()
+ except FileNotFoundError as e:
+ raise ConfigurationError(f'HTML template not found: {e}') from e
+ except PermissionError as e:
+ raise FileAccessError(f'Permission denied reading HTML template: {e}') from e
except Exception as e:
- raise ConfigurationError(f'Could not load HTML template: {e}')
+ raise ConfigurationError(f'Could not load HTML template: {e}') from e
template = jinja2.Template(template_source, autoescape=True)
@@ -108,23 +124,31 @@ def _export_html(self, results: List[ResultDict], output_path: Path) -> None:
try:
html = self._generate_html(results)
output_path.write_text(html, encoding='utf-8')
- except Exception as exc:
- raise RuntimeError(f"HTML export error: {exc}") from exc
+ except PermissionError as e:
+ raise FileAccessError(f"Permission denied writing HTML file: {e}") from e
+ except OSError as e:
+ raise FileAccessError(f"OS error writing HTML file: {e}") from e
+ except Exception as e:
+ raise ExportError(f"HTML export error: {e}") from e
def _export_pdf(self, results: List[ResultDict], output_path: Path) -> None:
if not results:
- raise ValueError('No results to export to PDF')
+ raise ExportError('No results to export to PDF')
try:
html = self._generate_html(results)
HTML(string=html).write_pdf(str(output_path))
- except Exception as exc:
- raise RuntimeError(f"PDF export error: {exc}") from exc
+ except PermissionError as e:
+ raise FileAccessError(f"Permission denied writing PDF file: {e}") from e
+ except OSError as e:
+ raise FileAccessError(f"OS error writing PDF file: {e}") from e
+ except Exception as e:
+ raise ExportError(f"PDF export error: {e}") from e
- def _resolve_path(self, fmt: FormatName, custom: Optional[str | Path]) -> Path:
+ def _resolve_path(self, format_name: FormatName, custom: Optional[str | Path]) -> Path:
if custom:
return Path(custom)
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
- filename = f"results_{timestamp}.{fmt}"
+ filename = f"results_{timestamp}.{format_name}"
return Path.cwd() / filename
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 4405791..c13136f 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -21,12 +21,14 @@
from ..cli.exporters import Exporter
from ..cli.progress import ProgressManager, ResultsTracker
from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
-from ..cli.utils import load_wmn_lists, sanitize_filename
+from ..cli.utils import sanitize_filename
from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult
from ..core.main import Naminter
+from ..core.network import CurlCFFISession
from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL, LOGGING_FORMAT
+from ..core.utils import validate_numeric_values
-from ..core.exceptions import DataError, ConfigurationError
+from ..core.exceptions import DataError, ConfigurationError, ExportError
from .. import __description__, __version__
@@ -52,12 +54,12 @@ def _setup_response_dir(self) -> Optional[Path]:
return None
try:
- response_path = self.config.response_path
- if response_path is None:
+ dir_path = self.config.response_dir
+ if dir_path is None:
return None
-
- response_path.mkdir(parents=True, exist_ok=True)
- return response_path
+
+ dir_path.mkdir(parents=True, exist_ok=True)
+ return dir_path
except PermissionError as e:
display_error(f"Permission denied creating/accessing response directory: {e}")
return None
@@ -70,40 +72,56 @@ def _setup_response_dir(self) -> Optional[Path]:
@staticmethod
def _setup_logging(config: NaminterConfig) -> None:
- """Setup logging configuration if log level and file are specified."""
- if config.log_level and config.log_file:
- log_path = Path(config.log_file)
- log_path.parent.mkdir(parents=True, exist_ok=True)
- level_value = getattr(logging, str(config.log_level).upper(), logging.INFO)
- logging.basicConfig(
- level=level_value,
- format=LOGGING_FORMAT,
- filename=str(log_path),
- filemode="a",
- )
+ """Configure project logging."""
+ if not config.log_file:
+ return
+
+ log_path = Path(config.log_file)
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+
+ level_value = getattr(logging, str(config.log_level or "INFO").upper(), logging.INFO)
+
+ logger = logging.getLogger("naminter")
+ logger.setLevel(level_value)
+ logger.propagate = False
+
+ has_file_handler = any(isinstance(handler, logging.FileHandler) for handler in logger.handlers)
+ if not has_file_handler:
+ file_handler = logging.FileHandler(str(log_path), mode="a", encoding="utf-8")
+ formatter = logging.Formatter(LOGGING_FORMAT)
+ file_handler.setFormatter(formatter)
+ file_handler.setLevel(level_value)
+ logger.addHandler(file_handler)
async def run(self) -> None:
"""Main execution method with progress tracking."""
- wmn_data, wmn_schema = load_wmn_lists(
- local_list_paths=self.config.local_list_paths,
- remote_list_urls=self.config.remote_list_urls,
- skip_validation=self.config.skip_validation,
- local_schema_path=self.config.local_schema_path,
- remote_schema_url=self.config.remote_schema_url
- )
-
- async with Naminter(
- wmn_data=wmn_data,
- wmn_schema=wmn_schema,
- max_tasks=self.config.max_tasks,
+ try:
+ warnings = validate_numeric_values(self.config.max_tasks, self.config.timeout)
+ for message in warnings:
+ display_warning(message)
+ except ConfigurationError as e:
+ display_error(f"Configuration error: {e}")
+ return
+
+ http_client = CurlCFFISession(
+ proxies=self.config.proxy,
+ verify=self.config.verify_ssl,
timeout=self.config.timeout,
- proxy=self.config.proxy,
- verify_ssl=self.config.verify_ssl,
allow_redirects=self.config.allow_redirects,
impersonate=self.config.impersonate,
ja3=self.config.ja3,
akamai=self.config.akamai,
extra_fp=self.config.extra_fp,
+ )
+
+ async with Naminter(
+ http_client=http_client,
+ max_tasks=self.config.max_tasks,
+ local_list_paths=self.config.local_list_paths,
+ remote_list_urls=self.config.remote_list_urls,
+ skip_validation=self.config.skip_validation,
+ local_schema_path=self.config.local_schema_path,
+ remote_schema_url=self.config.remote_schema_url,
) as naminter:
if self.config.self_enumeration:
results = await self._run_self_enumeration(naminter)
@@ -111,8 +129,12 @@ async def run(self) -> None:
results = await self._run_check(naminter)
if self.config.export_formats and results:
- export_manager = Exporter(self.config.usernames or [], __version__)
- export_manager.export(results, self.config.export_formats)
+ try:
+ export_manager = Exporter(self.config.usernames or [], __version__)
+ export_manager.export(results, self.config.export_formats)
+ except ExportError as e:
+ display_error(f"Export error: {e}")
+ return
async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
"""Run the username enumeration functionality."""
@@ -121,7 +143,7 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
)
- actual_site_count = int(summary.get("sites_count", 0))
+ actual_site_count = int(summary.sites_count)
total_sites = actual_site_count * len(self.config.usernames)
tracker = ResultsTracker(total_sites)
@@ -159,7 +181,7 @@ async def _run_self_enumeration(self, naminter: Naminter) -> List[SelfEnumeratio
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
)
- total_tests = int(summary.get("known_accounts_total", 0))
+ total_tests = int(summary.known_accounts_total)
tracker = ResultsTracker(total_tests)
results: List[SelfEnumerationResult] = []
@@ -385,6 +407,9 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
except DataError as e:
display_error(f"Data error: {e}")
ctx.exit(1)
+ except ExportError as e:
+ display_error(f"Export error: {e}")
+ ctx.exit(1)
except Exception as e:
display_error(f"Fatal error: {e}")
ctx.exit(1)
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index cf56a82..822acb3 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -34,7 +34,6 @@ def add_result(self, result: SiteResult) -> None:
def get_progress_text(self) -> str:
"""Get formatted progress text with request speed and statistics."""
elapsed = time.time() - self.start_time
- rate = self.results_count / elapsed if elapsed > 0 else 0.0
found = self.status_counts[ResultStatus.FOUND]
ambiguous = self.status_counts[ResultStatus.AMBIGUOUS]
@@ -43,6 +42,11 @@ def get_progress_text(self) -> str:
not_valid = self.status_counts[ResultStatus.NOT_VALID]
errors = self.status_counts[ResultStatus.ERROR]
+ valid_count = self.results_count - errors - not_valid
+ if valid_count < 0:
+ valid_count = 0
+ rate = valid_count / elapsed if elapsed > 0 else 0.0
+
sections = [
f"[{THEME['primary']}]{rate:.1f} req/s[/]",
f"[{THEME['success']}]+ {found}[/]",
@@ -60,11 +64,6 @@ def get_progress_text(self) -> str:
sections.append(f"[{THEME['primary']}]{self.results_count}/{self.total_sites}[/]")
return " │ ".join(sections)
-
- @property
- def completion_percentage(self) -> float:
- """Get the completion percentage as a float between 0 and 100."""
- return (self.results_count / self.total_sites) * 100 if self.total_sites > 0 else 0.0
class ProgressManager:
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
index 3ad7f12..4da5184 100644
--- a/naminter/cli/utils.py
+++ b/naminter/cli/utils.py
@@ -1,85 +1,3 @@
-import json
-from pathlib import Path
-from typing import Any, Dict, List, Optional, Tuple
-
-from curl_cffi import requests
-from ..core.constants import WMN_REMOTE_URL
-from ..core.exceptions import DataError
-
-
-def load_wmn_lists(
- local_list_paths: Optional[List[Path]] = None,
- remote_list_urls: Optional[List[str]] = None,
- skip_validation: bool = False,
- local_schema_path: Optional[Path] = None,
- remote_schema_url: Optional[str] = None
-) -> Tuple[Dict[str, Any], Optional[Dict[str, Any]]]:
- """Load and merge WMN lists from local and remote sources."""
- wmn_data = {"sites": [], "categories": [], "authors": [], "license": []}
- wmn_schema = None
-
- def _fetch_json(url: str, timeout: int = 30) -> Dict[str, Any]:
- """Helper to fetch and parse JSON from URL."""
- if not url or not isinstance(url, str) or not url.strip():
- raise ValueError(f"Invalid URL: {url}")
-
- try:
- response = requests.get(url, timeout=timeout)
- response.raise_for_status()
- return response.json()
- except requests.exceptions.RequestException as e:
- raise DataError(f"Failed to fetch from {url}: {e}") from e
- except json.JSONDecodeError as e:
- raise DataError(f"Failed to parse JSON from {url}: {e}") from e
-
- def _merge_data(data: Dict[str, Any]) -> None:
- """Helper to merge data into wmn_data."""
- if isinstance(data, dict):
- for key in ["sites", "categories", "authors", "license"]:
- if key in data and isinstance(data[key], list):
- wmn_data[key].extend(data[key])
-
- if not skip_validation:
- try:
- if local_schema_path:
- wmn_schema = json.loads(Path(local_schema_path).read_text())
- elif remote_schema_url:
- wmn_schema = _fetch_json(remote_schema_url)
- except Exception:
- pass
-
- sources = []
- if remote_list_urls:
- sources.extend([(url, True) for url in remote_list_urls])
- if local_list_paths:
- sources.extend([(path, False) for path in local_list_paths])
-
- if not sources:
- sources = [(WMN_REMOTE_URL, True)]
-
- for source, is_remote in sources:
- try:
- if is_remote:
- data = _fetch_json(source)
- else:
- data = json.loads(Path(source).read_text())
- _merge_data(data)
- except Exception as e:
- if not sources or source == WMN_REMOTE_URL:
- raise DataError(f"Failed to load WMN data from {source}: {e}") from e
-
- if not wmn_data["sites"]:
- raise DataError("No sites loaded from any source")
-
- unique_sites = {site["name"]: site for site in wmn_data["sites"]
- if isinstance(site, dict) and site.get("name")}
- wmn_data["sites"] = list(unique_sites.values())
- wmn_data["categories"] = sorted(set(wmn_data["categories"]))
- wmn_data["authors"] = sorted(set(wmn_data["authors"]))
- wmn_data["license"] = list(dict.fromkeys(wmn_data["license"]))
-
- return wmn_data, wmn_schema
-
def sanitize_filename(filename: str) -> str:
"""Sanitize filename for cross-platform compatibility."""
if not filename or not str(filename).strip():
@@ -88,4 +6,5 @@ def sanitize_filename(filename: str) -> str:
invalid_chars = '<>:"|?*\\/\0'
sanitized = ''.join('_' if c in invalid_chars or ord(c) < 32 else c for c in str(filename))
sanitized = sanitized.strip(' .')[:200] if sanitized.strip(' .') else 'unnamed'
- return sanitized
\ No newline at end of file
+ return sanitized
+
\ No newline at end of file
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index e41b694..933d965 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -34,3 +34,36 @@
# Placeholder for account name substitution in uri_check or post_body
ACCOUNT_PLACEHOLDER: Final[str] = "{account}"
+
+# Required key sets for validations
+REQUIRED_KEYS_ENUMERATE: Final[tuple[str, ...]] = (
+ "name",
+ "uri_check",
+ "e_code",
+ "e_string",
+ "m_string",
+ "m_code",
+ "cat",
+)
+
+REQUIRED_KEYS_SELF_ENUM: Final[tuple[str, ...]] = (
+ "name",
+ "cat",
+ "known",
+)
+
+# WMN dataset keys
+WMN_KEY_SITES: Final[str] = "sites"
+WMN_KEY_CATEGORIES: Final[str] = "categories"
+WMN_KEY_AUTHORS: Final[str] = "authors"
+WMN_KEY_LICENSE: Final[str] = "license"
+WMN_KEY_NAME: Final[str] = "name"
+
+# Collection of list fields present in WMN payloads
+WMN_LIST_FIELDS: Final[tuple[str, ...]] = (
+ WMN_KEY_SITES,
+ WMN_KEY_CATEGORIES,
+ WMN_KEY_AUTHORS,
+ WMN_KEY_LICENSE,
+)
+
diff --git a/naminter/core/exceptions.py b/naminter/core/exceptions.py
index b8ad269..61e9149 100644
--- a/naminter/core/exceptions.py
+++ b/naminter/core/exceptions.py
@@ -50,11 +50,11 @@ class SessionError(NetworkError):
pass
-class SchemaValidationError(DataError):
+class SchemaError(DataError):
"""Raised when WMN schema validation fails.
This occurs when the WhatsMyName list format doesn't match
- the expected schema structure.
+ the expected schema structure, or when the schema itself is invalid.
"""
pass
@@ -77,15 +77,6 @@ class FileAccessError(DataError):
pass
-class LoggingError(ConfigurationError):
- """Raised when logging configuration fails.
-
- This includes logger setup errors, handler configuration issues,
- and log file access problems.
- """
- pass
-
-
class ValidationError(DataError):
"""Raised when input validation fails.
@@ -95,20 +86,11 @@ class ValidationError(DataError):
pass
-class WMNListError(DataError):
- """Raised when WhatsMyName list loading or processing fails.
-
- This includes download errors, parsing failures,
- and list update issues.
- """
- pass
-
-
-class ConcurrencyError(NaminterError):
- """Raised when concurrency-related errors occur.
+class ExportError(NaminterError):
+ """Raised when export operations fail.
- This includes semaphore acquisition failures, task management errors,
- and thread/async coordination issues.
+ This includes file writing errors, format conversion errors,
+ and other export-related issues.
"""
pass
@@ -119,11 +101,9 @@ class ConcurrencyError(NaminterError):
"NetworkError",
"DataError",
"SessionError",
- "SchemaValidationError",
+ "SchemaError",
"TimeoutError",
"FileAccessError",
- "LoggingError",
"ValidationError",
- "WMNListError",
- "ConcurrencyError",
+ "ExportError",
]
\ No newline at end of file
diff --git a/naminter/core/main.py b/naminter/core/main.py
index 8bb6f4c..8290b90 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -1,124 +1,249 @@
import asyncio
+import json
import logging
-import time
-from typing import Any, AsyncGenerator, Dict, List, Optional, Union, Set
-
-from curl_cffi.requests import AsyncSession, RequestsError
-
-from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
-from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult, ValidationMode
+from pathlib import Path
+from typing import Any, AsyncGenerator, Dict, List, Optional, Union, Set, Sequence, Tuple
+from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult, ValidationMode, Summary
from ..core.exceptions import (
DataError,
ValidationError,
+ SchemaError,
+ FileAccessError,
+ NetworkError,
+ TimeoutError,
+ SessionError,
)
from ..core.utils import (
- validate_wmn_data,
- validate_numeric_values,
- configure_proxy,
validate_usernames,
+ deduplicate_strings,
+ merge_lists,
)
from ..core.constants import (
- HTTP_REQUEST_TIMEOUT_SECONDS,
- HTTP_SSL_VERIFY,
- HTTP_ALLOW_REDIRECTS,
- BROWSER_IMPERSONATE_AGENT,
MAX_CONCURRENT_TASKS,
ACCOUNT_PLACEHOLDER,
+ REQUIRED_KEYS_ENUMERATE,
+ REQUIRED_KEYS_SELF_ENUM,
+ WMN_REMOTE_URL,
+ WMN_KEY_SITES,
+ WMN_KEY_CATEGORIES,
+ WMN_KEY_AUTHORS,
+ WMN_KEY_LICENSE,
+ WMN_KEY_NAME,
)
+from ..core.network import BaseSession
+import jsonschema
+import aiofiles
class Naminter:
"""Main class for Naminter username enumeration."""
def __init__(
self,
- wmn_data: Dict[str, Any],
+ http_client: BaseSession,
+ wmn_data: Optional[Dict[str, Any]] = None,
wmn_schema: Optional[Dict[str, Any]] = None,
+ local_list_paths: Optional[List[Path]] = None,
+ remote_list_urls: Optional[List[str]] = None,
+ skip_validation: bool = False,
+ local_schema_path: Optional[Path] = None,
+ remote_schema_url: Optional[str] = None,
max_tasks: int = MAX_CONCURRENT_TASKS,
- timeout: int = HTTP_REQUEST_TIMEOUT_SECONDS,
- proxy: Optional[Union[str, Dict[str, str]]] = None,
- verify_ssl: bool = HTTP_SSL_VERIFY,
- allow_redirects: bool = HTTP_ALLOW_REDIRECTS,
- impersonate: BrowserTypeLiteral = BROWSER_IMPERSONATE_AGENT,
- ja3: Optional[str] = None,
- akamai: Optional[str] = None,
- extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any]]] = None,
) -> None:
"""Initialize Naminter with configuration parameters."""
self._logger = logging.getLogger(__name__)
self._logger.addHandler(logging.NullHandler())
- self._logger.info(
- "Initializing Naminter with configuration: max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, allow_redirects=%s, proxy=%s, ja3=%s, akamai=%s",
- max_tasks, timeout, impersonate, verify_ssl, allow_redirects, bool(proxy), ja3, akamai
- )
+ self._logger.debug("Initializing Naminter (max_tasks=%d)", max_tasks)
self.max_tasks = max_tasks
- self.timeout = timeout
- self.impersonate = impersonate
- self.verify_ssl = verify_ssl
- self.allow_redirects = allow_redirects
- self.proxy = configure_proxy(proxy)
- self.ja3 = ja3
- self.akamai = akamai
- self.extra_fp = extra_fp.to_dict() if isinstance(extra_fp, ExtraFingerprints) else extra_fp
-
- validate_numeric_values(self.max_tasks, self.timeout)
- validate_wmn_data(wmn_data, wmn_schema)
- self._wmn_data = wmn_data
- self._wmn_schema = wmn_schema
+ self._local_list_paths = local_list_paths
+ self._remote_list_urls = remote_list_urls
+ self._skip_validation = skip_validation
+ self._local_schema_path = local_schema_path
+ self._remote_schema_url = remote_schema_url
+
+ self._wmn_data: Optional[Dict[str, Any]] = wmn_data
+ self._wmn_schema: Optional[Dict[str, Any]] = wmn_schema
self._semaphore = asyncio.Semaphore(self.max_tasks)
self._session_lock = asyncio.Lock()
- self._session: Optional[AsyncSession] = None
+ self._http: BaseSession = http_client
+
+ async def _open_session(self) -> None:
+ """Open the HTTP session (idempotent, safe under concurrency)."""
+ async with self._session_lock:
+ try:
+ await self._http.open()
+ self._logger.info("HTTP client opened")
+ except SessionError as e:
+ self._logger.error("Failed to open HTTP session: %s", e)
+ raise DataError(f"HTTP session initialization failed: {e}") from e
+
+ async def _fetch_json(self, url: str) -> Dict[str, Any]:
+ """Fetch and parse JSON from a URL."""
+ if not url.strip():
+ raise ValidationError(f"Invalid URL: {url}")
+
+ try:
+ response = await self._http.get(url)
+ except TimeoutError as e:
+ raise DataError(f"Timeout while fetching from {url}: {e}") from e
+ except SessionError as e:
+ raise DataError(f"Session error while fetching from {url}: {e}") from e
+ except NetworkError as e:
+ raise DataError(f"Network error while fetching from {url}: {e}") from e
+
+ if response.status_code < 200 or response.status_code >= 300:
+ raise DataError(f"Failed to fetch from {url}: HTTP {response.status_code}")
- self._logger.info(
- "Naminter initialized successfully: max_tasks=%d, timeout=%ds, browser=%s, ssl_verify=%s, proxy=%s, ja3=%s, akamai=%s",
- self.max_tasks, self.timeout,
- self.impersonate, self.verify_ssl, bool(self.proxy), self.ja3, self.akamai
- )
+ try:
+ return response.json()
+ except (ValueError, json.JSONDecodeError) as e:
+ raise DataError(f"Failed to parse JSON from {url}: {e}") from e
- def _create_async_session(self) -> AsyncSession:
- """Create and configure the underlying HTTP session."""
- return AsyncSession(
- proxies=self.proxy,
- verify=self.verify_ssl,
- timeout=self.timeout,
- allow_redirects=self.allow_redirects,
- impersonate=self.impersonate,
- ja3=self.ja3,
- akamai=self.akamai,
- extra_fp=self.extra_fp,
+ async def _read_json_file(self, path: Union[str, Path]) -> Dict[str, Any]:
+ """Read JSON from a local file without blocking the event loop."""
+ try:
+ async with aiofiles.open(path, mode="r", encoding="utf-8") as file:
+ content = await file.read()
+ except FileNotFoundError as e:
+ raise FileAccessError(f"File not found: {path}") from e
+ except PermissionError as e:
+ raise FileAccessError(f"Permission denied accessing file: {path}") from e
+ except OSError as e:
+ raise FileAccessError(f"Error reading file {path}: {e}") from e
+
+ try:
+ return json.loads(content)
+ except json.JSONDecodeError as e:
+ raise DataError(f"Invalid JSON in file {path}: {e}") from e
+
+ async def _load_schema(self) -> Dict[str, Any]:
+ """Load WMN schema from local or remote source."""
+ if self._skip_validation:
+ return {}
+
+ try:
+ if self._local_schema_path:
+ return await self._read_json_file(self._local_schema_path)
+ elif self._remote_schema_url:
+ return await self._fetch_json(self._remote_schema_url)
+ else:
+ raise DataError("No schema source provided - either local_schema_path or remote_schema_url is required")
+ except (OSError, json.JSONDecodeError) as e:
+ raise DataError(f"Failed to load required WMN schema from local file: {e}") from e
+ except NetworkError as e:
+ raise DataError(f"Failed to load required WMN schema from {self._remote_schema_url}: {e}") from e
+
+ async def _load_dataset(self) -> Dict[str, Any]:
+ """Load WMN data from configured sources."""
+ dataset: Dict[str, Any] = {WMN_KEY_SITES: [], WMN_KEY_CATEGORIES: [], WMN_KEY_AUTHORS: [], WMN_KEY_LICENSE: []}
+
+ sources: List[Tuple[Union[str, Path], bool]] = []
+ if self._remote_list_urls:
+ sources.extend([(url, True) for url in self._remote_list_urls])
+ if self._local_list_paths:
+ sources.extend([(path, False) for path in self._local_list_paths])
+ if not sources:
+ sources = [(WMN_REMOTE_URL, True)]
+
+ coroutines = []
+ for source, is_remote in sources:
+ if is_remote:
+ coroutines.append(self._fetch_json(str(source)))
+ else:
+ coroutines.append(self._read_json_file(source))
+
+ results = await asyncio.gather(*coroutines, return_exceptions=True)
+
+ failures: List[str] = []
+ for src, res in zip(sources, results):
+ if isinstance(res, Exception):
+ source, is_remote = src
+ failures.append(f"{source} ({'remote' if is_remote else 'local'}): {res}")
+ self._logger.warning("Failed to load WMN data from %s: %s", source, res)
+ else:
+ merge_lists(res, dataset)
+
+ if not dataset[WMN_KEY_SITES]:
+ detail = "; ".join(failures) if failures else "no sources produced any sites"
+ raise DataError(f"No sites loaded from any source; details: {detail}")
+
+ return dataset
+
+ def _deduplicate_data(self, data: Dict[str, Any]) -> None:
+ """Deduplicate and clean the WMN data in place."""
+ unique_sites = {site[WMN_KEY_NAME]: site for site in data[WMN_KEY_SITES] if isinstance(site, dict) and site.get(WMN_KEY_NAME)}
+ data[WMN_KEY_SITES] = list(unique_sites.values())
+ data[WMN_KEY_CATEGORIES] = list(dict.fromkeys(data[WMN_KEY_CATEGORIES]))
+ data[WMN_KEY_AUTHORS] = list(dict.fromkeys(data[WMN_KEY_AUTHORS]))
+ data[WMN_KEY_LICENSE] = list(dict.fromkeys(data[WMN_KEY_LICENSE]))
+
+ async def _load_wmn_lists(self) -> Tuple[Dict[str, Any], Dict[str, Any]]:
+ """Unified async loader for WMN data and schema.
+
+ Returns a mapping with keys: data (dataset dict) and schema (schema dict).
+ """
+ if self._wmn_data and self._wmn_schema:
+ return (self._wmn_data, self._wmn_schema)
+
+ dataset, dataset_schema = await asyncio.gather(
+ self._load_dataset(),
+ self._load_schema(),
)
+ self._deduplicate_data(dataset)
- async def _open_session(self) -> None:
- """Open the HTTP session for manual (non-context) usage."""
- if self._session is None:
- self._session = self._create_async_session()
- self._logger.info("HTTP session opened successfully.")
-
- async def _ensure_session(self) -> None:
- """Ensure the HTTP session is initialized (safe for concurrent calls)."""
- if self._session is not None:
+ return (dataset, dataset_schema)
+
+ @staticmethod
+ def _validate_data(data: Dict[str, Any], schema: Dict[str, Any]) -> None:
+ """Validate WMN data against schema. Raises on failure."""
+ if not schema:
+ return
+ try:
+ jsonschema.Draft7Validator.check_schema(schema)
+ jsonschema.Draft7Validator(schema).validate(data)
+ except jsonschema.ValidationError as e:
+ raise SchemaError(f"WMN data does not match schema: {e.message}") from e
+ except jsonschema.SchemaError as e:
+ raise SchemaError(f"Invalid WMN schema: {e.message}") from e
+
+ async def _ensure_dataset(self) -> None:
+ """Load and validate the WMN dataset and schema if not already loaded."""
+ if self._wmn_data and self._wmn_schema:
return
- async with self._session_lock:
- if self._session is None:
- self._session = self._create_async_session()
- self._logger.info("HTTP session opened successfully.")
+ try:
+ data, schema = await self._load_wmn_lists()
+ if not self._skip_validation:
+ self._validate_data(data, schema)
+ self._wmn_data = data
+ self._wmn_schema = schema
+ self._logger.info("WMN dataset loaded (sites=%d)", len(self._wmn_data.get(WMN_KEY_SITES, [])))
+ except SchemaError as e:
+ raise DataError(f"WMN validation failed: {e}") from e
+ except Exception as e:
+ raise DataError(f"WMN load failed: {e}") from e
async def _close_session(self) -> None:
- """Close the HTTP session if it is open."""
- if self._session:
+ """Close the HTTP session if open."""
+ async with self._session_lock:
try:
- await self._session.close()
- self._logger.info("HTTP session closed successfully.")
- except Exception as e:
- self._logger.warning("Error closing session during cleanup: %s", e, exc_info=True)
- finally:
- self._session = None
+ await self._http.close()
+ self._logger.info("HTTP client closed")
+ except asyncio.CancelledError:
+ self._logger.warning("HTTP client close cancelled")
+ raise
+ except Exception as error:
+ self._logger.warning("Error during HTTP client close: %s", error)
async def __aenter__(self) -> "Naminter":
- await self._ensure_session()
+ await self._open_session()
+ try:
+ await self._ensure_dataset()
+ except DataError as e:
+ self._logger.error("Dataset load failed")
+ raise
return self
async def __aexit__(self, exc_type: Optional[type], exc_val: Optional[BaseException], exc_tb: Optional[Any]) -> None:
@@ -130,17 +255,26 @@ async def get_wmn_summary(
site_names: Optional[List[str]] = None,
include_categories: Optional[List[str]] = None,
exclude_categories: Optional[List[str]] = None,
- ) -> Dict[str, Any]:
+ ) -> Summary:
"""Get enriched WMN metadata information for diagnostics and UI.
Filters can be applied to compute statistics on a subset of sites.
"""
+ try:
+ await self._ensure_dataset()
+ except DataError as e:
+ self._logger.error("Dataset load failed")
+ raise
try:
sites: List[Dict[str, Any]] = self._filter_sites(
site_names,
include_categories=include_categories,
exclude_categories=exclude_categories,
)
+ except DataError as e:
+ self._logger.error("Site filtering failed: %s", e)
+ raise
+ try:
category_list: List[str] = [site.get("cat") for site in sites if site.get("cat")]
site_name_list: List[str] = [site.get("name") for site in sites if site.get("name")]
@@ -152,28 +286,24 @@ async def get_wmn_summary(
if isinstance(known_list, list) and len(known_list) > 0:
total_known_accounts += len(known_list)
- info: Dict[str, Any] = {
- "license": list(dict.fromkeys(self._wmn_data.get("license", []))),
- "authors": list(dict.fromkeys(self._wmn_data.get("authors", []))),
- "site_names": list(dict.fromkeys(site_name_list)),
- "sites_count": len(sites),
- "categories": list(dict.fromkeys(category_list)),
- "categories_count": len(set(category_list)),
- "known_accounts_total": total_known_accounts,
- }
-
- self._logger.info(
- "WMN info: %d sites, %d categories (filters - names: %s, include: %s, exclude: %s)",
- info["sites_count"],
- info["categories_count"],
- bool(site_names),
- bool(include_categories),
- bool(exclude_categories),
+ wmn_summary = Summary(
+ license=list(dict.fromkeys(self._wmn_data.get("license", []))),
+ authors=list(dict.fromkeys(self._wmn_data.get("authors", []))),
+ site_names=list(dict.fromkeys(site_name_list)),
+ sites_count=len(sites),
+ categories=list(dict.fromkeys(category_list)),
+ categories_count=len(set(category_list)),
+ known_accounts_total=total_known_accounts,
)
- return info
+
+ self._logger.info("WMN summary computed (sites=%d, categories=%d)",
+ wmn_summary.sites_count, wmn_summary.categories_count)
+ return wmn_summary
+ except DataError:
+ raise
except Exception as e:
- self._logger.error("Error retrieving WMN metadata: %s", e, exc_info=True)
- return {"error": f"Failed to retrieve metadata: {e}"}
+ self._logger.exception("Failed to compute WMN summary")
+ raise DataError(f"Failed to retrieve metadata: {e}") from e
def _filter_sites(
@@ -184,45 +314,45 @@ def _filter_sites(
) -> List[Dict[str, Any]]:
"""Filter sites by names and categories for the current WMN dataset."""
sites: List[Dict[str, Any]] = self._wmn_data.get("sites", [])
-
if site_names:
- requested_site_names: Set[str] = set(site_names)
+ filtered_site_names: Set[str] = set(deduplicate_strings(site_names))
available_names: Set[str] = {site.get("name") for site in sites}
- missing_names = requested_site_names - available_names
+ missing_names = filtered_site_names - available_names
if missing_names:
- raise DataError(f"Unknown site names: {missing_names}")
+ raise DataError(f"Unknown site names: {sorted(missing_names)}")
else:
- requested_site_names = set()
+ filtered_site_names = set()
filtered_sites: List[Dict[str, Any]] = sites
- if requested_site_names:
+ if filtered_site_names:
filtered_sites = [
- site for site in filtered_sites if site.get("name") in requested_site_names
+ site for site in filtered_sites if site.get("name") in filtered_site_names
]
if include_categories:
- include_set: Set[str] = set(include_categories)
+ include_set: Set[str] = set(deduplicate_strings(include_categories))
filtered_sites = [
site for site in filtered_sites if site.get("cat") in include_set
]
if exclude_categories:
- exclude_set: Set[str] = set(exclude_categories)
+ exclude_set: Set[str] = set(deduplicate_strings(exclude_categories))
filtered_sites = [
site for site in filtered_sites if site.get("cat") not in exclude_set
]
- self._logger.info(
- "Filtered to %d sites from %d total (names: %s, include: %s, exclude: %s)",
- len(filtered_sites),
- len(sites),
- bool(site_names),
- bool(include_categories),
- bool(exclude_categories),
+ self._logger.debug(
+ "Filter result %d/%d (names=%s include=%s exclude=%s)",
+ len(filtered_sites), len(sites),
+ bool(site_names), bool(include_categories), bool(exclude_categories),
)
return filtered_sites
+ def _get_missing_keys(self, site: Dict[str, Any], required_keys: Sequence[str]) -> List[str]:
+ """Return a list of required keys missing from a site mapping."""
+ return [key for key in required_keys if key not in site]
+
async def enumerate_site(
self,
site: Dict[str, Any],
@@ -230,157 +360,94 @@ async def enumerate_site(
fuzzy_mode: bool = False,
) -> SiteResult:
"""Enumerate a single site for the given username."""
- await self._ensure_session()
-
- name = site.get("name")
- category = site.get("cat")
- uri_check_template = site.get("uri_check")
- post_body_template = site.get("post_body")
- e_code, e_string = site.get("e_code"), site.get("e_string")
- m_code, m_string = site.get("m_code"), site.get("m_string")
-
- if not name:
- self._logger.error("Site configuration missing required 'name' field: %r", site)
- return SiteResult(
- name="",
- category=category,
- username=username,
- status=ResultStatus.ERROR,
- error="Site missing required field: name",
- )
-
- if not category:
- self._logger.error("Site '%s' missing required 'cat' field", name)
- return SiteResult(
- name=name,
- category="",
- username=username,
- status=ResultStatus.ERROR,
- error="Site missing required field: cat",
- )
-
- if not uri_check_template:
- self._logger.error("Site '%s' missing required 'uri_check' field", name)
+ await self._open_session()
+ try:
+ await self._ensure_dataset()
+ except DataError as e:
+ self._logger.error("Dataset load failed")
+ raise
+
+ missing_keys = self._get_missing_keys(site, REQUIRED_KEYS_ENUMERATE)
+ if missing_keys:
return SiteResult(
- name=name,
- category=category,
+ name=site.get("name", "unknown"),
+ category=site.get("cat", "unknown"),
username=username,
status=ResultStatus.ERROR,
- error="Site missing required field: uri_check",
+ error=f"Site entry missing required keys: {missing_keys}"
)
-
- has_placeholder = ACCOUNT_PLACEHOLDER in uri_check_template or (post_body_template and ACCOUNT_PLACEHOLDER in post_body_template)
- if not has_placeholder:
- return SiteResult(name, category, username, ResultStatus.ERROR, error=f"Site '{name}' missing {ACCOUNT_PLACEHOLDER} placeholder")
-
- matchers = {
- 'e_code': e_code,
- 'e_string': e_string,
- 'm_code': m_code,
- 'm_string': m_string,
- }
-
- if fuzzy_mode:
- if all(val is None for val in matchers.values()):
- self._logger.error(
- "Site '%s' must define at least one matcher (e_code, e_string, m_code, or m_string) for %s mode",
- name,
- ValidationMode.FUZZY,
- )
- return SiteResult(
- name=name,
- category=category,
- username=username,
- status=ResultStatus.ERROR,
- error=f"Site must define at least one matcher for {ValidationMode.FUZZY} mode",
- )
- else:
- missing = [name for name, val in matchers.items() if val is None]
- if missing:
- self._logger.error(
- "Site '%s' missing required matchers for %s mode: %s",
- name, ValidationMode.STRICT, missing
- )
- return SiteResult(
- name=name,
- category=category,
- username=username,
- status=ResultStatus.ERROR,
- error=f"Site missing required matchers for {ValidationMode.STRICT} mode: {missing}",
- )
+ name = site["name"]
+ category = site["cat"]
+
+ uri_check_template = site["uri_check"]
strip_bad_char = site.get("strip_bad_char", "")
clean_username = username.translate(str.maketrans("", "", strip_bad_char))
if not clean_username:
- return SiteResult(name, category, username, ResultStatus.ERROR, error=f"Username '{username}' became empty after character stripping")
+ return SiteResult(name, category, username, ResultStatus.ERROR,
+ error=f"Username became empty after stripping")
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
uri_pretty = site.get("uri_pretty", uri_check_template).replace(ACCOUNT_PLACEHOLDER, clean_username)
- self._logger.info("Enumerating site '%s' (category: %s) for username '%s' in %s mode",
- name, category, username, ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ self._logger.debug("Enumerating site=%s user=%s mode=%s", name, username,
+ "FUZZY" if fuzzy_mode else "STRICT")
+
+ headers = site.get("headers", {})
+ post_body = site.get("post_body")
+ if post_body:
+ post_body = post_body.replace(ACCOUNT_PLACEHOLDER, clean_username)
+ self._logger.debug("POST %s (body_present=%s)", uri_check, True)
+ else:
+ self._logger.debug("GET %s", uri_check)
try:
async with self._semaphore:
- start_time = time.monotonic()
- headers = site.get("headers", {})
- post_body = site.get("post_body")
-
if post_body:
- post_body = post_body.replace(ACCOUNT_PLACEHOLDER, clean_username)
- self._logger.debug("Making POST request to %s with body: %.100s", uri_check, post_body)
- response = await self._session.post(uri_check, headers=headers, data=post_body)
+ response = await self._http.post(uri_check, headers=headers, data=post_body)
else:
- self._logger.debug("Making GET request to %s", uri_check)
- response = await self._session.get(uri_check, headers=headers)
-
- elapsed = time.monotonic() - start_time
- self._logger.info("Request to '%s' completed in %.2fs with status %d", name, elapsed, response.status_code)
+ response = await self._http.get(uri_check, headers=headers)
+ elapsed = response.elapsed
+ self._logger.debug("Request ok (status=%d, elapsed=%.2fs)", response.status_code, elapsed)
except asyncio.CancelledError:
- self._logger.warning("Request to '%s' was cancelled", name)
+ self._logger.warning("Request cancelled")
raise
- except RequestsError as e:
- self._logger.warning("Network error while enumerating '%s': %s", name, e, exc_info=True)
+ except TimeoutError as e:
+ self._logger.warning("Request timeout for %s: %s", name, e)
return SiteResult(
- name=name,
- category=category,
- username=username,
- result_url=uri_pretty,
- status=ResultStatus.ERROR,
- error=f"Network error: {e}",
+ name=name, category=category, username=username, result_url=uri_pretty,
+ status=ResultStatus.ERROR, error=f"Request timeout: {e}"
+ )
+ except SessionError as e:
+ self._logger.warning("Session error for %s: %s", name, e)
+ return SiteResult(
+ name=name, category=category, username=username, result_url=uri_pretty,
+ status=ResultStatus.ERROR, error=f"Session error: {e}"
+ )
+ except NetworkError as e:
+ self._logger.warning("Network error for %s: %s", name, e)
+ return SiteResult(
+ name=name, category=category, username=username, result_url=uri_pretty,
+ status=ResultStatus.ERROR, error=f"Network error: {e}"
)
except Exception as e:
- self._logger.error("Unexpected error while enumerating '%s': %s", name, e, exc_info=True)
+ self._logger.exception("Unexpected error during request for %s", name)
return SiteResult(
- name=name,
- category=category,
- username=username,
- result_url=uri_pretty,
- status=ResultStatus.ERROR,
- error=f"Unexpected error: {e}",
+ name=name, category=category, username=username, result_url=uri_pretty,
+ status=ResultStatus.ERROR, error=f"Unexpected error: {e}"
)
- response_text = response.text
- response_code = response.status_code
-
result_status = SiteResult.get_result_status(
- response_code=response_code,
- response_text=response_text,
- e_code=e_code,
- e_string=e_string,
- m_code=m_code,
- m_string=m_string,
+ response_code=response.status_code,
+ response_text=response.text,
+ e_code=site["e_code"],
+ e_string=site["e_string"],
+ m_code=site["m_code"],
+ m_string=site["m_string"],
fuzzy_mode=fuzzy_mode,
)
- self._logger.debug(
- "Site '%s' result: %s (HTTP %d) in %.2fs (%s mode)",
- name,
- result_status.name,
- response_code,
- elapsed,
- ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT,
- )
+ self._logger.debug("Result=%s (HTTP %d)", result_status.name, response.status_code)
return SiteResult(
name=name,
@@ -388,9 +455,9 @@ async def enumerate_site(
username=username,
result_url=uri_pretty,
status=result_status,
- response_code=response_code,
+ response_code=response.status_code,
elapsed=elapsed,
- response_text=response_text,
+ response_text=response.text,
)
async def enumerate_usernames(
@@ -403,17 +470,30 @@ async def enumerate_usernames(
as_generator: bool = False,
) -> Union[List[SiteResult], AsyncGenerator[SiteResult, None]]:
"""Enumerate one or multiple usernames across all loaded sites."""
- await self._ensure_session()
+ await self._open_session()
+ try:
+ await self._ensure_dataset()
+ except DataError as e:
+ self._logger.exception("Dataset load failed")
+ raise
- usernames = validate_usernames(usernames)
- self._logger.info("Starting username enumeration for %d username(s): %s", len(usernames), usernames)
+ try:
+ usernames = validate_usernames(usernames)
+ except ValidationError as e:
+ self._logger.error("Invalid usernames: %s", e)
+ raise DataError("Invalid usernames") from e
+ else:
+ self._logger.info("Usernames validated (count=%d)", len(usernames))
- sites = self._filter_sites(
- site_names,
- include_categories=include_categories,
- exclude_categories=exclude_categories,
- )
- self._logger.info("Will enumerate against %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ try:
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
+ except DataError as e:
+ self._logger.error("Site filtering failed: %s", e)
+ raise
coroutines = [
self.enumerate_site(site, username, fuzzy_mode)
@@ -426,9 +506,8 @@ async def iterate_results() -> AsyncGenerator[SiteResult, None]:
if as_generator:
return iterate_results()
-
- results = await asyncio.gather(*coroutines)
- return results
+
+ return await asyncio.gather(*coroutines)
async def self_enumeration(
self,
@@ -439,50 +518,42 @@ async def self_enumeration(
as_generator: bool = False
) -> Union[List[SelfEnumerationResult], AsyncGenerator[SelfEnumerationResult, None]]:
"""Run self-enumeration using known accounts for each site."""
- await self._ensure_session()
+ await self._open_session()
+ try:
+ await self._ensure_dataset()
+ except DataError as e:
+ self._logger.exception("Dataset load failed")
+ raise
- sites = self._filter_sites(
- site_names,
- include_categories=include_categories,
- exclude_categories=exclude_categories,
- )
+ try:
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
+ except DataError as e:
+ self._logger.error("Site filtering failed: %s", e)
+ raise
- self._logger.info("Starting self-enumeration validation for %d sites in %s mode", len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ self._logger.info("Starting self-enumeration (sites=%d, mode=%s)",
+ len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
async def _enumerate_known(site: Dict[str, Any]) -> SelfEnumerationResult:
"""Helper function to enumerate a site with all its known users."""
- name = site.get("name")
- category = site.get("cat")
- known = site.get("known")
-
- if not name:
- self._logger.error("Site configuration missing required 'name' field for self-enumeration: %r", site)
- return SelfEnumerationResult(
- name="",
- category=category or "",
- results=[],
- error=f"Site missing required field: name"
- )
-
- if not category:
- self._logger.error("Site '%s' missing required 'cat' field for self-enumeration", name)
+ missing_keys = self._get_missing_keys(site, REQUIRED_KEYS_SELF_ENUM)
+ if missing_keys:
return SelfEnumerationResult(
- name=name,
- category="",
- results=[],
- error=f"Site missing required field: cat"
+ name=site.get("name", "unknown"),
+ category=site.get("cat", "unknown"),
+ error=f"Site data missing required keys: {missing_keys}"
)
- if known is None:
- self._logger.error("Site '%s' missing required 'known' field for self-enumeration", name)
- return SelfEnumerationResult(
- name=name,
- category=category,
- results=[],
- error=f"Site '{name}' missing required field: known"
- )
+ name = site["name"]
+ category = site["cat"]
+ known = site["known"]
- self._logger.info("Self-enumerating site '%s' (category: %s) with %d known accounts", name, category, len(known))
+ self._logger.debug("Self-enumerating site=%s category=%s known_count=%d",
+ name, category, len(known))
try:
coroutines = [self.enumerate_site(site, username, fuzzy_mode) for username in known]
@@ -494,11 +565,10 @@ async def _enumerate_known(site: Dict[str, Any]) -> SelfEnumerationResult:
results=results
)
except Exception as e:
- self._logger.error("Unexpected error during self-enumeration for site '%s': %s", name, e, exc_info=True)
+ self._logger.exception("Self-enumeration failed for site=%s", name)
return SelfEnumerationResult(
name=name,
category=category,
- results=[],
error=f"Unexpected error during self-enumeration: {e}"
)
@@ -513,5 +583,4 @@ async def iterate_results() -> AsyncGenerator[SelfEnumerationResult, None]:
if as_generator:
return iterate_results()
- results = await asyncio.gather(*coroutines)
- return results
\ No newline at end of file
+ return await asyncio.gather(*coroutines)
diff --git a/naminter/core/models.py b/naminter/core/models.py
index 16c3029..c2714eb 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -2,6 +2,7 @@
from enum import StrEnum, auto
from typing import Optional, Dict, Any, List, Set
from datetime import datetime
+import json
class ValidationMode(StrEnum):
FUZZY = auto()
@@ -83,7 +84,7 @@ class SelfEnumerationResult:
"""Result of a self-enumeration for a username."""
name: str
category: str
- results: List[SiteResult]
+ results: Optional[List[SiteResult]] = None
status: ResultStatus = field(init=False)
error: Optional[str] = None
created_at: datetime = field(default_factory=datetime.now)
@@ -118,9 +119,46 @@ def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
return {
'name': self.name,
'category': self.category,
- 'results': [result.to_dict(exclude_response_text=exclude_response_text) for result in self.results],
+ 'results': [result.to_dict(exclude_response_text=exclude_response_text) for result in self.results] if self.results else [],
'status': self.status.value,
'created_at': self.created_at.isoformat(),
'error': self.error,
}
+@dataclass(slots=True, frozen=True)
+class Summary:
+ """Summary of the loaded WhatsMyName dataset and filters applied."""
+ license: List[str]
+ authors: List[str]
+ site_names: List[str]
+ sites_count: int
+ categories: List[str]
+ categories_count: int
+ known_accounts_total: int
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Convert Summary to a plain dictionary for serialization/legacy callers."""
+ return {
+ 'license': list(self.license),
+ 'authors': list(self.authors),
+ 'site_names': list(self.site_names),
+ 'sites_count': int(self.sites_count),
+ 'categories': list(self.categories),
+ 'categories_count': int(self.categories_count),
+ 'known_accounts_total': int(self.known_accounts_total),
+ }
+
+@dataclass(slots=True, frozen=True)
+class Response:
+ """HTTP response abstraction used by session adapters."""
+ status_code: int
+ text: str
+ elapsed: float
+
+ def json(self) -> Any:
+ """Parse the response body as JSON and return the resulting object.
+
+ Raises:
+ ValueError: If the response text is not valid JSON.
+ """
+ return json.loads(self.text)
\ No newline at end of file
diff --git a/naminter/core/network.py b/naminter/core/network.py
new file mode 100644
index 0000000..df69576
--- /dev/null
+++ b/naminter/core/network.py
@@ -0,0 +1,171 @@
+import asyncio
+import logging
+import json
+from typing import Any, Dict, Optional, Union, Protocol, Mapping, runtime_checkable
+
+from curl_cffi.requests import AsyncSession
+from curl_cffi.requests.exceptions import Timeout as CurlTimeout, RequestException as CurlRequestException
+from curl_cffi import BrowserTypeLiteral
+
+from .exceptions import NetworkError, TimeoutError, SessionError
+from .models import Response
+
+
+@runtime_checkable
+class BaseSession(Protocol):
+ """Async HTTP client protocol for Naminter adapters."""
+
+ async def open(self) -> None:
+ """Open the underlying HTTP session."""
+ ...
+
+ async def close(self) -> None:
+ """Close the underlying HTTP session."""
+ ...
+
+ async def get(self, url: str, headers: Optional[Mapping[str, str]] = None) -> Response:
+ """HTTP GET request (see class docstring for error contract)."""
+ ...
+
+ async def post(
+ self, url: str, headers: Optional[Mapping[str, str]] = None, data: Optional[Union[str, bytes]] = None
+ ) -> Response:
+ """HTTP POST request (see class docstring for error contract)."""
+ ...
+
+ async def request(
+ self,
+ method: str,
+ url: str,
+ headers: Optional[Mapping[str, str]] = None,
+ data: Optional[Union[str, bytes]] = None,
+ ) -> Response:
+ """Generic HTTP request (see class docstring for error contract)."""
+ ...
+
+
+class CurlCFFISession:
+ def __init__(
+ self,
+ *,
+ proxies: Optional[Union[str, Dict[str, str]]] = None,
+ verify: bool = True,
+ timeout: int = 30,
+ allow_redirects: bool = True,
+ impersonate: Optional[BrowserTypeLiteral] = None,
+ ja3: Optional[str] = None,
+ akamai: Optional[str] = None,
+ extra_fp: Optional[Dict[str, Any]] = None,
+ ) -> None:
+ self._logger = logging.getLogger(__name__)
+ self._session: Optional[AsyncSession] = None
+
+ if isinstance(proxies, str):
+ proxies = {"http": proxies, "https": proxies}
+
+ self._proxies: Optional[Union[str, Dict[str, str]]] = proxies
+ self._verify: bool = verify
+ self._timeout: int = timeout
+ self._allow_redirects: bool = allow_redirects
+ self._impersonate: Optional[BrowserTypeLiteral] = impersonate
+ self._ja3: Optional[str] = ja3
+ self._akamai: Optional[str] = akamai
+ self._extra_fp: Optional[Dict[str, Any]] = extra_fp
+
+ self._lock = asyncio.Lock()
+
+ async def open(self) -> None:
+ if self._session is not None:
+ return
+ async with self._lock:
+ if self._session is None:
+ try:
+ self._session = AsyncSession(
+ proxies=self._proxies,
+ verify=self._verify,
+ timeout=self._timeout,
+ allow_redirects=self._allow_redirects,
+ impersonate=self._impersonate,
+ ja3=self._ja3,
+ akamai=self._akamai,
+ extra_fp=self._extra_fp,
+ )
+ except Exception as e:
+ raise SessionError("Failed to open curl-cffi session", cause=e) from e
+
+ async def close(self) -> None:
+ if not self._session:
+ return
+ try:
+ await self._session.close()
+ except Exception as e:
+ self._logger.warning("Error closing curl-cffi session: %s", e)
+ finally:
+ self._session = None
+
+ async def get(self, url: str, headers: Optional[Mapping[str, str]] = None) -> Response:
+ await self.open()
+ if self._session is None:
+ raise SessionError("Session not initialized")
+
+ try:
+ response = await self._session.get(url, headers=dict(headers) if headers else None)
+ elapsed = response.elapsed
+ return Response(status_code=response.status_code, text=response.text, elapsed=elapsed)
+ except CurlTimeout as e:
+ raise TimeoutError(f"GET timeout for {url}", cause=e) from e
+ except CurlRequestException as e:
+ raise NetworkError(f"GET failed for {url}: {e}", cause=e) from e
+ except Exception as e:
+ raise NetworkError(f"GET failed for {url}: {e}", cause=e) from e
+
+ async def post(
+ self, url: str, headers: Optional[Mapping[str, str]] = None, data: Optional[Union[str, bytes]] = None
+ ) -> Response:
+ await self.open()
+ if self._session is None:
+ raise SessionError("Session not initialized")
+
+ try:
+ response = await self._session.post(url, headers=dict(headers) if headers else None, data=data)
+ elapsed = response.elapsed
+ return Response(status_code=response.status_code, text=response.text, elapsed=elapsed)
+ except CurlTimeout as e:
+ raise TimeoutError(f"POST timeout for {url}", cause=e) from e
+ except CurlRequestException as e:
+ raise NetworkError(f"POST failed for {url}: {e}", cause=e) from e
+ except Exception as e:
+ raise NetworkError(f"POST failed for {url}: {e}", cause=e) from e
+
+ async def request(
+ self,
+ method: str,
+ url: str,
+ headers: Optional[Mapping[str, str]] = None,
+ data: Optional[Union[str, bytes]] = None,
+ ) -> Response:
+ await self.open()
+ if self._session is None:
+ raise SessionError("Session not initialized")
+
+ try:
+ response = await self._session.request(method=method, url=url, headers=dict(headers) if headers else None, data=data)
+
+ elapsed = response.elapsed
+ return Response(status_code=response.status_code, text=response.text, elapsed=elapsed)
+ except CurlTimeout as e:
+ raise TimeoutError(f"{method} timeout for {url}", cause=e) from e
+ except CurlRequestException as e:
+ raise NetworkError(f"{method} failed for {url}: {e}", cause=e) from e
+ except Exception as e:
+ raise NetworkError(f"{method} failed for {url}: {e}", cause=e) from e
+
+
+
+__all__ = [
+ "Response",
+ "BaseSession",
+ "CurlCFFISession",
+]
+
+
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index 8196fec..bcf3560 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -1,15 +1,18 @@
import logging
-from typing import Any, Dict, List, Optional, Union, Set
-
-import jsonschema
+import json
+import asyncio
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Union, Set, Tuple
from .exceptions import (
ConfigurationError,
DataError,
- SchemaValidationError,
+ SchemaError,
ValidationError,
)
+
from .constants import (
+ WMN_REMOTE_URL,
MIN_TASKS,
MAX_TASKS_LIMIT,
MIN_TIMEOUT,
@@ -20,62 +23,61 @@
VERY_HIGH_CONCURRENCY_MIN_TIMEOUT,
EXTREME_CONCURRENCY_THRESHOLD,
LOW_TIMEOUT_WARNING_THRESHOLD,
+ WMN_LIST_FIELDS,
)
+from .network import BaseSession
logger = logging.getLogger(__name__)
-def validate_wmn_data(data: Dict[str, Any], schema: Optional[Dict[str, Any]]) -> None:
- """Validate WMN data against schema."""
- if not data:
- logger.error("No WMN data provided during initialization.")
- raise DataError("No WMN data provided during initialization.")
-
- if schema:
- try:
- jsonschema.validate(instance=data, schema=schema)
- logger.info("WMN data validation successful")
- except jsonschema.ValidationError as e:
- logger.error(f"WMN data does not match schema: {e.message}")
- raise SchemaValidationError(f"WMN data does not match schema: {e.message}") from e
- except jsonschema.SchemaError as e:
- logger.error(f"Invalid WMN schema: {e.message}")
- raise SchemaValidationError(f"Invalid WMN schema: {e.message}") from e
- else:
- logger.warning("No schema provided - skipping WMN data validation")
-
-
-def validate_numeric_values(max_tasks: int, timeout: int) -> None:
- """Validate numeric configuration values for max_tasks and timeout."""
- logger.debug(f"Validating numeric values: max_tasks={max_tasks}, timeout={timeout}")
-
+def deduplicate_strings(values: Optional[List[str]]) -> List[str]:
+ """Return a list of unique, non-empty strings preserving original order."""
+ if not values:
+ return []
+
+ seen: Set[str] = set()
+ unique_values: List[str] = []
+
+ for item in values:
+ if isinstance(item, str):
+ normalized = item.strip()
+ if normalized and normalized not in seen:
+ seen.add(normalized)
+ unique_values.append(normalized)
+
+ return unique_values
+
+def validate_numeric_values(max_tasks: int, timeout: int) -> List[str]:
+ """Validate numeric configuration values and return warnings.
+ """
+ warnings: List[str] = []
+
if not (MIN_TASKS <= max_tasks <= MAX_TASKS_LIMIT):
- logger.error(f"max_tasks out of range: {max_tasks} not in [{MIN_TASKS}-{MAX_TASKS_LIMIT}]")
raise ConfigurationError(f"Invalid max_tasks: {max_tasks} must be between {MIN_TASKS} and {MAX_TASKS_LIMIT}")
if not (MIN_TIMEOUT <= timeout <= MAX_TIMEOUT):
- logger.error(f"timeout out of range: {timeout} not in [{MIN_TIMEOUT}-{MAX_TIMEOUT}]")
raise ConfigurationError(f"Invalid timeout: {timeout} must be between {MIN_TIMEOUT} and {MAX_TIMEOUT} seconds")
if max_tasks > HIGH_CONCURRENCY_THRESHOLD and timeout < HIGH_CONCURRENCY_MIN_TIMEOUT:
- logger.warning(
- f"High concurrency ({max_tasks} tasks) with low timeout ({timeout}s) may cause failures - consider increasing timeout or reducing max_tasks"
+ warnings.append(
+ f"High concurrency ({max_tasks}) with low timeout ({timeout}s) may cause failures; consider increasing timeout or reducing max_tasks."
)
elif max_tasks > VERY_HIGH_CONCURRENCY_THRESHOLD and timeout < VERY_HIGH_CONCURRENCY_MIN_TIMEOUT:
- logger.warning(
- f"Very high concurrency ({max_tasks} tasks) with very low timeout ({timeout}s) may cause connection issues - recommend timeout >= {HIGH_CONCURRENCY_MIN_TIMEOUT}s for max_tasks > {VERY_HIGH_CONCURRENCY_THRESHOLD}"
+ warnings.append(
+ f"Very high concurrency ({max_tasks}) with very low timeout ({timeout}s) may cause connection issues; recommend timeout >= {HIGH_CONCURRENCY_MIN_TIMEOUT}s for max_tasks > {VERY_HIGH_CONCURRENCY_THRESHOLD}."
)
if max_tasks > EXTREME_CONCURRENCY_THRESHOLD:
- logger.warning(
- f"Extremely high concurrency ({max_tasks} tasks) may overwhelm servers or cause rate limiting - lower value recommended"
+ warnings.append(
+ f"Extremely high concurrency ({max_tasks}) may overwhelm servers or cause rate limiting; lowering value is recommended."
)
if timeout < LOW_TIMEOUT_WARNING_THRESHOLD:
- logger.warning(
- f"Very low timeout ({timeout}s) may cause legitimate requests to fail - increase timeout for better accuracy"
+ warnings.append(
+ f"Very low timeout ({timeout}s) may cause legitimate requests to fail; increase timeout for better accuracy."
)
+ return warnings
def configure_proxy(proxy: Optional[Union[str, Dict[str, str]]]) -> Optional[Dict[str, str]]:
"""Validate and configure proxy settings."""
@@ -84,51 +86,38 @@ def configure_proxy(proxy: Optional[Union[str, Dict[str, str]]]) -> Optional[Dic
if isinstance(proxy, str):
if not proxy.strip():
- logger.error("Proxy validation failed: empty string.")
raise ConfigurationError("Invalid proxy: proxy string cannot be empty")
if not (proxy.startswith('http://') or proxy.startswith('https://') or proxy.startswith('socks5://')):
- logger.error(f"Proxy validation failed: invalid protocol in '{proxy}'")
raise ConfigurationError("Invalid proxy: must be http://, https://, or socks5:// URL")
- logger.info("Proxy configuration validated successfully")
+ logger.debug("Proxy configuration validated")
return {"http": proxy, "https": proxy}
elif isinstance(proxy, dict):
for protocol, proxy_url in proxy.items():
if protocol not in ['http', 'https']:
- logger.error(f"Proxy validation failed: invalid protocol '{protocol}' in dict.")
raise ConfigurationError(f"Invalid proxy protocol: {protocol}")
if not isinstance(proxy_url, str) or not proxy_url.strip():
- logger.error(f"Proxy validation failed: empty or invalid URL for protocol '{protocol}'.")
raise ConfigurationError(f"Invalid proxy URL for {protocol}: must be non-empty string")
- logger.info("Proxy dictionary configuration validated successfully")
+ logger.debug("Proxy dictionary configuration validated")
return proxy
-
- else:
- logger.error(f"Proxy validation failed: not a string or dict. Value: {proxy!r}")
- raise ConfigurationError("Invalid proxy: must be string or dict")
-
def validate_usernames(usernames: List[str]) -> List[str]:
"""Validate and deduplicate usernames, preserving order."""
- logger.debug(f"Validating and deduplicating usernames: {usernames!r}")
-
- seen: Set[str] = set()
- unique_usernames: List[str] = []
-
- for u in usernames:
- if isinstance(u, str):
- name = u.strip()
- if name and name not in seen:
- seen.add(name)
- unique_usernames.append(name)
-
+
+ unique_usernames: List[str] = deduplicate_strings(usernames)
+
if not unique_usernames:
- logger.error("No valid usernames provided after validation.")
raise ValidationError("No valid usernames provided")
-
- logger.info(f"Validated {len(unique_usernames)} unique usernames")
+
return unique_usernames
+
+def merge_lists(data: Dict[str, Any], accumulator: Dict[str, Any]) -> None:
+ """Merge list fields from data into the accumulator dictionary."""
+ if isinstance(data, dict):
+ for key in WMN_LIST_FIELDS:
+ if key in data and isinstance(data[key], list):
+ accumulator[key].extend(data[key])
From 17bfd907d1c6e76e4c5ff49de34d489debc7a509 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sun, 28 Sep 2025 18:51:02 +0300
Subject: [PATCH 13/19] feat: migrate to poetry and add ruff
---
pyproject.toml | 158 ++++++++++++++++++++++++++++++++++++-------------
1 file changed, 116 insertions(+), 42 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 302ffe4..6d48962 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,12 +1,11 @@
-[build-system]
-requires = ["setuptools>=61.0", "wheel"]
-build-backend = "setuptools.build_meta"
-
-[project]
+[tool.poetry]
name = "naminter"
-dynamic = ["version", "description", "authors", "license"]
+version = "1.0.7"
+description = "Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset"
+authors = ["3xp0rt "]
readme = "README.md"
-requires-python = ">=3.11"
+packages = [{include = "naminter"}]
+include = ["naminter/cli/templates/*.html"]
classifiers = [
"Development Status :: 5 - Production/Stable",
@@ -22,19 +21,6 @@ classifiers = [
"Topic :: Utilities"
]
-dependencies = [
- "click>=8.2.1",
- "curl-cffi>=0.13.0",
- "aiofiles>=24.1.0",
- "jinja2>=3.1.6",
- "jsonschema>=4.25.0",
- "rich>=14.1.0",
- "rich-click>=1.8.9",
- "weasyprint>=66.0"
-]
-
-maintainers = [{ name = "3xp0rt" }]
-
keywords = [
"osint",
"username",
@@ -47,32 +33,120 @@ keywords = [
"naminter"
]
-[project.urls]
-Homepage = "https://github.com/3xp0rt/naminter"
-Repository = "https://github.com/3xp0rt/naminter"
-BugTracker = "https://github.com/3xp0rt/naminter/issues"
-Documentation= "https://github.com/3xp0rt/naminter#readme"
-Source = "https://github.com/3xp0rt/naminter"
+[tool.poetry.urls]
+Homepage = "https://github.com/3xp0rt/naminter"
+Repository = "https://github.com/3xp0rt/naminter"
+BugTracker = "https://github.com/3xp0rt/naminter/issues"
+Documentation = "https://github.com/3xp0rt/naminter#readme"
+Source = "https://github.com/3xp0rt/naminter"
+
+[tool.poetry.dependencies]
+python = "^3.11"
+click = "^8.3.0"
+curl-cffi = "^0.13.0"
+aiofiles = "^24.1.0"
+jinja2 = "^3.1.6"
+jsonschema = "^4.25.1"
+rich = "^14.1.0"
+rich-click = "^1.9.1"
+weasyprint = "^66.0"
-[project.scripts]
+[tool.poetry.group.dev.dependencies]
+ruff = "^0.13.2"
+pytest = "^8.4.2"
+pytest-cov = "^7.0.0"
+coverage = "^7.10.7"
+taskipy = "^1.14.1"
+
+[tool.poetry.scripts]
naminter = "naminter.cli.main:entry_point"
-[tool.setuptools.dynamic]
-version = {attr = "naminter.__version__"}
-description = {attr = "naminter.__description__"}
-authors = [
- {name = "naminter.__author__", email = "naminter.__author_email__"}
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
+# Pytest configuration
+[tool.pytest.ini_options]
+pythonpath = "."
+addopts = "-p no:warnings"
+
+# Taskipy tasks
+[tool.taskipy.tasks]
+lint = "ruff check .; ruff check . --diff"
+format = "ruff check . --fix; ruff format ."
+test = "pytest -s -x --cov=naminter -vv"
+post_test = "coverage html"
+
+# Ruff configuration
+[tool.ruff]
+line-length = 88
+target-version = "py311"
+
+[tool.ruff.lint]
+preview = true
+select = [
+ 'I', # isort
+ 'F', # pyflakes
+ 'E', # pycodestyle errors
+ 'W', # pycodestyle warnings
+ 'PL', # pylint
+ 'PT', # flake8-pytest-style
+ 'B', # flake8-bugbear
+ 'C4', # flake8-comprehensions
+ 'UP', # pyupgrade
+ 'ARG001', # unused-function-args
+ 'SIM', # flake8-simplify
+ 'TCH', # flake8-type-checking
+ 'TID', # flake8-tidy-imports
+ 'Q', # flake8-quotes
+ 'RUF', # ruff-specific rules
+ 'A', # flake8-builtins
+ 'COM', # flake8-commas
+ 'DTZ', # flake8-datetimez
+ 'EM', # flake8-errmsg
+ 'G', # flake8-logging-format
+ 'ICN', # flake8-import-conventions
+ 'N', # pep8-naming
+ 'PIE', # flake8-pie
+ 'T20', # flake8-print
+ 'YTT', # flake8-2020
]
-license = {text = "naminter.__license__"}
+ignore = [
+ 'PLR0913', # too-many-arguments
+ 'PLR0917', # too-many-locals
+ 'PLR0904', # too-many-branches
+ 'E701', # multiple-statements-on-one-line
+ 'B008', # do-not-perform-function-calls-in-argument-defaults
+ 'C901', # too-complex
+ 'PLR2004', # magic-value-comparison
+ 'PLR0912', # too-many-branches
+ 'PLR0915', # too-many-statements
+ 'PLR0916', # too-many-boolean-expressions
+ 'COM812', # missing-trailing-comma (conflicts with formatter)
+]
+exclude = ['tests', 'tests/*']
+
+[tool.ruff.lint.per-file-ignores]
+"__init__.py" = ["F401"] # unused imports
+
+[tool.ruff.lint.isort]
+known-first-party = ["naminter"]
+
+[tool.ruff.lint.flake8-quotes]
+docstring-quotes = "double"
-[tool.setuptools]
-package-dir = {"" = "."}
-include-package-data = true
+[tool.ruff.lint.mccabe]
+max-complexity = 10
-[tool.setuptools.packages.find]
-where = ["."]
-include = ["naminter*"]
+[tool.ruff.lint.pylint]
+max-args = 8
+max-branches = 12
+max-returns = 6
+max-statements = 50
-[tool.setuptools.package-data]
-"naminter.cli.templates" = ["*.html"]
-"naminter" = ["cli/templates/*.html"]
+[tool.ruff.format]
+preview = true
+quote-style = "double"
+docstring-code-format = true
+docstring-code-line-length = 79
+exclude = ['tests', 'tests/*']
From bf16feed94988e0f7f135a2d4b8db75e06220c05 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Mon, 29 Sep 2025 22:05:01 +0300
Subject: [PATCH 14/19] chore: adjust packaging config in pyproject.toml
---
pyproject.toml | 15 ++-------------
1 file changed, 2 insertions(+), 13 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 6d48962..c269c6b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,6 +3,7 @@ name = "naminter"
version = "1.0.7"
description = "Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset"
authors = ["3xp0rt "]
+license = "MIT"
readme = "README.md"
packages = [{include = "naminter"}]
include = ["naminter/cli/templates/*.html"]
@@ -53,9 +54,6 @@ weasyprint = "^66.0"
[tool.poetry.group.dev.dependencies]
ruff = "^0.13.2"
-pytest = "^8.4.2"
-pytest-cov = "^7.0.0"
-coverage = "^7.10.7"
taskipy = "^1.14.1"
[tool.poetry.scripts]
@@ -65,22 +63,16 @@ naminter = "naminter.cli.main:entry_point"
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
-# Pytest configuration
-[tool.pytest.ini_options]
-pythonpath = "."
-addopts = "-p no:warnings"
-
# Taskipy tasks
[tool.taskipy.tasks]
lint = "ruff check .; ruff check . --diff"
format = "ruff check . --fix; ruff format ."
-test = "pytest -s -x --cov=naminter -vv"
-post_test = "coverage html"
# Ruff configuration
[tool.ruff]
line-length = 88
target-version = "py311"
+exclude = []
[tool.ruff.lint]
preview = true
@@ -90,7 +82,6 @@ select = [
'E', # pycodestyle errors
'W', # pycodestyle warnings
'PL', # pylint
- 'PT', # flake8-pytest-style
'B', # flake8-bugbear
'C4', # flake8-comprehensions
'UP', # pyupgrade
@@ -124,7 +115,6 @@ ignore = [
'PLR0916', # too-many-boolean-expressions
'COM812', # missing-trailing-comma (conflicts with formatter)
]
-exclude = ['tests', 'tests/*']
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"] # unused imports
@@ -149,4 +139,3 @@ preview = true
quote-style = "double"
docstring-code-format = true
docstring-code-line-length = 79
-exclude = ['tests', 'tests/*']
From bcd9b38c951de63fbb38f47a82e3d5b2f1912739 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Mon, 29 Sep 2025 22:14:36 +0300
Subject: [PATCH 15/19] style: apply ruff formatting
---
naminter/__init__.py | 2 +-
naminter/cli/config.py | 75 +-
naminter/cli/console.py | 118 +--
naminter/cli/constants.py | 4 +-
naminter/cli/exporters.py | 101 +--
naminter/cli/main.py | 454 ++++++++----
naminter/cli/progress.py | 56 +-
naminter/cli/utils.py | 11 +-
naminter/core/constants.py | 9 +-
naminter/core/exceptions.py | 48 +-
naminter/core/main.py | 360 +++++----
naminter/core/models.py | 134 ++--
naminter/core/network.py | 95 ++-
naminter/core/utils.py | 103 +--
poetry.lock | 1390 +++++++++++++++++++++++++++++++++++
15 files changed, 2352 insertions(+), 608 deletions(-)
create mode 100644 poetry.lock
diff --git a/naminter/__init__.py b/naminter/__init__.py
index 7f07e6d..790747e 100644
--- a/naminter/__init__.py
+++ b/naminter/__init__.py
@@ -8,4 +8,4 @@
__license__ = "MIT"
__email__ = "contact@3xp0rt.com"
__url__ = "https://github.com/3xp0rt/Naminter"
-__all__ = ['Naminter']
\ No newline at end of file
+__all__ = ["Naminter"]
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index 3243eb4..c3941b6 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -1,7 +1,9 @@
+import json
from dataclasses import dataclass, field
from pathlib import Path
-from typing import List, Optional, Union, Dict, Any
-import json
+from typing import Any
+
+from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
from ..cli.console import display_warning
from ..core.constants import (
@@ -11,30 +13,30 @@
WMN_SCHEMA_URL,
)
from ..core.exceptions import ConfigurationError
-from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
@dataclass
class NaminterConfig:
"""Configuration for Naminter CLI tool.
-
+
Holds all configuration parameters for username enumeration operations, including network settings, export options, filtering, and validation parameters.
"""
+
# Required parameters
- usernames: List[str]
- sites: Optional[List[str]] = None
- logger: Optional[object] = None
+ usernames: list[str]
+ sites: list[str] | None = None
+ logger: object | None = None
# List and schema sources
- local_list_paths: Optional[List[Union[Path, str]]] = None
- remote_list_urls: Optional[List[str]] = None
- local_schema_path: Optional[Union[Path, str]] = None
- remote_schema_url: Optional[str] = WMN_SCHEMA_URL
+ local_list_paths: list[Path | str] | None = None
+ remote_list_urls: list[str] | None = None
+ local_schema_path: Path | str | None = None
+ remote_schema_url: str | None = WMN_SCHEMA_URL
# Validation and filtering
skip_validation: bool = False
- include_categories: List[str] = field(default_factory=list)
- exclude_categories: List[str] = field(default_factory=list)
+ include_categories: list[str] = field(default_factory=list)
+ exclude_categories: list[str] = field(default_factory=list)
filter_all: bool = False
filter_found: bool = False
filter_ambiguous: bool = False
@@ -46,37 +48,37 @@ class NaminterConfig:
# Network and concurrency
max_tasks: int = MAX_CONCURRENT_TASKS
timeout: int = HTTP_REQUEST_TIMEOUT_SECONDS
- proxy: Optional[str] = None
+ proxy: str | None = None
allow_redirects: bool = False
verify_ssl: bool = False
- impersonate: Optional[BrowserTypeLiteral] = "chrome"
- ja3: Optional[str] = None
- akamai: Optional[str] = None
- extra_fp: Optional[Union[ExtraFingerprints, Dict[str, Any], str]] = None
+ impersonate: BrowserTypeLiteral | None = "chrome"
+ ja3: str | None = None
+ akamai: str | None = None
+ extra_fp: ExtraFingerprints | dict[str, Any] | str | None = None
browse: bool = False
fuzzy_mode: bool = False
self_enumeration: bool = False
no_progressbar: bool = False
# Logging
- log_level: Optional[str] = None
- log_file: Optional[str] = None
+ log_level: str | None = None
+ log_file: str | None = None
show_details: bool = False
# Response saving
save_response: bool = False
- response_path: Optional[str] = None
+ response_path: str | None = None
open_response: bool = False
# Export options
csv_export: bool = False
- csv_path: Optional[str] = None
+ csv_path: str | None = None
pdf_export: bool = False
- pdf_path: Optional[str] = None
+ pdf_path: str | None = None
html_export: bool = False
- html_path: Optional[str] = None
+ html_path: str | None = None
json_export: bool = False
- json_path: Optional[str] = None
+ json_path: str | None = None
def __post_init__(self) -> None:
"""Validate and normalize configuration after initialization."""
@@ -102,11 +104,11 @@ def __post_init__(self) -> None:
self.filter_unknown,
self.filter_not_found,
self.filter_not_valid,
- self.filter_errors
+ self.filter_errors,
]
if not any(filter_fields):
self.filter_found = True
-
+
if isinstance(self.impersonate, str) and self.impersonate.lower() == "none":
self.impersonate = None
@@ -118,20 +120,19 @@ def __post_init__(self) -> None:
except TypeError as e:
raise ConfigurationError(f"Invalid data type in extra_fp: {e}") from e
-
@property
- def response_dir(self) -> Optional[Path]:
+ def response_dir(self) -> Path | None:
"""Return response directory Path if save_response is enabled."""
if not self.save_response:
return None
if self.response_path:
return Path(self.response_path)
-
+
return Path.cwd() / "responses"
@property
- def export_formats(self) -> Dict[str, Optional[str]]:
+ def export_formats(self) -> dict[str, str | None]:
"""Return enabled export formats with their custom paths."""
export_configs = [
("csv", self.csv_export, self.csv_path),
@@ -139,14 +140,14 @@ def export_formats(self) -> Dict[str, Optional[str]]:
("html", self.html_export, self.html_path),
("json", self.json_export, self.json_path),
]
-
+
return {
- format_name: path
- for format_name, is_enabled, path in export_configs
+ format_name: path
+ for format_name, is_enabled, path in export_configs
if is_enabled
}
- def to_dict(self) -> Dict[str, Any]:
+ def to_dict(self) -> dict[str, Any]:
"""Convert configuration to a dictionary."""
return {
"usernames": self.usernames,
@@ -166,7 +167,9 @@ def to_dict(self) -> Dict[str, Any]:
"impersonate": self.impersonate,
"ja3": self.ja3,
"akamai": self.akamai,
- "extra_fp": self.extra_fp.to_dict() if isinstance(self.extra_fp, ExtraFingerprints) else self.extra_fp,
+ "extra_fp": self.extra_fp.to_dict()
+ if isinstance(self.extra_fp, ExtraFingerprints)
+ else self.extra_fp,
"browse": self.browse,
"fuzzy_mode": self.fuzzy_mode,
"self_enumeration": self.self_enumeration,
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index ddd603b..7585779 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -1,5 +1,4 @@
from pathlib import Path
-from typing import Dict, List, Optional
from rich import box
from rich.console import Console
@@ -9,21 +8,21 @@
from rich.text import Text
from rich.tree import Tree
-from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult
-from .. import __description__, __version__, __author__, __license__, __email__, __url__
+from .. import __author__, __description__, __email__, __license__, __url__, __version__
+from ..core.models import ResultStatus, SelfEnumerationResult, SiteResult
console: Console = Console()
-THEME: Dict[str, str] = {
- 'primary': 'bright_blue',
- 'success': 'bright_green',
- 'error': 'bright_red',
- 'warning': 'bright_yellow',
- 'info': 'bright_cyan',
- 'muted': 'bright_black',
+THEME: dict[str, str] = {
+ "primary": "bright_blue",
+ "success": "bright_green",
+ "error": "bright_red",
+ "warning": "bright_yellow",
+ "info": "bright_cyan",
+ "muted": "bright_black",
}
-_STATUS_SYMBOLS: Dict[ResultStatus, str] = {
+_STATUS_SYMBOLS: dict[ResultStatus, str] = {
ResultStatus.FOUND: "+",
ResultStatus.AMBIGUOUS: "*",
ResultStatus.UNKNOWN: "?",
@@ -32,23 +31,26 @@
ResultStatus.ERROR: "!",
}
-_STATUS_STYLES: Dict[ResultStatus, Style] = {
- ResultStatus.FOUND: Style(color=THEME['success'], bold=True),
- ResultStatus.AMBIGUOUS: Style(color=THEME['warning'], bold=True),
- ResultStatus.UNKNOWN: Style(color=THEME['warning']),
- ResultStatus.NOT_FOUND: Style(color=THEME['error']),
- ResultStatus.NOT_VALID: Style(color=THEME['error']),
- ResultStatus.ERROR: Style(color=THEME['error'], bold=True),
+_STATUS_STYLES: dict[ResultStatus, Style] = {
+ ResultStatus.FOUND: Style(color=THEME["success"], bold=True),
+ ResultStatus.AMBIGUOUS: Style(color=THEME["warning"], bold=True),
+ ResultStatus.UNKNOWN: Style(color=THEME["warning"]),
+ ResultStatus.NOT_FOUND: Style(color=THEME["error"]),
+ ResultStatus.NOT_VALID: Style(color=THEME["error"]),
+ ResultStatus.ERROR: Style(color=THEME["error"], bold=True),
}
+
class ResultFormatter:
"""Formats test results for console output."""
-
+
def __init__(self, show_details: bool = False) -> None:
"""Initialize the result formatter."""
self.show_details = show_details
- def format_result(self, site_result: SiteResult, response_file_path: Optional[Path] = None) -> Tree:
+ def format_result(
+ self, site_result: SiteResult, response_file_path: Path | None = None
+ ) -> Tree:
"""Format a single result as a tree-style output."""
root_label = Text()
@@ -56,10 +58,10 @@ def format_result(self, site_result: SiteResult, response_file_path: Optional[Pa
status_style = _STATUS_STYLES.get(site_result.status, Style())
root_label.append(status_symbol, style=status_style)
- root_label.append(" [", style=THEME['muted'])
- root_label.append(site_result.name or "Unknown", style=THEME['info'])
- root_label.append("] ", style=THEME['muted'])
- root_label.append(site_result.result_url or "No URL", style=THEME['primary'])
+ root_label.append(" [", style=THEME["muted"])
+ root_label.append(site_result.name or "Unknown", style=THEME["info"])
+ root_label.append("] ", style=THEME["muted"])
+ root_label.append(site_result.result_url or "No URL", style=THEME["primary"])
tree = Tree(root_label, guide_style=THEME["muted"])
@@ -69,64 +71,85 @@ def format_result(self, site_result: SiteResult, response_file_path: Optional[Pa
site_result.response_code,
site_result.elapsed,
site_result.error,
- response_file_path
+ response_file_path,
)
return tree
- def format_self_enumeration(self, self_enumeration_result: SelfEnumerationResult, response_files: Optional[List[Optional[Path]]] = None) -> Tree:
+ def format_self_enumeration(
+ self,
+ self_enumeration_result: SelfEnumerationResult,
+ response_files: list[Path | None] | None = None,
+ ) -> Tree:
"""Format self-enumeration results into a tree structure."""
root_label = Text()
- root_label.append(_STATUS_SYMBOLS.get(self_enumeration_result.status, "?"), style=_STATUS_STYLES.get(self_enumeration_result.status, Style()))
+ root_label.append(
+ _STATUS_SYMBOLS.get(self_enumeration_result.status, "?"),
+ style=_STATUS_STYLES.get(self_enumeration_result.status, Style()),
+ )
root_label.append(" [", style=THEME["muted"])
root_label.append(self_enumeration_result.name, style=THEME["info"])
root_label.append("]", style=THEME["muted"])
tree = Tree(root_label, guide_style=THEME["muted"], expanded=True)
-
+
for i, test in enumerate(self_enumeration_result.results):
if test is None:
continue
url_text = Text()
- url_text.append(_STATUS_SYMBOLS.get(test.status, "?"), style=_STATUS_STYLES.get(test.status, Style()))
+ url_text.append(
+ _STATUS_SYMBOLS.get(test.status, "?"),
+ style=_STATUS_STYLES.get(test.status, Style()),
+ )
url_text.append(" ", style=THEME["muted"])
- url_text.append(f"{test.username}: ", style=THEME["info"])
+ url_text.append(f"{test.username}: ", style=THEME["info"])
url_text.append(test.result_url or "No URL", style=THEME["primary"])
test_node = tree.add(url_text)
if self.show_details:
- response_file = response_files[i] if response_files and i < len(response_files) else None
+ response_file = (
+ response_files[i]
+ if response_files and i < len(response_files)
+ else None
+ )
self._add_debug_info(
test_node,
test.response_code,
test.elapsed,
test.error,
- response_file
+ response_file,
)
return tree
- def _add_debug_info(self, node: Tree, response_code: Optional[int] = None, elapsed: Optional[float] = None,
- error: Optional[str] = None, response_file: Optional[Path] = None) -> None:
+ def _add_debug_info(
+ self,
+ node: Tree,
+ response_code: int | None = None,
+ elapsed: float | None = None,
+ error: str | None = None,
+ response_file: Path | None = None,
+ ) -> None:
"""Add debug information to a tree node."""
if response_code is not None:
- node.add(Text(f"Response Code: {response_code}", style=THEME['info']))
+ node.add(Text(f"Response Code: {response_code}", style=THEME["info"]))
if response_file is not None:
- node.add(Text(f"Response File: {response_file}", style=THEME['info']))
+ node.add(Text(f"Response File: {response_file}", style=THEME["info"]))
if elapsed is not None:
- node.add(Text(f"Elapsed: {elapsed:.2f}s", style=THEME['info']))
+ node.add(Text(f"Elapsed: {elapsed:.2f}s", style=THEME["info"]))
if error is not None:
- node.add(Text(f"Error: {error}", style=THEME['error']))
+ node.add(Text(f"Error: {error}", style=THEME["error"]))
+
def display_version() -> None:
"""Display version and metadata of the application."""
version_table = Table.grid(padding=(0, 2))
- version_table.add_column(style=THEME['info'])
+ version_table.add_column(style=THEME["info"])
version_table.add_column(style="bold")
version_table.add_row("Version:", __version__)
@@ -139,12 +162,13 @@ def display_version() -> None:
panel = Panel(
version_table,
title="[bold]:mag: Naminter[/]",
- border_style=THEME['muted'],
+ border_style=THEME["muted"],
box=box.ROUNDED,
)
console.print(panel)
+
def _display_message(message: str, style: str, symbol: str, label: str) -> None:
"""Display a styled message with symbol and label."""
@@ -152,28 +176,32 @@ def _display_message(message: str, style: str, symbol: str, label: str) -> None:
formatted_message.append(symbol, style=style)
formatted_message.append(f" [{label}] ", style=style)
formatted_message.append(message)
-
+
console.print(formatted_message)
console.file.flush()
+
def display_error(message: str, show_traceback: bool = False) -> None:
"""Display an error message."""
- _display_message(message, THEME['error'], "!", "ERROR")
+ _display_message(message, THEME["error"], "!", "ERROR")
if show_traceback:
console.print_exception()
+
def display_warning(message: str) -> None:
"""Display a warning message."""
- _display_message(message, THEME['warning'], "?", "WARNING")
+ _display_message(message, THEME["warning"], "?", "WARNING")
+
def display_info(message: str) -> None:
"""Display an info message."""
- _display_message(message, THEME['info'], "*", "INFO")
+ _display_message(message, THEME["info"], "*", "INFO")
+
def display_success(message: str) -> None:
"""Display a success message."""
- _display_message(message, THEME['success'], "+", "SUCCESS")
+ _display_message(message, THEME["success"], "+", "SUCCESS")
diff --git a/naminter/cli/constants.py b/naminter/cli/constants.py
index b231f66..27274e1 100644
--- a/naminter/cli/constants.py
+++ b/naminter/cli/constants.py
@@ -1,3 +1,3 @@
# Constants for file operations
-RESPONSE_FILE_DATE_FORMAT = '%Y%m%d_%H%M%S'
-RESPONSE_FILE_EXTENSION = '.html'
+RESPONSE_FILE_DATE_FORMAT = "%Y%m%d_%H%M%S"
+RESPONSE_FILE_EXTENSION = ".html"
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index a8c6d05..85243a0 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -1,40 +1,48 @@
import csv
+import importlib.resources
import json
from datetime import datetime
from pathlib import Path
-from typing import Any, Dict, List, Optional, Protocol, Literal
-import importlib.resources
+from typing import Any, Literal, Protocol
+
import jinja2
from weasyprint import HTML
-from ..core.models import SiteResult, SelfEnumerationResult
from ..core.exceptions import ConfigurationError, ExportError, FileAccessError
+from ..core.models import SelfEnumerationResult, SiteResult
+
+FormatName = Literal["csv", "json", "html", "pdf"]
+ResultDict = dict[str, Any]
-FormatName = Literal['csv', 'json', 'html', 'pdf']
-ResultDict = Dict[str, Any]
class ExportMethod(Protocol):
- def __call__(self, results: List[ResultDict], output_path: Path) -> None: ...
+ def __call__(self, results: list[ResultDict], output_path: Path) -> None: ...
+
class Exporter:
"""
Unified exporter for CSV, JSON, HTML, and PDF formats.
"""
- SUPPORTED_FORMATS: List[FormatName] = ['csv', 'json', 'html', 'pdf']
- def __init__(self, usernames: Optional[List[str]] = None, version: Optional[str] = None) -> None:
+ SUPPORTED_FORMATS: list[FormatName] = ["csv", "json", "html", "pdf"]
+
+ def __init__(
+ self, usernames: list[str] | None = None, version: str | None = None
+ ) -> None:
self.usernames = usernames or []
- self.version = version or 'unknown'
- self.export_methods: Dict[FormatName, ExportMethod] = {
- 'csv': self._export_csv,
- 'json': self._export_json,
- 'html': self._export_html,
- 'pdf': self._export_pdf,
+ self.version = version or "unknown"
+ self.export_methods: dict[FormatName, ExportMethod] = {
+ "csv": self._export_csv,
+ "json": self._export_json,
+ "html": self._export_html,
+ "pdf": self._export_pdf,
}
- def export(self,
- results: List[SiteResult | SelfEnumerationResult],
- formats: Dict[FormatName, Optional[str | Path]]) -> None:
+ def export(
+ self,
+ results: list[SiteResult | SelfEnumerationResult],
+ formats: dict[FormatName, str | Path | None],
+ ) -> None:
"""
Export results in the given formats.
"""
@@ -42,31 +50,32 @@ def export(self,
return
dict_results = [
- result.to_dict(exclude_response_text=True)
- for result in results
+ result.to_dict(exclude_response_text=True) for result in results
]
for format_name, path in formats.items():
if format_name not in self.SUPPORTED_FORMATS:
raise ExportError(f"Unsupported export format: {format_name}")
-
+
try:
out_path = self._resolve_path(format_name, path)
out_path.parent.mkdir(parents=True, exist_ok=True)
self.export_methods[format_name](dict_results, out_path)
except FileAccessError as e:
- raise ExportError(f"File access error during {format_name} export: {e}") from e
+ raise ExportError(
+ f"File access error during {format_name} export: {e}"
+ ) from e
except Exception as e:
raise ExportError(f"Failed to export {format_name}: {e}") from e
- def _export_csv(self, results: List[ResultDict], output_path: Path) -> None:
+ def _export_csv(self, results: list[ResultDict], output_path: Path) -> None:
if not results:
return
fieldnames = list(results[0].keys())
try:
- with output_path.open('w', newline='', encoding='utf-8') as f:
+ with output_path.open("w", newline="", encoding="utf-8") as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(results)
@@ -77,9 +86,11 @@ def _export_csv(self, results: List[ResultDict], output_path: Path) -> None:
except Exception as e:
raise ExportError(f"CSV export error: {e}") from e
- def _export_json(self, results: List[ResultDict], output_path: Path) -> None:
+ def _export_json(self, results: list[ResultDict], output_path: Path) -> None:
try:
- output_path.write_text(json.dumps(results, ensure_ascii=False, indent=2), encoding='utf-8')
+ output_path.write_text(
+ json.dumps(results, ensure_ascii=False, indent=2), encoding="utf-8"
+ )
except PermissionError as e:
raise FileAccessError(f"Permission denied writing JSON file: {e}") from e
except OSError as e:
@@ -89,24 +100,30 @@ def _export_json(self, results: List[ResultDict], output_path: Path) -> None:
except Exception as e:
raise ExportError(f"JSON export error: {e}") from e
- def _generate_html(self, results: List[ResultDict]) -> str:
- grouped: Dict[str, List[ResultDict]] = {}
+ def _generate_html(self, results: list[ResultDict]) -> str:
+ grouped: dict[str, list[ResultDict]] = {}
for item in results:
- cat = item.get('category', 'uncategorized')
+ cat = item.get("category", "uncategorized")
grouped.setdefault(cat, []).append(item)
- default_fields = ['name', 'result_url', 'elapsed']
+ default_fields = ["name", "result_url", "elapsed"]
display_fields = [f for f in default_fields if any(f in r for r in results)]
try:
- with importlib.resources.files('naminter.cli.templates').joinpath('report.html').open('r', encoding='utf-8') as f:
+ with (
+ importlib.resources.files("naminter.cli.templates")
+ .joinpath("report.html")
+ .open("r", encoding="utf-8") as f
+ ):
template_source = f.read()
except FileNotFoundError as e:
- raise ConfigurationError(f'HTML template not found: {e}') from e
+ raise ConfigurationError(f"HTML template not found: {e}") from e
except PermissionError as e:
- raise FileAccessError(f'Permission denied reading HTML template: {e}') from e
+ raise FileAccessError(
+ f"Permission denied reading HTML template: {e}"
+ ) from e
except Exception as e:
- raise ConfigurationError(f'Could not load HTML template: {e}') from e
+ raise ConfigurationError(f"Could not load HTML template: {e}") from e
template = jinja2.Template(template_source, autoescape=True)
@@ -115,15 +132,15 @@ def _generate_html(self, results: List[ResultDict]) -> str:
display_fields=display_fields,
usernames=self.usernames,
version=self.version,
- current_time=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
+ current_time=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
total_count=len(results),
- category_count=len(grouped)
+ category_count=len(grouped),
)
- def _export_html(self, results: List[ResultDict], output_path: Path) -> None:
+ def _export_html(self, results: list[ResultDict], output_path: Path) -> None:
try:
html = self._generate_html(results)
- output_path.write_text(html, encoding='utf-8')
+ output_path.write_text(html, encoding="utf-8")
except PermissionError as e:
raise FileAccessError(f"Permission denied writing HTML file: {e}") from e
except OSError as e:
@@ -131,9 +148,9 @@ def _export_html(self, results: List[ResultDict], output_path: Path) -> None:
except Exception as e:
raise ExportError(f"HTML export error: {e}") from e
- def _export_pdf(self, results: List[ResultDict], output_path: Path) -> None:
+ def _export_pdf(self, results: list[ResultDict], output_path: Path) -> None:
if not results:
- raise ExportError('No results to export to PDF')
+ raise ExportError("No results to export to PDF")
try:
html = self._generate_html(results)
@@ -145,10 +162,10 @@ def _export_pdf(self, results: List[ResultDict], output_path: Path) -> None:
except Exception as e:
raise ExportError(f"PDF export error: {e}") from e
- def _resolve_path(self, format_name: FormatName, custom: Optional[str | Path]) -> Path:
+ def _resolve_path(self, format_name: FormatName, custom: str | Path | None) -> Path:
if custom:
return Path(custom)
-
- timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
+
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"results_{timestamp}.{format_name}"
return Path.cwd() / filename
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index c13136f..cd71eed 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -1,36 +1,41 @@
import asyncio
-import json
import logging
+import typing
import webbrowser
from pathlib import Path
-from typing import Any, Dict, List, Optional, Union
-import typing
-import aiofiles
+from typing import Any
+import aiofiles
import rich_click as click
-
from curl_cffi import BrowserTypeLiteral
+
+from .. import __version__
from ..cli.config import NaminterConfig
from ..cli.console import (
+ ResultFormatter,
console,
display_error,
- display_warning,
display_version,
- ResultFormatter,
+ display_warning,
)
+from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
from ..cli.exporters import Exporter
from ..cli.progress import ProgressManager, ResultsTracker
-from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
from ..cli.utils import sanitize_filename
-from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult
+from ..core.constants import (
+ HTTP_ALLOW_REDIRECTS,
+ HTTP_REQUEST_TIMEOUT_SECONDS,
+ HTTP_SSL_VERIFY,
+ LOGGING_FORMAT,
+ MAX_CONCURRENT_TASKS,
+ WMN_SCHEMA_URL,
+)
+from ..core.exceptions import ConfigurationError, DataError, ExportError
from ..core.main import Naminter
+from ..core.models import ResultStatus, SelfEnumerationResult, SiteResult
from ..core.network import CurlCFFISession
-from ..core.constants import MAX_CONCURRENT_TASKS, HTTP_REQUEST_TIMEOUT_SECONDS, HTTP_ALLOW_REDIRECTS, HTTP_SSL_VERIFY, WMN_SCHEMA_URL, LOGGING_FORMAT
from ..core.utils import validate_numeric_values
-from ..core.exceptions import DataError, ConfigurationError, ExportError
-from .. import __description__, __version__
-
def _version_callback(ctx: click.Context, param: click.Option, value: bool) -> None:
"""Eager callback to display version and exit."""
@@ -42,17 +47,19 @@ def _version_callback(ctx: click.Context, param: click.Option, value: bool) -> N
class NaminterCLI:
"""Handles username enumeration operations."""
-
+
def __init__(self, config: NaminterConfig) -> None:
self.config: NaminterConfig = config
- self._formatter: ResultFormatter = ResultFormatter(show_details=config.show_details)
- self._response_dir: Optional[Path] = self._setup_response_dir()
+ self._formatter: ResultFormatter = ResultFormatter(
+ show_details=config.show_details
+ )
+ self._response_dir: Path | None = self._setup_response_dir()
- def _setup_response_dir(self) -> Optional[Path]:
+ def _setup_response_dir(self) -> Path | None:
"""Setup response directory if response saving is enabled."""
if not self.config.save_response:
return None
-
+
try:
dir_path = self.config.response_dir
if dir_path is None:
@@ -61,7 +68,9 @@ def _setup_response_dir(self) -> Optional[Path]:
dir_path.mkdir(parents=True, exist_ok=True)
return dir_path
except PermissionError as e:
- display_error(f"Permission denied creating/accessing response directory: {e}")
+ display_error(
+ f"Permission denied creating/accessing response directory: {e}"
+ )
return None
except OSError as e:
display_error(f"OS error creating/accessing response directory: {e}")
@@ -79,30 +88,38 @@ def _setup_logging(config: NaminterConfig) -> None:
log_path = Path(config.log_file)
log_path.parent.mkdir(parents=True, exist_ok=True)
- level_value = getattr(logging, str(config.log_level or "INFO").upper(), logging.INFO)
+ level_value = getattr(
+ logging, str(config.log_level or "INFO").upper(), logging.INFO
+ )
logger = logging.getLogger("naminter")
logger.setLevel(level_value)
logger.propagate = False
-
- has_file_handler = any(isinstance(handler, logging.FileHandler) for handler in logger.handlers)
+
+ has_file_handler = any(
+ isinstance(handler, logging.FileHandler) for handler in logger.handlers
+ )
if not has_file_handler:
- file_handler = logging.FileHandler(str(log_path), mode="a", encoding="utf-8")
+ file_handler = logging.FileHandler(
+ str(log_path), mode="a", encoding="utf-8"
+ )
formatter = logging.Formatter(LOGGING_FORMAT)
file_handler.setFormatter(formatter)
file_handler.setLevel(level_value)
logger.addHandler(file_handler)
-
+
async def run(self) -> None:
"""Main execution method with progress tracking."""
try:
- warnings = validate_numeric_values(self.config.max_tasks, self.config.timeout)
+ warnings = validate_numeric_values(
+ self.config.max_tasks, self.config.timeout
+ )
for message in warnings:
display_warning(message)
except ConfigurationError as e:
display_error(f"Configuration error: {e}")
return
-
+
http_client = CurlCFFISession(
proxies=self.config.proxy,
verify=self.config.verify_ssl,
@@ -136,7 +153,7 @@ async def run(self) -> None:
display_error(f"Export error: {e}")
return
- async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
+ async def _run_check(self, naminter: Naminter) -> list[SiteResult]:
"""Run the username enumeration functionality."""
summary = await naminter.get_wmn_summary(
site_names=self.config.sites,
@@ -145,28 +162,34 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
)
actual_site_count = int(summary.sites_count)
total_sites = actual_site_count * len(self.config.usernames)
-
+
tracker = ResultsTracker(total_sites)
- results: List[SiteResult] = []
-
- with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(total_sites, "[bright_cyan]Enumerating usernames...[/bright_cyan]")
-
+ results: list[SiteResult] = []
+
+ with ProgressManager(
+ console, disabled=self.config.no_progressbar
+ ) as progress_mgr:
+ progress_mgr.start(
+ total_sites, "[bright_cyan]Enumerating usernames...[/bright_cyan]"
+ )
+
result_stream = await naminter.enumerate_usernames(
usernames=self.config.usernames,
site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
fuzzy_mode=self.config.fuzzy_mode,
- as_generator=True
- )
+ as_generator=True,
+ )
async for result in result_stream:
tracker.add_result(result)
if self._filter_result(result):
response_file_path = await self._process_result(result)
- formatted_output = self._formatter.format_result(result, response_file_path)
+ formatted_output = self._formatter.format_result(
+ result, response_file_path
+ )
console.print(formatted_output)
results.append(result)
@@ -174,7 +197,9 @@ async def _run_check(self, naminter: Naminter) -> List[SiteResult]:
return results
- async def _run_self_enumeration(self, naminter: Naminter) -> List[SelfEnumerationResult]:
+ async def _run_self_enumeration(
+ self, naminter: Naminter
+ ) -> list[SelfEnumerationResult]:
"""Run the self-enumeration functionality."""
summary = await naminter.get_wmn_summary(
site_names=self.config.sites,
@@ -184,45 +209,53 @@ async def _run_self_enumeration(self, naminter: Naminter) -> List[SelfEnumeratio
total_tests = int(summary.known_accounts_total)
tracker = ResultsTracker(total_tests)
- results: List[SelfEnumerationResult] = []
+ results: list[SelfEnumerationResult] = []
+
+ with ProgressManager(
+ console, disabled=self.config.no_progressbar
+ ) as progress_mgr:
+ progress_mgr.start(
+ total_tests, "[bright_cyan]Running self-enumeration...[/bright_cyan]"
+ )
- with ProgressManager(console, disabled=self.config.no_progressbar) as progress_mgr:
- progress_mgr.start(total_tests, "[bright_cyan]Running self-enumeration...[/bright_cyan]")
-
result_stream = await naminter.self_enumeration(
site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
fuzzy_mode=self.config.fuzzy_mode,
- as_generator=True
+ as_generator=True,
)
async for result in result_stream:
for site_result in result.results:
tracker.add_result(site_result)
- progress_mgr.update(advance=1, description=tracker.get_progress_text())
+ progress_mgr.update(
+ advance=1, description=tracker.get_progress_text()
+ )
if self._filter_result(result):
- response_files: List[Optional[Path]] = []
+ response_files: list[Path | None] = []
for site_result in result.results:
response_file_path = await self._process_result(site_result)
if response_file_path:
response_files.append(response_file_path)
else:
response_files.append(None)
- formatted_output = self._formatter.format_self_enumeration(result, response_files)
+ formatted_output = self._formatter.format_self_enumeration(
+ result, response_files
+ )
console.print(formatted_output)
results.append(result)
return results
-
- def _filter_result(self, result: Union[SiteResult, SelfEnumerationResult]) -> bool:
+
+ def _filter_result(self, result: SiteResult | SelfEnumerationResult) -> bool:
"""Determine if a result should be included in output based on filter settings."""
status = result.status
-
+
if self.config.filter_all:
return True
-
+
filter_map = {
self.config.filter_found: ResultStatus.FOUND,
self.config.filter_ambiguous: ResultStatus.AMBIGUOUS,
@@ -231,13 +264,13 @@ def _filter_result(self, result: Union[SiteResult, SelfEnumerationResult]) -> bo
self.config.filter_not_valid: ResultStatus.NOT_VALID,
self.config.filter_errors: ResultStatus.ERROR,
}
-
+
return any(
- filter_enabled and status == expected_status
+ filter_enabled and status == expected_status
for filter_enabled, expected_status in filter_map.items()
) or not any(filter_map.keys())
- async def _process_result(self, result: SiteResult) -> Optional[Path]:
+ async def _process_result(self, result: SiteResult) -> Path | None:
"""Process a single result: handle browser opening, response saving, and console output."""
response_file = None
@@ -288,117 +321,246 @@ async def _write_file(self, file_path: Path, content: str) -> None:
display_error(f"Failed to write to {file_path}: {e}")
-@click.group(invoke_without_command=True, no_args_is_help=True, context_settings=dict(help_option_names=['-h', '--help']))
-@click.option('--version', is_flag=True, is_eager=True, expose_value=False, callback=_version_callback, help='Show version information and exit')
-@click.option('--no-color', is_flag=True, help='Disable colored console output')
-@click.option('--no-progressbar', is_flag=True, help='Disable progress bar during execution')
-@click.option('--username', '-u', multiple=True, help='Username(s) to search for across social media platforms')
-@click.option('--site', '-s', multiple=True, help='Specific site name(s) to enumerate (e.g., "GitHub", "Twitter")')
-@click.option('--local-list', type=click.Path(exists=True, path_type=Path), multiple=True, help='Path(s) to local JSON file(s) containing WhatsMyName site data')
-@click.option('--remote-list', multiple=True, help='URL(s) to fetch remote WhatsMyName site data')
-@click.option('--local-schema', type=click.Path(exists=True, path_type=Path), help='Path to local WhatsMyName JSON schema file for validation')
-@click.option('--remote-schema', default=WMN_SCHEMA_URL, help='URL to fetch custom WhatsMyName JSON schema for validation')
-@click.option('--skip-validation', is_flag=True, help='Skip JSON schema validation of WhatsMyName data')
-@click.option('--self-enumeration', is_flag=True, help='Run self-enumeration mode to validate site detection accuracy')
-@click.option('--include-categories', multiple=True, help='Include only sites from specified categories (e.g., "social", "coding")')
-@click.option('--exclude-categories', multiple=True, help='Exclude sites from specified categories (e.g., "adult", "gaming")')
-@click.option('--proxy', help='Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)')
-@click.option('--timeout', type=int, default=HTTP_REQUEST_TIMEOUT_SECONDS, help='Maximum time in seconds to wait for each HTTP request')
-@click.option('--allow-redirects', is_flag=True, default=HTTP_ALLOW_REDIRECTS, help='Whether to follow HTTP redirects automatically')
-@click.option('--verify-ssl', is_flag=True, default=HTTP_SSL_VERIFY, help='Whether to verify SSL/TLS certificates for HTTPS requests')
-@click.option('--impersonate', type=click.Choice(["none", *typing.get_args(BrowserTypeLiteral)]), default="chrome", help='Browser to impersonate in HTTP requests (use "none" to disable)')
-@click.option('--ja3', help='JA3 fingerprint string for TLS fingerprinting')
-@click.option('--akamai', help='Akamai fingerprint string for Akamai bot detection bypass')
-@click.option('--extra-fp', help='Extra fingerprinting options as JSON string (e.g., \'{"tls_grease": true, "tls_cert_compression": "brotli"}\')')
-@click.option('--max-tasks', type=int, default=MAX_CONCURRENT_TASKS, help='Maximum number of concurrent tasks')
-@click.option('--fuzzy', 'fuzzy_mode', is_flag=True, help='Enable fuzzy validation mode')
-@click.option('--log-level', type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), help='Set logging level')
-@click.option('--log-file', help='Path to log file for debug output')
-@click.option('--show-details', is_flag=True, help='Show detailed information in console output')
-@click.option('--browse', is_flag=True, help='Open found profiles in web browser')
-@click.option('--save-response', is_flag=True, help='Save HTTP response content for each result to files')
-@click.option('--response-path', help='Custom directory path for saving response files')
-@click.option('--open-response', is_flag=True, help='Open saved response files in web browser')
-@click.option('--csv', 'csv_export', is_flag=True, help='Export results to CSV file')
-@click.option('--csv-path', help='Custom path for CSV export')
-@click.option('--pdf', 'pdf_export', is_flag=True, help='Export results to PDF file')
-@click.option('--pdf-path', help='Custom path for PDF export')
-@click.option('--html', 'html_export', is_flag=True, help='Export results to HTML file')
-@click.option('--html-path', help='Custom path for HTML export')
-@click.option('--json', 'json_export', is_flag=True, help='Export results to JSON file')
-@click.option('--json-path', help='Custom path for JSON export')
-@click.option('--filter-all', is_flag=True, help='Include all results in console output and exports')
-@click.option('--filter-found', is_flag=True, help='Show only found results in console output and exports')
-@click.option('--filter-ambiguous', is_flag=True, help='Show only ambiguous results in console output and exports')
-@click.option('--filter-unknown', is_flag=True, help='Show only unknown results in console output and exports')
-@click.option('--filter-not-found', is_flag=True, help='Show only not found results in console output and exports')
-@click.option('--filter-not-valid', is_flag=True, help='Show only not valid results in console output and exports')
-@click.option('--filter-errors', is_flag=True, help='Show only error results in console output and exports')
+@click.group(
+ invoke_without_command=True,
+ no_args_is_help=True,
+ context_settings=dict(help_option_names=["-h", "--help"]),
+)
+@click.option(
+ "--version",
+ is_flag=True,
+ is_eager=True,
+ expose_value=False,
+ callback=_version_callback,
+ help="Show version information and exit",
+)
+@click.option("--no-color", is_flag=True, help="Disable colored console output")
+@click.option(
+ "--no-progressbar", is_flag=True, help="Disable progress bar during execution"
+)
+@click.option(
+ "--username",
+ "-u",
+ multiple=True,
+ help="Username(s) to search for across social media platforms",
+)
+@click.option(
+ "--site",
+ "-s",
+ multiple=True,
+ help='Specific site name(s) to enumerate (e.g., "GitHub", "Twitter")',
+)
+@click.option(
+ "--local-list",
+ type=click.Path(exists=True, path_type=Path),
+ multiple=True,
+ help="Path(s) to local JSON file(s) containing WhatsMyName site data",
+)
+@click.option(
+ "--remote-list", multiple=True, help="URL(s) to fetch remote WhatsMyName site data"
+)
+@click.option(
+ "--local-schema",
+ type=click.Path(exists=True, path_type=Path),
+ help="Path to local WhatsMyName JSON schema file for validation",
+)
+@click.option(
+ "--remote-schema",
+ default=WMN_SCHEMA_URL,
+ help="URL to fetch custom WhatsMyName JSON schema for validation",
+)
+@click.option(
+ "--skip-validation",
+ is_flag=True,
+ help="Skip JSON schema validation of WhatsMyName data",
+)
+@click.option(
+ "--self-enumeration",
+ is_flag=True,
+ help="Run self-enumeration mode to validate site detection accuracy",
+)
+@click.option(
+ "--include-categories",
+ multiple=True,
+ help='Include only sites from specified categories (e.g., "social", "coding")',
+)
+@click.option(
+ "--exclude-categories",
+ multiple=True,
+ help='Exclude sites from specified categories (e.g., "adult", "gaming")',
+)
+@click.option(
+ "--proxy",
+ help="Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)",
+)
+@click.option(
+ "--timeout",
+ type=int,
+ default=HTTP_REQUEST_TIMEOUT_SECONDS,
+ help="Maximum time in seconds to wait for each HTTP request",
+)
+@click.option(
+ "--allow-redirects",
+ is_flag=True,
+ default=HTTP_ALLOW_REDIRECTS,
+ help="Whether to follow HTTP redirects automatically",
+)
+@click.option(
+ "--verify-ssl",
+ is_flag=True,
+ default=HTTP_SSL_VERIFY,
+ help="Whether to verify SSL/TLS certificates for HTTPS requests",
+)
+@click.option(
+ "--impersonate",
+ type=click.Choice(["none", *typing.get_args(BrowserTypeLiteral)]),
+ default="chrome",
+ help='Browser to impersonate in HTTP requests (use "none" to disable)',
+)
+@click.option("--ja3", help="JA3 fingerprint string for TLS fingerprinting")
+@click.option(
+ "--akamai", help="Akamai fingerprint string for Akamai bot detection bypass"
+)
+@click.option(
+ "--extra-fp",
+ help='Extra fingerprinting options as JSON string (e.g., \'{"tls_grease": true, "tls_cert_compression": "brotli"}\')',
+)
+@click.option(
+ "--max-tasks",
+ type=int,
+ default=MAX_CONCURRENT_TASKS,
+ help="Maximum number of concurrent tasks",
+)
+@click.option(
+ "--fuzzy", "fuzzy_mode", is_flag=True, help="Enable fuzzy validation mode"
+)
+@click.option(
+ "--log-level",
+ type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]),
+ help="Set logging level",
+)
+@click.option("--log-file", help="Path to log file for debug output")
+@click.option(
+ "--show-details", is_flag=True, help="Show detailed information in console output"
+)
+@click.option("--browse", is_flag=True, help="Open found profiles in web browser")
+@click.option(
+ "--save-response",
+ is_flag=True,
+ help="Save HTTP response content for each result to files",
+)
+@click.option("--response-path", help="Custom directory path for saving response files")
+@click.option(
+ "--open-response", is_flag=True, help="Open saved response files in web browser"
+)
+@click.option("--csv", "csv_export", is_flag=True, help="Export results to CSV file")
+@click.option("--csv-path", help="Custom path for CSV export")
+@click.option("--pdf", "pdf_export", is_flag=True, help="Export results to PDF file")
+@click.option("--pdf-path", help="Custom path for PDF export")
+@click.option("--html", "html_export", is_flag=True, help="Export results to HTML file")
+@click.option("--html-path", help="Custom path for HTML export")
+@click.option("--json", "json_export", is_flag=True, help="Export results to JSON file")
+@click.option("--json-path", help="Custom path for JSON export")
+@click.option(
+ "--filter-all",
+ is_flag=True,
+ help="Include all results in console output and exports",
+)
+@click.option(
+ "--filter-found",
+ is_flag=True,
+ help="Show only found results in console output and exports",
+)
+@click.option(
+ "--filter-ambiguous",
+ is_flag=True,
+ help="Show only ambiguous results in console output and exports",
+)
+@click.option(
+ "--filter-unknown",
+ is_flag=True,
+ help="Show only unknown results in console output and exports",
+)
+@click.option(
+ "--filter-not-found",
+ is_flag=True,
+ help="Show only not found results in console output and exports",
+)
+@click.option(
+ "--filter-not-valid",
+ is_flag=True,
+ help="Show only not valid results in console output and exports",
+)
+@click.option(
+ "--filter-errors",
+ is_flag=True,
+ help="Show only error results in console output and exports",
+)
@click.pass_context
def main(ctx: click.Context, **kwargs: Any) -> None:
"""Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset."""
if ctx.invoked_subcommand is not None:
return
-
- if kwargs.get('no_color'):
+
+ if kwargs.get("no_color"):
console.no_color = True
try:
config = NaminterConfig(
- usernames=kwargs.get('username'),
- sites=kwargs.get('site'),
- local_list_paths=kwargs.get('local_list'),
- remote_list_urls=kwargs.get('remote_list'),
- local_schema_path=kwargs.get('local_schema'),
- remote_schema_url=kwargs.get('remote_schema'),
- skip_validation=kwargs.get('skip_validation'),
- include_categories=kwargs.get('include_categories'),
- exclude_categories=kwargs.get('exclude_categories'),
- max_tasks=kwargs.get('max_tasks'),
- timeout=kwargs.get('timeout'),
- proxy=kwargs.get('proxy'),
- allow_redirects=kwargs.get('allow_redirects'),
- verify_ssl=kwargs.get('verify_ssl'),
- impersonate=kwargs.get('impersonate'),
- ja3=kwargs.get('ja3'),
- akamai=kwargs.get('akamai'),
- extra_fp=kwargs.get('extra_fp'),
- fuzzy_mode=kwargs.get('fuzzy_mode'),
- self_enumeration=kwargs.get('self_enumeration'),
- log_level=kwargs.get('log_level'),
- log_file=kwargs.get('log_file'),
- show_details=kwargs.get('show_details'),
- browse=kwargs.get('browse'),
- save_response=kwargs.get('save_response'),
- response_path=kwargs.get('response_path'),
- open_response=kwargs.get('open_response'),
- csv_export=kwargs.get('csv_export'),
- csv_path=kwargs.get('csv_path'),
- pdf_export=kwargs.get('pdf_export'),
- pdf_path=kwargs.get('pdf_path'),
- html_export=kwargs.get('html_export'),
- html_path=kwargs.get('html_path'),
- json_export=kwargs.get('json_export'),
- json_path=kwargs.get('json_path'),
- filter_all=kwargs.get('filter_all'),
- filter_found=kwargs.get('filter_found'),
- filter_ambiguous=kwargs.get('filter_ambiguous'),
- filter_unknown=kwargs.get('filter_unknown'),
- filter_not_found=kwargs.get('filter_not_found'),
- filter_not_valid=kwargs.get('filter_not_valid'),
- filter_errors=kwargs.get('filter_errors'),
- no_progressbar=kwargs.get('no_progressbar'),
+ usernames=kwargs.get("username"),
+ sites=kwargs.get("site"),
+ local_list_paths=kwargs.get("local_list"),
+ remote_list_urls=kwargs.get("remote_list"),
+ local_schema_path=kwargs.get("local_schema"),
+ remote_schema_url=kwargs.get("remote_schema"),
+ skip_validation=kwargs.get("skip_validation"),
+ include_categories=kwargs.get("include_categories"),
+ exclude_categories=kwargs.get("exclude_categories"),
+ max_tasks=kwargs.get("max_tasks"),
+ timeout=kwargs.get("timeout"),
+ proxy=kwargs.get("proxy"),
+ allow_redirects=kwargs.get("allow_redirects"),
+ verify_ssl=kwargs.get("verify_ssl"),
+ impersonate=kwargs.get("impersonate"),
+ ja3=kwargs.get("ja3"),
+ akamai=kwargs.get("akamai"),
+ extra_fp=kwargs.get("extra_fp"),
+ fuzzy_mode=kwargs.get("fuzzy_mode"),
+ self_enumeration=kwargs.get("self_enumeration"),
+ log_level=kwargs.get("log_level"),
+ log_file=kwargs.get("log_file"),
+ show_details=kwargs.get("show_details"),
+ browse=kwargs.get("browse"),
+ save_response=kwargs.get("save_response"),
+ response_path=kwargs.get("response_path"),
+ open_response=kwargs.get("open_response"),
+ csv_export=kwargs.get("csv_export"),
+ csv_path=kwargs.get("csv_path"),
+ pdf_export=kwargs.get("pdf_export"),
+ pdf_path=kwargs.get("pdf_path"),
+ html_export=kwargs.get("html_export"),
+ html_path=kwargs.get("html_path"),
+ json_export=kwargs.get("json_export"),
+ json_path=kwargs.get("json_path"),
+ filter_all=kwargs.get("filter_all"),
+ filter_found=kwargs.get("filter_found"),
+ filter_ambiguous=kwargs.get("filter_ambiguous"),
+ filter_unknown=kwargs.get("filter_unknown"),
+ filter_not_found=kwargs.get("filter_not_found"),
+ filter_not_valid=kwargs.get("filter_not_valid"),
+ filter_errors=kwargs.get("filter_errors"),
+ no_progressbar=kwargs.get("no_progressbar"),
)
NaminterCLI._setup_logging(config)
-
+
naminter_cli = NaminterCLI(config)
asyncio.run(naminter_cli.run())
except KeyboardInterrupt:
display_warning("Operation interrupted")
ctx.exit(1)
- except asyncio.TimeoutError:
+ except TimeoutError:
display_error("Operation timed out")
ctx.exit(1)
except ConfigurationError as e:
@@ -421,4 +583,4 @@ def entry_point() -> None:
if __name__ == "__main__":
- entry_point()
\ No newline at end of file
+ entry_point()
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index 822acb3..0d72bce 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -1,30 +1,31 @@
import time
-from typing import Any, Dict, Optional, Union
+from typing import Any
from rich.console import Console
from rich.progress import (
+ BarColumn,
Progress,
SpinnerColumn,
- TextColumn,
- BarColumn,
+ TaskID,
TaskProgressColumn,
+ TextColumn,
TimeElapsedColumn,
TimeRemainingColumn,
- TaskID,
)
from ..cli.console import THEME
from ..core.models import ResultStatus, SiteResult
+
class ResultsTracker:
"""Tracks results for the username enumeration operations."""
-
+
def __init__(self, total_sites: int) -> None:
"""Initialize the results tracker."""
self.total_sites = total_sites
self.results_count = 0
self.start_time = time.time()
- self.status_counts: Dict[ResultStatus, int] = {status: 0 for status in ResultStatus}
+ self.status_counts: dict[ResultStatus, int] = dict.fromkeys(ResultStatus, 0)
def add_result(self, result: SiteResult) -> None:
"""Update counters with a new result."""
@@ -34,7 +35,7 @@ def add_result(self, result: SiteResult) -> None:
def get_progress_text(self) -> str:
"""Get formatted progress text with request speed and statistics."""
elapsed = time.time() - self.start_time
-
+
found = self.status_counts[ResultStatus.FOUND]
ambiguous = self.status_counts[ResultStatus.AMBIGUOUS]
unknown = self.status_counts[ResultStatus.UNKNOWN]
@@ -43,8 +44,7 @@ def get_progress_text(self) -> str:
errors = self.status_counts[ResultStatus.ERROR]
valid_count = self.results_count - errors - not_valid
- if valid_count < 0:
- valid_count = 0
+ valid_count = max(valid_count, 0)
rate = valid_count / elapsed if elapsed > 0 else 0.0
sections = [
@@ -52,7 +52,7 @@ def get_progress_text(self) -> str:
f"[{THEME['success']}]+ {found}[/]",
f"[{THEME['error']}]- {not_found}[/]",
]
-
+
if unknown > 0:
sections.append(f"[{THEME['warning']}]? {unknown}[/]")
if ambiguous > 0:
@@ -61,29 +61,31 @@ def get_progress_text(self) -> str:
sections.append(f"[{THEME['error']}]! {errors}[/]")
if not_valid > 0:
sections.append(f"[{THEME['warning']}]× {not_valid}[/]")
-
- sections.append(f"[{THEME['primary']}]{self.results_count}/{self.total_sites}[/]")
+
+ sections.append(
+ f"[{THEME['primary']}]{self.results_count}/{self.total_sites}[/]"
+ )
return " │ ".join(sections)
class ProgressManager:
"""Manages progress bar and tracking for CLI applications."""
-
+
def __init__(self, console: Console, disabled: bool = False) -> None:
"""Initialize the progress manager."""
self.console: Console = console
self.disabled: bool = disabled
- self.progress: Optional[Progress] = None
- self.task_id: Optional[TaskID] = None
-
+ self.progress: Progress | None = None
+ self.task_id: TaskID | None = None
+
def create_progress_bar(self) -> Progress:
"""Create a new progress bar."""
return Progress(
SpinnerColumn(),
TextColumn("[progress.description]{task.description}"),
BarColumn(
- complete_style=THEME['primary'],
- finished_style=THEME['success'],
+ complete_style=THEME["primary"],
+ finished_style=THEME["success"],
),
TaskProgressColumn(),
TimeElapsedColumn(),
@@ -91,33 +93,35 @@ def create_progress_bar(self) -> Progress:
TimeRemainingColumn(),
console=self.console,
)
-
+
def start(self, total: int, description: str) -> None:
"""Start the progress bar."""
if not self.disabled:
self.progress = self.create_progress_bar()
self.progress.start()
self.task_id = self.progress.add_task(description, total=total)
-
- def update(self, advance: int = 1, description: Optional[str] = None) -> None:
+
+ def update(self, advance: int = 1, description: str | None = None) -> None:
"""Update the progress bar."""
if self.progress and self.task_id is not None:
- update_kwargs: Dict[str, Any] = {"advance": advance}
+ update_kwargs: dict[str, Any] = {"advance": advance}
if description is not None:
update_kwargs["description"] = description
self.progress.update(self.task_id, **update_kwargs)
-
+
def stop(self) -> None:
"""Stop and close the progress bar."""
if self.progress:
self.progress.stop()
self.progress = None
self.task_id = None
-
+
def __enter__(self) -> "ProgressManager":
"""Enter context manager."""
return self
-
- def __exit__(self, exc_type: Optional[type], exc_val: Optional[BaseException], exc_tb: Optional[Any]) -> None:
+
+ def __exit__(
+ self, exc_type: type | None, exc_val: BaseException | None, exc_tb: Any | None
+ ) -> None:
"""Exit context manager and stop progress bar."""
self.stop()
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
index 4da5184..cc54b88 100644
--- a/naminter/cli/utils.py
+++ b/naminter/cli/utils.py
@@ -2,9 +2,10 @@ def sanitize_filename(filename: str) -> str:
"""Sanitize filename for cross-platform compatibility."""
if not filename or not str(filename).strip():
return "unnamed"
-
+
invalid_chars = '<>:"|?*\\/\0'
- sanitized = ''.join('_' if c in invalid_chars or ord(c) < 32 else c for c in str(filename))
- sanitized = sanitized.strip(' .')[:200] if sanitized.strip(' .') else 'unnamed'
- return sanitized
-
\ No newline at end of file
+ sanitized = "".join(
+ "_" if c in invalid_chars or ord(c) < 32 else c for c in str(filename)
+ )
+ sanitized = sanitized.strip(" .")[:200] if sanitized.strip(" .") else "unnamed"
+ return sanitized
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index 933d965..1bca02c 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -1,8 +1,12 @@
from typing import Final
# Remote data source configuration
-WMN_REMOTE_URL: Final[str] = "https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data.json"
-WMN_SCHEMA_URL: Final[str] = "https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data-schema.json"
+WMN_REMOTE_URL: Final[str] = (
+ "https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data.json"
+)
+WMN_SCHEMA_URL: Final[str] = (
+ "https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data-schema.json"
+)
# HTTP request configuration
HTTP_REQUEST_TIMEOUT_SECONDS: Final[int] = 30
@@ -66,4 +70,3 @@
WMN_KEY_AUTHORS,
WMN_KEY_LICENSE,
)
-
diff --git a/naminter/core/exceptions.py b/naminter/core/exceptions.py
index 61e9149..15d1995 100644
--- a/naminter/core/exceptions.py
+++ b/naminter/core/exceptions.py
@@ -1,15 +1,12 @@
-from typing import Optional
-
-
class NaminterError(Exception):
"""Base exception class for Naminter errors.
-
+
Args:
message: Error message describing what went wrong.
cause: Optional underlying exception that caused this error.
"""
-
- def __init__(self, message: str, cause: Optional[Exception] = None) -> None:
+
+ def __init__(self, message: str, cause: Exception | None = None) -> None:
super().__init__(message)
self.message = message
self.cause = cause
@@ -17,93 +14,84 @@ def __init__(self, message: str, cause: Optional[Exception] = None) -> None:
class ConfigurationError(NaminterError):
"""Raised when there's an error in the configuration parameters.
-
+
This includes invalid configuration values, missing required settings,
or configuration file parsing errors.
"""
- pass
class NetworkError(NaminterError):
"""Raised when network-related errors occur.
-
+
This includes connection failures, DNS resolution errors,
and other network-level issues.
"""
- pass
class DataError(NaminterError):
"""Raised when there are issues with data processing or validation.
-
+
This includes malformed data, parsing errors, and data integrity issues.
"""
- pass
class SessionError(NetworkError):
"""Raised when HTTP session creation or management fails.
-
+
This includes session initialization errors, authentication failures,
and session state management issues.
"""
- pass
class SchemaError(DataError):
"""Raised when WMN schema validation fails.
-
+
This occurs when the WhatsMyName list format doesn't match
the expected schema structure, or when the schema itself is invalid.
"""
- pass
class TimeoutError(NetworkError):
"""Raised when network requests timeout.
-
+
This includes both connection timeouts and read timeouts
during HTTP requests.
"""
- pass
class FileAccessError(DataError):
"""Raised when file operations fail.
-
+
This includes reading/writing local lists, responses, exports,
and other file system operations.
"""
- pass
class ValidationError(DataError):
"""Raised when input validation fails.
-
+
This includes invalid usernames, malformed URLs,
and other input parameter validation errors.
"""
- pass
class ExportError(NaminterError):
"""Raised when export operations fail.
-
+
This includes file writing errors, format conversion errors,
and other export-related issues.
"""
- pass
__all__ = [
- "NaminterError",
"ConfigurationError",
- "NetworkError",
"DataError",
- "SessionError",
+ "ExportError",
+ "FileAccessError",
+ "NaminterError",
+ "NetworkError",
"SchemaError",
+ "SessionError",
"TimeoutError",
- "FileAccessError",
"ValidationError",
- "ExportError",
-]
\ No newline at end of file
+]
diff --git a/naminter/core/main.py b/naminter/core/main.py
index 8290b90..975e68f 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -1,38 +1,50 @@
import asyncio
import json
import logging
+from collections.abc import AsyncGenerator, Sequence
from pathlib import Path
-from typing import Any, AsyncGenerator, Dict, List, Optional, Union, Set, Sequence, Tuple
-from ..core.models import ResultStatus, SiteResult, SelfEnumerationResult, ValidationMode, Summary
-from ..core.exceptions import (
- DataError,
- ValidationError,
- SchemaError,
- FileAccessError,
- NetworkError,
- TimeoutError,
- SessionError,
-)
-from ..core.utils import (
- validate_usernames,
- deduplicate_strings,
- merge_lists,
+from typing import (
+ Any,
)
+
+import aiofiles
+import jsonschema
+
from ..core.constants import (
- MAX_CONCURRENT_TASKS,
ACCOUNT_PLACEHOLDER,
+ MAX_CONCURRENT_TASKS,
REQUIRED_KEYS_ENUMERATE,
REQUIRED_KEYS_SELF_ENUM,
- WMN_REMOTE_URL,
- WMN_KEY_SITES,
- WMN_KEY_CATEGORIES,
WMN_KEY_AUTHORS,
+ WMN_KEY_CATEGORIES,
WMN_KEY_LICENSE,
WMN_KEY_NAME,
+ WMN_KEY_SITES,
+ WMN_REMOTE_URL,
+)
+from ..core.exceptions import (
+ DataError,
+ FileAccessError,
+ NetworkError,
+ SchemaError,
+ SessionError,
+ TimeoutError,
+ ValidationError,
+)
+from ..core.models import (
+ ResultStatus,
+ SelfEnumerationResult,
+ SiteResult,
+ Summary,
+ ValidationMode,
)
from ..core.network import BaseSession
-import jsonschema
-import aiofiles
+from ..core.utils import (
+ deduplicate_strings,
+ merge_lists,
+ validate_usernames,
+)
+
class Naminter:
"""Main class for Naminter username enumeration."""
@@ -40,13 +52,13 @@ class Naminter:
def __init__(
self,
http_client: BaseSession,
- wmn_data: Optional[Dict[str, Any]] = None,
- wmn_schema: Optional[Dict[str, Any]] = None,
- local_list_paths: Optional[List[Path]] = None,
- remote_list_urls: Optional[List[str]] = None,
+ wmn_data: dict[str, Any] | None = None,
+ wmn_schema: dict[str, Any] | None = None,
+ local_list_paths: list[Path] | None = None,
+ remote_list_urls: list[str] | None = None,
skip_validation: bool = False,
- local_schema_path: Optional[Path] = None,
- remote_schema_url: Optional[str] = None,
+ local_schema_path: Path | None = None,
+ remote_schema_url: str | None = None,
max_tasks: int = MAX_CONCURRENT_TASKS,
) -> None:
"""Initialize Naminter with configuration parameters."""
@@ -63,8 +75,8 @@ def __init__(
self._local_schema_path = local_schema_path
self._remote_schema_url = remote_schema_url
- self._wmn_data: Optional[Dict[str, Any]] = wmn_data
- self._wmn_schema: Optional[Dict[str, Any]] = wmn_schema
+ self._wmn_data: dict[str, Any] | None = wmn_data
+ self._wmn_schema: dict[str, Any] | None = wmn_schema
self._semaphore = asyncio.Semaphore(self.max_tasks)
self._session_lock = asyncio.Lock()
self._http: BaseSession = http_client
@@ -79,7 +91,7 @@ async def _open_session(self) -> None:
self._logger.error("Failed to open HTTP session: %s", e)
raise DataError(f"HTTP session initialization failed: {e}") from e
- async def _fetch_json(self, url: str) -> Dict[str, Any]:
+ async def _fetch_json(self, url: str) -> dict[str, Any]:
"""Fetch and parse JSON from a URL."""
if not url.strip():
raise ValidationError(f"Invalid URL: {url}")
@@ -92,19 +104,19 @@ async def _fetch_json(self, url: str) -> Dict[str, Any]:
raise DataError(f"Session error while fetching from {url}: {e}") from e
except NetworkError as e:
raise DataError(f"Network error while fetching from {url}: {e}") from e
-
+
if response.status_code < 200 or response.status_code >= 300:
raise DataError(f"Failed to fetch from {url}: HTTP {response.status_code}")
-
+
try:
return response.json()
except (ValueError, json.JSONDecodeError) as e:
raise DataError(f"Failed to parse JSON from {url}: {e}") from e
- async def _read_json_file(self, path: Union[str, Path]) -> Dict[str, Any]:
+ async def _read_json_file(self, path: str | Path) -> dict[str, Any]:
"""Read JSON from a local file without blocking the event loop."""
try:
- async with aiofiles.open(path, mode="r", encoding="utf-8") as file:
+ async with aiofiles.open(path, encoding="utf-8") as file:
content = await file.read()
except FileNotFoundError as e:
raise FileAccessError(f"File not found: {path}") from e
@@ -112,34 +124,45 @@ async def _read_json_file(self, path: Union[str, Path]) -> Dict[str, Any]:
raise FileAccessError(f"Permission denied accessing file: {path}") from e
except OSError as e:
raise FileAccessError(f"Error reading file {path}: {e}") from e
-
+
try:
return json.loads(content)
except json.JSONDecodeError as e:
raise DataError(f"Invalid JSON in file {path}: {e}") from e
- async def _load_schema(self) -> Dict[str, Any]:
+ async def _load_schema(self) -> dict[str, Any]:
"""Load WMN schema from local or remote source."""
if self._skip_validation:
return {}
-
+
try:
if self._local_schema_path:
return await self._read_json_file(self._local_schema_path)
elif self._remote_schema_url:
return await self._fetch_json(self._remote_schema_url)
else:
- raise DataError("No schema source provided - either local_schema_path or remote_schema_url is required")
+ raise DataError(
+ "No schema source provided - either local_schema_path or remote_schema_url is required"
+ )
except (OSError, json.JSONDecodeError) as e:
- raise DataError(f"Failed to load required WMN schema from local file: {e}") from e
+ raise DataError(
+ f"Failed to load required WMN schema from local file: {e}"
+ ) from e
except NetworkError as e:
- raise DataError(f"Failed to load required WMN schema from {self._remote_schema_url}: {e}") from e
+ raise DataError(
+ f"Failed to load required WMN schema from {self._remote_schema_url}: {e}"
+ ) from e
- async def _load_dataset(self) -> Dict[str, Any]:
+ async def _load_dataset(self) -> dict[str, Any]:
"""Load WMN data from configured sources."""
- dataset: Dict[str, Any] = {WMN_KEY_SITES: [], WMN_KEY_CATEGORIES: [], WMN_KEY_AUTHORS: [], WMN_KEY_LICENSE: []}
-
- sources: List[Tuple[Union[str, Path], bool]] = []
+ dataset: dict[str, Any] = {
+ WMN_KEY_SITES: [],
+ WMN_KEY_CATEGORIES: [],
+ WMN_KEY_AUTHORS: [],
+ WMN_KEY_LICENSE: [],
+ }
+
+ sources: list[tuple[str | Path, bool]] = []
if self._remote_list_urls:
sources.extend([(url, True) for url in self._remote_list_urls])
if self._local_list_paths:
@@ -156,30 +179,38 @@ async def _load_dataset(self) -> Dict[str, Any]:
results = await asyncio.gather(*coroutines, return_exceptions=True)
- failures: List[str] = []
- for src, res in zip(sources, results):
+ failures: list[str] = []
+ for src, res in zip(sources, results, strict=False):
if isinstance(res, Exception):
source, is_remote = src
- failures.append(f"{source} ({'remote' if is_remote else 'local'}): {res}")
+ failures.append(
+ f"{source} ({'remote' if is_remote else 'local'}): {res}"
+ )
self._logger.warning("Failed to load WMN data from %s: %s", source, res)
else:
merge_lists(res, dataset)
if not dataset[WMN_KEY_SITES]:
- detail = "; ".join(failures) if failures else "no sources produced any sites"
+ detail = (
+ "; ".join(failures) if failures else "no sources produced any sites"
+ )
raise DataError(f"No sites loaded from any source; details: {detail}")
-
+
return dataset
- def _deduplicate_data(self, data: Dict[str, Any]) -> None:
+ def _deduplicate_data(self, data: dict[str, Any]) -> None:
"""Deduplicate and clean the WMN data in place."""
- unique_sites = {site[WMN_KEY_NAME]: site for site in data[WMN_KEY_SITES] if isinstance(site, dict) and site.get(WMN_KEY_NAME)}
+ unique_sites = {
+ site[WMN_KEY_NAME]: site
+ for site in data[WMN_KEY_SITES]
+ if isinstance(site, dict) and site.get(WMN_KEY_NAME)
+ }
data[WMN_KEY_SITES] = list(unique_sites.values())
data[WMN_KEY_CATEGORIES] = list(dict.fromkeys(data[WMN_KEY_CATEGORIES]))
data[WMN_KEY_AUTHORS] = list(dict.fromkeys(data[WMN_KEY_AUTHORS]))
data[WMN_KEY_LICENSE] = list(dict.fromkeys(data[WMN_KEY_LICENSE]))
- async def _load_wmn_lists(self) -> Tuple[Dict[str, Any], Dict[str, Any]]:
+ async def _load_wmn_lists(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Unified async loader for WMN data and schema.
Returns a mapping with keys: data (dataset dict) and schema (schema dict).
@@ -196,7 +227,7 @@ async def _load_wmn_lists(self) -> Tuple[Dict[str, Any], Dict[str, Any]]:
return (dataset, dataset_schema)
@staticmethod
- def _validate_data(data: Dict[str, Any], schema: Dict[str, Any]) -> None:
+ def _validate_data(data: dict[str, Any], schema: dict[str, Any]) -> None:
"""Validate WMN data against schema. Raises on failure."""
if not schema:
return
@@ -212,14 +243,17 @@ async def _ensure_dataset(self) -> None:
"""Load and validate the WMN dataset and schema if not already loaded."""
if self._wmn_data and self._wmn_schema:
return
-
+
try:
data, schema = await self._load_wmn_lists()
if not self._skip_validation:
self._validate_data(data, schema)
self._wmn_data = data
self._wmn_schema = schema
- self._logger.info("WMN dataset loaded (sites=%d)", len(self._wmn_data.get(WMN_KEY_SITES, [])))
+ self._logger.info(
+ "WMN dataset loaded (sites=%d)",
+ len(self._wmn_data.get(WMN_KEY_SITES, [])),
+ )
except SchemaError as e:
raise DataError(f"WMN validation failed: {e}") from e
except Exception as e:
@@ -241,20 +275,22 @@ async def __aenter__(self) -> "Naminter":
await self._open_session()
try:
await self._ensure_dataset()
- except DataError as e:
+ except DataError:
self._logger.error("Dataset load failed")
raise
return self
-
- async def __aexit__(self, exc_type: Optional[type], exc_val: Optional[BaseException], exc_tb: Optional[Any]) -> None:
+
+ async def __aexit__(
+ self, exc_type: type | None, exc_val: BaseException | None, exc_tb: Any | None
+ ) -> None:
"""Async context manager exit."""
await self._close_session()
async def get_wmn_summary(
self,
- site_names: Optional[List[str]] = None,
- include_categories: Optional[List[str]] = None,
- exclude_categories: Optional[List[str]] = None,
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
) -> Summary:
"""Get enriched WMN metadata information for diagnostics and UI.
@@ -262,11 +298,11 @@ async def get_wmn_summary(
"""
try:
await self._ensure_dataset()
- except DataError as e:
+ except DataError:
self._logger.error("Dataset load failed")
raise
try:
- sites: List[Dict[str, Any]] = self._filter_sites(
+ sites: list[dict[str, Any]] = self._filter_sites(
site_names,
include_categories=include_categories,
exclude_categories=exclude_categories,
@@ -275,12 +311,15 @@ async def get_wmn_summary(
self._logger.error("Site filtering failed: %s", e)
raise
try:
+ category_list: list[str] = [
+ site.get("cat") for site in sites if site.get("cat")
+ ]
+ site_name_list: list[str] = [
+ site.get("name") for site in sites if site.get("name")
+ ]
- category_list: List[str] = [site.get("cat") for site in sites if site.get("cat")]
- site_name_list: List[str] = [site.get("name") for site in sites if site.get("name")]
-
total_known_accounts: int = 0
-
+
for site in sites:
known_list = site.get("known")
if isinstance(known_list, list) and len(known_list) > 0:
@@ -296,8 +335,11 @@ async def get_wmn_summary(
known_accounts_total=total_known_accounts,
)
- self._logger.info("WMN summary computed (sites=%d, categories=%d)",
- wmn_summary.sites_count, wmn_summary.categories_count)
+ self._logger.info(
+ "WMN summary computed (sites=%d, categories=%d)",
+ wmn_summary.sites_count,
+ wmn_summary.categories_count,
+ )
return wmn_summary
except DataError:
raise
@@ -305,57 +347,63 @@ async def get_wmn_summary(
self._logger.exception("Failed to compute WMN summary")
raise DataError(f"Failed to retrieve metadata: {e}") from e
-
def _filter_sites(
self,
- site_names: Optional[List[str]],
- include_categories: Optional[List[str]] = None,
- exclude_categories: Optional[List[str]] = None,
- ) -> List[Dict[str, Any]]:
+ site_names: list[str] | None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
+ ) -> list[dict[str, Any]]:
"""Filter sites by names and categories for the current WMN dataset."""
- sites: List[Dict[str, Any]] = self._wmn_data.get("sites", [])
+ sites: list[dict[str, Any]] = self._wmn_data.get("sites", [])
if site_names:
- filtered_site_names: Set[str] = set(deduplicate_strings(site_names))
- available_names: Set[str] = {site.get("name") for site in sites}
+ filtered_site_names: set[str] = set(deduplicate_strings(site_names))
+ available_names: set[str] = {site.get("name") for site in sites}
missing_names = filtered_site_names - available_names
if missing_names:
raise DataError(f"Unknown site names: {sorted(missing_names)}")
else:
filtered_site_names = set()
- filtered_sites: List[Dict[str, Any]] = sites
+ filtered_sites: list[dict[str, Any]] = sites
if filtered_site_names:
filtered_sites = [
- site for site in filtered_sites if site.get("name") in filtered_site_names
+ site
+ for site in filtered_sites
+ if site.get("name") in filtered_site_names
]
if include_categories:
- include_set: Set[str] = set(deduplicate_strings(include_categories))
+ include_set: set[str] = set(deduplicate_strings(include_categories))
filtered_sites = [
site for site in filtered_sites if site.get("cat") in include_set
]
if exclude_categories:
- exclude_set: Set[str] = set(deduplicate_strings(exclude_categories))
+ exclude_set: set[str] = set(deduplicate_strings(exclude_categories))
filtered_sites = [
site for site in filtered_sites if site.get("cat") not in exclude_set
]
self._logger.debug(
"Filter result %d/%d (names=%s include=%s exclude=%s)",
- len(filtered_sites), len(sites),
- bool(site_names), bool(include_categories), bool(exclude_categories),
+ len(filtered_sites),
+ len(sites),
+ bool(site_names),
+ bool(include_categories),
+ bool(exclude_categories),
)
return filtered_sites
-
- def _get_missing_keys(self, site: Dict[str, Any], required_keys: Sequence[str]) -> List[str]:
+
+ def _get_missing_keys(
+ self, site: dict[str, Any], required_keys: Sequence[str]
+ ) -> list[str]:
"""Return a list of required keys missing from a site mapping."""
return [key for key in required_keys if key not in site]
-
+
async def enumerate_site(
self,
- site: Dict[str, Any],
+ site: dict[str, Any],
username: str,
fuzzy_mode: bool = False,
) -> SiteResult:
@@ -363,7 +411,7 @@ async def enumerate_site(
await self._open_session()
try:
await self._ensure_dataset()
- except DataError as e:
+ except DataError:
self._logger.error("Dataset load failed")
raise
@@ -374,7 +422,7 @@ async def enumerate_site(
category=site.get("cat", "unknown"),
username=username,
status=ResultStatus.ERROR,
- error=f"Site entry missing required keys: {missing_keys}"
+ error=f"Site entry missing required keys: {missing_keys}",
)
name = site["name"]
@@ -384,14 +432,25 @@ async def enumerate_site(
strip_bad_char = site.get("strip_bad_char", "")
clean_username = username.translate(str.maketrans("", "", strip_bad_char))
if not clean_username:
- return SiteResult(name, category, username, ResultStatus.ERROR,
- error=f"Username became empty after stripping")
+ return SiteResult(
+ name,
+ category,
+ username,
+ ResultStatus.ERROR,
+ error="Username became empty after stripping",
+ )
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
- uri_pretty = site.get("uri_pretty", uri_check_template).replace(ACCOUNT_PLACEHOLDER, clean_username)
+ uri_pretty = site.get("uri_pretty", uri_check_template).replace(
+ ACCOUNT_PLACEHOLDER, clean_username
+ )
- self._logger.debug("Enumerating site=%s user=%s mode=%s", name, username,
- "FUZZY" if fuzzy_mode else "STRICT")
+ self._logger.debug(
+ "Enumerating site=%s user=%s mode=%s",
+ name,
+ username,
+ "FUZZY" if fuzzy_mode else "STRICT",
+ )
headers = site.get("headers", {})
post_body = site.get("post_body")
@@ -404,37 +463,59 @@ async def enumerate_site(
try:
async with self._semaphore:
if post_body:
- response = await self._http.post(uri_check, headers=headers, data=post_body)
+ response = await self._http.post(
+ uri_check, headers=headers, data=post_body
+ )
else:
response = await self._http.get(uri_check, headers=headers)
elapsed = response.elapsed
- self._logger.debug("Request ok (status=%d, elapsed=%.2fs)", response.status_code, elapsed)
+ self._logger.debug(
+ "Request ok (status=%d, elapsed=%.2fs)",
+ response.status_code,
+ elapsed,
+ )
except asyncio.CancelledError:
self._logger.warning("Request cancelled")
raise
except TimeoutError as e:
self._logger.warning("Request timeout for %s: %s", name, e)
return SiteResult(
- name=name, category=category, username=username, result_url=uri_pretty,
- status=ResultStatus.ERROR, error=f"Request timeout: {e}"
+ name=name,
+ category=category,
+ username=username,
+ result_url=uri_pretty,
+ status=ResultStatus.ERROR,
+ error=f"Request timeout: {e}",
)
except SessionError as e:
self._logger.warning("Session error for %s: %s", name, e)
return SiteResult(
- name=name, category=category, username=username, result_url=uri_pretty,
- status=ResultStatus.ERROR, error=f"Session error: {e}"
+ name=name,
+ category=category,
+ username=username,
+ result_url=uri_pretty,
+ status=ResultStatus.ERROR,
+ error=f"Session error: {e}",
)
except NetworkError as e:
self._logger.warning("Network error for %s: %s", name, e)
return SiteResult(
- name=name, category=category, username=username, result_url=uri_pretty,
- status=ResultStatus.ERROR, error=f"Network error: {e}"
+ name=name,
+ category=category,
+ username=username,
+ result_url=uri_pretty,
+ status=ResultStatus.ERROR,
+ error=f"Network error: {e}",
)
except Exception as e:
self._logger.exception("Unexpected error during request for %s", name)
return SiteResult(
- name=name, category=category, username=username, result_url=uri_pretty,
- status=ResultStatus.ERROR, error=f"Unexpected error: {e}"
+ name=name,
+ category=category,
+ username=username,
+ result_url=uri_pretty,
+ status=ResultStatus.ERROR,
+ error=f"Unexpected error: {e}",
)
result_status = SiteResult.get_result_status(
@@ -447,7 +528,9 @@ async def enumerate_site(
fuzzy_mode=fuzzy_mode,
)
- self._logger.debug("Result=%s (HTTP %d)", result_status.name, response.status_code)
+ self._logger.debug(
+ "Result=%s (HTTP %d)", result_status.name, response.status_code
+ )
return SiteResult(
name=name,
@@ -462,18 +545,18 @@ async def enumerate_site(
async def enumerate_usernames(
self,
- usernames: List[str],
- site_names: Optional[List[str]] = None,
- include_categories: Optional[List[str]] = None,
- exclude_categories: Optional[List[str]] = None,
+ usernames: list[str],
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
fuzzy_mode: bool = False,
as_generator: bool = False,
- ) -> Union[List[SiteResult], AsyncGenerator[SiteResult, None]]:
+ ) -> list[SiteResult] | AsyncGenerator[SiteResult, None]:
"""Enumerate one or multiple usernames across all loaded sites."""
await self._open_session()
try:
await self._ensure_dataset()
- except DataError as e:
+ except DataError:
self._logger.exception("Dataset load failed")
raise
@@ -484,7 +567,7 @@ async def enumerate_usernames(
raise DataError("Invalid usernames") from e
else:
self._logger.info("Usernames validated (count=%d)", len(usernames))
-
+
try:
sites = self._filter_sites(
site_names,
@@ -497,7 +580,8 @@ async def enumerate_usernames(
coroutines = [
self.enumerate_site(site, username, fuzzy_mode)
- for site in sites for username in usernames
+ for site in sites
+ for username in usernames
]
async def iterate_results() -> AsyncGenerator[SiteResult, None]:
@@ -511,17 +595,17 @@ async def iterate_results() -> AsyncGenerator[SiteResult, None]:
async def self_enumeration(
self,
- site_names: Optional[List[str]] = None,
- include_categories: Optional[List[str]] = None,
- exclude_categories: Optional[List[str]] = None,
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
fuzzy_mode: bool = False,
- as_generator: bool = False
- ) -> Union[List[SelfEnumerationResult], AsyncGenerator[SelfEnumerationResult, None]]:
+ as_generator: bool = False,
+ ) -> list[SelfEnumerationResult] | AsyncGenerator[SelfEnumerationResult, None]:
"""Run self-enumeration using known accounts for each site."""
await self._open_session()
try:
await self._ensure_dataset()
- except DataError as e:
+ except DataError:
self._logger.exception("Dataset load failed")
raise
@@ -535,43 +619,51 @@ async def self_enumeration(
self._logger.error("Site filtering failed: %s", e)
raise
- self._logger.info("Starting self-enumeration (sites=%d, mode=%s)",
- len(sites), ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT)
+ self._logger.info(
+ "Starting self-enumeration (sites=%d, mode=%s)",
+ len(sites),
+ ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT,
+ )
- async def _enumerate_known(site: Dict[str, Any]) -> SelfEnumerationResult:
+ async def _enumerate_known(site: dict[str, Any]) -> SelfEnumerationResult:
"""Helper function to enumerate a site with all its known users."""
missing_keys = self._get_missing_keys(site, REQUIRED_KEYS_SELF_ENUM)
if missing_keys:
return SelfEnumerationResult(
name=site.get("name", "unknown"),
category=site.get("cat", "unknown"),
- error=f"Site data missing required keys: {missing_keys}"
+ error=f"Site data missing required keys: {missing_keys}",
)
-
+
name = site["name"]
category = site["cat"]
known = site["known"]
-
- self._logger.debug("Self-enumerating site=%s category=%s known_count=%d",
- name, category, len(known))
+
+ self._logger.debug(
+ "Self-enumerating site=%s category=%s known_count=%d",
+ name,
+ category,
+ len(known),
+ )
try:
- coroutines = [self.enumerate_site(site, username, fuzzy_mode) for username in known]
+ coroutines = [
+ self.enumerate_site(site, username, fuzzy_mode)
+ for username in known
+ ]
results = await asyncio.gather(*coroutines)
return SelfEnumerationResult(
- name=name,
- category=category,
- results=results
+ name=name, category=category, results=results
)
except Exception as e:
self._logger.exception("Self-enumeration failed for site=%s", name)
return SelfEnumerationResult(
name=name,
category=category,
- error=f"Unexpected error during self-enumeration: {e}"
+ error=f"Unexpected error during self-enumeration: {e}",
)
-
+
coroutines = [
_enumerate_known(site) for site in sites if isinstance(site, dict)
]
@@ -582,5 +674,5 @@ async def iterate_results() -> AsyncGenerator[SelfEnumerationResult, None]:
if as_generator:
return iterate_results()
-
+
return await asyncio.gather(*coroutines)
diff --git a/naminter/core/models.py b/naminter/core/models.py
index c2714eb..a2a85ed 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -1,15 +1,18 @@
-from dataclasses import dataclass, asdict, field
-from enum import StrEnum, auto
-from typing import Optional, Dict, Any, List, Set
-from datetime import datetime
import json
+from dataclasses import asdict, dataclass, field
+from datetime import datetime
+from enum import StrEnum, auto
+from typing import Any
+
class ValidationMode(StrEnum):
FUZZY = auto()
STRICT = auto()
+
class ResultStatus(StrEnum):
"""Status of username search results."""
+
FOUND = auto()
AMBIGUOUS = auto()
UNKNOWN = auto()
@@ -17,18 +20,20 @@ class ResultStatus(StrEnum):
NOT_VALID = auto()
ERROR = auto()
+
@dataclass(slots=True, frozen=True)
class SiteResult:
"""Result of testing a username on a site."""
+
name: str
category: str
username: str
status: ResultStatus
- result_url: Optional[str] = None
- response_code: Optional[int] = None
- response_text: Optional[str] = None
- elapsed: Optional[float] = None
- error: Optional[str] = None
+ result_url: str | None = None
+ response_code: int | None = None
+ response_text: str | None = None
+ elapsed: float | None = None
+ error: str | None = None
created_at: datetime = field(default_factory=datetime.now)
@classmethod
@@ -36,29 +41,33 @@ def get_result_status(
cls,
response_code: int,
response_text: str,
- e_code: Optional[int] = None,
- e_string: Optional[str] = None,
- m_code: Optional[int] = None,
- m_string: Optional[str] = None,
+ e_code: int | None = None,
+ e_string: str | None = None,
+ m_code: int | None = None,
+ m_string: str | None = None,
fuzzy_mode: bool = False,
) -> ResultStatus:
condition_found = False
condition_not_found = False
if fuzzy_mode:
- condition_found = (e_code is not None and response_code == e_code) or (e_string and e_string in response_text)
- condition_not_found = (m_code is not None and response_code == m_code) or (m_string and m_string in response_text)
+ condition_found = (e_code is not None and response_code == e_code) or (
+ e_string and e_string in response_text
+ )
+ condition_not_found = (m_code is not None and response_code == m_code) or (
+ m_string and m_string in response_text
+ )
else:
condition_found = (
- (e_code is None or response_code == e_code) and
- (e_string is None or e_string in response_text) and
- (e_code is not None or e_string is not None)
+ (e_code is None or response_code == e_code)
+ and (e_string is None or e_string in response_text)
+ and (e_code is not None or e_string is not None)
)
condition_not_found = (
- (m_code is None or response_code == m_code) and
- (m_string is None or m_string in response_text) and
- (m_code is not None or m_string is not None)
+ (m_code is None or response_code == m_code)
+ and (m_string is None or m_string in response_text)
+ and (m_code is not None or m_string is not None)
)
if condition_found and condition_not_found:
@@ -70,87 +79,100 @@ def get_result_status(
else:
return ResultStatus.UNKNOWN
- def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
+ def to_dict(self, exclude_response_text: bool = False) -> dict[str, Any]:
"""Convert SiteResult to dict."""
result = asdict(self)
- result['status'] = self.status.value
- result['created_at'] = self.created_at.isoformat()
+ result["status"] = self.status.value
+ result["created_at"] = self.created_at.isoformat()
if exclude_response_text:
- result.pop('response_text', None)
+ result.pop("response_text", None)
return result
+
@dataclass(slots=True, frozen=True)
class SelfEnumerationResult:
"""Result of a self-enumeration for a username."""
+
name: str
category: str
- results: Optional[List[SiteResult]] = None
+ results: list[SiteResult] | None = None
status: ResultStatus = field(init=False)
- error: Optional[str] = None
+ error: str | None = None
created_at: datetime = field(default_factory=datetime.now)
def __post_init__(self) -> None:
"""Calculate result status from results."""
- object.__setattr__(self, 'status', self._get_result_status())
+ object.__setattr__(self, "status", self._get_result_status())
def _get_result_status(self) -> ResultStatus:
"""Determine result status from results."""
if self.error:
return ResultStatus.ERROR
-
+
if not self.results:
return ResultStatus.UNKNOWN
-
- statuses: Set[ResultStatus] = {result.status for result in self.results if result}
-
+
+ statuses: set[ResultStatus] = {
+ result.status for result in self.results if result
+ }
+
if not statuses:
return ResultStatus.UNKNOWN
-
+
if ResultStatus.ERROR in statuses:
return ResultStatus.ERROR
-
+
if len(statuses) > 1:
return ResultStatus.UNKNOWN
-
+
return next(iter(statuses))
-
- def to_dict(self, exclude_response_text: bool = False) -> Dict[str, Any]:
+
+ def to_dict(self, exclude_response_text: bool = False) -> dict[str, Any]:
"""Convert SelfEnumerationResult to dict."""
return {
- 'name': self.name,
- 'category': self.category,
- 'results': [result.to_dict(exclude_response_text=exclude_response_text) for result in self.results] if self.results else [],
- 'status': self.status.value,
- 'created_at': self.created_at.isoformat(),
- 'error': self.error,
+ "name": self.name,
+ "category": self.category,
+ "results": [
+ result.to_dict(exclude_response_text=exclude_response_text)
+ for result in self.results
+ ]
+ if self.results
+ else [],
+ "status": self.status.value,
+ "created_at": self.created_at.isoformat(),
+ "error": self.error,
}
+
@dataclass(slots=True, frozen=True)
class Summary:
"""Summary of the loaded WhatsMyName dataset and filters applied."""
- license: List[str]
- authors: List[str]
- site_names: List[str]
+
+ license: list[str]
+ authors: list[str]
+ site_names: list[str]
sites_count: int
- categories: List[str]
+ categories: list[str]
categories_count: int
known_accounts_total: int
- def to_dict(self) -> Dict[str, Any]:
+ def to_dict(self) -> dict[str, Any]:
"""Convert Summary to a plain dictionary for serialization/legacy callers."""
return {
- 'license': list(self.license),
- 'authors': list(self.authors),
- 'site_names': list(self.site_names),
- 'sites_count': int(self.sites_count),
- 'categories': list(self.categories),
- 'categories_count': int(self.categories_count),
- 'known_accounts_total': int(self.known_accounts_total),
+ "license": list(self.license),
+ "authors": list(self.authors),
+ "site_names": list(self.site_names),
+ "sites_count": int(self.sites_count),
+ "categories": list(self.categories),
+ "categories_count": int(self.categories_count),
+ "known_accounts_total": int(self.known_accounts_total),
}
+
@dataclass(slots=True, frozen=True)
class Response:
"""HTTP response abstraction used by session adapters."""
+
status_code: int
text: str
elapsed: float
@@ -161,4 +183,4 @@ def json(self) -> Any:
Raises:
ValueError: If the response text is not valid JSON.
"""
- return json.loads(self.text)
\ No newline at end of file
+ return json.loads(self.text)
diff --git a/naminter/core/network.py b/naminter/core/network.py
index df69576..ea26454 100644
--- a/naminter/core/network.py
+++ b/naminter/core/network.py
@@ -1,13 +1,14 @@
import asyncio
import logging
-import json
-from typing import Any, Dict, Optional, Union, Protocol, Mapping, runtime_checkable
+from collections.abc import Mapping
+from typing import Any, Protocol, runtime_checkable
-from curl_cffi.requests import AsyncSession
-from curl_cffi.requests.exceptions import Timeout as CurlTimeout, RequestException as CurlRequestException
from curl_cffi import BrowserTypeLiteral
+from curl_cffi.requests import AsyncSession
+from curl_cffi.requests.exceptions import RequestException as CurlRequestException
+from curl_cffi.requests.exceptions import Timeout as CurlTimeout
-from .exceptions import NetworkError, TimeoutError, SessionError
+from .exceptions import NetworkError, SessionError, TimeoutError
from .models import Response
@@ -23,12 +24,15 @@ async def close(self) -> None:
"""Close the underlying HTTP session."""
...
- async def get(self, url: str, headers: Optional[Mapping[str, str]] = None) -> Response:
+ async def get(self, url: str, headers: Mapping[str, str] | None = None) -> Response:
"""HTTP GET request (see class docstring for error contract)."""
...
async def post(
- self, url: str, headers: Optional[Mapping[str, str]] = None, data: Optional[Union[str, bytes]] = None
+ self,
+ url: str,
+ headers: Mapping[str, str] | None = None,
+ data: str | bytes | None = None,
) -> Response:
"""HTTP POST request (see class docstring for error contract)."""
...
@@ -37,8 +41,8 @@ async def request(
self,
method: str,
url: str,
- headers: Optional[Mapping[str, str]] = None,
- data: Optional[Union[str, bytes]] = None,
+ headers: Mapping[str, str] | None = None,
+ data: str | bytes | None = None,
) -> Response:
"""Generic HTTP request (see class docstring for error contract)."""
...
@@ -48,29 +52,29 @@ class CurlCFFISession:
def __init__(
self,
*,
- proxies: Optional[Union[str, Dict[str, str]]] = None,
+ proxies: str | dict[str, str] | None = None,
verify: bool = True,
timeout: int = 30,
allow_redirects: bool = True,
- impersonate: Optional[BrowserTypeLiteral] = None,
- ja3: Optional[str] = None,
- akamai: Optional[str] = None,
- extra_fp: Optional[Dict[str, Any]] = None,
+ impersonate: BrowserTypeLiteral | None = None,
+ ja3: str | None = None,
+ akamai: str | None = None,
+ extra_fp: dict[str, Any] | None = None,
) -> None:
self._logger = logging.getLogger(__name__)
- self._session: Optional[AsyncSession] = None
+ self._session: AsyncSession | None = None
if isinstance(proxies, str):
proxies = {"http": proxies, "https": proxies}
- self._proxies: Optional[Union[str, Dict[str, str]]] = proxies
+ self._proxies: str | dict[str, str] | None = proxies
self._verify: bool = verify
self._timeout: int = timeout
self._allow_redirects: bool = allow_redirects
- self._impersonate: Optional[BrowserTypeLiteral] = impersonate
- self._ja3: Optional[str] = ja3
- self._akamai: Optional[str] = akamai
- self._extra_fp: Optional[Dict[str, Any]] = extra_fp
+ self._impersonate: BrowserTypeLiteral | None = impersonate
+ self._ja3: str | None = ja3
+ self._akamai: str | None = akamai
+ self._extra_fp: dict[str, Any] | None = extra_fp
self._lock = asyncio.Lock()
@@ -91,7 +95,9 @@ async def open(self) -> None:
extra_fp=self._extra_fp,
)
except Exception as e:
- raise SessionError("Failed to open curl-cffi session", cause=e) from e
+ raise SessionError(
+ "Failed to open curl-cffi session", cause=e
+ ) from e
async def close(self) -> None:
if not self._session:
@@ -103,15 +109,19 @@ async def close(self) -> None:
finally:
self._session = None
- async def get(self, url: str, headers: Optional[Mapping[str, str]] = None) -> Response:
+ async def get(self, url: str, headers: Mapping[str, str] | None = None) -> Response:
await self.open()
if self._session is None:
raise SessionError("Session not initialized")
try:
- response = await self._session.get(url, headers=dict(headers) if headers else None)
- elapsed = response.elapsed
- return Response(status_code=response.status_code, text=response.text, elapsed=elapsed)
+ response = await self._session.get(
+ url, headers=dict(headers) if headers else None
+ )
+ elapsed = response.elapsed
+ return Response(
+ status_code=response.status_code, text=response.text, elapsed=elapsed
+ )
except CurlTimeout as e:
raise TimeoutError(f"GET timeout for {url}", cause=e) from e
except CurlRequestException as e:
@@ -120,16 +130,23 @@ async def get(self, url: str, headers: Optional[Mapping[str, str]] = None) -> Re
raise NetworkError(f"GET failed for {url}: {e}", cause=e) from e
async def post(
- self, url: str, headers: Optional[Mapping[str, str]] = None, data: Optional[Union[str, bytes]] = None
+ self,
+ url: str,
+ headers: Mapping[str, str] | None = None,
+ data: str | bytes | None = None,
) -> Response:
await self.open()
if self._session is None:
raise SessionError("Session not initialized")
try:
- response = await self._session.post(url, headers=dict(headers) if headers else None, data=data)
+ response = await self._session.post(
+ url, headers=dict(headers) if headers else None, data=data
+ )
elapsed = response.elapsed
- return Response(status_code=response.status_code, text=response.text, elapsed=elapsed)
+ return Response(
+ status_code=response.status_code, text=response.text, elapsed=elapsed
+ )
except CurlTimeout as e:
raise TimeoutError(f"POST timeout for {url}", cause=e) from e
except CurlRequestException as e:
@@ -141,18 +158,25 @@ async def request(
self,
method: str,
url: str,
- headers: Optional[Mapping[str, str]] = None,
- data: Optional[Union[str, bytes]] = None,
+ headers: Mapping[str, str] | None = None,
+ data: str | bytes | None = None,
) -> Response:
await self.open()
if self._session is None:
raise SessionError("Session not initialized")
-
+
try:
- response = await self._session.request(method=method, url=url, headers=dict(headers) if headers else None, data=data)
+ response = await self._session.request(
+ method=method,
+ url=url,
+ headers=dict(headers) if headers else None,
+ data=data,
+ )
elapsed = response.elapsed
- return Response(status_code=response.status_code, text=response.text, elapsed=elapsed)
+ return Response(
+ status_code=response.status_code, text=response.text, elapsed=elapsed
+ )
except CurlTimeout as e:
raise TimeoutError(f"{method} timeout for {url}", cause=e) from e
except CurlRequestException as e:
@@ -161,11 +185,8 @@ async def request(
raise NetworkError(f"{method} failed for {url}: {e}", cause=e) from e
-
__all__ = [
- "Response",
"BaseSession",
"CurlCFFISession",
+ "Response",
]
-
-
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index bcf3560..248f19f 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -1,42 +1,34 @@
import logging
-import json
-import asyncio
-from pathlib import Path
-from typing import Any, Dict, List, Optional, Union, Set, Tuple
-
-from .exceptions import (
- ConfigurationError,
- DataError,
- SchemaError,
- ValidationError,
-)
+from typing import Any
from .constants import (
- WMN_REMOTE_URL,
- MIN_TASKS,
+ EXTREME_CONCURRENCY_THRESHOLD,
+ HIGH_CONCURRENCY_MIN_TIMEOUT,
+ HIGH_CONCURRENCY_THRESHOLD,
+ LOW_TIMEOUT_WARNING_THRESHOLD,
MAX_TASKS_LIMIT,
- MIN_TIMEOUT,
MAX_TIMEOUT,
- HIGH_CONCURRENCY_THRESHOLD,
- HIGH_CONCURRENCY_MIN_TIMEOUT,
- VERY_HIGH_CONCURRENCY_THRESHOLD,
+ MIN_TASKS,
+ MIN_TIMEOUT,
VERY_HIGH_CONCURRENCY_MIN_TIMEOUT,
- EXTREME_CONCURRENCY_THRESHOLD,
- LOW_TIMEOUT_WARNING_THRESHOLD,
+ VERY_HIGH_CONCURRENCY_THRESHOLD,
WMN_LIST_FIELDS,
)
-from .network import BaseSession
+from .exceptions import (
+ ConfigurationError,
+ ValidationError,
+)
logger = logging.getLogger(__name__)
-def deduplicate_strings(values: Optional[List[str]]) -> List[str]:
+def deduplicate_strings(values: list[str] | None) -> list[str]:
"""Return a list of unique, non-empty strings preserving original order."""
if not values:
return []
- seen: Set[str] = set()
- unique_values: List[str] = []
+ seen: set[str] = set()
+ unique_values: list[str] = []
for item in values:
if isinstance(item, str):
@@ -47,22 +39,32 @@ def deduplicate_strings(values: Optional[List[str]]) -> List[str]:
return unique_values
-def validate_numeric_values(max_tasks: int, timeout: int) -> List[str]:
- """Validate numeric configuration values and return warnings.
- """
- warnings: List[str] = []
+
+def validate_numeric_values(max_tasks: int, timeout: int) -> list[str]:
+ """Validate numeric configuration values and return warnings."""
+ warnings: list[str] = []
if not (MIN_TASKS <= max_tasks <= MAX_TASKS_LIMIT):
- raise ConfigurationError(f"Invalid max_tasks: {max_tasks} must be between {MIN_TASKS} and {MAX_TASKS_LIMIT}")
-
+ raise ConfigurationError(
+ f"Invalid max_tasks: {max_tasks} must be between {MIN_TASKS} and {MAX_TASKS_LIMIT}"
+ )
+
if not (MIN_TIMEOUT <= timeout <= MAX_TIMEOUT):
- raise ConfigurationError(f"Invalid timeout: {timeout} must be between {MIN_TIMEOUT} and {MAX_TIMEOUT} seconds")
+ raise ConfigurationError(
+ f"Invalid timeout: {timeout} must be between {MIN_TIMEOUT} and {MAX_TIMEOUT} seconds"
+ )
- if max_tasks > HIGH_CONCURRENCY_THRESHOLD and timeout < HIGH_CONCURRENCY_MIN_TIMEOUT:
+ if (
+ max_tasks > HIGH_CONCURRENCY_THRESHOLD
+ and timeout < HIGH_CONCURRENCY_MIN_TIMEOUT
+ ):
warnings.append(
f"High concurrency ({max_tasks}) with low timeout ({timeout}s) may cause failures; consider increasing timeout or reducing max_tasks."
)
- elif max_tasks > VERY_HIGH_CONCURRENCY_THRESHOLD and timeout < VERY_HIGH_CONCURRENCY_MIN_TIMEOUT:
+ elif (
+ max_tasks > VERY_HIGH_CONCURRENCY_THRESHOLD
+ and timeout < VERY_HIGH_CONCURRENCY_MIN_TIMEOUT
+ ):
warnings.append(
f"Very high concurrency ({max_tasks}) with very low timeout ({timeout}s) may cause connection issues; recommend timeout >= {HIGH_CONCURRENCY_MIN_TIMEOUT}s for max_tasks > {VERY_HIGH_CONCURRENCY_THRESHOLD}."
)
@@ -79,7 +81,8 @@ def validate_numeric_values(max_tasks: int, timeout: int) -> List[str]:
return warnings
-def configure_proxy(proxy: Optional[Union[str, Dict[str, str]]]) -> Optional[Dict[str, str]]:
+
+def configure_proxy(proxy: str | dict[str, str] | None) -> dict[str, str] | None:
"""Validate and configure proxy settings."""
if proxy is None:
return None
@@ -87,35 +90,45 @@ def configure_proxy(proxy: Optional[Union[str, Dict[str, str]]]) -> Optional[Dic
if isinstance(proxy, str):
if not proxy.strip():
raise ConfigurationError("Invalid proxy: proxy string cannot be empty")
-
- if not (proxy.startswith('http://') or proxy.startswith('https://') or proxy.startswith('socks5://')):
- raise ConfigurationError("Invalid proxy: must be http://, https://, or socks5:// URL")
-
+
+ if not (
+ proxy.startswith("http://")
+ or proxy.startswith("https://")
+ or proxy.startswith("socks5://")
+ ):
+ raise ConfigurationError(
+ "Invalid proxy: must be http://, https://, or socks5:// URL"
+ )
+
logger.debug("Proxy configuration validated")
return {"http": proxy, "https": proxy}
-
+
elif isinstance(proxy, dict):
for protocol, proxy_url in proxy.items():
- if protocol not in ['http', 'https']:
+ if protocol not in ["http", "https"]:
raise ConfigurationError(f"Invalid proxy protocol: {protocol}")
-
+
if not isinstance(proxy_url, str) or not proxy_url.strip():
- raise ConfigurationError(f"Invalid proxy URL for {protocol}: must be non-empty string")
-
+ raise ConfigurationError(
+ f"Invalid proxy URL for {protocol}: must be non-empty string"
+ )
+
logger.debug("Proxy dictionary configuration validated")
return proxy
-def validate_usernames(usernames: List[str]) -> List[str]:
+
+def validate_usernames(usernames: list[str]) -> list[str]:
"""Validate and deduplicate usernames, preserving order."""
- unique_usernames: List[str] = deduplicate_strings(usernames)
+ unique_usernames: list[str] = deduplicate_strings(usernames)
if not unique_usernames:
raise ValidationError("No valid usernames provided")
return unique_usernames
-def merge_lists(data: Dict[str, Any], accumulator: Dict[str, Any]) -> None:
+
+def merge_lists(data: dict[str, Any], accumulator: dict[str, Any]) -> None:
"""Merge list fields from data into the accumulator dictionary."""
if isinstance(data, dict):
for key in WMN_LIST_FIELDS:
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..0893ce3
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1390 @@
+# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
+
+[[package]]
+name = "aiofiles"
+version = "24.1.0"
+description = "File support for asyncio."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
+ {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
+]
+
+[[package]]
+name = "attrs"
+version = "25.3.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"},
+ {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"},
+]
+
+[package.extras]
+benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
+tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
+
+[[package]]
+name = "brotli"
+version = "1.1.0"
+description = "Python bindings for the Brotli compression library"
+optional = false
+python-versions = "*"
+groups = ["main"]
+markers = "platform_python_implementation == \"CPython\""
+files = [
+ {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"},
+ {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
+ {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
+ {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
+ {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
+ {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
+ {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
+ {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
+ {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
+ {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
+ {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
+ {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
+ {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
+ {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
+ {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
+ {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
+ {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
+ {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
+ {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
+ {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
+ {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
+ {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
+ {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
+ {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
+ {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
+ {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
+ {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
+ {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
+ {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
+]
+
+[[package]]
+name = "brotlicffi"
+version = "1.1.0.0"
+description = "Python CFFI bindings to the Brotli library"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+markers = "platform_python_implementation != \"CPython\""
+files = [
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:246f1d1a90279bb6069de3de8d75a8856e073b8ff0b09dcca18ccc14cec85979"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc4bc5d82bc56ebd8b514fb8350cfac4627d6b0743382e46d033976a5f80fab6"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c26ecb14386a44b118ce36e546ce307f4810bc9598a6e6cb4f7fca725ae7e6"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca72968ae4eaf6470498d5c2887073f7efe3b1e7d7ec8be11a06a79cc810e990"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:add0de5b9ad9e9aa293c3aa4e9deb2b61e99ad6c1634e01d01d98c03e6a354cc"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b6068e0f3769992d6b622a1cd2e7835eae3cf8d9da123d7f51ca9c1e9c333e5"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8557a8559509b61e65083f8782329188a250102372576093c88930c875a69838"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a7ae37e5d79c5bdfb5b4b99f2715a6035e6c5bf538c3746abc8e26694f92f33"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391151ec86bb1c683835980f4816272a87eaddc46bb91cbf44f62228b84d8cca"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f3711be9290f0453de8eed5275d93d286abe26b08ab4a35d7452caa1fef532f"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a807d760763e398bbf2c6394ae9da5815901aa93ee0a37bca5efe78d4ee3171"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa8ca0623b26c94fccc3a1fdd895be1743b838f3917300506d04aa3346fd2a14"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de0cf28a53a3238b252aca9fed1593e9d36c1d116748013339f0949bfc84112"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6be5ec0e88a4925c91f3dea2bb0013b3a2accda6f77238f76a34a1ea532a1cb0"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d9eb71bb1085d996244439154387266fd23d6ad37161f6f52f1cd41dd95a3808"},
+ {file = "brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13"},
+]
+
+[package.dependencies]
+cffi = ">=1.0.0"
+
+[[package]]
+name = "certifi"
+version = "2025.8.3"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"},
+ {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"},
+]
+
+[[package]]
+name = "cffi"
+version = "2.0.0"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
+ {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"},
+ {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"},
+ {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"},
+ {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"},
+ {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"},
+ {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"},
+ {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"},
+ {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"},
+ {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"},
+ {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"},
+ {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"},
+ {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"},
+ {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"},
+ {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"},
+ {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"},
+ {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"},
+ {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"},
+ {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"},
+ {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"},
+ {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"},
+ {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"},
+ {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"},
+ {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"},
+ {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"},
+ {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"},
+ {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"},
+ {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"},
+ {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"},
+ {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"},
+ {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"},
+ {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"},
+ {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"},
+ {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"},
+ {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"},
+ {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"},
+ {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"},
+ {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"},
+ {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"},
+ {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"},
+ {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"},
+ {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"},
+ {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"},
+ {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"},
+ {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"},
+ {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"},
+ {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"},
+ {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"},
+ {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"},
+ {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
+ {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
+]
+
+[package.dependencies]
+pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
+
+[[package]]
+name = "click"
+version = "8.3.0"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"},
+ {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main", "dev"]
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+markers = {main = "platform_system == \"Windows\""}
+
+[[package]]
+name = "cssselect2"
+version = "0.8.0"
+description = "CSS selectors for Python ElementTree"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "cssselect2-0.8.0-py3-none-any.whl", hash = "sha256:46fc70ebc41ced7a32cd42d58b1884d72ade23d21e5a4eaaf022401c13f0e76e"},
+ {file = "cssselect2-0.8.0.tar.gz", hash = "sha256:7674ffb954a3b46162392aee2a3a0aedb2e14ecf99fcc28644900f4e6e3e9d3a"},
+]
+
+[package.dependencies]
+tinycss2 = "*"
+webencodings = "*"
+
+[package.extras]
+doc = ["furo", "sphinx"]
+test = ["pytest", "ruff"]
+
+[[package]]
+name = "curl-cffi"
+version = "0.13.0"
+description = "libcurl ffi bindings for Python, with impersonation support."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "curl_cffi-0.13.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:434cadbe8df2f08b2fc2c16dff2779fb40b984af99c06aa700af898e185bb9db"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:59afa877a9ae09efa04646a7d068eeea48915a95d9add0a29854e7781679fcd7"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06ed389e45a7ca97b17c275dbedd3d6524560270e675c720e93a2018a766076"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4e0de45ab3b7a835c72bd53640c2347415111b43421b5c7a1a0b18deae2e541"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eb4083371bbb94e9470d782de235fb5268bf43520de020c9e5e6be8f395443f"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:28911b526e8cd4aa0e5e38401bfe6887e8093907272f1f67ca22e6beb2933a51"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d433ffcb455ab01dd0d7bde47109083aa38b59863aa183d29c668ae4c96bf8e"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:66a6b75ce971de9af64f1b6812e275f60b88880577bac47ef1fa19694fa21cd3"},
+ {file = "curl_cffi-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:d438a3b45244e874794bc4081dc1e356d2bb926dcc7021e5a8fef2e2105ef1d8"},
+ {file = "curl_cffi-0.13.0.tar.gz", hash = "sha256:62ecd90a382bd5023750e3606e0aa7cb1a3a8ba41c14270b8e5e149ebf72c5ca"},
+]
+
+[package.dependencies]
+certifi = ">=2024.2.2"
+cffi = ">=1.12.0"
+
+[package.extras]
+build = ["cibuildwheel", "wheel"]
+dev = ["charset_normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "typing_extensions", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
+extra = ["lxml_html_clean", "markdownify (>=1.1.0)", "readability-lxml (>=0.8.1)"]
+test = ["charset_normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "typing_extensions", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
+
+[[package]]
+name = "fonttools"
+version = "4.60.0"
+description = "Tools to manipulate font files"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "fonttools-4.60.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:151282a235c36024168c21c02193e939e8b28c73d5fa0b36ae1072671d8fa134"},
+ {file = "fonttools-4.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3f32cc42d485d9b1546463b9a7a92bdbde8aef90bac3602503e04c2ddb27e164"},
+ {file = "fonttools-4.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:336b89d169c40379b8ccef418c877edbc28840b553099c9a739b0db2bcbb57c5"},
+ {file = "fonttools-4.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39a38d950b2b04cd6da729586e6b51d686b0c27d554a2154a6a35887f87c09b1"},
+ {file = "fonttools-4.60.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7067dd03e0296907a5c6184285807cbb7bc0bf61a584ffebbf97c2b638d8641a"},
+ {file = "fonttools-4.60.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:342753fe1a1bd2e6896e7a4e936a67c0f441d6897bd11477f718e772d6e63e88"},
+ {file = "fonttools-4.60.0-cp310-cp310-win32.whl", hash = "sha256:0746c2b2b32087da2ac5f81e14d319c44cb21127d419bc60869daed089790e3d"},
+ {file = "fonttools-4.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:b83b32e5e8918f8e0ccd79816fc2f914e30edc6969ab2df6baf4148e72dbcc11"},
+ {file = "fonttools-4.60.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a9106c202d68ff5f9b4a0094c4d7ad2eaa7e9280f06427b09643215e706eb016"},
+ {file = "fonttools-4.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9da3a4a3f2485b156bb429b4f8faa972480fc01f553f7c8c80d05d48f17eec89"},
+ {file = "fonttools-4.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f84de764c6057b2ffd4feb50ddef481d92e348f0c70f2c849b723118d352bf3"},
+ {file = "fonttools-4.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:800b3fa0d5c12ddff02179d45b035a23989a6c597a71c8035c010fff3b2ef1bb"},
+ {file = "fonttools-4.60.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd68f60b030277f292a582d31c374edfadc60bb33d51ec7b6cd4304531819ba"},
+ {file = "fonttools-4.60.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:53328e3ca9e5c8660ef6de07c35f8f312c189b757535e12141be7a8ec942de6e"},
+ {file = "fonttools-4.60.0-cp311-cp311-win32.whl", hash = "sha256:d493c175ddd0b88a5376e61163e3e6fde3be8b8987db9b092e0a84650709c9e7"},
+ {file = "fonttools-4.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cc2770c9dc49c2d0366e9683f4d03beb46c98042d7ccc8ddbadf3459ecb051a7"},
+ {file = "fonttools-4.60.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8c68928a438d60dfde90e2f09aa7f848ed201176ca6652341744ceec4215859f"},
+ {file = "fonttools-4.60.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b7133821249097cffabf0624eafd37f5a3358d5ce814febe9db688e3673e724e"},
+ {file = "fonttools-4.60.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3638905d3d77ac8791127ce181f7cb434f37e4204d8b2e31b8f1e154320b41f"},
+ {file = "fonttools-4.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7968a26ef010ae89aabbb2f8e9dec1e2709a2541bb8620790451ee8aeb4f6fbf"},
+ {file = "fonttools-4.60.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ef01ca7847c356b0fe026b7b92304bc31dc60a4218689ee0acc66652c1a36b2"},
+ {file = "fonttools-4.60.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f3482d7ed7867edfcf785f77c1dffc876c4b2ddac19539c075712ff2a0703cf5"},
+ {file = "fonttools-4.60.0-cp312-cp312-win32.whl", hash = "sha256:8c937c4fe8addff575a984c9519433391180bf52cf35895524a07b520f376067"},
+ {file = "fonttools-4.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:99b06d5d6f29f32e312adaed0367112f5ff2d300ea24363d377ec917daf9e8c5"},
+ {file = "fonttools-4.60.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:97100ba820936cdb5148b634e0884f0088699c7e2f1302ae7bba3747c7a19fb3"},
+ {file = "fonttools-4.60.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:03fccf84f377f83e99a5328a9ebe6b41e16fcf64a1450c352b6aa7e0deedbc01"},
+ {file = "fonttools-4.60.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a3ef06671f862cd7da78ab105fbf8dce9da3634a8f91b3a64ed5c29c0ac6a9a8"},
+ {file = "fonttools-4.60.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f2195faf96594c238462c420c7eff97d1aa51de595434f806ec3952df428616"},
+ {file = "fonttools-4.60.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3887008865fa4f56cff58a1878f1300ba81a4e34f76daf9b47234698493072ee"},
+ {file = "fonttools-4.60.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5567bd130378f21231d3856d8f0571dcdfcd77e47832978c26dabe572d456daa"},
+ {file = "fonttools-4.60.0-cp313-cp313-win32.whl", hash = "sha256:699d0b521ec0b188ac11f2c14ccf6a926367795818ddf2bd00a273e9a052dd20"},
+ {file = "fonttools-4.60.0-cp313-cp313-win_amd64.whl", hash = "sha256:24296163268e7c800009711ce5c0e9997be8882c0bd546696c82ef45966163a6"},
+ {file = "fonttools-4.60.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b6fe3efdc956bdad95145cea906ad9ff345c17b706356dfc1098ce3230591343"},
+ {file = "fonttools-4.60.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:764b2aaab839762a3aa3207e5b3f0e0dfa41799e0b091edec5fcbccc584fdab5"},
+ {file = "fonttools-4.60.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b81c7c47d9e78106a4d70f1dbeb49150513171715e45e0d2661809f2b0e3f710"},
+ {file = "fonttools-4.60.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799ff60ee66b300ebe1fe6632b1cc55a66400fe815cef7b034d076bce6b1d8fc"},
+ {file = "fonttools-4.60.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f9878abe155ddd1b433bab95d027a686898a6afba961f3c5ca14b27488f2d772"},
+ {file = "fonttools-4.60.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ded432b7133ea4602fdb4731a4a7443a8e9548edad28987b99590cf6da626254"},
+ {file = "fonttools-4.60.0-cp314-cp314-win32.whl", hash = "sha256:5d97cf3a9245316d5978628c05642b939809c4f55ca632ca40744cb9de6e8d4a"},
+ {file = "fonttools-4.60.0-cp314-cp314-win_amd64.whl", hash = "sha256:61b9ef46dd5e9dcb6f437eb0cc5ed83d5049e1bf9348e31974ffee1235db0f8f"},
+ {file = "fonttools-4.60.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:bba7e3470cf353e1484a36dfb4108f431c2859e3f6097fe10118eeae92166773"},
+ {file = "fonttools-4.60.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5ac6439a38c27b3287063176b3303b34982024b01e2e95bba8ac1e45f6d41c1"},
+ {file = "fonttools-4.60.0-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4acd21e9f125a1257da59edf7a6e9bd4abd76282770715c613f1fe482409e9f9"},
+ {file = "fonttools-4.60.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4a6fc53039ea047e35dc62b958af9cd397eedbc3fa42406d2910ae091b9ae37"},
+ {file = "fonttools-4.60.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ef34f44eadf133e94e82c775a33ee3091dd37ee0161c5f5ea224b46e3ce0fb8e"},
+ {file = "fonttools-4.60.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d112cae3e7ad1bb5d7f7a60365fcf6c181374648e064a8c07617b240e7c828ee"},
+ {file = "fonttools-4.60.0-cp314-cp314t-win32.whl", hash = "sha256:0f7b2c251dc338973e892a1e153016114e7a75f6aac7a49b84d5d1a4c0608d08"},
+ {file = "fonttools-4.60.0-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a72771106bc7434098db35abecd84d608857f6e116d3ef00366b213c502ce9"},
+ {file = "fonttools-4.60.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79a18fff39ce2044dfc88050a033eb16e48ee0024bd0ea831950aad342b9eae9"},
+ {file = "fonttools-4.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97fe4f9483a6cecaa3976f29cd896501f47840474188b6e505ba73e4fa25006a"},
+ {file = "fonttools-4.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fa66f07f5f4a019c36dcac86d112e016ee7f579a3100154051031a422cea8903"},
+ {file = "fonttools-4.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47e82dcf6ace13a1fd36a0b4d6966c559653f459a80784b0746f4b342e335a5d"},
+ {file = "fonttools-4.60.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d25e9af0c2e1eb70a204072cc29ec01b2efc4d072f4ebca9334145a4a8cbfca"},
+ {file = "fonttools-4.60.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3e445e9db6ce9ccda22b1dc29d619825cf91bf1b955e25974a3c47f67a7983c3"},
+ {file = "fonttools-4.60.0-cp39-cp39-win32.whl", hash = "sha256:dfd7b71a196c6929f21a7f30fa64a5d62f1acf5d857dd40ad6864452ebe615de"},
+ {file = "fonttools-4.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:1eab07d561e18b971e20510631c048cf496ffa1adf3574550dbcac38e6425832"},
+ {file = "fonttools-4.60.0-py3-none-any.whl", hash = "sha256:496d26e4d14dcccdd6ada2e937e4d174d3138e3d73f5c9b6ec6eb2fd1dab4f66"},
+ {file = "fonttools-4.60.0.tar.gz", hash = "sha256:8f5927f049091a0ca74d35cce7f78e8f7775c83a6901a8fbe899babcc297146a"},
+]
+
+[package.dependencies]
+brotli = {version = ">=1.0.1", optional = true, markers = "platform_python_implementation == \"CPython\" and extra == \"woff\""}
+brotlicffi = {version = ">=0.8.0", optional = true, markers = "platform_python_implementation != \"CPython\" and extra == \"woff\""}
+zopfli = {version = ">=0.1.4", optional = true, markers = "extra == \"woff\""}
+
+[package.extras]
+all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
+graphite = ["lz4 (>=1.7.4.2)"]
+interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
+lxml = ["lxml (>=4.0)"]
+pathops = ["skia-pathops (>=0.5.0)"]
+plot = ["matplotlib"]
+repacker = ["uharfbuzz (>=0.23.0)"]
+symfont = ["sympy"]
+type1 = ["xattr ; sys_platform == \"darwin\""]
+unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
+woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
+ {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "jsonschema"
+version = "4.25.1"
+description = "An implementation of JSON Schema validation for Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"},
+ {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"},
+]
+
+[package.dependencies]
+attrs = ">=22.2.0"
+jsonschema-specifications = ">=2023.03.6"
+referencing = ">=0.28.4"
+rpds-py = ">=0.7.1"
+
+[package.extras]
+format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
+format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2025.9.1"
+description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"},
+ {file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"},
+]
+
+[package.dependencies]
+referencing = ">=0.31.0"
+
+[[package]]
+name = "markdown-it-py"
+version = "4.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"},
+ {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "markdown-it-pyrs", "mistletoe (>=1.0,<2.0)", "mistune (>=3.0,<4.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins (>=0.5.0)"]
+profiling = ["gprof2dot"]
+rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"},
+ {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"},
+ {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"},
+ {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"},
+ {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"},
+ {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"},
+ {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"},
+ {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"},
+ {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"},
+ {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"},
+ {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"},
+ {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"},
+ {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"},
+ {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"},
+ {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"},
+ {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"},
+ {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"},
+ {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"},
+ {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"},
+ {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"},
+ {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"},
+ {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"},
+ {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"},
+ {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"},
+ {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"},
+ {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"},
+ {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"},
+ {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"},
+ {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"},
+ {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"},
+ {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"},
+ {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"},
+ {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"},
+ {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"},
+ {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"},
+ {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"},
+ {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"},
+ {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"},
+ {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"},
+ {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"},
+ {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"},
+ {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"},
+ {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"},
+ {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"},
+ {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"},
+ {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"},
+ {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"},
+ {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"},
+ {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"},
+ {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"},
+ {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"},
+ {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"},
+ {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"},
+ {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"},
+ {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"},
+ {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"},
+ {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"},
+ {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"},
+ {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"},
+ {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"},
+ {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"},
+ {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"},
+ {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"},
+ {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"},
+ {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"},
+ {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"},
+ {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"},
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "mslex"
+version = "1.3.0"
+description = "shlex for windows"
+optional = false
+python-versions = ">=3.5"
+groups = ["dev"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "mslex-1.3.0-py3-none-any.whl", hash = "sha256:c7074b347201b3466fc077c5692fbce9b5f62a63a51f537a53fbbd02eff2eea4"},
+ {file = "mslex-1.3.0.tar.gz", hash = "sha256:641c887d1d3db610eee2af37a8e5abda3f70b3006cdfd2d0d29dc0d1ae28a85d"},
+]
+
+[[package]]
+name = "pillow"
+version = "11.3.0"
+description = "Python Imaging Library (Fork)"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
+ {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
+ {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
+ {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
+ {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
+ {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
+ {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
+ {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"},
+ {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"},
+ {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"},
+ {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
+ {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
+ {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
+ {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
+ {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
+ {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
+ {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
+ {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
+ {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"},
+ {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"},
+ {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"},
+ {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
+ {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
+ {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
+ {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
+ {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
+ {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
+ {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
+ {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
+ {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"},
+ {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"},
+ {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"},
+ {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"},
+ {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"},
+ {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"},
+ {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
+ {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
+ {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
+ {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
+ {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
+ {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
+ {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
+ {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
+ {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"},
+ {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"},
+ {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"},
+ {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
+ {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
+ {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
+ {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
+ {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
+ {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
+ {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
+ {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
+ {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"},
+ {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"},
+ {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"},
+ {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
+ {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
+ {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
+ {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
+ {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
+ {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
+ {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
+ {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
+ {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"},
+ {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"},
+ {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"},
+ {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
+ {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
+ {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
+ {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
+ {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
+ {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
+ {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
+ {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
+ {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"},
+ {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"},
+ {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"},
+ {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
+ {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
+ {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
+ {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
+ {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
+ {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
+ {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
+ {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
+ {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"},
+ {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"},
+ {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"},
+ {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
+ {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
+ {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
+ {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"},
+]
+
+[package.extras]
+docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
+fpx = ["olefile"]
+mic = ["olefile"]
+test-arrow = ["pyarrow"]
+tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"]
+typing = ["typing-extensions ; python_version < \"3.10\""]
+xmp = ["defusedxml"]
+
+[[package]]
+name = "psutil"
+version = "6.1.1"
+description = "Cross-platform lib for process and system monitoring in Python."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+groups = ["dev"]
+files = [
+ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"},
+ {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"},
+ {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"},
+ {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"},
+ {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"},
+ {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"},
+ {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"},
+ {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"},
+ {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"},
+ {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"},
+ {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"},
+ {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"},
+ {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"},
+ {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"},
+ {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"},
+ {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"},
+ {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"},
+]
+
+[package.extras]
+dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"]
+test = ["pytest", "pytest-xdist", "setuptools"]
+
+[[package]]
+name = "pycparser"
+version = "2.23"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "implementation_name != \"PyPy\""
+files = [
+ {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"},
+ {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"},
+]
+
+[[package]]
+name = "pydyf"
+version = "0.11.0"
+description = "A low-level PDF generator."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "pydyf-0.11.0-py3-none-any.whl", hash = "sha256:0aaf9e2ebbe786ec7a78ec3fbffa4cdcecde53fd6f563221d53c6bc1328848a3"},
+ {file = "pydyf-0.11.0.tar.gz", hash = "sha256:394dddf619cca9d0c55715e3c55ea121a9bf9cbc780cdc1201a2427917b86b64"},
+]
+
+[package.extras]
+doc = ["sphinx", "sphinx_rtd_theme"]
+test = ["pillow", "pytest", "ruff"]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
+ {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
+]
+
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pyphen"
+version = "0.17.2"
+description = "Pure Python module to hyphenate text"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "pyphen-0.17.2-py3-none-any.whl", hash = "sha256:3a07fb017cb2341e1d9ff31b8634efb1ae4dc4b130468c7c39dd3d32e7c3affd"},
+ {file = "pyphen-0.17.2.tar.gz", hash = "sha256:f60647a9c9b30ec6c59910097af82bc5dd2d36576b918e44148d8b07ef3b4aa3"},
+]
+
+[package.extras]
+doc = ["sphinx", "sphinx_rtd_theme"]
+test = ["pytest", "ruff"]
+
+[[package]]
+name = "referencing"
+version = "0.36.2"
+description = "JSON Referencing + Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"},
+ {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"},
+]
+
+[package.dependencies]
+attrs = ">=22.2.0"
+rpds-py = ">=0.7.0"
+typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
+
+[[package]]
+name = "rich"
+version = "14.1.0"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = false
+python-versions = ">=3.8.0"
+groups = ["main"]
+files = [
+ {file = "rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f"},
+ {file = "rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
+[[package]]
+name = "rich-click"
+version = "1.9.1"
+description = "Format click help output nicely with rich"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "rich_click-1.9.1-py3-none-any.whl", hash = "sha256:ea6114a9e081b7d68cc07b315070398f806f01bb0e0c49da56f129e672877817"},
+ {file = "rich_click-1.9.1.tar.gz", hash = "sha256:4f2620589d7287f86265432e6a909de4f281de909fe68d8c835fbba49265d268"},
+]
+
+[package.dependencies]
+click = ">=8"
+rich = ">=12"
+
+[package.extras]
+dev = ["inline-snapshot (>=0.24)", "jsonschema (>=4)", "mypy (>=1.14.1)", "nodeenv (>=1.9.1)", "packaging (>=25)", "pre-commit (>=3.5)", "pytest (>=8.3.5)", "pytest-cov (>=5)", "rich-codex (>=1.2.11)", "ruff (>=0.12.4)", "typer (>=0.15)", "types-setuptools (>=75.8.0.20250110)"]
+docs = ["markdown-include (>=0.8.1)", "mike (>=2.1.3)", "mkdocs-github-admonitions-plugin (>=0.1.1)", "mkdocs-glightbox (>=0.4)", "mkdocs-include-markdown-plugin (>=7.1.7) ; python_version >= \"3.9\"", "mkdocs-material-extensions (>=1.3.1)", "mkdocs-material[imaging] (>=9.5.18,<9.6.0)", "mkdocs-redirects (>=1.2.2)", "mkdocs-rss-plugin (>=1.15)", "mkdocs[docs] (>=1.6.1)", "mkdocstrings[python] (>=0.26.1)", "rich-codex (>=1.2.11)", "typer (>=0.15)"]
+
+[[package]]
+name = "rpds-py"
+version = "0.27.1"
+description = "Python bindings to Rust's persistent data structures (rpds)"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef"},
+ {file = "rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1"},
+ {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10"},
+ {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808"},
+ {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8"},
+ {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9"},
+ {file = "rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4"},
+ {file = "rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1"},
+ {file = "rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881"},
+ {file = "rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a"},
+ {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde"},
+ {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21"},
+ {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9"},
+ {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948"},
+ {file = "rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39"},
+ {file = "rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15"},
+ {file = "rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746"},
+ {file = "rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90"},
+ {file = "rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a"},
+ {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444"},
+ {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a"},
+ {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1"},
+ {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998"},
+ {file = "rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39"},
+ {file = "rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594"},
+ {file = "rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502"},
+ {file = "rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b"},
+ {file = "rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d"},
+ {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274"},
+ {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd"},
+ {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2"},
+ {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002"},
+ {file = "rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3"},
+ {file = "rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83"},
+ {file = "rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688"},
+ {file = "rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797"},
+ {file = "rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334"},
+ {file = "rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9"},
+ {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60"},
+ {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e"},
+ {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212"},
+ {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675"},
+ {file = "rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3"},
+ {file = "rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456"},
+ {file = "rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a"},
+ {file = "rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772"},
+ {file = "rpds_py-0.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c918c65ec2e42c2a78d19f18c553d77319119bf43aa9e2edf7fb78d624355527"},
+ {file = "rpds_py-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fea2b1a922c47c51fd07d656324531adc787e415c8b116530a1d29c0516c62d"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbf94c58e8e0cd6b6f38d8de67acae41b3a515c26169366ab58bdca4a6883bb8"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2a8fed130ce946d5c585eddc7c8eeef0051f58ac80a8ee43bd17835c144c2cc"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:037a2361db72ee98d829bc2c5b7cc55598ae0a5e0ec1823a56ea99374cfd73c1"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5281ed1cc1d49882f9997981c88df1a22e140ab41df19071222f7e5fc4e72125"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd50659a069c15eef8aa3d64bbef0d69fd27bb4a50c9ab4f17f83a16cbf8905"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:c4b676c4ae3921649a15d28ed10025548e9b561ded473aa413af749503c6737e"},
+ {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:079bc583a26db831a985c5257797b2b5d3affb0386e7ff886256762f82113b5e"},
+ {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4e44099bd522cba71a2c6b97f68e19f40e7d85399de899d66cdb67b32d7cb786"},
+ {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e202e6d4188e53c6661af813b46c37ca2c45e497fc558bacc1a7630ec2695aec"},
+ {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f41f814b8eaa48768d1bb551591f6ba45f87ac76899453e8ccd41dba1289b04b"},
+ {file = "rpds_py-0.27.1-cp39-cp39-win32.whl", hash = "sha256:9e71f5a087ead99563c11fdaceee83ee982fd39cf67601f4fd66cb386336ee52"},
+ {file = "rpds_py-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:71108900c9c3c8590697244b9519017a400d9ba26a36c48381b3f64743a44aab"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b"},
+ {file = "rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6"},
+ {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa8933159edc50be265ed22b401125c9eebff3171f570258854dbce3ecd55475"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50431bf02583e21bf273c71b89d710e7a710ad5e39c725b14e685610555926f"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78af06ddc7fe5cc0e967085a9115accee665fb912c22a3f54bad70cc65b05fe6"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70d0738ef8fee13c003b100c2fbd667ec4f133468109b3472d249231108283a3"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2f6fd8a1cea5bbe599b6e78a6e5ee08db434fc8ffea51ff201c8765679698b3"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8177002868d1426305bb5de1e138161c2ec9eb2d939be38291d7c431c4712df8"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008b839781d6c9bf3b6a8984d1d8e56f0ec46dc56df61fd669c49b58ae800400"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:a55b9132bb1ade6c734ddd2759c8dc132aa63687d259e725221f106b83a0e485"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a46fdec0083a26415f11d5f236b79fa1291c32aaa4a17684d82f7017a1f818b1"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8a63b640a7845f2bdd232eb0d0a4a2dd939bcdd6c57e6bb134526487f3160ec5"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e32721e5d4922deaaf963469d795d5bde6093207c52fec719bd22e5d1bedbc4"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c426b99a068601b5f4623573df7a7c3d72e87533a2dd2253353a03e7502566c"},
+ {file = "rpds_py-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fc9b7fe29478824361ead6e14e4f5aed570d477e06088826537e202d25fe859"},
+ {file = "rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8"},
+]
+
+[[package]]
+name = "ruff"
+version = "0.13.2"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+groups = ["dev"]
+files = [
+ {file = "ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3"},
+ {file = "ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2"},
+ {file = "ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3"},
+ {file = "ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d"},
+ {file = "ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b"},
+ {file = "ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22"},
+ {file = "ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736"},
+ {file = "ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2"},
+ {file = "ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac"},
+ {file = "ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585"},
+ {file = "ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7"},
+ {file = "ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff"},
+]
+
+[[package]]
+name = "taskipy"
+version = "1.14.1"
+description = "tasks runner for python projects"
+optional = false
+python-versions = "<4.0,>=3.6"
+groups = ["dev"]
+files = [
+ {file = "taskipy-1.14.1-py3-none-any.whl", hash = "sha256:6e361520f29a0fd2159848e953599f9c75b1d0b047461e4965069caeb94908f1"},
+ {file = "taskipy-1.14.1.tar.gz", hash = "sha256:410fbcf89692dfd4b9f39c2b49e1750b0a7b81affd0e2d7ea8c35f9d6a4774ed"},
+]
+
+[package.dependencies]
+colorama = ">=0.4.4,<0.5.0"
+mslex = {version = ">=1.1.0,<2.0.0", markers = "sys_platform == \"win32\""}
+psutil = ">=5.7.2,<7"
+tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""}
+
+[[package]]
+name = "tinycss2"
+version = "1.4.0"
+description = "A tiny CSS parser"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"},
+ {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"},
+]
+
+[package.dependencies]
+webencodings = ">=0.4"
+
+[package.extras]
+doc = ["sphinx", "sphinx_rtd_theme"]
+test = ["pytest", "ruff"]
+
+[[package]]
+name = "tinyhtml5"
+version = "2.0.0"
+description = "HTML parser based on the WHATWG HTML specification"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "tinyhtml5-2.0.0-py3-none-any.whl", hash = "sha256:13683277c5b176d070f82d099d977194b7a1e26815b016114f581a74bbfbf47e"},
+ {file = "tinyhtml5-2.0.0.tar.gz", hash = "sha256:086f998833da24c300c414d9fe81d9b368fd04cb9d2596a008421cbc705fcfcc"},
+]
+
+[package.dependencies]
+webencodings = ">=0.5.1"
+
+[package.extras]
+doc = ["sphinx", "sphinx_rtd_theme"]
+test = ["pytest", "ruff"]
+
+[[package]]
+name = "tomli"
+version = "2.2.1"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
+ {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
+ {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
+ {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
+ {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
+ {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
+ {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
+ {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
+ {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
+ {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
+ {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
+ {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
+ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
+ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+description = "Backported and Experimental Type Hints for Python 3.9+"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version < \"3.13\""
+files = [
+ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
+ {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
+]
+
+[[package]]
+name = "weasyprint"
+version = "66.0"
+description = "The Awesome Document Factory"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "weasyprint-66.0-py3-none-any.whl", hash = "sha256:82b0783b726fcd318e2c977dcdddca76515b30044bc7a830cc4fbe717582a6d0"},
+ {file = "weasyprint-66.0.tar.gz", hash = "sha256:da71dc87dc129ac9cffdc65e5477e90365ab9dbae45c744014ec1d06303dde40"},
+]
+
+[package.dependencies]
+cffi = ">=0.6"
+cssselect2 = ">=0.8.0"
+fonttools = {version = ">=4.0.0", extras = ["woff"]}
+Pillow = ">=9.1.0"
+pydyf = ">=0.11.0"
+Pyphen = ">=0.9.1"
+tinycss2 = ">=1.4.0"
+tinyhtml5 = ">=2.0.0b1"
+
+[package.extras]
+doc = ["furo", "sphinx"]
+test = ["pytest", "ruff"]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+description = "Character encoding aliases for legacy web content"
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
+ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
+]
+
+[[package]]
+name = "zopfli"
+version = "0.2.3.post1"
+description = "Zopfli module for python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "zopfli-0.2.3.post1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0137dd64a493ba6a4be37405cfd6febe650a98cc1e9dca8f6b8c63b1db11b41"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aa588b21044f8a74e423d8c8a4c7fc9988501878aacced793467010039c50734"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f4a7ec2770e6af05f5a02733fd3900f30a9cd58e5d6d3727e14c5bcd6e7d587"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f7d69c1a7168ad0e9cb864e8663acb232986a0c9c9cb9801f56bf6214f53a54d"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2d2bc8129707e34c51f9352c4636ca313b52350bbb7e04637c46c1818a2a70"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:39e576f93576c5c223b41d9c780bbb91fd6db4babf3223d2a4fe7bf568e2b5a8"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cbe6df25807227519debd1a57ab236f5f6bad441500e85b13903e51f93a43214"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7cce242b5df12b2b172489daf19c32e5577dd2fac659eb4b17f6a6efb446fd5c"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-win32.whl", hash = "sha256:f815fcc2b2a457977724bad97fb4854022980f51ce7b136925e336b530545ae1"},
+ {file = "zopfli-0.2.3.post1-cp310-cp310-win_amd64.whl", hash = "sha256:0cc20b02a9531559945324c38302fd4ba763311632d0ec8a1a0aa9c10ea363e6"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:518f1f4ed35dd69ce06b552f84e6d081f07c552b4c661c5312d950a0b764a58a"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:615a8ac9dda265e9cc38b2a76c3142e4a9f30fea4a79c85f670850783bc6feb4"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a82fc2dbebe6eb908b9c665e71496f8525c1bc4d2e3a7a7722ef2b128b6227c8"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37d011e92f7b9622742c905fdbed9920a1d0361df84142807ea2a528419dea7f"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e63d558847166543c2c9789e6f985400a520b7eacc4b99181668b2c3aeadd352"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60db20f06c3d4c5934b16cfa62a2cc5c3f0686bffe0071ed7804d3c31ab1a04e"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:716cdbfc57bfd3d3e31a58e6246e8190e6849b7dbb7c4ce39ef8bbf0edb8f6d5"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3a89277ed5f8c0fb2d0b46d669aa0633123aa7381f1f6118c12f15e0fb48f8ca"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-win32.whl", hash = "sha256:75a26a2307b10745a83b660c404416e984ee6fca515ec7f0765f69af3ce08072"},
+ {file = "zopfli-0.2.3.post1-cp311-cp311-win_amd64.whl", hash = "sha256:81c341d9bb87a6dbbb0d45d6e272aca80c7c97b4b210f9b6e233bf8b87242f29"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3f0197b6aa6eb3086ae9e66d6dd86c4d502b6c68b0ec490496348ae8c05ecaef"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fcfc0dc2761e4fcc15ad5d273b4d58c2e8e059d3214a7390d4d3c8e2aee644e"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cac2b37ab21c2b36a10b685b1893ebd6b0f83ae26004838ac817680881576567"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d5ab297d660b75c159190ce6d73035502310e40fd35170aed7d1a1aea7ddd65"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba214f4f45bec195ee8559651154d3ac2932470b9d91c5715fc29c013349f8c"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c1e0ed5d84ffa2d677cc9582fc01e61dab2e7ef8b8996e055f0a76167b1b94df"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bfa1eb759e07d8b7aa7a310a2bc535e127ee70addf90dc8d4b946b593c3e51a8"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cd2c002f160502608dcc822ed2441a0f4509c52e86fcfd1a09e937278ed1ca14"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-win32.whl", hash = "sha256:7be5cc6732eb7b4df17305d8a7b293223f934a31783a874a01164703bc1be6cd"},
+ {file = "zopfli-0.2.3.post1-cp312-cp312-win_amd64.whl", hash = "sha256:4e50ffac74842c1c1018b9b73875a0d0a877c066ab06bf7cccbaa84af97e754f"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecb7572df5372abce8073df078207d9d1749f20b8b136089916a4a0868d56051"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1cf720896d2ce998bc8e051d4b4ce0d8bec007aab6243102e8e1d22a0b2fb3f"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aad740b4d4fcbaaae4887823925166ffd062db3b248b3f432198fc287381d1a"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6617fb10f9e4393b331941861d73afb119cd847e88e4974bdbe8068ceef3f73f"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a53b18797cdef27e019db595d66c4b077325afe2fd62145953275f53d84ce40c"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b78008a69300d929ca2efeffec951b64a312e9a811e265ea4a907ab546d79fa6"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa5f90d6298bda02a95bc8dc8c3c19004d5a4e44bda00b67ca7431d857b4b54"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2768c877f76c8a0e7519b1c86c93757f3c01492ddde55751e9988afb7eff64e1"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-win32.whl", hash = "sha256:71390dbd3fbf6ebea9a5d85ffed8c26ee1453ee09248e9b88486e30e0397b775"},
+ {file = "zopfli-0.2.3.post1-cp313-cp313-win_amd64.whl", hash = "sha256:a86eb88e06bd87e1fff31dac878965c26b0c26db59ddcf78bb0379a954b120de"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3827170de28faf144992d3d4dcf8f3998fe3c8a6a6f4a08f1d42c2ec6119d2bb"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b0ec13f352ea5ae0fc91f98a48540512eed0767d0ec4f7f3cb92d92797983d18"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f272186e03ad55e7af09ab78055535c201b1a0bcc2944edb1768298d9c483a4"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:29ea74e72ffa6e291b8c6f2504ce6c146b4fe990c724c1450eb8e4c27fd31431"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eb45a34f23da4f8bc712b6376ca5396914b0b7c09adbb001dad964eb7f3132f8"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6482db9876c68faac2d20a96b566ffbf65ddaadd97b222e4e73641f4f8722fc4"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:95a260cafd56b8fffa679918937401c80bb38e1681c448b988022e4c3610965d"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:676919fba7311125244eb0c4393679ac5fe856e5864a15d122bd815205369fa0"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-win32.whl", hash = "sha256:b9026a21b6d41eb0e2e63f5bc1242c3fcc43ecb770963cda99a4307863dac12e"},
+ {file = "zopfli-0.2.3.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3c163911f8bad94b3e1db0a572e7c28ba681a0c91d0002ea1e4fa9264c21ef17"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b05296e8bc88c92e2b21e0a9bae4740c1551ee613c1d93a51fd28a7a0b2b6fbb"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12000a6accdd4bf0a3fa6eaa1b1c7a7bc80af0a2edf3f89d770d3dcce1d0e22"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a241a68581d34d67b40c425cce3d1fd211c092f99d9250947824ccba9f491949"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3657e416ffb8f31d9d3424af12122bb251befae109f2e271d87d825c92fc5b7b"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4915a41375bdee4db749ecd07d985a0486eb688a6619f713b7bf6fbfd145e960"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bbe429fc50686bb2a2608a30843e36fbaa123462a5284f136c7d9e0145220bfd"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2345e713260a350bea0b01a816a469ea356bc2d63d009a0d777691ecbbcf7493"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fc39f5c27f962ec8660d8d20c24762431131b5d8c672b44b0a54cf2b5bcde9b9"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-win32.whl", hash = "sha256:9a6aec38a989bad7ddd1ef53f1265699e49e294d08231b5313d61293f3cd6237"},
+ {file = "zopfli-0.2.3.post1-cp39-cp39-win_amd64.whl", hash = "sha256:b3df42f52502438ee973042cc551877d24619fa1cd38ef7b7e9ac74200daca8b"},
+ {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4c1226a7e2c7105ac31503a9bb97454743f55d88164d6d46bc138051b77f609b"},
+ {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48dba9251060289101343110ab47c0756f66f809bb4d1ddbb6d5c7e7752115c5"},
+ {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89899641d4de97dbad8e0cde690040d078b6aea04066dacaab98e0b5a23573f2"},
+ {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3654bfc927bc478b1c3f3ff5056ed7b20a1a37fa108ca503256d0a699c03bbb1"},
+ {file = "zopfli-0.2.3.post1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c4278d1873ce6e803e5d4f8d702fd3026bd67fca744aa98881324d1157ddf748"},
+ {file = "zopfli-0.2.3.post1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1d8cc06605519e82b16df090e17cb3990d1158861b2872c3117f1168777b81e4"},
+ {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1f990634fd5c5c8ced8edddd8bd45fab565123b4194d6841e01811292650acae"},
+ {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91a2327a4d7e77471fa4fbb26991c6de4a738c6fc6a33e09bb25f56a870a4b7b"},
+ {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbe5bcf10d01aab3513550f284c09fef32f342b36f56bfae2120a9c4d12c130"},
+ {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:34a99592f3d9eb6f737616b5bd74b48a589fdb3cb59a01a50d636ea81d6af272"},
+ {file = "zopfli-0.2.3.post1.tar.gz", hash = "sha256:96484dc0f48be1c5d7ae9f38ed1ce41e3675fd506b27c11a6607f14b49101e99"},
+]
+
+[package.extras]
+test = ["pytest"]
+
+[metadata]
+lock-version = "2.1"
+python-versions = "^3.11"
+content-hash = "10d66250510675424f4becad4e83d9785e2d3b0282dc30eb6d8c2c17bdd2df13"
From 0f848ba985acc1ea177216d5e68a6a8a889419ca Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Tue, 30 Sep 2025 00:32:56 +0300
Subject: [PATCH 16/19] refactor(cli,core): convert to absolute imports and
improve code formatting
---
README.md | 2 +-
naminter/__init__.py | 5 +-
naminter/cli/config.py | 19 +++--
naminter/cli/console.py | 13 +++-
naminter/cli/exporters.py | 83 +++++++++++++--------
naminter/cli/main.py | 54 ++++++++------
naminter/cli/progress.py | 6 +-
naminter/core/exceptions.py | 46 ++++++------
naminter/core/main.py | 141 +++++++++++++++++++++---------------
naminter/core/network.py | 43 +++++++----
naminter/core/utils.py | 51 +++++++++----
pyproject.toml | 2 +-
12 files changed, 287 insertions(+), 178 deletions(-)
diff --git a/README.md b/README.md
index 53ee33e..cbb51d9 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
[](https://pypi.org/project/naminter/)
[](https://pypi.org/project/naminter/)
-Naminter is an asynchronous OSINT username enumeration tool and Python package. Leveraging the comprehensive [WhatsMyName](https://github.com/WebBreacher/WhatsMyName) list, Naminter enumerates usernames across hundreds of websites. With advanced features like browser impersonation, asynchronous enumeration, and customizable filtering, it can be used both as a command-line tool and as a library in your Python projects.
+Naminter is a Python package and command-line interface (CLI) tool for asynchronous OSINT username enumeration using the WhatsMyName dataset. Leveraging the comprehensive [WhatsMyName](https://github.com/WebBreacher/WhatsMyName) list, Naminter enumerates usernames across hundreds of websites. With advanced features like browser impersonation, asynchronous enumeration, and customizable filtering, it can be used both as a command-line tool and as a library in your Python projects.
diff --git a/naminter/__init__.py b/naminter/__init__.py
index 790747e..944ca07 100644
--- a/naminter/__init__.py
+++ b/naminter/__init__.py
@@ -4,7 +4,10 @@
__name__ = "naminter"
__author__ = "3xp0rt"
__author_email__ = "contact@3xp0rt.com"
-__description__ = "Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset"
+__description__ = (
+ "A Python package and command-line interface (CLI) tool for asynchronous "
+ "OSINT username enumeration using the WhatsMyName dataset"
+)
__license__ = "MIT"
__email__ = "contact@3xp0rt.com"
__url__ = "https://github.com/3xp0rt/Naminter"
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index c3941b6..867017e 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -5,21 +5,23 @@
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
-from ..cli.console import display_warning
-from ..core.constants import (
+from naminter.cli.console import display_warning
+from naminter.core.constants import (
HTTP_REQUEST_TIMEOUT_SECONDS,
MAX_CONCURRENT_TASKS,
WMN_REMOTE_URL,
WMN_SCHEMA_URL,
)
-from ..core.exceptions import ConfigurationError
+from naminter.core.exceptions import ConfigurationError
@dataclass
class NaminterConfig:
"""Configuration for Naminter CLI tool.
- Holds all configuration parameters for username enumeration operations, including network settings, export options, filtering, and validation parameters.
+ Holds all configuration parameters for username enumeration operations,
+ including network settings, export options, filtering, and validation
+ parameters.
"""
# Required parameters
@@ -96,7 +98,8 @@ def __post_init__(self) -> None:
if not self.local_list_paths and not self.remote_list_urls:
self.remote_list_urls = [WMN_REMOTE_URL]
except Exception as e:
- raise ConfigurationError(f"Configuration validation failed: {e}") from e
+ msg = f"Configuration validation failed: {e}"
+ raise ConfigurationError(msg) from e
filter_fields = [
self.filter_all,
@@ -116,9 +119,11 @@ def __post_init__(self) -> None:
try:
self.extra_fp = json.loads(self.extra_fp)
except json.JSONDecodeError as e:
- raise ConfigurationError(f"Invalid JSON in extra_fp: {e}") from e
+ msg = f"Invalid JSON in extra_fp: {e}"
+ raise ConfigurationError(msg) from e
except TypeError as e:
- raise ConfigurationError(f"Invalid data type in extra_fp: {e}") from e
+ msg = f"Invalid data type in extra_fp: {e}"
+ raise ConfigurationError(msg) from e
@property
def response_dir(self) -> Path | None:
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 7585779..3ff95c3 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -8,8 +8,15 @@
from rich.text import Text
from rich.tree import Tree
-from .. import __author__, __description__, __email__, __license__, __url__, __version__
-from ..core.models import ResultStatus, SelfEnumerationResult, SiteResult
+from naminter import (
+ __author__,
+ __description__,
+ __email__,
+ __license__,
+ __url__,
+ __version__,
+)
+from naminter.core.models import ResultStatus, SelfEnumerationResult, SiteResult
console: Console = Console()
@@ -125,8 +132,8 @@ def format_self_enumeration(
return tree
+ @staticmethod
def _add_debug_info(
- self,
node: Tree,
response_code: int | None = None,
elapsed: float | None = None,
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index 85243a0..661129d 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -1,15 +1,15 @@
import csv
import importlib.resources
import json
-from datetime import datetime
+from datetime import UTC, datetime
from pathlib import Path
-from typing import Any, Literal, Protocol
+from typing import Any, ClassVar, Literal, Protocol
import jinja2
from weasyprint import HTML
-from ..core.exceptions import ConfigurationError, ExportError, FileAccessError
-from ..core.models import SelfEnumerationResult, SiteResult
+from naminter.core.exceptions import ConfigurationError, ExportError, FileAccessError
+from naminter.core.models import SelfEnumerationResult, SiteResult
FormatName = Literal["csv", "json", "html", "pdf"]
ResultDict = dict[str, Any]
@@ -24,7 +24,7 @@ class Exporter:
Unified exporter for CSV, JSON, HTML, and PDF formats.
"""
- SUPPORTED_FORMATS: list[FormatName] = ["csv", "json", "html", "pdf"]
+ SUPPORTED_FORMATS: ClassVar[list[FormatName]] = ["csv", "json", "html", "pdf"]
def __init__(
self, usernames: list[str] | None = None, version: str | None = None
@@ -55,20 +55,24 @@ def export(
for format_name, path in formats.items():
if format_name not in self.SUPPORTED_FORMATS:
- raise ExportError(f"Unsupported export format: {format_name}")
+ msg = f"Unsupported export format: {format_name}"
+ raise ExportError(msg)
try:
out_path = self._resolve_path(format_name, path)
out_path.parent.mkdir(parents=True, exist_ok=True)
self.export_methods[format_name](dict_results, out_path)
except FileAccessError as e:
+ msg = f"File access error during {format_name} export: {e}"
raise ExportError(
- f"File access error during {format_name} export: {e}"
+ msg
) from e
except Exception as e:
- raise ExportError(f"Failed to export {format_name}: {e}") from e
+ msg = f"Failed to export {format_name}: {e}"
+ raise ExportError(msg) from e
- def _export_csv(self, results: list[ResultDict], output_path: Path) -> None:
+ @staticmethod
+ def _export_csv(results: list[ResultDict], output_path: Path) -> None:
if not results:
return
@@ -80,25 +84,33 @@ def _export_csv(self, results: list[ResultDict], output_path: Path) -> None:
writer.writeheader()
writer.writerows(results)
except PermissionError as e:
- raise FileAccessError(f"Permission denied writing CSV file: {e}") from e
+ msg = f"Permission denied writing CSV file: {e}"
+ raise FileAccessError(msg) from e
except OSError as e:
- raise FileAccessError(f"OS error writing CSV file: {e}") from e
+ msg = f"OS error writing CSV file: {e}"
+ raise FileAccessError(msg) from e
except Exception as e:
- raise ExportError(f"CSV export error: {e}") from e
+ msg = f"CSV export error: {e}"
+ raise ExportError(msg) from e
- def _export_json(self, results: list[ResultDict], output_path: Path) -> None:
+ @staticmethod
+ def _export_json(results: list[ResultDict], output_path: Path) -> None:
try:
output_path.write_text(
json.dumps(results, ensure_ascii=False, indent=2), encoding="utf-8"
)
except PermissionError as e:
- raise FileAccessError(f"Permission denied writing JSON file: {e}") from e
+ msg = f"Permission denied writing JSON file: {e}"
+ raise FileAccessError(msg) from e
except OSError as e:
- raise FileAccessError(f"OS error writing JSON file: {e}") from e
+ msg = f"OS error writing JSON file: {e}"
+ raise FileAccessError(msg) from e
except (TypeError, ValueError) as e:
- raise ExportError(f"JSON serialization error: {e}") from e
+ msg = f"JSON serialization error: {e}"
+ raise ExportError(msg) from e
except Exception as e:
- raise ExportError(f"JSON export error: {e}") from e
+ msg = f"JSON export error: {e}"
+ raise ExportError(msg) from e
def _generate_html(self, results: list[ResultDict]) -> str:
grouped: dict[str, list[ResultDict]] = {}
@@ -117,13 +129,16 @@ def _generate_html(self, results: list[ResultDict]) -> str:
):
template_source = f.read()
except FileNotFoundError as e:
- raise ConfigurationError(f"HTML template not found: {e}") from e
+ msg = f"HTML template not found: {e}"
+ raise ConfigurationError(msg) from e
except PermissionError as e:
+ msg = f"Permission denied reading HTML template: {e}"
raise FileAccessError(
- f"Permission denied reading HTML template: {e}"
+ msg
) from e
except Exception as e:
- raise ConfigurationError(f"Could not load HTML template: {e}") from e
+ msg = f"Could not load HTML template: {e}"
+ raise ConfigurationError(msg) from e
template = jinja2.Template(template_source, autoescape=True)
@@ -132,7 +147,7 @@ def _generate_html(self, results: list[ResultDict]) -> str:
display_fields=display_fields,
usernames=self.usernames,
version=self.version,
- current_time=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+ current_time=datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S %Z"),
total_count=len(results),
category_count=len(grouped),
)
@@ -142,30 +157,38 @@ def _export_html(self, results: list[ResultDict], output_path: Path) -> None:
html = self._generate_html(results)
output_path.write_text(html, encoding="utf-8")
except PermissionError as e:
- raise FileAccessError(f"Permission denied writing HTML file: {e}") from e
+ msg = f"Permission denied writing HTML file: {e}"
+ raise FileAccessError(msg) from e
except OSError as e:
- raise FileAccessError(f"OS error writing HTML file: {e}") from e
+ msg = f"OS error writing HTML file: {e}"
+ raise FileAccessError(msg) from e
except Exception as e:
- raise ExportError(f"HTML export error: {e}") from e
+ msg = f"HTML export error: {e}"
+ raise ExportError(msg) from e
def _export_pdf(self, results: list[ResultDict], output_path: Path) -> None:
if not results:
- raise ExportError("No results to export to PDF")
+ msg = "No results to export to PDF"
+ raise ExportError(msg)
try:
html = self._generate_html(results)
HTML(string=html).write_pdf(str(output_path))
except PermissionError as e:
- raise FileAccessError(f"Permission denied writing PDF file: {e}") from e
+ msg = f"Permission denied writing PDF file: {e}"
+ raise FileAccessError(msg) from e
except OSError as e:
- raise FileAccessError(f"OS error writing PDF file: {e}") from e
+ msg = f"OS error writing PDF file: {e}"
+ raise FileAccessError(msg) from e
except Exception as e:
- raise ExportError(f"PDF export error: {e}") from e
+ msg = f"PDF export error: {e}"
+ raise ExportError(msg) from e
- def _resolve_path(self, format_name: FormatName, custom: str | Path | None) -> Path:
+ @staticmethod
+ def _resolve_path(format_name: FormatName, custom: str | Path | None) -> Path:
if custom:
return Path(custom)
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ timestamp = datetime.now(UTC).strftime("%Y%m%d_%H%M%S")
filename = f"results_{timestamp}.{format_name}"
return Path.cwd() / filename
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index cd71eed..57eb5bb 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -9,20 +9,20 @@
import rich_click as click
from curl_cffi import BrowserTypeLiteral
-from .. import __version__
-from ..cli.config import NaminterConfig
-from ..cli.console import (
+from naminter import __version__
+from naminter.cli.config import NaminterConfig
+from naminter.cli.console import (
ResultFormatter,
console,
display_error,
display_version,
display_warning,
)
-from ..cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
-from ..cli.exporters import Exporter
-from ..cli.progress import ProgressManager, ResultsTracker
-from ..cli.utils import sanitize_filename
-from ..core.constants import (
+from naminter.cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
+from naminter.cli.exporters import Exporter
+from naminter.cli.progress import ProgressManager, ResultsTracker
+from naminter.cli.utils import sanitize_filename
+from naminter.core.constants import (
HTTP_ALLOW_REDIRECTS,
HTTP_REQUEST_TIMEOUT_SECONDS,
HTTP_SSL_VERIFY,
@@ -30,14 +30,14 @@
MAX_CONCURRENT_TASKS,
WMN_SCHEMA_URL,
)
-from ..core.exceptions import ConfigurationError, DataError, ExportError
-from ..core.main import Naminter
-from ..core.models import ResultStatus, SelfEnumerationResult, SiteResult
-from ..core.network import CurlCFFISession
-from ..core.utils import validate_numeric_values
+from naminter.core.exceptions import ConfigurationError, DataError, ExportError
+from naminter.core.main import Naminter
+from naminter.core.models import ResultStatus, SelfEnumerationResult, SiteResult
+from naminter.core.network import CurlCFFISession
+from naminter.core.utils import validate_numeric_values
-def _version_callback(ctx: click.Context, param: click.Option, value: bool) -> None:
+def _version_callback(ctx: click.Context, _param: click.Option, value: bool) -> None:
"""Eager callback to display version and exit."""
if not value or ctx.resilient_parsing:
return
@@ -250,7 +250,7 @@ async def _run_self_enumeration(
return results
def _filter_result(self, result: SiteResult | SelfEnumerationResult) -> bool:
- """Determine if a result should be included in output based on filter settings."""
+ """Determine if a result should be included based on filter settings."""
status = result.status
if self.config.filter_all:
@@ -271,7 +271,7 @@ def _filter_result(self, result: SiteResult | SelfEnumerationResult) -> bool:
) or not any(filter_map.keys())
async def _process_result(self, result: SiteResult) -> Path | None:
- """Process a single result: handle browser opening, response saving, and console output."""
+ """Handle browser opening, response saving, and console output for a result."""
response_file = None
if result.result_url and self.config.browse:
@@ -284,7 +284,11 @@ async def _process_result(self, result: SiteResult) -> Path | None:
status_str = result.status.value
created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
- base_filename = f"{status_str}_{result.response_code}_{safe_site_name}_{safe_username}_{created_at_str}{RESPONSE_FILE_EXTENSION}"
+ base_filename = (
+ f"{status_str}_{result.response_code}_"
+ f"{safe_site_name}_{safe_username}_{created_at_str}"
+ f"{RESPONSE_FILE_EXTENSION}"
+ )
response_file = self._response_dir / base_filename
await self._write_file(response_file, result.response_text)
@@ -301,14 +305,16 @@ async def _process_result(self, result: SiteResult) -> Path | None:
return response_file
- async def _open_browser(self, url: str) -> None:
+ @staticmethod
+ async def _open_browser(url: str) -> None:
"""Open a URL in the browser with error handling."""
try:
await asyncio.to_thread(webbrowser.open, url)
except Exception as e:
display_error(f"Error opening browser for {url}: {e}")
- async def _write_file(self, file_path: Path, content: str) -> None:
+ @staticmethod
+ async def _write_file(file_path: Path, content: str) -> None:
"""Write content to a file with error handling."""
try:
async with aiofiles.open(file_path, "w", encoding="utf-8") as file:
@@ -324,7 +330,7 @@ async def _write_file(self, file_path: Path, content: str) -> None:
@click.group(
invoke_without_command=True,
no_args_is_help=True,
- context_settings=dict(help_option_names=["-h", "--help"]),
+ context_settings={"help_option_names": ["-h", "--help"]},
)
@click.option(
"--version",
@@ -423,7 +429,11 @@ async def _write_file(self, file_path: Path, content: str) -> None:
)
@click.option(
"--extra-fp",
- help='Extra fingerprinting options as JSON string (e.g., \'{"tls_grease": true, "tls_cert_compression": "brotli"}\')',
+ help=(
+ "Extra fingerprinting options as JSON string (e.g., '"
+ '{"tls_grease": true, "tls_cert_compression": "brotli"}'
+ ")"
+ ),
)
@click.option(
"--max-tasks",
@@ -498,7 +508,7 @@ async def _write_file(self, file_path: Path, content: str) -> None:
)
@click.pass_context
def main(ctx: click.Context, **kwargs: Any) -> None:
- """Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset."""
+ """A Python package and command-line interface (CLI) tool for asynchronous OSINT username enumeration using the WhatsMyName dataset."""
if ctx.invoked_subcommand is not None:
return
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index 0d72bce..d28ac15 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -13,8 +13,8 @@
TimeRemainingColumn,
)
-from ..cli.console import THEME
-from ..core.models import ResultStatus, SiteResult
+from naminter.cli.console import THEME
+from naminter.core.models import ResultStatus, SiteResult
class ResultsTracker:
@@ -60,7 +60,7 @@ def get_progress_text(self) -> str:
if errors > 0:
sections.append(f"[{THEME['error']}]! {errors}[/]")
if not_valid > 0:
- sections.append(f"[{THEME['warning']}]× {not_valid}[/]")
+ sections.append(f"[{THEME['warning']}]x {not_valid}[/]")
sections.append(
f"[{THEME['primary']}]{self.results_count}/{self.total_sites}[/]"
diff --git a/naminter/core/exceptions.py b/naminter/core/exceptions.py
index 15d1995..8423521 100644
--- a/naminter/core/exceptions.py
+++ b/naminter/core/exceptions.py
@@ -20,7 +20,7 @@ class ConfigurationError(NaminterError):
"""
-class NetworkError(NaminterError):
+class HttpError(NaminterError):
"""Raised when network-related errors occur.
This includes connection failures, DNS resolution errors,
@@ -28,18 +28,26 @@ class NetworkError(NaminterError):
"""
-class DataError(NaminterError):
- """Raised when there are issues with data processing or validation.
+class HttpSessionError(HttpError):
+ """Raised when HTTP session creation or management fails.
- This includes malformed data, parsing errors, and data integrity issues.
+ This includes session initialization errors, authentication failures,
+ and session state management issues.
"""
-class SessionError(NetworkError):
- """Raised when HTTP session creation or management fails.
+class HttpTimeoutError(HttpError):
+ """Raised when network requests timeout.
- This includes session initialization errors, authentication failures,
- and session state management issues.
+ This includes both connection timeouts and read timeouts
+ during HTTP requests.
+ """
+
+
+class DataError(NaminterError):
+ """Raised when there are issues with data processing or validation.
+
+ This includes malformed data, parsing errors, and data integrity issues.
"""
@@ -51,11 +59,11 @@ class SchemaError(DataError):
"""
-class TimeoutError(NetworkError):
- """Raised when network requests timeout.
+class ValidationError(DataError):
+ """Raised when input validation fails.
- This includes both connection timeouts and read timeouts
- during HTTP requests.
+ This includes invalid usernames, malformed URLs,
+ and other input parameter validation errors.
"""
@@ -67,14 +75,6 @@ class FileAccessError(DataError):
"""
-class ValidationError(DataError):
- """Raised when input validation fails.
-
- This includes invalid usernames, malformed URLs,
- and other input parameter validation errors.
- """
-
-
class ExportError(NaminterError):
"""Raised when export operations fail.
@@ -88,10 +88,10 @@ class ExportError(NaminterError):
"DataError",
"ExportError",
"FileAccessError",
+ "HttpError",
+ "HttpSessionError",
+ "HttpTimeoutError",
"NaminterError",
- "NetworkError",
"SchemaError",
- "SessionError",
- "TimeoutError",
"ValidationError",
]
diff --git a/naminter/core/main.py b/naminter/core/main.py
index 975e68f..e2145e9 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -3,14 +3,12 @@
import logging
from collections.abc import AsyncGenerator, Sequence
from pathlib import Path
-from typing import (
- Any,
-)
+from typing import Any
import aiofiles
import jsonschema
-from ..core.constants import (
+from naminter.core.constants import (
ACCOUNT_PLACEHOLDER,
MAX_CONCURRENT_TASKS,
REQUIRED_KEYS_ENUMERATE,
@@ -22,28 +20,24 @@
WMN_KEY_SITES,
WMN_REMOTE_URL,
)
-from ..core.exceptions import (
+from naminter.core.exceptions import (
DataError,
FileAccessError,
- NetworkError,
+ HttpError,
+ HttpSessionError,
+ HttpTimeoutError,
SchemaError,
- SessionError,
- TimeoutError,
ValidationError,
)
-from ..core.models import (
+from naminter.core.models import (
ResultStatus,
SelfEnumerationResult,
SiteResult,
Summary,
ValidationMode,
)
-from ..core.network import BaseSession
-from ..core.utils import (
- deduplicate_strings,
- merge_lists,
- validate_usernames,
-)
+from naminter.core.network import BaseSession
+from naminter.core.utils import deduplicate_strings, merge_lists, validate_usernames
class Naminter:
@@ -87,48 +81,60 @@ async def _open_session(self) -> None:
try:
await self._http.open()
self._logger.info("HTTP client opened")
- except SessionError as e:
+ except HttpSessionError as e:
self._logger.error("Failed to open HTTP session: %s", e)
- raise DataError(f"HTTP session initialization failed: {e}") from e
+ msg = f"HTTP session initialization failed: {e}"
+ raise DataError(msg) from e
async def _fetch_json(self, url: str) -> dict[str, Any]:
"""Fetch and parse JSON from a URL."""
if not url.strip():
- raise ValidationError(f"Invalid URL: {url}")
+ msg = f"Invalid URL: {url}"
+ raise ValidationError(msg)
try:
response = await self._http.get(url)
- except TimeoutError as e:
- raise DataError(f"Timeout while fetching from {url}: {e}") from e
- except SessionError as e:
- raise DataError(f"Session error while fetching from {url}: {e}") from e
- except NetworkError as e:
- raise DataError(f"Network error while fetching from {url}: {e}") from e
+ except HttpTimeoutError as e:
+ msg = f"Timeout while fetching from {url}: {e}"
+ raise DataError(msg) from e
+ except HttpSessionError as e:
+ msg = f"Session error while fetching from {url}: {e}"
+ raise DataError(msg) from e
+ except HttpError as e:
+ msg = f"Network error while fetching from {url}: {e}"
+ raise DataError(msg) from e
if response.status_code < 200 or response.status_code >= 300:
- raise DataError(f"Failed to fetch from {url}: HTTP {response.status_code}")
+ msg = f"Failed to fetch from {url}: HTTP {response.status_code}"
+ raise DataError(msg)
try:
return response.json()
except (ValueError, json.JSONDecodeError) as e:
- raise DataError(f"Failed to parse JSON from {url}: {e}") from e
+ msg = f"Failed to parse JSON from {url}: {e}"
+ raise DataError(msg) from e
- async def _read_json_file(self, path: str | Path) -> dict[str, Any]:
+ @staticmethod
+ async def _read_json_file(path: str | Path) -> dict[str, Any]:
"""Read JSON from a local file without blocking the event loop."""
try:
async with aiofiles.open(path, encoding="utf-8") as file:
content = await file.read()
except FileNotFoundError as e:
- raise FileAccessError(f"File not found: {path}") from e
+ msg = f"File not found: {path}"
+ raise FileAccessError(msg) from e
except PermissionError as e:
- raise FileAccessError(f"Permission denied accessing file: {path}") from e
+ msg = f"Permission denied accessing file: {path}"
+ raise FileAccessError(msg) from e
except OSError as e:
- raise FileAccessError(f"Error reading file {path}: {e}") from e
+ msg = f"Error reading file {path}: {e}"
+ raise FileAccessError(msg) from e
try:
return json.loads(content)
except json.JSONDecodeError as e:
- raise DataError(f"Invalid JSON in file {path}: {e}") from e
+ msg = f"Invalid JSON in file {path}: {e}"
+ raise DataError(msg) from e
async def _load_schema(self) -> dict[str, Any]:
"""Load WMN schema from local or remote source."""
@@ -141,17 +147,22 @@ async def _load_schema(self) -> dict[str, Any]:
elif self._remote_schema_url:
return await self._fetch_json(self._remote_schema_url)
else:
- raise DataError(
- "No schema source provided - either local_schema_path or remote_schema_url is required"
+ msg = (
+ "No schema source provided - either local_schema_path or "
+ "remote_schema_url is required"
)
+ raise DataError(msg)
except (OSError, json.JSONDecodeError) as e:
+ msg = f"Failed to load required WMN schema from local file: {e}"
raise DataError(
- f"Failed to load required WMN schema from local file: {e}"
- ) from e
- except NetworkError as e:
- raise DataError(
- f"Failed to load required WMN schema from {self._remote_schema_url}: {e}"
+ msg
) from e
+ except HttpError as e:
+ msg = (
+ f"Failed to load required WMN schema from {self._remote_schema_url}: "
+ f"{e}"
+ )
+ raise DataError(msg) from e
async def _load_dataset(self) -> dict[str, Any]:
"""Load WMN data from configured sources."""
@@ -194,11 +205,13 @@ async def _load_dataset(self) -> dict[str, Any]:
detail = (
"; ".join(failures) if failures else "no sources produced any sites"
)
- raise DataError(f"No sites loaded from any source; details: {detail}")
+ msg = f"No sites loaded from any source; details: {detail}"
+ raise DataError(msg)
return dataset
- def _deduplicate_data(self, data: dict[str, Any]) -> None:
+ @staticmethod
+ def _deduplicate_data(data: dict[str, Any]) -> None:
"""Deduplicate and clean the WMN data in place."""
unique_sites = {
site[WMN_KEY_NAME]: site
@@ -235,9 +248,11 @@ def _validate_data(data: dict[str, Any], schema: dict[str, Any]) -> None:
jsonschema.Draft7Validator.check_schema(schema)
jsonschema.Draft7Validator(schema).validate(data)
except jsonschema.ValidationError as e:
- raise SchemaError(f"WMN data does not match schema: {e.message}") from e
+ msg = f"WMN data does not match schema: {e.message}"
+ raise SchemaError(msg) from e
except jsonschema.SchemaError as e:
- raise SchemaError(f"Invalid WMN schema: {e.message}") from e
+ msg = f"Invalid WMN schema: {e.message}"
+ raise SchemaError(msg) from e
async def _ensure_dataset(self) -> None:
"""Load and validate the WMN dataset and schema if not already loaded."""
@@ -255,9 +270,11 @@ async def _ensure_dataset(self) -> None:
len(self._wmn_data.get(WMN_KEY_SITES, [])),
)
except SchemaError as e:
- raise DataError(f"WMN validation failed: {e}") from e
+ msg = f"WMN validation failed: {e}"
+ raise DataError(msg) from e
except Exception as e:
- raise DataError(f"WMN load failed: {e}") from e
+ msg = f"WMN load failed: {e}"
+ raise DataError(msg) from e
async def _close_session(self) -> None:
"""Close the HTTP session if open."""
@@ -345,7 +362,8 @@ async def get_wmn_summary(
raise
except Exception as e:
self._logger.exception("Failed to compute WMN summary")
- raise DataError(f"Failed to retrieve metadata: {e}") from e
+ msg = f"Failed to retrieve metadata: {e}"
+ raise DataError(msg) from e
def _filter_sites(
self,
@@ -360,7 +378,8 @@ def _filter_sites(
available_names: set[str] = {site.get("name") for site in sites}
missing_names = filtered_site_names - available_names
if missing_names:
- raise DataError(f"Unknown site names: {sorted(missing_names)}")
+ msg = f"Unknown site names: {sorted(missing_names)}"
+ raise DataError(msg)
else:
filtered_site_names = set()
@@ -395,8 +414,9 @@ def _filter_sites(
)
return filtered_sites
+ @staticmethod
def _get_missing_keys(
- self, site: dict[str, Any], required_keys: Sequence[str]
+ site: dict[str, Any], required_keys: Sequence[str]
) -> list[str]:
"""Return a list of required keys missing from a site mapping."""
return [key for key in required_keys if key not in site]
@@ -417,13 +437,14 @@ async def enumerate_site(
missing_keys = self._get_missing_keys(site, REQUIRED_KEYS_ENUMERATE)
if missing_keys:
- return SiteResult(
+ error_result = SiteResult(
name=site.get("name", "unknown"),
category=site.get("cat", "unknown"),
username=username,
status=ResultStatus.ERROR,
error=f"Site entry missing required keys: {missing_keys}",
)
+ return error_result
name = site["name"]
category = site["cat"]
@@ -432,13 +453,14 @@ async def enumerate_site(
strip_bad_char = site.get("strip_bad_char", "")
clean_username = username.translate(str.maketrans("", "", strip_bad_char))
if not clean_username:
- return SiteResult(
+ error_result = SiteResult(
name,
category,
username,
ResultStatus.ERROR,
error="Username became empty after stripping",
)
+ return error_result
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
uri_pretty = site.get("uri_pretty", uri_check_template).replace(
@@ -460,6 +482,7 @@ async def enumerate_site(
else:
self._logger.debug("GET %s", uri_check)
+ error_result: SiteResult | None = None
try:
async with self._semaphore:
if post_body:
@@ -477,9 +500,9 @@ async def enumerate_site(
except asyncio.CancelledError:
self._logger.warning("Request cancelled")
raise
- except TimeoutError as e:
+ except HttpTimeoutError as e:
self._logger.warning("Request timeout for %s: %s", name, e)
- return SiteResult(
+ error_result = SiteResult(
name=name,
category=category,
username=username,
@@ -487,9 +510,9 @@ async def enumerate_site(
status=ResultStatus.ERROR,
error=f"Request timeout: {e}",
)
- except SessionError as e:
+ except HttpSessionError as e:
self._logger.warning("Session error for %s: %s", name, e)
- return SiteResult(
+ error_result = SiteResult(
name=name,
category=category,
username=username,
@@ -497,9 +520,9 @@ async def enumerate_site(
status=ResultStatus.ERROR,
error=f"Session error: {e}",
)
- except NetworkError as e:
+ except HttpError as e:
self._logger.warning("Network error for %s: %s", name, e)
- return SiteResult(
+ error_result = SiteResult(
name=name,
category=category,
username=username,
@@ -509,7 +532,7 @@ async def enumerate_site(
)
except Exception as e:
self._logger.exception("Unexpected error during request for %s", name)
- return SiteResult(
+ error_result = SiteResult(
name=name,
category=category,
username=username,
@@ -518,6 +541,9 @@ async def enumerate_site(
error=f"Unexpected error: {e}",
)
+ if error_result is not None:
+ return error_result
+
result_status = SiteResult.get_result_status(
response_code=response.status_code,
response_text=response.text,
@@ -564,7 +590,8 @@ async def enumerate_usernames(
usernames = validate_usernames(usernames)
except ValidationError as e:
self._logger.error("Invalid usernames: %s", e)
- raise DataError("Invalid usernames") from e
+ msg = "Invalid usernames"
+ raise DataError(msg) from e
else:
self._logger.info("Usernames validated (count=%d)", len(usernames))
diff --git a/naminter/core/network.py b/naminter/core/network.py
index ea26454..c6f8047 100644
--- a/naminter/core/network.py
+++ b/naminter/core/network.py
@@ -8,7 +8,7 @@
from curl_cffi.requests.exceptions import RequestException as CurlRequestException
from curl_cffi.requests.exceptions import Timeout as CurlTimeout
-from .exceptions import NetworkError, SessionError, TimeoutError
+from .exceptions import HttpError, HttpSessionError, HttpTimeoutError
from .models import Response
@@ -95,8 +95,9 @@ async def open(self) -> None:
extra_fp=self._extra_fp,
)
except Exception as e:
- raise SessionError(
- "Failed to open curl-cffi session", cause=e
+ msg = "Failed to open curl-cffi session"
+ raise HttpSessionError(
+ msg, cause=e
) from e
async def close(self) -> None:
@@ -112,7 +113,8 @@ async def close(self) -> None:
async def get(self, url: str, headers: Mapping[str, str] | None = None) -> Response:
await self.open()
if self._session is None:
- raise SessionError("Session not initialized")
+ msg = "Session not initialized"
+ raise HttpSessionError(msg)
try:
response = await self._session.get(
@@ -123,11 +125,14 @@ async def get(self, url: str, headers: Mapping[str, str] | None = None) -> Respo
status_code=response.status_code, text=response.text, elapsed=elapsed
)
except CurlTimeout as e:
- raise TimeoutError(f"GET timeout for {url}", cause=e) from e
+ msg = f"GET timeout for {url}"
+ raise HttpTimeoutError(msg, cause=e) from e
except CurlRequestException as e:
- raise NetworkError(f"GET failed for {url}: {e}", cause=e) from e
+ msg = f"GET failed for {url}: {e}"
+ raise HttpError(msg, cause=e) from e
except Exception as e:
- raise NetworkError(f"GET failed for {url}: {e}", cause=e) from e
+ msg = f"GET failed for {url}: {e}"
+ raise HttpError(msg, cause=e) from e
async def post(
self,
@@ -137,7 +142,8 @@ async def post(
) -> Response:
await self.open()
if self._session is None:
- raise SessionError("Session not initialized")
+ msg = "Session not initialized"
+ raise HttpSessionError(msg)
try:
response = await self._session.post(
@@ -148,11 +154,14 @@ async def post(
status_code=response.status_code, text=response.text, elapsed=elapsed
)
except CurlTimeout as e:
- raise TimeoutError(f"POST timeout for {url}", cause=e) from e
+ msg = f"POST timeout for {url}"
+ raise HttpTimeoutError(msg, cause=e) from e
except CurlRequestException as e:
- raise NetworkError(f"POST failed for {url}: {e}", cause=e) from e
+ msg = f"POST failed for {url}: {e}"
+ raise HttpError(msg, cause=e) from e
except Exception as e:
- raise NetworkError(f"POST failed for {url}: {e}", cause=e) from e
+ msg = f"POST failed for {url}: {e}"
+ raise HttpError(msg, cause=e) from e
async def request(
self,
@@ -163,7 +172,8 @@ async def request(
) -> Response:
await self.open()
if self._session is None:
- raise SessionError("Session not initialized")
+ msg = "Session not initialized"
+ raise HttpSessionError(msg)
try:
response = await self._session.request(
@@ -178,11 +188,14 @@ async def request(
status_code=response.status_code, text=response.text, elapsed=elapsed
)
except CurlTimeout as e:
- raise TimeoutError(f"{method} timeout for {url}", cause=e) from e
+ msg = f"{method} timeout for {url}"
+ raise HttpTimeoutError(msg, cause=e) from e
except CurlRequestException as e:
- raise NetworkError(f"{method} failed for {url}: {e}", cause=e) from e
+ msg = f"{method} failed for {url}: {e}"
+ raise HttpError(msg, cause=e) from e
except Exception as e:
- raise NetworkError(f"{method} failed for {url}: {e}", cause=e) from e
+ msg = f"{method} failed for {url}: {e}"
+ raise HttpError(msg, cause=e) from e
__all__ = [
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index 248f19f..53739f2 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -45,13 +45,21 @@ def validate_numeric_values(max_tasks: int, timeout: int) -> list[str]:
warnings: list[str] = []
if not (MIN_TASKS <= max_tasks <= MAX_TASKS_LIMIT):
+ msg = (
+ "Invalid max_tasks: "
+ f"{max_tasks} must be between {MIN_TASKS} and {MAX_TASKS_LIMIT}"
+ )
raise ConfigurationError(
- f"Invalid max_tasks: {max_tasks} must be between {MIN_TASKS} and {MAX_TASKS_LIMIT}"
+ msg
)
if not (MIN_TIMEOUT <= timeout <= MAX_TIMEOUT):
+ msg = (
+ "Invalid timeout: "
+ f"{timeout} must be between {MIN_TIMEOUT} and {MAX_TIMEOUT} seconds"
+ )
raise ConfigurationError(
- f"Invalid timeout: {timeout} must be between {MIN_TIMEOUT} and {MAX_TIMEOUT} seconds"
+ msg
)
if (
@@ -59,24 +67,34 @@ def validate_numeric_values(max_tasks: int, timeout: int) -> list[str]:
and timeout < HIGH_CONCURRENCY_MIN_TIMEOUT
):
warnings.append(
- f"High concurrency ({max_tasks}) with low timeout ({timeout}s) may cause failures; consider increasing timeout or reducing max_tasks."
+ "High concurrency ("
+ f"{max_tasks}) with low timeout ({timeout}s) may cause failures; "
+ "consider increasing timeout or reducing max_tasks."
)
elif (
max_tasks > VERY_HIGH_CONCURRENCY_THRESHOLD
and timeout < VERY_HIGH_CONCURRENCY_MIN_TIMEOUT
):
warnings.append(
- f"Very high concurrency ({max_tasks}) with very low timeout ({timeout}s) may cause connection issues; recommend timeout >= {HIGH_CONCURRENCY_MIN_TIMEOUT}s for max_tasks > {VERY_HIGH_CONCURRENCY_THRESHOLD}."
+ "Very high concurrency ("
+ f"{max_tasks}) with very low timeout ({timeout}s) may cause connection "
+ "issues; recommend timeout >= "
+ f"{HIGH_CONCURRENCY_MIN_TIMEOUT}s for max_tasks > "
+ f"{VERY_HIGH_CONCURRENCY_THRESHOLD}."
)
if max_tasks > EXTREME_CONCURRENCY_THRESHOLD:
warnings.append(
- f"Extremely high concurrency ({max_tasks}) may overwhelm servers or cause rate limiting; lowering value is recommended."
+ "Extremely high concurrency ("
+ f"{max_tasks}) may overwhelm servers or cause rate limiting; "
+ "lowering value is recommended."
)
if timeout < LOW_TIMEOUT_WARNING_THRESHOLD:
warnings.append(
- f"Very low timeout ({timeout}s) may cause legitimate requests to fail; increase timeout for better accuracy."
+ "Very low timeout ("
+ f"{timeout}s) may cause legitimate requests to fail; increase "
+ "timeout for better accuracy."
)
return warnings
@@ -89,15 +107,15 @@ def configure_proxy(proxy: str | dict[str, str] | None) -> dict[str, str] | None
if isinstance(proxy, str):
if not proxy.strip():
- raise ConfigurationError("Invalid proxy: proxy string cannot be empty")
+ msg = "Invalid proxy: proxy string cannot be empty"
+ raise ConfigurationError(msg)
if not (
- proxy.startswith("http://")
- or proxy.startswith("https://")
- or proxy.startswith("socks5://")
+ proxy.startswith(("http://", "https://", "socks5://"))
):
+ msg = "Invalid proxy: must be http://, https://, or socks5:// URL"
raise ConfigurationError(
- "Invalid proxy: must be http://, https://, or socks5:// URL"
+ msg
)
logger.debug("Proxy configuration validated")
@@ -105,12 +123,14 @@ def configure_proxy(proxy: str | dict[str, str] | None) -> dict[str, str] | None
elif isinstance(proxy, dict):
for protocol, proxy_url in proxy.items():
- if protocol not in ["http", "https"]:
- raise ConfigurationError(f"Invalid proxy protocol: {protocol}")
+ if protocol not in {"http", "https"}:
+ msg = f"Invalid proxy protocol: {protocol}"
+ raise ConfigurationError(msg)
if not isinstance(proxy_url, str) or not proxy_url.strip():
+ msg = f"Invalid proxy URL for {protocol}: must be non-empty string"
raise ConfigurationError(
- f"Invalid proxy URL for {protocol}: must be non-empty string"
+ msg
)
logger.debug("Proxy dictionary configuration validated")
@@ -123,7 +143,8 @@ def validate_usernames(usernames: list[str]) -> list[str]:
unique_usernames: list[str] = deduplicate_strings(usernames)
if not unique_usernames:
- raise ValidationError("No valid usernames provided")
+ msg = "No valid usernames provided"
+ raise ValidationError(msg)
return unique_usernames
diff --git a/pyproject.toml b/pyproject.toml
index c269c6b..e57b0ec 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[tool.poetry]
name = "naminter"
version = "1.0.7"
-description = "Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset"
+description = "A Python package and command-line interface (CLI) tool for asynchronous OSINT username enumeration using the WhatsMyName dataset"
authors = ["3xp0rt "]
license = "MIT"
readme = "README.md"
From dab3a98a4dbb131b1335d4a261017fdc2b3646f4 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Tue, 18 Nov 2025 00:45:05 +0200
Subject: [PATCH 17/19] refactor(build,cli,core): migrate from poetry to uv and
refactor codebase
---
.github/workflows/docker-publish.yml | 18 +-
.github/workflows/python-publish.yml | 16 +-
Dockerfile | 4 +-
README.md | 62 +-
naminter/cli/config.py | 103 +-
naminter/cli/console.py | 130 ++-
naminter/cli/constants.py | 16 +
naminter/cli/exceptions.py | 38 +
naminter/cli/exporters.py | 128 ++-
naminter/cli/main.py | 487 +++++----
naminter/cli/progress.py | 30 +-
naminter/cli/utils.py | 140 ++-
naminter/core/__init__.py | 1 +
naminter/core/constants.py | 90 +-
naminter/core/exceptions.py | 66 +-
naminter/core/main.py | 1129 +++++++++++----------
naminter/core/models.py | 271 +++--
naminter/core/network.py | 120 +--
naminter/core/utils.py | 232 ++---
poetry.lock | 1390 --------------------------
pyproject.toml | 57 +-
uv.lock | 847 ++++++++++++++++
22 files changed, 2580 insertions(+), 2795 deletions(-)
create mode 100644 naminter/cli/exceptions.py
delete mode 100644 poetry.lock
create mode 100644 uv.lock
diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml
index ed11bee..e96608d 100644
--- a/.github/workflows/docker-publish.yml
+++ b/.github/workflows/docker-publish.yml
@@ -8,11 +8,6 @@ on:
- main
- master
workflow_dispatch:
- push:
- branches:
- - main
- tags:
- - 'v*.*.*'
env:
REGISTRY: ghcr.io
@@ -29,7 +24,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -58,14 +53,10 @@ jobs:
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
- # set latest tag for default branch
- type=ref,event=branch
- type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
- type=raw,value=latest,enable={{is_default_branch}}
- type=sha,prefix=sha-
+ type=raw,value=latest,enable=${{ github.event_name == 'release' }}
labels: |
org.opencontainers.image.title=Naminter
org.opencontainers.image.description=Asynchronous OSINT username enumeration tool that searches hundreds of websites using the WhatsMyName dataset
@@ -73,11 +64,10 @@ jobs:
org.opencontainers.image.licenses=MIT
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
org.opencontainers.image.revision=${{ github.sha }}
- org.opencontainers.image.created=${{ steps.meta.outputs.created }}
- name: Build and push Docker image
id: build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
@@ -92,7 +82,7 @@ jobs:
- name: Generate artifact attestation
if: github.event_name == 'release'
- uses: actions/attest-build-provenance@v1
+ uses: actions/attest-build-provenance@v3
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build.outputs.digest }}
diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml
index b493f9c..1749772 100644
--- a/.github/workflows/python-publish.yml
+++ b/.github/workflows/python-publish.yml
@@ -1,11 +1,3 @@
-# This workflow will upload a Python Package to PyPI when a release is created
-# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
-
-# This workflow uses actions that are not certified by GitHub.
-# They are provided by a third-party and are governed by
-# separate terms of service, privacy policy, and support
-# documentation.
-
name: Upload Python Package
on:
@@ -20,9 +12,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- - uses: actions/setup-python@v5
+ - uses: actions/setup-python@v6
with:
python-version: "3.x"
@@ -33,7 +25,7 @@ jobs:
python -m build
- name: Upload distributions
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v5
with:
name: release-dists
path: dist/
@@ -59,7 +51,7 @@ jobs:
steps:
- name: Retrieve release distributions
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v6
with:
name: release-dists
path: dist/
diff --git a/Dockerfile b/Dockerfile
index 6e4bad7..49423a1 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -26,13 +26,11 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
COPY pyproject.toml ./
+COPY . .
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
pip install --no-cache-dir -e .
-# Copy application code
-COPY . .
-
RUN useradd --create-home --shell /bin/bash naminter && \
chown -R naminter:naminter /app
USER naminter
diff --git a/README.md b/README.md
index cbb51d9..a318fe0 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# 🔍 Naminter
+# Naminter
[](https://www.python.org/downloads/)
[](LICENSE)
@@ -33,11 +33,11 @@ Naminter is a Python package and command-line interface (CLI) tool for asynchron
- **Browser Impersonation:** Simulate Chrome, Firefox, Safari, Edge for accurate detection
- **Real-Time Console UI:** Live progress bar, colored output, and instant feedback
- **Concurrent & Fast:** High-speed, concurrent checks with adjustable task limits
-- **Fuzzy Matching:** Optional fuzzy mode for broader username discovery
+- **Validation Modes:** Strict (ALL) or permissive (ANY) matching for detection criteria
- **Category Filters:** Include or exclude sites by category
- **Custom Site Lists:** Use your own or remote WhatsMyName-format lists and schemas
- **Proxy & Network Options:** Full proxy support, SSL verification, and redirect control
-- **Self-Enumeration Mode:** Validate detection methods for reliability
+- **Site Validation Mode:** Validate detection methods for reliability
- **Export Results:** Output to CSV, JSON, HTML, and PDF
- **Response Handling:** Save/open HTTP responses for analysis
- **Flexible Filtering:** Filter results by found, not found, errors, or unknown
@@ -127,8 +127,14 @@ naminter --username alice_bob \
--html \
--filter-all
-# Self-enumeration with detailed output
-naminter --self-enumeration \
+# Export with custom paths using merged flags
+naminter --username alice_bob \
+ --csv results.csv \
+ --json results.json \
+ --html report.html
+
+# Site validation with detailed output
+naminter --validate-sites \
--show-details \
--log-level DEBUG \
--log-file debug.log
@@ -188,7 +194,7 @@ async def main():
results = await naminter.enumerate_usernames(["example_username"])
for result in results:
if result.status.value == "found":
- print(f"✅ {result.username} found on {result.name}: {result.result_url}")
+ print(f"✅ {result.username} found on {result.name}: {result.url}")
elif result.status.value == "not_found":
print(f"❌ {result.username} not found on {result.name}")
elif result.status.value == "error":
@@ -213,7 +219,7 @@ async def main():
results = await naminter.enumerate_usernames(["example_username"], as_generator=True)
async for result in results:
if result.status.value == "found":
- print(f"✅ {result.username} found on {result.name}: {result.result_url}")
+ print(f"✅ {result.username} found on {result.name}: {result.url}")
elif result.status.value == "not_found":
print(f"❌ {result.username} not found on {result.name}")
@@ -225,6 +231,7 @@ asyncio.run(main())
```python
import asyncio
from naminter import Naminter
+from naminter.core.models import WMNMode
async def main():
wmn_data, wmn_schema = await load_wmn_data()
@@ -240,12 +247,12 @@ async def main():
proxy="http://proxy:8080"
) as naminter:
usernames = ["user1", "user2", "user3"]
- results = await naminter.enumerate_usernames(usernames, fuzzy_mode=True)
+ results = await naminter.enumerate_usernames(usernames, mode=WMNMode.ANY)
for result in results:
if result.status.value == "found":
print(f"✅ Found: {result.username} on {result.name}")
- print(f" URL: {result.result_url}")
+ print(f" URL: {result.url}")
print(f" Response time: {result.elapsed:.2f}s")
else:
print(f"❌ Not found: {result.username} on {result.name}")
@@ -253,7 +260,7 @@ async def main():
asyncio.run(main())
```
-#### Self-Enumeration and Validation
+#### Site Validation
```python
import asyncio
@@ -263,10 +270,10 @@ async def main():
wmn_data, wmn_schema = await load_wmn_data()
async with Naminter(wmn_data, wmn_schema) as naminter:
- # Perform self-enumeration to validate site configurations
- self_enumeration_results = await naminter.self_enumeration()
+ # Validate site detection methods using known usernames
+ validation_results = await naminter.validate_sites()
- for site_result in self_enumeration_results:
+ for site_result in validation_results:
if site_result.error:
print(f"❌ {site_result.name}: {site_result.error}")
else:
@@ -312,16 +319,16 @@ asyncio.run(main())
### Input Lists
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--local-list` | Path(s) to local file(s) containing list of sites to enumerate |
-| `--remote-list` | URL(s) to fetch remote list(s) of sites to enumerate |
+| `--local-list` | Path to a local file containing the list of sites to enumerate |
+| `--remote-list` | URL to fetch a remote list of sites to enumerate |
| `--skip-validation` | Skip WhatsMyName schema validation for lists |
| `--local-schema` | Path to local WhatsMyName schema file |
| `--remote-schema` | URL to fetch custom WhatsMyName schema |
-### Self-Enumeration
+### Site Validation
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--self-enumeration` | Perform self-enumeration of the application |
+| `--validate-sites` | Validate site detection methods by checking known usernames |
### Category Filters
| Option | Description |
@@ -342,7 +349,7 @@ asyncio.run(main())
| Option | Description |
|-----------------------------|------------------------------------------------------------|
| `--max-tasks` | Maximum number of concurrent tasks (default: 50) |
-| `--fuzzy` | Enable fuzzy validation mode |
+| `--mode` | Validation mode: `all` for strict matching (all detection criteria must match) or `any` for permissive matching (at least one detection criterion must match) |
| `--log-level` | Set logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) |
| `--log-file` | Path to log file for debug output |
| `--show-details` | Show detailed information in console output |
@@ -351,21 +358,16 @@ asyncio.run(main())
### Response Handling
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--save-response` | Save HTTP response body for each result to files |
-| `--response-path` | Custom directory path for saving response files |
+| `--save-response [DIR]` | Save HTTP response body; optionally specify directory |
| `--open-response` | Open saved response file in browser |
### Export Options
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--csv` | Export results to CSV file |
-| `--csv-path` | Custom path for CSV export |
-| `--pdf` | Export results to PDF file |
-| `--pdf-path` | Custom path for PDF export |
-| `--html` | Export results to HTML file |
-| `--html-path` | Custom path for HTML export |
-| `--json` | Export results to JSON file |
-| `--json-path` | Custom path for JSON export |
+| Option | Description |
+|---------------|-------------|
+| `--csv [PATH]` | Export results to CSV; optionally specify output path |
+| `--pdf [PATH]` | Export results to PDF; optionally specify output path |
+| `--html [PATH]` | Export results to HTML; optionally specify output path |
+| `--json [PATH]` | Export results to JSON; optionally specify output path |
### Result Filters
| Option | Description |
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index 867017e..f6b379e 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -6,13 +6,16 @@
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
from naminter.cli.console import display_warning
+from naminter.cli.exceptions import ConfigurationError
from naminter.core.constants import (
+ BROWSER_IMPERSONATE_AGENT,
+ BROWSER_IMPERSONATE_NONE,
HTTP_REQUEST_TIMEOUT_SECONDS,
MAX_CONCURRENT_TASKS,
WMN_REMOTE_URL,
WMN_SCHEMA_URL,
)
-from naminter.core.exceptions import ConfigurationError
+from naminter.core.models import WMNMode
@dataclass
@@ -30,15 +33,15 @@ class NaminterConfig:
logger: object | None = None
# List and schema sources
- local_list_paths: list[Path | str] | None = None
- remote_list_urls: list[str] | None = None
+ local_list_path: Path | str | None = None
+ remote_list_url: str | None = None
local_schema_path: Path | str | None = None
remote_schema_url: str | None = WMN_SCHEMA_URL
# Validation and filtering
skip_validation: bool = False
- include_categories: list[str] = field(default_factory=list)
- exclude_categories: list[str] = field(default_factory=list)
+ include_categories: list[str] = field(default_factory=lambda: []) # noqa: PIE807
+ exclude_categories: list[str] = field(default_factory=lambda: []) # noqa: PIE807
filter_all: bool = False
filter_found: bool = False
filter_ambiguous: bool = False
@@ -53,13 +56,13 @@ class NaminterConfig:
proxy: str | None = None
allow_redirects: bool = False
verify_ssl: bool = False
- impersonate: BrowserTypeLiteral | None = "chrome"
+ impersonate: BrowserTypeLiteral | str | None = BROWSER_IMPERSONATE_AGENT
ja3: str | None = None
akamai: str | None = None
extra_fp: ExtraFingerprints | dict[str, Any] | str | None = None
browse: bool = False
- fuzzy_mode: bool = False
- self_enumeration: bool = False
+ mode: WMNMode = WMNMode.ALL
+ validate_sites: bool = False
no_progressbar: bool = False
# Logging
@@ -84,22 +87,25 @@ class NaminterConfig:
def __post_init__(self) -> None:
"""Validate and normalize configuration after initialization."""
- if self.self_enumeration and self.usernames:
+ if self.validate_sites and self.usernames:
display_warning(
- "Self-enumeration mode enabled: provided usernames will be ignored, "
+ "Site validation mode enabled: provided usernames will be ignored, "
"using known usernames from site configurations instead."
)
- try:
- if self.local_list_paths:
- self.local_list_paths = [str(p) for p in self.local_list_paths]
- if self.remote_list_urls:
- self.remote_list_urls = list(self.remote_list_urls)
- if not self.local_list_paths and not self.remote_list_urls:
- self.remote_list_urls = [WMN_REMOTE_URL]
- except Exception as e:
- msg = f"Configuration validation failed: {e}"
- raise ConfigurationError(msg) from e
+ if self.local_list_path and self.remote_list_url:
+ msg = "Both local and remote list sources provided; only one is allowed"
+ raise ConfigurationError(msg)
+
+ if not self.local_list_path and not self.remote_list_url:
+ self.remote_list_url = WMN_REMOTE_URL
+
+ if self.local_schema_path and self.remote_schema_url:
+ msg = "Both local and remote schema sources provided; only one is allowed"
+ raise ConfigurationError(msg)
+
+ if not self.local_schema_path and not self.remote_schema_url:
+ self.remote_schema_url = WMN_SCHEMA_URL
filter_fields = [
self.filter_all,
@@ -112,10 +118,13 @@ def __post_init__(self) -> None:
if not any(filter_fields):
self.filter_found = True
- if isinstance(self.impersonate, str) and self.impersonate.lower() == "none":
+ if (
+ isinstance(self.impersonate, str)
+ and self.impersonate.lower() == BROWSER_IMPERSONATE_NONE
+ ):
self.impersonate = None
- if self.extra_fp is not None and isinstance(self.extra_fp, str):
+ if isinstance(self.extra_fp, str):
try:
self.extra_fp = json.loads(self.extra_fp)
except json.JSONDecodeError as e:
@@ -151,53 +160,3 @@ def export_formats(self) -> dict[str, str | None]:
for format_name, is_enabled, path in export_configs
if is_enabled
}
-
- def to_dict(self) -> dict[str, Any]:
- """Convert configuration to a dictionary."""
- return {
- "usernames": self.usernames,
- "sites": self.sites,
- "local_list_paths": self.local_list_paths,
- "remote_list_urls": self.remote_list_urls,
- "local_schema_path": self.local_schema_path,
- "remote_schema_url": self.remote_schema_url,
- "skip_validation": self.skip_validation,
- "include_categories": self.include_categories,
- "exclude_categories": self.exclude_categories,
- "max_tasks": self.max_tasks,
- "timeout": self.timeout,
- "proxy": self.proxy,
- "allow_redirects": self.allow_redirects,
- "verify_ssl": self.verify_ssl,
- "impersonate": self.impersonate,
- "ja3": self.ja3,
- "akamai": self.akamai,
- "extra_fp": self.extra_fp.to_dict()
- if isinstance(self.extra_fp, ExtraFingerprints)
- else self.extra_fp,
- "browse": self.browse,
- "fuzzy_mode": self.fuzzy_mode,
- "self_enumeration": self.self_enumeration,
- "log_level": self.log_level,
- "log_file": self.log_file,
- "show_details": self.show_details,
- "save_response": self.save_response,
- "response_path": self.response_path,
- "open_response": self.open_response,
- "csv_export": self.csv_export,
- "csv_path": self.csv_path,
- "pdf_export": self.pdf_export,
- "pdf_path": self.pdf_path,
- "html_export": self.html_export,
- "html_path": self.html_path,
- "json_export": self.json_export,
- "json_path": self.json_path,
- "filter_all": self.filter_all,
- "filter_found": self.filter_found,
- "filter_ambiguous": self.filter_ambiguous,
- "filter_unknown": self.filter_unknown,
- "filter_not_found": self.filter_not_found,
- "filter_not_valid": self.filter_not_valid,
- "filter_errors": self.filter_errors,
- "no_progressbar": self.no_progressbar,
- }
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 3ff95c3..545ae45 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -1,4 +1,5 @@
from pathlib import Path
+from typing import Any
from rich import box
from rich.console import Console
@@ -16,7 +17,7 @@
__url__,
__version__,
)
-from naminter.core.models import ResultStatus, SelfEnumerationResult, SiteResult
+from naminter.core.models import WMNResult, WMNStatus, WMNValidationResult
console: Console = Console()
@@ -29,22 +30,22 @@
"muted": "bright_black",
}
-_STATUS_SYMBOLS: dict[ResultStatus, str] = {
- ResultStatus.FOUND: "+",
- ResultStatus.AMBIGUOUS: "*",
- ResultStatus.UNKNOWN: "?",
- ResultStatus.NOT_FOUND: "-",
- ResultStatus.NOT_VALID: "X",
- ResultStatus.ERROR: "!",
+_STATUS_SYMBOLS: dict[WMNStatus, str] = {
+ WMNStatus.FOUND: "+",
+ WMNStatus.AMBIGUOUS: "*",
+ WMNStatus.UNKNOWN: "?",
+ WMNStatus.NOT_FOUND: "-",
+ WMNStatus.NOT_VALID: "X",
+ WMNStatus.ERROR: "!",
}
-_STATUS_STYLES: dict[ResultStatus, Style] = {
- ResultStatus.FOUND: Style(color=THEME["success"], bold=True),
- ResultStatus.AMBIGUOUS: Style(color=THEME["warning"], bold=True),
- ResultStatus.UNKNOWN: Style(color=THEME["warning"]),
- ResultStatus.NOT_FOUND: Style(color=THEME["error"]),
- ResultStatus.NOT_VALID: Style(color=THEME["error"]),
- ResultStatus.ERROR: Style(color=THEME["error"], bold=True),
+_STATUS_STYLES: dict[WMNStatus, Style] = {
+ WMNStatus.FOUND: Style(color=THEME["success"], bold=True),
+ WMNStatus.AMBIGUOUS: Style(color=THEME["warning"], bold=True),
+ WMNStatus.UNKNOWN: Style(color=THEME["warning"]),
+ WMNStatus.NOT_FOUND: Style(color=THEME["error"]),
+ WMNStatus.NOT_VALID: Style(color=THEME["error"]),
+ WMNStatus.ERROR: Style(color=THEME["error"], bold=True),
}
@@ -56,7 +57,7 @@ def __init__(self, show_details: bool = False) -> None:
self.show_details = show_details
def format_result(
- self, site_result: SiteResult, response_file_path: Path | None = None
+ self, site_result: WMNResult, response_file_path: Path | None = None
) -> Tree:
"""Format a single result as a tree-style output."""
@@ -68,7 +69,7 @@ def format_result(
root_label.append(" [", style=THEME["muted"])
root_label.append(site_result.name or "Unknown", style=THEME["info"])
root_label.append("] ", style=THEME["muted"])
- root_label.append(site_result.result_url or "No URL", style=THEME["primary"])
+ root_label.append(site_result.url or "No URL", style=THEME["primary"])
tree = Tree(root_label, guide_style=THEME["muted"])
@@ -83,52 +84,50 @@ def format_result(
return tree
- def format_self_enumeration(
+ def format_validation(
self,
- self_enumeration_result: SelfEnumerationResult,
+ validation_result: WMNValidationResult,
response_files: list[Path | None] | None = None,
) -> Tree:
- """Format self-enumeration results into a tree structure."""
+ """Format validation results into a tree structure."""
root_label = Text()
root_label.append(
- _STATUS_SYMBOLS.get(self_enumeration_result.status, "?"),
- style=_STATUS_STYLES.get(self_enumeration_result.status, Style()),
+ _STATUS_SYMBOLS.get(validation_result.status, "?"),
+ style=_STATUS_STYLES.get(validation_result.status, Style()),
)
root_label.append(" [", style=THEME["muted"])
- root_label.append(self_enumeration_result.name, style=THEME["info"])
+ root_label.append(validation_result.name, style=THEME["info"])
root_label.append("]", style=THEME["muted"])
tree = Tree(root_label, guide_style=THEME["muted"], expanded=True)
- for i, test in enumerate(self_enumeration_result.results):
- if test is None:
- continue
-
- url_text = Text()
- url_text.append(
- _STATUS_SYMBOLS.get(test.status, "?"),
- style=_STATUS_STYLES.get(test.status, Style()),
- )
- url_text.append(" ", style=THEME["muted"])
- url_text.append(f"{test.username}: ", style=THEME["info"])
- url_text.append(test.result_url or "No URL", style=THEME["primary"])
-
- test_node = tree.add(url_text)
-
- if self.show_details:
- response_file = (
- response_files[i]
- if response_files and i < len(response_files)
- else None
- )
- self._add_debug_info(
- test_node,
- test.response_code,
- test.elapsed,
- test.error,
- response_file,
+ if validation_result.results:
+ for i, result in enumerate(validation_result.results):
+ url_text = Text()
+ url_text.append(
+ _STATUS_SYMBOLS.get(result.status, "?"),
+ style=_STATUS_STYLES.get(result.status, Style()),
)
+ url_text.append(" ", style=THEME["muted"])
+ url_text.append(f"{result.username}: ", style=THEME["info"])
+ url_text.append(result.url or "No URL", style=THEME["primary"])
+
+ result_node = tree.add(url_text)
+
+ if self.show_details:
+ response_file = (
+ response_files[i]
+ if response_files and i < len(response_files)
+ else None
+ )
+ self._add_debug_info(
+ result_node,
+ result.response_code,
+ result.elapsed,
+ result.error,
+ response_file,
+ )
return tree
@@ -212,3 +211,34 @@ def display_success(message: str) -> None:
"""Display a success message."""
_display_message(message, THEME["success"], "+", "SUCCESS")
+
+
+def display_validation_errors(errors: list[Any]) -> None:
+ """Display validation errors in a formatted table."""
+ if not errors:
+ return
+
+ table = Table(
+ title="[bold bright_red]Validation Errors[/bold bright_red]",
+ border_style=THEME["error"],
+ box=box.ROUNDED,
+ show_lines=True,
+ )
+
+ table.add_column("Path", style=THEME["info"], no_wrap=False)
+ table.add_column("Message", style=THEME["warning"])
+ table.add_column("Data Preview", style=THEME["muted"], overflow="fold")
+
+ for error in errors:
+ path = getattr(error, "path", "N/A") or "N/A"
+ message = getattr(error, "message", "Unknown error")
+ data = getattr(error, "data", None)
+
+ data_preview = (
+ data[:200] + "..." if data and len(data) > 200 else (data or "N/A")
+ )
+
+ table.add_row(path, message, data_preview)
+
+ console.print(table)
+ console.file.flush()
diff --git a/naminter/cli/constants.py b/naminter/cli/constants.py
index 27274e1..a05a86c 100644
--- a/naminter/cli/constants.py
+++ b/naminter/cli/constants.py
@@ -1,3 +1,19 @@
# Constants for file operations
RESPONSE_FILE_DATE_FORMAT = "%Y%m%d_%H%M%S"
RESPONSE_FILE_EXTENSION = ".html"
+
+# Default network timeout (overrides core default for CLI)
+DEFAULT_NETWORK_TIMEOUT: int = 30
+
+# Progress tracking
+PROGRESS_ADVANCE_INCREMENT: int = 1
+
+# Exit codes
+EXIT_CODE_ERROR: int = 1
+EXIT_CODE_SUCCESS: int = 0
+
+# Filename constraints
+MAX_FILENAME_LENGTH: int = 200
+
+# Supported export formats
+SUPPORTED_FORMATS: list[str] = ["csv", "json", "html", "pdf"]
diff --git a/naminter/cli/exceptions.py b/naminter/cli/exceptions.py
new file mode 100644
index 0000000..e0dcd44
--- /dev/null
+++ b/naminter/cli/exceptions.py
@@ -0,0 +1,38 @@
+from naminter.core.exceptions import NaminterError
+
+
+# Configuration errors
+class ConfigurationError(NaminterError):
+ """Raised when there's an error in the configuration parameters.
+
+ This includes invalid configuration values, missing required settings,
+ configuration file parsing errors, or invalid URLs.
+ """
+
+
+# File/IO errors
+class FileIOError(NaminterError):
+ """Raised when file operations fail.
+
+ This includes reading/writing local lists, responses, exports,
+ and other file system operations.
+ """
+
+
+# Browser errors
+class BrowserError(NaminterError):
+ """Raised when browser operations fail in the CLI layer."""
+
+
+# Export errors
+class ExportError(NaminterError):
+ """Raised when export operations fail in the CLI layer."""
+
+
+__all__ = [
+ "BrowserError",
+ "ConfigurationError",
+ "ExportError",
+ "FileIOError",
+ "NaminterError",
+]
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index 661129d..a22fc96 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -3,19 +3,30 @@
import json
from datetime import UTC, datetime
from pathlib import Path
-from typing import Any, ClassVar, Literal, Protocol
+from typing import Any, Literal, Protocol
import jinja2
from weasyprint import HTML
-from naminter.core.exceptions import ConfigurationError, ExportError, FileAccessError
-from naminter.core.models import SelfEnumerationResult, SiteResult
+from naminter import __version__
+from naminter.cli.constants import SUPPORTED_FORMATS
+from naminter.core.constants import (
+ DEFAULT_JSON_ENCODING,
+ DEFAULT_JSON_ENSURE_ASCII,
+ DEFAULT_JSON_INDENT,
+ EMPTY_STRING,
+)
+from naminter.core.models import WMNResult, WMNValidationResult
+
+from .exceptions import ConfigurationError, ExportError, FileIOError
FormatName = Literal["csv", "json", "html", "pdf"]
ResultDict = dict[str, Any]
class ExportMethod(Protocol):
+ """Protocol for export method callables."""
+
def __call__(self, results: list[ResultDict], output_path: Path) -> None: ...
@@ -24,13 +35,8 @@ class Exporter:
Unified exporter for CSV, JSON, HTML, and PDF formats.
"""
- SUPPORTED_FORMATS: ClassVar[list[FormatName]] = ["csv", "json", "html", "pdf"]
-
- def __init__(
- self, usernames: list[str] | None = None, version: str | None = None
- ) -> None:
+ def __init__(self, usernames: list[str] | None = None) -> None:
self.usernames = usernames or []
- self.version = version or "unknown"
self.export_methods: dict[FormatName, ExportMethod] = {
"csv": self._export_csv,
"json": self._export_json,
@@ -40,21 +46,22 @@ def __init__(
def export(
self,
- results: list[SiteResult | SelfEnumerationResult],
+ results: list[WMNResult | WMNValidationResult],
formats: dict[FormatName, str | Path | None],
) -> None:
"""
Export results in the given formats.
"""
if not results:
- return
+ msg = "No results to export"
+ raise ExportError(msg)
dict_results = [
result.to_dict(exclude_response_text=True) for result in results
]
for format_name, path in formats.items():
- if format_name not in self.SUPPORTED_FORMATS:
+ if format_name not in SUPPORTED_FORMATS:
msg = f"Unsupported export format: {format_name}"
raise ExportError(msg)
@@ -62,64 +69,72 @@ def export(
out_path = self._resolve_path(format_name, path)
out_path.parent.mkdir(parents=True, exist_ok=True)
self.export_methods[format_name](dict_results, out_path)
- except FileAccessError as e:
+ except FileIOError as e:
msg = f"File access error during {format_name} export: {e}"
- raise ExportError(
- msg
- ) from e
+ raise ExportError(msg) from e
except Exception as e:
- msg = f"Failed to export {format_name}: {e}"
+ msg = f"Unexpected error exporting {format_name}: {e}"
raise ExportError(msg) from e
@staticmethod
def _export_csv(results: list[ResultDict], output_path: Path) -> None:
- if not results:
- return
-
- fieldnames = list(results[0].keys())
+ """Export results to CSV format."""
+ fieldnames: list[str] = []
+ seen: set[str] = set()
+ for result in results:
+ for key in result:
+ if key not in seen:
+ fieldnames.append(key)
+ seen.add(key)
try:
- with output_path.open("w", newline="", encoding="utf-8") as f:
+ with output_path.open("w", newline=EMPTY_STRING, encoding="utf-8") as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(results)
except PermissionError as e:
msg = f"Permission denied writing CSV file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except OSError as e:
msg = f"OS error writing CSV file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except Exception as e:
- msg = f"CSV export error: {e}"
+ msg = f"Unexpected error during CSV export: {e}"
raise ExportError(msg) from e
@staticmethod
def _export_json(results: list[ResultDict], output_path: Path) -> None:
+ """Export results to JSON format."""
try:
output_path.write_text(
- json.dumps(results, ensure_ascii=False, indent=2), encoding="utf-8"
+ json.dumps(
+ results,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ indent=DEFAULT_JSON_INDENT,
+ ),
+ encoding=DEFAULT_JSON_ENCODING,
)
except PermissionError as e:
msg = f"Permission denied writing JSON file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except OSError as e:
msg = f"OS error writing JSON file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except (TypeError, ValueError) as e:
msg = f"JSON serialization error: {e}"
raise ExportError(msg) from e
except Exception as e:
- msg = f"JSON export error: {e}"
+ msg = f"Unexpected error during JSON export: {e}"
raise ExportError(msg) from e
def _generate_html(self, results: list[ResultDict]) -> str:
+ """Generate HTML report from results."""
grouped: dict[str, list[ResultDict]] = {}
for item in results:
cat = item.get("category", "uncategorized")
grouped.setdefault(cat, []).append(item)
- default_fields = ["name", "result_url", "elapsed"]
- display_fields = [f for f in default_fields if any(f in r for r in results)]
+ display_fields = ["name", "url", "elapsed"]
try:
with (
@@ -133,40 +148,46 @@ def _generate_html(self, results: list[ResultDict]) -> str:
raise ConfigurationError(msg) from e
except PermissionError as e:
msg = f"Permission denied reading HTML template: {e}"
- raise FileAccessError(
- msg
- ) from e
+ raise FileIOError(msg) from e
+ except OSError as e:
+ msg = f"OS error reading HTML template: {e}"
+ raise FileIOError(msg) from e
except Exception as e:
- msg = f"Could not load HTML template: {e}"
+ msg = f"Unexpected error loading HTML template: {e}"
raise ConfigurationError(msg) from e
- template = jinja2.Template(template_source, autoescape=True)
-
- return template.render(
- grouped_results=grouped,
- display_fields=display_fields,
- usernames=self.usernames,
- version=self.version,
- current_time=datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S %Z"),
- total_count=len(results),
- category_count=len(grouped),
- )
+ try:
+ template = jinja2.Template(template_source, autoescape=True)
+ return template.render(
+ grouped_results=grouped,
+ display_fields=display_fields,
+ usernames=self.usernames,
+ version=__version__,
+ current_time=datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S %Z"),
+ total_count=len(results),
+ category_count=len(grouped),
+ )
+ except jinja2.TemplateError as e:
+ msg = f"Template rendering error: {e}"
+ raise ExportError(msg) from e
def _export_html(self, results: list[ResultDict], output_path: Path) -> None:
+ """Export results to HTML format."""
try:
html = self._generate_html(results)
output_path.write_text(html, encoding="utf-8")
except PermissionError as e:
msg = f"Permission denied writing HTML file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except OSError as e:
msg = f"OS error writing HTML file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except Exception as e:
- msg = f"HTML export error: {e}"
+ msg = f"Unexpected error during HTML export: {e}"
raise ExportError(msg) from e
def _export_pdf(self, results: list[ResultDict], output_path: Path) -> None:
+ """Export results to PDF format."""
if not results:
msg = "No results to export to PDF"
raise ExportError(msg)
@@ -176,12 +197,15 @@ def _export_pdf(self, results: list[ResultDict], output_path: Path) -> None:
HTML(string=html).write_pdf(str(output_path))
except PermissionError as e:
msg = f"Permission denied writing PDF file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
except OSError as e:
msg = f"OS error writing PDF file: {e}"
- raise FileAccessError(msg) from e
+ raise FileIOError(msg) from e
+ except (ValueError, TypeError) as e:
+ msg = f"PDF generation error: {e}"
+ raise ExportError(msg) from e
except Exception as e:
- msg = f"PDF export error: {e}"
+ msg = f"Unexpected error during PDF export: {e}"
raise ExportError(msg) from e
@staticmethod
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index 57eb5bb..c0654c9 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -1,28 +1,38 @@
import asyncio
import logging
import typing
-import webbrowser
from pathlib import Path
-from typing import Any
+from typing import Any, Final
-import aiofiles
import rich_click as click
from curl_cffi import BrowserTypeLiteral
-from naminter import __version__
from naminter.cli.config import NaminterConfig
from naminter.cli.console import (
ResultFormatter,
console,
display_error,
+ display_validation_errors,
display_version,
display_warning,
)
-from naminter.cli.constants import RESPONSE_FILE_DATE_FORMAT, RESPONSE_FILE_EXTENSION
+from naminter.cli.constants import (
+ EXIT_CODE_ERROR,
+ PROGRESS_ADVANCE_INCREMENT,
+)
from naminter.cli.exporters import Exporter
from naminter.cli.progress import ProgressManager, ResultsTracker
-from naminter.cli.utils import sanitize_filename
+from naminter.cli.utils import (
+ fetch_json,
+ generate_response_filename,
+ open_browser,
+ read_json,
+ write_file,
+)
from naminter.core.constants import (
+ BROWSER_IMPERSONATE_AGENT,
+ BROWSER_IMPERSONATE_NONE,
+ DEFAULT_FILE_ENCODING,
HTTP_ALLOW_REDIRECTS,
HTTP_REQUEST_TIMEOUT_SECONDS,
HTTP_SSL_VERIFY,
@@ -30,14 +40,15 @@
MAX_CONCURRENT_TASKS,
WMN_SCHEMA_URL,
)
-from naminter.core.exceptions import ConfigurationError, DataError, ExportError
+from naminter.core.exceptions import HttpError, WMNDataError, WMNValidationError
from naminter.core.main import Naminter
-from naminter.core.models import ResultStatus, SelfEnumerationResult, SiteResult
+from naminter.core.models import WMNMode, WMNResult, WMNStatus, WMNValidationResult
from naminter.core.network import CurlCFFISession
-from naminter.core.utils import validate_numeric_values
+from .exceptions import BrowserError, ConfigurationError, ExportError, FileIOError
-def _version_callback(ctx: click.Context, _param: click.Option, value: bool) -> None:
+
+def _version_callback(ctx: click.Context, _param: click.Parameter, value: bool) -> None:
"""Eager callback to display version and exit."""
if not value or ctx.resilient_parsing:
return
@@ -54,6 +65,14 @@ def __init__(self, config: NaminterConfig) -> None:
show_details=config.show_details
)
self._response_dir: Path | None = self._setup_response_dir()
+ self._status_filters: Final[dict[WMNStatus, bool]] = {
+ WMNStatus.FOUND: config.filter_found,
+ WMNStatus.AMBIGUOUS: config.filter_ambiguous,
+ WMNStatus.UNKNOWN: config.filter_unknown,
+ WMNStatus.NOT_FOUND: config.filter_not_found,
+ WMNStatus.NOT_VALID: config.filter_not_valid,
+ WMNStatus.ERROR: config.filter_errors,
+ }
def _setup_response_dir(self) -> Path | None:
"""Setup response directory if response saving is enabled."""
@@ -62,11 +81,10 @@ def _setup_response_dir(self) -> Path | None:
try:
dir_path = self.config.response_dir
- if dir_path is None:
- return None
-
- dir_path.mkdir(parents=True, exist_ok=True)
- return dir_path
+ if dir_path is not None:
+ dir_path.mkdir(parents=True, exist_ok=True)
+ return dir_path
+ return None
except PermissionError as e:
display_error(
f"Permission denied creating/accessing response directory: {e}"
@@ -80,13 +98,17 @@ def _setup_response_dir(self) -> Path | None:
return None
@staticmethod
- def _setup_logging(config: NaminterConfig) -> None:
+ def setup_logging(config: NaminterConfig) -> None:
"""Configure project logging."""
if not config.log_file:
return
log_path = Path(config.log_file)
- log_path.parent.mkdir(parents=True, exist_ok=True)
+ try:
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+ except (PermissionError, OSError) as e:
+ msg = f"Failed to create log directory {log_path.parent}: {e}"
+ raise FileIOError(msg) from e
level_value = getattr(
logging, str(config.log_level or "INFO").upper(), logging.INFO
@@ -100,26 +122,22 @@ def _setup_logging(config: NaminterConfig) -> None:
isinstance(handler, logging.FileHandler) for handler in logger.handlers
)
if not has_file_handler:
- file_handler = logging.FileHandler(
- str(log_path), mode="a", encoding="utf-8"
- )
- formatter = logging.Formatter(LOGGING_FORMAT)
- file_handler.setFormatter(formatter)
- file_handler.setLevel(level_value)
- logger.addHandler(file_handler)
+ try:
+ file_handler = logging.FileHandler(
+ str(log_path),
+ mode="a",
+ encoding=DEFAULT_FILE_ENCODING,
+ )
+ formatter = logging.Formatter(LOGGING_FORMAT)
+ file_handler.setFormatter(formatter)
+ file_handler.setLevel(level_value)
+ logger.addHandler(file_handler)
+ except (PermissionError, OSError) as e:
+ msg = f"Failed to create log file {log_path}: {e}"
+ raise FileIOError(msg) from e
async def run(self) -> None:
"""Main execution method with progress tracking."""
- try:
- warnings = validate_numeric_values(
- self.config.max_tasks, self.config.timeout
- )
- for message in warnings:
- display_warning(message)
- except ConfigurationError as e:
- display_error(f"Configuration error: {e}")
- return
-
http_client = CurlCFFISession(
proxies=self.config.proxy,
verify=self.config.verify_ssl,
@@ -131,40 +149,48 @@ async def run(self) -> None:
extra_fp=self.config.extra_fp,
)
+ wmn_data: dict[str, Any] | None = None
+ if self.config.local_list_path:
+ wmn_data = await read_json(self.config.local_list_path)
+ elif self.config.remote_list_url:
+ wmn_data = await fetch_json(http_client, self.config.remote_list_url)
+
+ wmn_schema: dict[str, Any] | None = None
+ if not self.config.skip_validation:
+ if self.config.local_schema_path:
+ wmn_schema = await read_json(self.config.local_schema_path)
+ elif self.config.remote_schema_url:
+ wmn_schema = await fetch_json(
+ http_client, self.config.remote_schema_url
+ )
+
async with Naminter(
http_client=http_client,
+ wmn_data=wmn_data,
+ wmn_schema=wmn_schema,
max_tasks=self.config.max_tasks,
- local_list_paths=self.config.local_list_paths,
- remote_list_urls=self.config.remote_list_urls,
- skip_validation=self.config.skip_validation,
- local_schema_path=self.config.local_schema_path,
- remote_schema_url=self.config.remote_schema_url,
) as naminter:
- if self.config.self_enumeration:
- results = await self._run_self_enumeration(naminter)
+ if self.config.validate_sites:
+ results = await self._run_validation(naminter)
else:
results = await self._run_check(naminter)
if self.config.export_formats and results:
- try:
- export_manager = Exporter(self.config.usernames or [], __version__)
- export_manager.export(results, self.config.export_formats)
- except ExportError as e:
- display_error(f"Export error: {e}")
- return
-
- async def _run_check(self, naminter: Naminter) -> list[SiteResult]:
+ exporter = Exporter(self.config.usernames or [])
+ exporter.export(results, self.config.export_formats)
+
+ async def _run_check(self, naminter: Naminter) -> list[WMNResult]:
"""Run the username enumeration functionality."""
summary = await naminter.get_wmn_summary(
site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
)
- actual_site_count = int(summary.sites_count)
+ actual_site_count = summary.sites_count
total_sites = actual_site_count * len(self.config.usernames)
tracker = ResultsTracker(total_sites)
- results: list[SiteResult] = []
+ results: list[WMNResult] = []
with ProgressManager(
console, disabled=self.config.no_progressbar
@@ -178,7 +204,7 @@ async def _run_check(self, naminter: Naminter) -> list[SiteResult]:
site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
- fuzzy_mode=self.config.fuzzy_mode,
+ mode=self.config.mode,
as_generator=True,
)
@@ -186,43 +212,48 @@ async def _run_check(self, naminter: Naminter) -> list[SiteResult]:
tracker.add_result(result)
if self._filter_result(result):
- response_file_path = await self._process_result(result)
- formatted_output = self._formatter.format_result(
- result, response_file_path
- )
- console.print(formatted_output)
- results.append(result)
-
- progress_mgr.update(advance=1, description=tracker.get_progress_text())
+ try:
+ file_path = await self._process_result(result)
+ formatted_output = self._formatter.format_result(
+ result, file_path
+ )
+ console.print(formatted_output)
+ results.append(result)
+ except Exception as e:
+ display_error(f"Error processing result for {result.name}: {e}")
+
+ progress_mgr.update(
+ advance=PROGRESS_ADVANCE_INCREMENT,
+ description=tracker.get_progress_text(),
+ )
return results
- async def _run_self_enumeration(
- self, naminter: Naminter
- ) -> list[SelfEnumerationResult]:
- """Run the self-enumeration functionality."""
+ async def _run_validation(self, naminter: Naminter) -> list[WMNValidationResult]:
+ """Run the site validation functionality."""
summary = await naminter.get_wmn_summary(
site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
)
- total_tests = int(summary.known_accounts_total)
+ total_tests = summary.known_count
tracker = ResultsTracker(total_tests)
- results: list[SelfEnumerationResult] = []
+ results: list[WMNValidationResult] = []
with ProgressManager(
console, disabled=self.config.no_progressbar
) as progress_mgr:
progress_mgr.start(
- total_tests, "[bright_cyan]Running self-enumeration...[/bright_cyan]"
+ total_tests,
+ "[bright_cyan]Validating sites...[/bright_cyan]",
)
- result_stream = await naminter.self_enumeration(
+ result_stream = await naminter.validate_sites(
site_names=self.config.sites,
include_categories=self.config.include_categories,
exclude_categories=self.config.exclude_categories,
- fuzzy_mode=self.config.fuzzy_mode,
+ mode=self.config.mode,
as_generator=True,
)
@@ -230,101 +261,69 @@ async def _run_self_enumeration(
for site_result in result.results:
tracker.add_result(site_result)
progress_mgr.update(
- advance=1, description=tracker.get_progress_text()
+ advance=PROGRESS_ADVANCE_INCREMENT,
+ description=tracker.get_progress_text(),
)
if self._filter_result(result):
- response_files: list[Path | None] = []
- for site_result in result.results:
- response_file_path = await self._process_result(site_result)
- if response_file_path:
- response_files.append(response_file_path)
- else:
- response_files.append(None)
- formatted_output = self._formatter.format_self_enumeration(
- result, response_files
- )
- console.print(formatted_output)
- results.append(result)
+ try:
+ response_files: list[Path | None] = []
+ for site_result in result.results:
+ response_file_path = await self._process_result(site_result)
+ if response_file_path:
+ response_files.append(response_file_path)
+ else:
+ response_files.append(None)
+ formatted_output = self._formatter.format_validation(
+ result, response_files
+ )
+ console.print(formatted_output)
+ results.append(result)
+ except Exception as e:
+ display_error(
+ f"Error processing validation result for {result.name}: {e}"
+ )
return results
- def _filter_result(self, result: SiteResult | SelfEnumerationResult) -> bool:
+ def _filter_result(self, result: WMNResult | WMNValidationResult) -> bool:
"""Determine if a result should be included based on filter settings."""
- status = result.status
-
if self.config.filter_all:
return True
- filter_map = {
- self.config.filter_found: ResultStatus.FOUND,
- self.config.filter_ambiguous: ResultStatus.AMBIGUOUS,
- self.config.filter_unknown: ResultStatus.UNKNOWN,
- self.config.filter_not_found: ResultStatus.NOT_FOUND,
- self.config.filter_not_valid: ResultStatus.NOT_VALID,
- self.config.filter_errors: ResultStatus.ERROR,
- }
-
- return any(
- filter_enabled and status == expected_status
- for filter_enabled, expected_status in filter_map.items()
- ) or not any(filter_map.keys())
+ return self._status_filters.get(result.status, False)
- async def _process_result(self, result: SiteResult) -> Path | None:
+ async def _process_result(self, result: WMNResult) -> Path | None:
"""Handle browser opening, response saving, and console output for a result."""
- response_file = None
-
- if result.result_url and self.config.browse:
- await self._open_browser(result.result_url)
-
- if self.config.save_response and result.response_text and self._response_dir:
+ if result.url and self.config.browse:
try:
- safe_site_name = sanitize_filename(result.name)
- safe_username = sanitize_filename(result.username)
- status_str = result.status.value
- created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
-
- base_filename = (
- f"{status_str}_{result.response_code}_"
- f"{safe_site_name}_{safe_username}_{created_at_str}"
- f"{RESPONSE_FILE_EXTENSION}"
- )
- response_file = self._response_dir / base_filename
+ await open_browser(result.url)
+ except BrowserError as e:
+ display_error(str(e))
- await self._write_file(response_file, result.response_text)
+ if not self.config.save_response:
+ return None
- if self.config.open_response:
- file_uri = response_file.resolve().as_uri()
- await self._open_browser(file_uri)
- except PermissionError as e:
- display_error(f"Permission denied saving response to file: {e}")
- except OSError as e:
- display_error(f"OS error saving response to file: {e}")
- except Exception as e:
- display_error(f"Failed to save response to file: {e}")
+ if not result.response_text or not self._response_dir:
+ return None
- return response_file
+ filename = generate_response_filename(result)
+ file_path = self._response_dir / filename
- @staticmethod
- async def _open_browser(url: str) -> None:
- """Open a URL in the browser with error handling."""
try:
- await asyncio.to_thread(webbrowser.open, url)
- except Exception as e:
- display_error(f"Error opening browser for {url}: {e}")
+ await write_file(file_path, result.response_text)
+ except FileIOError as e:
+ display_error(str(e))
+ return None
- @staticmethod
- async def _write_file(file_path: Path, content: str) -> None:
- """Write content to a file with error handling."""
- try:
- async with aiofiles.open(file_path, "w", encoding="utf-8") as file:
- await file.write(content)
- except PermissionError as e:
- display_error(f"Permission denied writing to {file_path}: {e}")
- except OSError as e:
- display_error(f"OS error writing to {file_path}: {e}")
- except Exception as e:
- display_error(f"Failed to write to {file_path}: {e}")
+ if self.config.open_response:
+ file_uri = file_path.resolve().as_uri()
+ try:
+ await open_browser(file_uri)
+ except BrowserError as e:
+ display_error(str(e))
+
+ return file_path
@click.group(
@@ -354,17 +353,14 @@ async def _write_file(file_path: Path, content: str) -> None:
"--site",
"-s",
multiple=True,
- help='Specific site name(s) to enumerate (e.g., "GitHub", "Twitter")',
+ help='Specific site name(s) to enumerate (e.g., "GitHub", "X")',
)
@click.option(
"--local-list",
type=click.Path(exists=True, path_type=Path),
- multiple=True,
- help="Path(s) to local JSON file(s) containing WhatsMyName site data",
-)
-@click.option(
- "--remote-list", multiple=True, help="URL(s) to fetch remote WhatsMyName site data"
+ help="Path to a local JSON file containing WhatsMyName site data",
)
+@click.option("--remote-list", help="URL to fetch remote WhatsMyName site data")
@click.option(
"--local-schema",
type=click.Path(exists=True, path_type=Path),
@@ -373,7 +369,10 @@ async def _write_file(file_path: Path, content: str) -> None:
@click.option(
"--remote-schema",
default=WMN_SCHEMA_URL,
- help="URL to fetch custom WhatsMyName JSON schema for validation",
+ help=(
+ "URL to fetch WhatsMyName JSON schema for validation "
+ "(ignored with --skip-validation)"
+ ),
)
@click.option(
"--skip-validation",
@@ -381,9 +380,9 @@ async def _write_file(file_path: Path, content: str) -> None:
help="Skip JSON schema validation of WhatsMyName data",
)
@click.option(
- "--self-enumeration",
+ "--validate-sites",
is_flag=True,
- help="Run self-enumeration mode to validate site detection accuracy",
+ help="Validate site detection methods by checking known usernames",
)
@click.option(
"--include-categories",
@@ -419,8 +418,8 @@ async def _write_file(file_path: Path, content: str) -> None:
)
@click.option(
"--impersonate",
- type=click.Choice(["none", *typing.get_args(BrowserTypeLiteral)]),
- default="chrome",
+ type=click.Choice([BROWSER_IMPERSONATE_NONE, *typing.get_args(BrowserTypeLiteral)]),
+ default=BROWSER_IMPERSONATE_AGENT,
help='Browser to impersonate in HTTP requests (use "none" to disable)',
)
@click.option("--ja3", help="JA3 fingerprint string for TLS fingerprinting")
@@ -442,7 +441,10 @@ async def _write_file(file_path: Path, content: str) -> None:
help="Maximum number of concurrent tasks",
)
@click.option(
- "--fuzzy", "fuzzy_mode", is_flag=True, help="Enable fuzzy validation mode"
+ "--mode",
+ type=click.Choice([WMNMode.ANY.value, WMNMode.ALL.value]),
+ default=WMNMode.ALL.value,
+ help="Validation mode: 'all' for strict (AND), 'any' for fuzzy (OR)",
)
@click.option(
"--log-level",
@@ -456,21 +458,47 @@ async def _write_file(file_path: Path, content: str) -> None:
@click.option("--browse", is_flag=True, help="Open found profiles in web browser")
@click.option(
"--save-response",
- is_flag=True,
- help="Save HTTP response content for each result to files",
+ "save_response_opt",
+ type=str,
+ flag_value="__AUTO__",
+ default=None,
+ help="Save HTTP responses; optionally specify directory path",
)
-@click.option("--response-path", help="Custom directory path for saving response files")
@click.option(
"--open-response", is_flag=True, help="Open saved response files in web browser"
)
-@click.option("--csv", "csv_export", is_flag=True, help="Export results to CSV file")
-@click.option("--csv-path", help="Custom path for CSV export")
-@click.option("--pdf", "pdf_export", is_flag=True, help="Export results to PDF file")
-@click.option("--pdf-path", help="Custom path for PDF export")
-@click.option("--html", "html_export", is_flag=True, help="Export results to HTML file")
-@click.option("--html-path", help="Custom path for HTML export")
-@click.option("--json", "json_export", is_flag=True, help="Export results to JSON file")
-@click.option("--json-path", help="Custom path for JSON export")
+@click.option(
+ "--csv",
+ "csv_opt",
+ type=str,
+ flag_value="__AUTO__",
+ default=None,
+ help="Export results to CSV; optionally specify a custom path",
+)
+@click.option(
+ "--pdf",
+ "pdf_opt",
+ type=str,
+ flag_value="__AUTO__",
+ default=None,
+ help="Export results to PDF; optionally specify a custom path",
+)
+@click.option(
+ "--html",
+ "html_opt",
+ type=str,
+ flag_value="__AUTO__",
+ default=None,
+ help="Export results to HTML; optionally specify a custom path",
+)
+@click.option(
+ "--json",
+ "json_opt",
+ type=str,
+ flag_value="__AUTO__",
+ default=None,
+ help="Export results to JSON; optionally specify a custom path",
+)
@click.option(
"--filter-all",
is_flag=True,
@@ -508,7 +536,10 @@ async def _write_file(file_path: Path, content: str) -> None:
)
@click.pass_context
def main(ctx: click.Context, **kwargs: Any) -> None:
- """A Python package and command-line interface (CLI) tool for asynchronous OSINT username enumeration using the WhatsMyName dataset."""
+ """A Python package and command-line interface (CLI) tool.
+ For asynchronous OSINT username enumeration using the
+ WhatsMyName dataset.
+ """
if ctx.invoked_subcommand is not None:
return
@@ -517,11 +548,46 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
console.no_color = True
try:
+ csv_export = kwargs.get("csv_opt") is not None
+ csv_path = (
+ None
+ if kwargs.get("csv_opt") in {None, "__AUTO__"}
+ else kwargs.get("csv_opt")
+ )
+
+ pdf_export = kwargs.get("pdf_opt") is not None
+ pdf_path = (
+ None
+ if kwargs.get("pdf_opt") in {None, "__AUTO__"}
+ else kwargs.get("pdf_opt")
+ )
+
+ html_export = kwargs.get("html_opt") is not None
+ html_path = (
+ None
+ if kwargs.get("html_opt") in {None, "__AUTO__"}
+ else kwargs.get("html_opt")
+ )
+
+ json_export = kwargs.get("json_opt") is not None
+ json_path = (
+ None
+ if kwargs.get("json_opt") in {None, "__AUTO__"}
+ else kwargs.get("json_opt")
+ )
+
+ save_response = kwargs.get("save_response_opt") is not None
+ response_path = (
+ None
+ if kwargs.get("save_response_opt") in {None, "__AUTO__"}
+ else kwargs.get("save_response_opt")
+ )
+
config = NaminterConfig(
- usernames=kwargs.get("username"),
+ usernames=list(kwargs.get("username") or []),
sites=kwargs.get("site"),
- local_list_paths=kwargs.get("local_list"),
- remote_list_urls=kwargs.get("remote_list"),
+ local_list_path=kwargs.get("local_list"),
+ remote_list_url=kwargs.get("remote_list"),
local_schema_path=kwargs.get("local_schema"),
remote_schema_url=kwargs.get("remote_schema"),
skip_validation=kwargs.get("skip_validation"),
@@ -530,61 +596,66 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
max_tasks=kwargs.get("max_tasks"),
timeout=kwargs.get("timeout"),
proxy=kwargs.get("proxy"),
- allow_redirects=kwargs.get("allow_redirects"),
- verify_ssl=kwargs.get("verify_ssl"),
+ allow_redirects=bool(kwargs.get("allow_redirects")),
+ verify_ssl=bool(kwargs.get("verify_ssl")),
impersonate=kwargs.get("impersonate"),
ja3=kwargs.get("ja3"),
akamai=kwargs.get("akamai"),
extra_fp=kwargs.get("extra_fp"),
- fuzzy_mode=kwargs.get("fuzzy_mode"),
- self_enumeration=kwargs.get("self_enumeration"),
+ mode=WMNMode(kwargs.get("mode", WMNMode.ALL.value)),
+ validate_sites=bool(kwargs.get("validate_sites")),
log_level=kwargs.get("log_level"),
log_file=kwargs.get("log_file"),
- show_details=kwargs.get("show_details"),
- browse=kwargs.get("browse"),
- save_response=kwargs.get("save_response"),
- response_path=kwargs.get("response_path"),
- open_response=kwargs.get("open_response"),
- csv_export=kwargs.get("csv_export"),
- csv_path=kwargs.get("csv_path"),
- pdf_export=kwargs.get("pdf_export"),
- pdf_path=kwargs.get("pdf_path"),
- html_export=kwargs.get("html_export"),
- html_path=kwargs.get("html_path"),
- json_export=kwargs.get("json_export"),
- json_path=kwargs.get("json_path"),
- filter_all=kwargs.get("filter_all"),
- filter_found=kwargs.get("filter_found"),
- filter_ambiguous=kwargs.get("filter_ambiguous"),
- filter_unknown=kwargs.get("filter_unknown"),
- filter_not_found=kwargs.get("filter_not_found"),
- filter_not_valid=kwargs.get("filter_not_valid"),
- filter_errors=kwargs.get("filter_errors"),
- no_progressbar=kwargs.get("no_progressbar"),
+ show_details=bool(kwargs.get("show_details")),
+ browse=bool(kwargs.get("browse")),
+ save_response=save_response,
+ response_path=response_path,
+ open_response=bool(kwargs.get("open_response")),
+ csv_export=csv_export,
+ csv_path=csv_path,
+ pdf_export=pdf_export,
+ pdf_path=pdf_path,
+ html_export=html_export,
+ html_path=html_path,
+ json_export=json_export,
+ json_path=json_path,
+ filter_all=bool(kwargs.get("filter_all")),
+ filter_found=bool(kwargs.get("filter_found")),
+ filter_ambiguous=bool(kwargs.get("filter_ambiguous")),
+ filter_unknown=bool(kwargs.get("filter_unknown")),
+ filter_not_found=bool(kwargs.get("filter_not_found")),
+ filter_not_valid=bool(kwargs.get("filter_not_valid")),
+ filter_errors=bool(kwargs.get("filter_errors")),
+ no_progressbar=bool(kwargs.get("no_progressbar")),
)
- NaminterCLI._setup_logging(config)
+ NaminterCLI.setup_logging(config)
naminter_cli = NaminterCLI(config)
asyncio.run(naminter_cli.run())
except KeyboardInterrupt:
display_warning("Operation interrupted")
- ctx.exit(1)
- except TimeoutError:
- display_error("Operation timed out")
- ctx.exit(1)
+ ctx.exit(EXIT_CODE_ERROR)
except ConfigurationError as e:
display_error(f"Configuration error: {e}")
- ctx.exit(1)
- except DataError as e:
+ ctx.exit(EXIT_CODE_ERROR)
+ except HttpError as e:
+ display_error(f"Network error: {e}")
+ ctx.exit(EXIT_CODE_ERROR)
+ except WMNDataError as e:
display_error(f"Data error: {e}")
- ctx.exit(1)
+ if isinstance(e, WMNValidationError) and e.errors:
+ display_validation_errors(e.errors)
+ ctx.exit(EXIT_CODE_ERROR)
+ except FileIOError as e:
+ display_error(f"File I/O error: {e}")
+ ctx.exit(EXIT_CODE_ERROR)
except ExportError as e:
display_error(f"Export error: {e}")
- ctx.exit(1)
+ ctx.exit(EXIT_CODE_ERROR)
except Exception as e:
- display_error(f"Fatal error: {e}")
- ctx.exit(1)
+ display_error(f"Unexpected error: {e}")
+ ctx.exit(EXIT_CODE_ERROR)
def entry_point() -> None:
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index d28ac15..135df90 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -14,7 +14,8 @@
)
from naminter.cli.console import THEME
-from naminter.core.models import ResultStatus, SiteResult
+from naminter.cli.constants import PROGRESS_ADVANCE_INCREMENT
+from naminter.core.models import WMNResult, WMNStatus
class ResultsTracker:
@@ -22,26 +23,26 @@ class ResultsTracker:
def __init__(self, total_sites: int) -> None:
"""Initialize the results tracker."""
- self.total_sites = total_sites
+ self.total_sites = max(total_sites, 0)
self.results_count = 0
self.start_time = time.time()
- self.status_counts: dict[ResultStatus, int] = dict.fromkeys(ResultStatus, 0)
+ self.status_counts: dict[WMNStatus, int] = dict.fromkeys(WMNStatus, 0)
- def add_result(self, result: SiteResult) -> None:
+ def add_result(self, result: WMNResult) -> None:
"""Update counters with a new result."""
self.results_count += 1
self.status_counts[result.status] += 1
def get_progress_text(self) -> str:
"""Get formatted progress text with request speed and statistics."""
- elapsed = time.time() - self.start_time
+ elapsed = time.time() - self.start_time if self.start_time else 0.0
- found = self.status_counts[ResultStatus.FOUND]
- ambiguous = self.status_counts[ResultStatus.AMBIGUOUS]
- unknown = self.status_counts[ResultStatus.UNKNOWN]
- not_found = self.status_counts[ResultStatus.NOT_FOUND]
- not_valid = self.status_counts[ResultStatus.NOT_VALID]
- errors = self.status_counts[ResultStatus.ERROR]
+ found = self.status_counts[WMNStatus.FOUND]
+ ambiguous = self.status_counts[WMNStatus.AMBIGUOUS]
+ unknown = self.status_counts[WMNStatus.UNKNOWN]
+ not_found = self.status_counts[WMNStatus.NOT_FOUND]
+ not_valid = self.status_counts[WMNStatus.NOT_VALID]
+ errors = self.status_counts[WMNStatus.ERROR]
valid_count = self.results_count - errors - not_valid
valid_count = max(valid_count, 0)
@@ -62,8 +63,9 @@ def get_progress_text(self) -> str:
if not_valid > 0:
sections.append(f"[{THEME['warning']}]x {not_valid}[/]")
+ total = max(self.total_sites, self.results_count)
sections.append(
- f"[{THEME['primary']}]{self.results_count}/{self.total_sites}[/]"
+ f"[{THEME['primary']}]{self.results_count}/{total}[/]"
)
return " │ ".join(sections)
@@ -101,7 +103,9 @@ def start(self, total: int, description: str) -> None:
self.progress.start()
self.task_id = self.progress.add_task(description, total=total)
- def update(self, advance: int = 1, description: str | None = None) -> None:
+ def update(
+ self, advance: int = PROGRESS_ADVANCE_INCREMENT, description: str | None = None
+ ) -> None:
"""Update the progress bar."""
if self.progress and self.task_id is not None:
update_kwargs: dict[str, Any] = {"advance": advance}
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
index cc54b88..3e18979 100644
--- a/naminter/cli/utils.py
+++ b/naminter/cli/utils.py
@@ -1,11 +1,147 @@
+import asyncio
+import json
+import webbrowser
+from pathlib import Path
+from typing import Any
+
+import aiofiles
+
+from naminter.cli.constants import (
+ MAX_FILENAME_LENGTH,
+ RESPONSE_FILE_DATE_FORMAT,
+ RESPONSE_FILE_EXTENSION,
+)
+from naminter.cli.exceptions import BrowserError, ConfigurationError, FileIOError
+from naminter.core.constants import (
+ DEFAULT_FILE_ENCODING,
+ EMPTY_STRING,
+)
+from naminter.core.exceptions import (
+ HttpError,
+ HttpSessionError,
+ HttpTimeoutError,
+ WMNDataError,
+)
+from naminter.core.models import WMNResult
+from naminter.core.network import BaseSession
+
+
def sanitize_filename(filename: str) -> str:
"""Sanitize filename for cross-platform compatibility."""
if not filename or not str(filename).strip():
return "unnamed"
invalid_chars = '<>:"|?*\\/\0'
- sanitized = "".join(
+ sanitized = EMPTY_STRING.join(
"_" if c in invalid_chars or ord(c) < 32 else c for c in str(filename)
)
- sanitized = sanitized.strip(" .")[:200] if sanitized.strip(" .") else "unnamed"
+ sanitized = (
+ sanitized.strip(" .")[:MAX_FILENAME_LENGTH]
+ if sanitized.strip(" .")
+ else "unnamed"
+ )
return sanitized
+
+
+async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any]:
+ """Fetch and parse JSON from a URL."""
+ if not url or not url.strip():
+ msg = f"Invalid URL: {url}"
+ raise ConfigurationError(msg)
+
+ try:
+ response = await http_client.get(url)
+ except (HttpError, HttpTimeoutError, HttpSessionError):
+ raise
+ except Exception as e:
+ msg = f"Network error while fetching from {url}: {e}"
+ raise HttpError(msg, cause=e) from e
+
+ if response.status_code != 200:
+ msg = f"Failed to fetch from {url}: HTTP {response.status_code}"
+ raise HttpError(msg)
+
+ try:
+ return response.json()
+ except (ValueError, json.JSONDecodeError) as e:
+ msg = f"Failed to parse JSON from {url}: {e}"
+ raise WMNDataError(msg, cause=e) from e
+ except Exception as e:
+ msg = f"Unexpected error parsing response from {url}: {e}"
+ raise WMNDataError(msg, cause=e) from e
+
+
+async def read_json(path: str | Path) -> dict[str, Any]:
+ """Read JSON from a local file without blocking the event loop."""
+ try:
+ async with aiofiles.open(path, encoding=DEFAULT_FILE_ENCODING) as file:
+ content = await file.read()
+ except FileNotFoundError as e:
+ msg = f"File not found: {path}"
+ raise FileIOError(msg) from e
+ except PermissionError as e:
+ msg = f"Permission denied accessing file: {path}"
+ raise FileIOError(msg) from e
+ except UnicodeDecodeError as e:
+ msg = f"Encoding error reading file {path}: {e}"
+ raise FileIOError(msg) from e
+ except OSError as e:
+ msg = f"Error reading file {path}: {e}"
+ raise FileIOError(msg) from e
+
+ try:
+ return json.loads(content)
+ except json.JSONDecodeError as e:
+ msg = f"Invalid JSON in file {path}: {e}"
+ raise WMNDataError(msg, cause=e) from e
+
+
+async def open_browser(url: str) -> None:
+ """Open a URL in the browser with error handling."""
+ if not url or not url.strip():
+ msg = "Invalid URL provided to browser"
+ raise BrowserError(msg)
+
+ try:
+ await asyncio.to_thread(webbrowser.open, url)
+ except webbrowser.Error as e:
+ msg = f"Browser error opening {url}: {e}"
+ raise BrowserError(msg) from e
+ except OSError as e:
+ msg = f"OS error opening browser for {url}: {e}"
+ raise BrowserError(msg) from e
+ except Exception as e:
+ msg = f"Unexpected error opening browser for {url}: {e}"
+ raise BrowserError(msg) from e
+
+
+async def write_file(file_path: Path, content: str) -> None:
+ """Write content to a file with error handling."""
+ try:
+ async with aiofiles.open(
+ file_path, mode="w", encoding=DEFAULT_FILE_ENCODING
+ ) as file:
+ await file.write(content)
+ except PermissionError as e:
+ msg = f"Permission denied writing to {file_path}: {e}"
+ raise FileIOError(msg) from e
+ except OSError as e:
+ msg = f"OS error writing to {file_path}: {e}"
+ raise FileIOError(msg) from e
+ except Exception as e:
+ msg = f"Unexpected error writing to {file_path}: {e}"
+ raise FileIOError(msg) from e
+
+
+def generate_response_filename(result: WMNResult) -> str:
+ """Generate a sanitized filename for saving response data."""
+ safe_site_name = sanitize_filename(result.name)
+ safe_username = sanitize_filename(result.username)
+ status_str = result.status.value
+ created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
+
+ return (
+ f"{status_str}_{result.response_code}_"
+ f"{safe_site_name}_{safe_username}_{created_at_str}"
+ f"{RESPONSE_FILE_EXTENSION}"
+ )
diff --git a/naminter/core/__init__.py b/naminter/core/__init__.py
index e69de29..8b13789 100644
--- a/naminter/core/__init__.py
+++ b/naminter/core/__init__.py
@@ -0,0 +1 @@
+
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index 1bca02c..9a7c025 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -1,6 +1,6 @@
from typing import Final
-# Remote data source configuration
+# Remote Data Source Configuration
WMN_REMOTE_URL: Final[str] = (
"https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data.json"
)
@@ -8,65 +8,77 @@
"https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data-schema.json"
)
-# HTTP request configuration
+# HTTP Configuration
HTTP_REQUEST_TIMEOUT_SECONDS: Final[int] = 30
HTTP_SSL_VERIFY: Final[bool] = False
HTTP_ALLOW_REDIRECTS: Final[bool] = False
-# Browser impersonation settings
+# Browser Impersonation Settings
BROWSER_IMPERSONATE_AGENT: Final[str] = "chrome"
+BROWSER_IMPERSONATE_NONE: Final[str] = "none"
-# Concurrency settings
+# Concurrency Settings
MAX_CONCURRENT_TASKS: Final[int] = 50
-# Validation ranges and thresholds
-MIN_TASKS: Final[int] = 1
-MAX_TASKS_LIMIT: Final[int] = 1000
-MIN_TIMEOUT: Final[int] = 0
-MAX_TIMEOUT: Final[int] = 300
-
-# Performance warning thresholds
-HIGH_CONCURRENCY_THRESHOLD: Final[int] = 100
-HIGH_CONCURRENCY_MIN_TIMEOUT: Final[int] = 10
-VERY_HIGH_CONCURRENCY_THRESHOLD: Final[int] = 50
-VERY_HIGH_CONCURRENCY_MIN_TIMEOUT: Final[int] = 5
-EXTREME_CONCURRENCY_THRESHOLD: Final[int] = 500
-LOW_TIMEOUT_WARNING_THRESHOLD: Final[int] = 3
-
-# Logging format - includes logger name to distinguish between core and cli
+# Logging Configuration
LOGGING_FORMAT: Final[str] = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
-# Placeholder for account name substitution in uri_check or post_body
+# Account Name Substitution
ACCOUNT_PLACEHOLDER: Final[str] = "{account}"
-# Required key sets for validations
-REQUIRED_KEYS_ENUMERATE: Final[tuple[str, ...]] = (
- "name",
- "uri_check",
- "e_code",
- "e_string",
- "m_string",
- "m_code",
- "cat",
-)
-
-REQUIRED_KEYS_SELF_ENUM: Final[tuple[str, ...]] = (
- "name",
- "cat",
- "known",
-)
-
-# WMN dataset keys
+# WMN Dataset Structure Keys
WMN_KEY_SITES: Final[str] = "sites"
WMN_KEY_CATEGORIES: Final[str] = "categories"
WMN_KEY_AUTHORS: Final[str] = "authors"
WMN_KEY_LICENSE: Final[str] = "license"
WMN_KEY_NAME: Final[str] = "name"
-# Collection of list fields present in WMN payloads
WMN_LIST_FIELDS: Final[tuple[str, ...]] = (
WMN_KEY_SITES,
WMN_KEY_CATEGORIES,
WMN_KEY_AUTHORS,
WMN_KEY_LICENSE,
)
+
+# Site Object Structure Keys
+SITE_KEY_NAME: Final[str] = "name"
+SITE_KEY_CATEGORY: Final[str] = "cat"
+SITE_KEY_URI_CHECK: Final[str] = "uri_check"
+SITE_KEY_URI_PRETTY: Final[str] = "uri_pretty"
+SITE_KEY_HEADERS: Final[str] = "headers"
+SITE_KEY_POST_BODY: Final[str] = "post_body"
+SITE_KEY_STRIP_BAD_CHAR: Final[str] = "strip_bad_char"
+SITE_KEY_E_CODE: Final[str] = "e_code"
+SITE_KEY_E_STRING: Final[str] = "e_string"
+SITE_KEY_M_STRING: Final[str] = "m_string"
+SITE_KEY_M_CODE: Final[str] = "m_code"
+SITE_KEY_KNOWN: Final[str] = "known"
+
+# Validation Requirements
+REQUIRED_KEYS_ENUMERATE: Final[tuple[str, ...]] = (
+ SITE_KEY_NAME,
+ SITE_KEY_URI_CHECK,
+ SITE_KEY_E_CODE,
+ SITE_KEY_E_STRING,
+ SITE_KEY_M_STRING,
+ SITE_KEY_M_CODE,
+ SITE_KEY_CATEGORY,
+)
+
+REQUIRED_KEYS_SELF_ENUM: Final[tuple[str, ...]] = (
+ SITE_KEY_NAME,
+ SITE_KEY_CATEGORY,
+ SITE_KEY_KNOWN,
+)
+
+# JSON Configuration
+DEFAULT_JSON_INDENT: Final[int] = 2
+DEFAULT_JSON_ENCODING: Final[str] = "utf-8"
+DEFAULT_JSON_ENSURE_ASCII: Final[bool] = False
+
+# File Operations
+DEFAULT_FILE_ENCODING: Final[str] = "utf-8"
+
+# Default Values and String Processing
+DEFAULT_UNKNOWN_VALUE: Final[str] = "unknown"
+EMPTY_STRING: Final[str] = ""
diff --git a/naminter/core/exceptions.py b/naminter/core/exceptions.py
index 8423521..7e4915c 100644
--- a/naminter/core/exceptions.py
+++ b/naminter/core/exceptions.py
@@ -1,3 +1,7 @@
+from typing import Any
+
+
+# Base exception
class NaminterError(Exception):
"""Base exception class for Naminter errors.
@@ -12,14 +16,7 @@ def __init__(self, message: str, cause: Exception | None = None) -> None:
self.cause = cause
-class ConfigurationError(NaminterError):
- """Raised when there's an error in the configuration parameters.
-
- This includes invalid configuration values, missing required settings,
- or configuration file parsing errors.
- """
-
-
+# Network/HTTP errors
class HttpError(NaminterError):
"""Raised when network-related errors occur.
@@ -44,54 +41,41 @@ class HttpTimeoutError(HttpError):
"""
-class DataError(NaminterError):
- """Raised when there are issues with data processing or validation.
+# Data processing errors
+class WMNDataError(NaminterError):
+ """Raised when there are issues with WMN data processing or validation.
This includes malformed data, parsing errors, and data integrity issues.
"""
-class SchemaError(DataError):
- """Raised when WMN schema validation fails.
-
- This occurs when the WhatsMyName list format doesn't match
- the expected schema structure, or when the schema itself is invalid.
- """
+class WMNSchemaError(WMNDataError):
+ """Raised when the WMN JSON Schema itself is invalid or cannot be used."""
-class ValidationError(DataError):
- """Raised when input validation fails.
+class WMNValidationError(WMNDataError):
+ """Raised when WMN dataset does not conform to the provided JSON Schema.
- This includes invalid usernames, malformed URLs,
- and other input parameter validation errors.
+ Attributes:
+ errors: Structured list of validation errors to display/inspect.
"""
-
-class FileAccessError(DataError):
- """Raised when file operations fail.
-
- This includes reading/writing local lists, responses, exports,
- and other file system operations.
- """
-
-
-class ExportError(NaminterError):
- """Raised when export operations fail.
-
- This includes file writing errors, format conversion errors,
- and other export-related issues.
- """
+ def __init__(
+ self,
+ message: str,
+ errors: list[Any] | None = None,
+ cause: Exception | None = None,
+ ) -> None:
+ super().__init__(message, cause)
+ self.errors: list[Any] = errors or []
__all__ = [
- "ConfigurationError",
- "DataError",
- "ExportError",
- "FileAccessError",
"HttpError",
"HttpSessionError",
"HttpTimeoutError",
"NaminterError",
- "SchemaError",
- "ValidationError",
+ "WMNDataError",
+ "WMNSchemaError",
+ "WMNValidationError",
]
diff --git a/naminter/core/main.py b/naminter/core/main.py
index e2145e9..491ffe4 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -1,43 +1,55 @@
import asyncio
-import json
import logging
-from collections.abc import AsyncGenerator, Sequence
-from pathlib import Path
-from typing import Any
-
-import aiofiles
-import jsonschema
+from collections.abc import AsyncGenerator, Awaitable, Callable
+from functools import wraps
+from typing import Any, Literal, TypeVar, overload
from naminter.core.constants import (
ACCOUNT_PLACEHOLDER,
+ DEFAULT_UNKNOWN_VALUE,
+ EMPTY_STRING,
MAX_CONCURRENT_TASKS,
REQUIRED_KEYS_ENUMERATE,
REQUIRED_KEYS_SELF_ENUM,
+ SITE_KEY_CATEGORY,
+ SITE_KEY_E_CODE,
+ SITE_KEY_E_STRING,
+ SITE_KEY_HEADERS,
+ SITE_KEY_KNOWN,
+ SITE_KEY_M_CODE,
+ SITE_KEY_M_STRING,
+ SITE_KEY_NAME,
+ SITE_KEY_POST_BODY,
+ SITE_KEY_STRIP_BAD_CHAR,
+ SITE_KEY_URI_CHECK,
+ SITE_KEY_URI_PRETTY,
WMN_KEY_AUTHORS,
- WMN_KEY_CATEGORIES,
WMN_KEY_LICENSE,
- WMN_KEY_NAME,
WMN_KEY_SITES,
- WMN_REMOTE_URL,
)
from naminter.core.exceptions import (
- DataError,
- FileAccessError,
HttpError,
HttpSessionError,
HttpTimeoutError,
- SchemaError,
- ValidationError,
+ WMNDataError,
+ WMNSchemaError,
+ WMNValidationError,
)
from naminter.core.models import (
- ResultStatus,
- SelfEnumerationResult,
- SiteResult,
- Summary,
- ValidationMode,
+ WMNDataset,
+ WMNMode,
+ WMNResult,
+ WMNResponse,
+ WMNSummary,
+ WMNValidationResult,
)
from naminter.core.network import BaseSession
-from naminter.core.utils import deduplicate_strings, merge_lists, validate_usernames
+from naminter.core.utils import (
+ get_missing_keys,
+ validate_dataset,
+)
+
+T = TypeVar("T")
class Naminter:
@@ -46,255 +58,109 @@ class Naminter:
def __init__(
self,
http_client: BaseSession,
- wmn_data: dict[str, Any] | None = None,
+ wmn_data: WMNDataset | None = None,
wmn_schema: dict[str, Any] | None = None,
- local_list_paths: list[Path] | None = None,
- remote_list_urls: list[str] | None = None,
- skip_validation: bool = False,
- local_schema_path: Path | None = None,
- remote_schema_url: str | None = None,
max_tasks: int = MAX_CONCURRENT_TASKS,
) -> None:
"""Initialize Naminter with configuration parameters."""
self._logger = logging.getLogger(__name__)
self._logger.addHandler(logging.NullHandler())
- self._logger.debug("Initializing Naminter (max_tasks=%d)", max_tasks)
-
- self.max_tasks = max_tasks
-
- self._local_list_paths = local_list_paths
- self._remote_list_urls = remote_list_urls
- self._skip_validation = skip_validation
- self._local_schema_path = local_schema_path
- self._remote_schema_url = remote_schema_url
-
- self._wmn_data: dict[str, Any] | None = wmn_data
+ self._wmn_data: WMNDataset | None = wmn_data
self._wmn_schema: dict[str, Any] | None = wmn_schema
- self._semaphore = asyncio.Semaphore(self.max_tasks)
- self._session_lock = asyncio.Lock()
+ self._semaphore = asyncio.Semaphore(max_tasks)
self._http: BaseSession = http_client
+ self._session_open: bool = False
+ self._session_lock = asyncio.Lock()
+ self._dataset_ready: bool = False
+
async def _open_session(self) -> None:
- """Open the HTTP session (idempotent, safe under concurrency)."""
+ """Open the HTTP session."""
+ if self._session_open:
+ return
+
async with self._session_lock:
+ if self._session_open:
+ return
try:
await self._http.open()
- self._logger.info("HTTP client opened")
+ self._session_open = True
+ self._logger.info("HTTP session opened")
except HttpSessionError as e:
self._logger.error("Failed to open HTTP session: %s", e)
msg = f"HTTP session initialization failed: {e}"
- raise DataError(msg) from e
-
- async def _fetch_json(self, url: str) -> dict[str, Any]:
- """Fetch and parse JSON from a URL."""
- if not url.strip():
- msg = f"Invalid URL: {url}"
- raise ValidationError(msg)
-
- try:
- response = await self._http.get(url)
- except HttpTimeoutError as e:
- msg = f"Timeout while fetching from {url}: {e}"
- raise DataError(msg) from e
- except HttpSessionError as e:
- msg = f"Session error while fetching from {url}: {e}"
- raise DataError(msg) from e
- except HttpError as e:
- msg = f"Network error while fetching from {url}: {e}"
- raise DataError(msg) from e
-
- if response.status_code < 200 or response.status_code >= 300:
- msg = f"Failed to fetch from {url}: HTTP {response.status_code}"
- raise DataError(msg)
-
- try:
- return response.json()
- except (ValueError, json.JSONDecodeError) as e:
- msg = f"Failed to parse JSON from {url}: {e}"
- raise DataError(msg) from e
-
- @staticmethod
- async def _read_json_file(path: str | Path) -> dict[str, Any]:
- """Read JSON from a local file without blocking the event loop."""
- try:
- async with aiofiles.open(path, encoding="utf-8") as file:
- content = await file.read()
- except FileNotFoundError as e:
- msg = f"File not found: {path}"
- raise FileAccessError(msg) from e
- except PermissionError as e:
- msg = f"Permission denied accessing file: {path}"
- raise FileAccessError(msg) from e
- except OSError as e:
- msg = f"Error reading file {path}: {e}"
- raise FileAccessError(msg) from e
-
- try:
- return json.loads(content)
- except json.JSONDecodeError as e:
- msg = f"Invalid JSON in file {path}: {e}"
- raise DataError(msg) from e
+ raise WMNDataError(msg) from e
- async def _load_schema(self) -> dict[str, Any]:
- """Load WMN schema from local or remote source."""
- if self._skip_validation:
- return {}
-
- try:
- if self._local_schema_path:
- return await self._read_json_file(self._local_schema_path)
- elif self._remote_schema_url:
- return await self._fetch_json(self._remote_schema_url)
- else:
- msg = (
- "No schema source provided - either local_schema_path or "
- "remote_schema_url is required"
- )
- raise DataError(msg)
- except (OSError, json.JSONDecodeError) as e:
- msg = f"Failed to load required WMN schema from local file: {e}"
- raise DataError(
- msg
- ) from e
- except HttpError as e:
- msg = (
- f"Failed to load required WMN schema from {self._remote_schema_url}: "
- f"{e}"
- )
- raise DataError(msg) from e
-
- async def _load_dataset(self) -> dict[str, Any]:
- """Load WMN data from configured sources."""
- dataset: dict[str, Any] = {
- WMN_KEY_SITES: [],
- WMN_KEY_CATEGORIES: [],
- WMN_KEY_AUTHORS: [],
- WMN_KEY_LICENSE: [],
- }
-
- sources: list[tuple[str | Path, bool]] = []
- if self._remote_list_urls:
- sources.extend([(url, True) for url in self._remote_list_urls])
- if self._local_list_paths:
- sources.extend([(path, False) for path in self._local_list_paths])
- if not sources:
- sources = [(WMN_REMOTE_URL, True)]
-
- coroutines = []
- for source, is_remote in sources:
- if is_remote:
- coroutines.append(self._fetch_json(str(source)))
- else:
- coroutines.append(self._read_json_file(source))
-
- results = await asyncio.gather(*coroutines, return_exceptions=True)
-
- failures: list[str] = []
- for src, res in zip(sources, results, strict=False):
- if isinstance(res, Exception):
- source, is_remote = src
- failures.append(
- f"{source} ({'remote' if is_remote else 'local'}): {res}"
+ async def _close_session(self) -> None:
+ """Close the HTTP session if open."""
+ async with self._session_lock:
+ if not self._session_open:
+ return
+ try:
+ await self._http.close()
+ except asyncio.CancelledError:
+ self._logger.debug("HTTP client close cancelled")
+ raise
+ except Exception as e:
+ self._logger.exception(
+ "Unexpected error during HTTP client close: %s", e
)
- self._logger.warning("Failed to load WMN data from %s: %s", source, res)
- else:
- merge_lists(res, dataset)
+ finally:
+ self._session_open = False
- if not dataset[WMN_KEY_SITES]:
- detail = (
- "; ".join(failures) if failures else "no sources produced any sites"
- )
- msg = f"No sites loaded from any source; details: {detail}"
- raise DataError(msg)
+ async def _ensure_ready(self) -> None:
+ """Ensure HTTP session is open and dataset is loaded."""
+ if not self._session_open:
+ await self._open_session()
- return dataset
+ if self._dataset_ready:
+ return
- @staticmethod
- def _deduplicate_data(data: dict[str, Any]) -> None:
- """Deduplicate and clean the WMN data in place."""
- unique_sites = {
- site[WMN_KEY_NAME]: site
- for site in data[WMN_KEY_SITES]
- if isinstance(site, dict) and site.get(WMN_KEY_NAME)
- }
- data[WMN_KEY_SITES] = list(unique_sites.values())
- data[WMN_KEY_CATEGORIES] = list(dict.fromkeys(data[WMN_KEY_CATEGORIES]))
- data[WMN_KEY_AUTHORS] = list(dict.fromkeys(data[WMN_KEY_AUTHORS]))
- data[WMN_KEY_LICENSE] = list(dict.fromkeys(data[WMN_KEY_LICENSE]))
-
- async def _load_wmn_lists(self) -> tuple[dict[str, Any], dict[str, Any]]:
- """Unified async loader for WMN data and schema.
-
- Returns a mapping with keys: data (dataset dict) and schema (schema dict).
- """
- if self._wmn_data and self._wmn_schema:
- return (self._wmn_data, self._wmn_schema)
+ if not self._wmn_data:
+ msg = "WMN data must be provided to Naminter constructor"
+ raise WMNDataError(msg)
- dataset, dataset_schema = await asyncio.gather(
- self._load_dataset(),
- self._load_schema(),
- )
- self._deduplicate_data(dataset)
+ async with self._session_lock:
+ if self._dataset_ready:
+ return
- return (dataset, dataset_schema)
+ try:
+ if self._wmn_schema:
+ errors = validate_dataset(self._wmn_data, self._wmn_schema)
+ if errors:
+ msg = "WMN dataset validation failed"
+ raise WMNValidationError(msg, errors=errors)
+
+ self._dataset_ready = True
+ self._logger.info(
+ "Dataset loaded: %d sites",
+ len(self._wmn_data.get(WMN_KEY_SITES, [])),
+ )
+ except WMNSchemaError as e:
+ msg = f"WMN schema error: {e}"
+ raise WMNDataError(msg) from e
+ except WMNValidationError:
+ raise
+ except Exception as e:
+ msg = f"Unexpected error loading WMN data: {e}"
+ raise WMNDataError(msg) from e
@staticmethod
- def _validate_data(data: dict[str, Any], schema: dict[str, Any]) -> None:
- """Validate WMN data against schema. Raises on failure."""
- if not schema:
- return
- try:
- jsonschema.Draft7Validator.check_schema(schema)
- jsonschema.Draft7Validator(schema).validate(data)
- except jsonschema.ValidationError as e:
- msg = f"WMN data does not match schema: {e.message}"
- raise SchemaError(msg) from e
- except jsonschema.SchemaError as e:
- msg = f"Invalid WMN schema: {e.message}"
- raise SchemaError(msg) from e
-
- async def _ensure_dataset(self) -> None:
- """Load and validate the WMN dataset and schema if not already loaded."""
- if self._wmn_data and self._wmn_schema:
- return
+ def _ensure_initialized(
+ method: Callable[..., Any],
+ ) -> Callable[..., Any]:
+ """Decorator to ensure the instance is ready before calling a method."""
- try:
- data, schema = await self._load_wmn_lists()
- if not self._skip_validation:
- self._validate_data(data, schema)
- self._wmn_data = data
- self._wmn_schema = schema
- self._logger.info(
- "WMN dataset loaded (sites=%d)",
- len(self._wmn_data.get(WMN_KEY_SITES, [])),
- )
- except SchemaError as e:
- msg = f"WMN validation failed: {e}"
- raise DataError(msg) from e
- except Exception as e:
- msg = f"WMN load failed: {e}"
- raise DataError(msg) from e
+ @wraps(method)
+ async def wrapper(self: "Naminter", *args: Any, **kwargs: Any) -> Any:
+ await self._ensure_ready()
+ return await method(self, *args, **kwargs)
- async def _close_session(self) -> None:
- """Close the HTTP session if open."""
- async with self._session_lock:
- try:
- await self._http.close()
- self._logger.info("HTTP client closed")
- except asyncio.CancelledError:
- self._logger.warning("HTTP client close cancelled")
- raise
- except Exception as error:
- self._logger.warning("Error during HTTP client close: %s", error)
+ return wrapper
async def __aenter__(self) -> "Naminter":
- await self._open_session()
- try:
- await self._ensure_dataset()
- except DataError:
- self._logger.error("Dataset load failed")
- raise
+ await self._ensure_ready()
return self
async def __aexit__(
@@ -303,68 +169,6 @@ async def __aexit__(
"""Async context manager exit."""
await self._close_session()
- async def get_wmn_summary(
- self,
- site_names: list[str] | None = None,
- include_categories: list[str] | None = None,
- exclude_categories: list[str] | None = None,
- ) -> Summary:
- """Get enriched WMN metadata information for diagnostics and UI.
-
- Filters can be applied to compute statistics on a subset of sites.
- """
- try:
- await self._ensure_dataset()
- except DataError:
- self._logger.error("Dataset load failed")
- raise
- try:
- sites: list[dict[str, Any]] = self._filter_sites(
- site_names,
- include_categories=include_categories,
- exclude_categories=exclude_categories,
- )
- except DataError as e:
- self._logger.error("Site filtering failed: %s", e)
- raise
- try:
- category_list: list[str] = [
- site.get("cat") for site in sites if site.get("cat")
- ]
- site_name_list: list[str] = [
- site.get("name") for site in sites if site.get("name")
- ]
-
- total_known_accounts: int = 0
-
- for site in sites:
- known_list = site.get("known")
- if isinstance(known_list, list) and len(known_list) > 0:
- total_known_accounts += len(known_list)
-
- wmn_summary = Summary(
- license=list(dict.fromkeys(self._wmn_data.get("license", []))),
- authors=list(dict.fromkeys(self._wmn_data.get("authors", []))),
- site_names=list(dict.fromkeys(site_name_list)),
- sites_count=len(sites),
- categories=list(dict.fromkeys(category_list)),
- categories_count=len(set(category_list)),
- known_accounts_total=total_known_accounts,
- )
-
- self._logger.info(
- "WMN summary computed (sites=%d, categories=%d)",
- wmn_summary.sites_count,
- wmn_summary.categories_count,
- )
- return wmn_summary
- except DataError:
- raise
- except Exception as e:
- self._logger.exception("Failed to compute WMN summary")
- msg = f"Failed to retrieve metadata: {e}"
- raise DataError(msg) from e
-
def _filter_sites(
self,
site_names: list[str] | None,
@@ -372,117 +176,249 @@ def _filter_sites(
exclude_categories: list[str] | None = None,
) -> list[dict[str, Any]]:
"""Filter sites by names and categories for the current WMN dataset."""
- sites: list[dict[str, Any]] = self._wmn_data.get("sites", [])
+ assert self._wmn_data is not None
+ sites: list[dict[str, Any]] = self._wmn_data.get(WMN_KEY_SITES, [])
+
+ if not any((site_names, include_categories, exclude_categories)):
+ return sites
+
+ filtered_names: frozenset[str] = frozenset()
if site_names:
- filtered_site_names: set[str] = set(deduplicate_strings(site_names))
- available_names: set[str] = {site.get("name") for site in sites}
- missing_names = filtered_site_names - available_names
+ filtered_names = frozenset(site_names)
+ available_names: frozenset[str] = frozenset({
+ name for site in sites if (name := site.get(SITE_KEY_NAME)) is not None
+ })
+ missing_names: frozenset[str] = filtered_names - available_names
if missing_names:
msg = f"Unknown site names: {sorted(missing_names)}"
- raise DataError(msg)
- else:
- filtered_site_names = set()
-
- filtered_sites: list[dict[str, Any]] = sites
-
- if filtered_site_names:
- filtered_sites = [
- site
- for site in filtered_sites
- if site.get("name") in filtered_site_names
- ]
+ raise WMNDataError(msg)
- if include_categories:
- include_set: set[str] = set(deduplicate_strings(include_categories))
- filtered_sites = [
- site for site in filtered_sites if site.get("cat") in include_set
- ]
+ include_set: frozenset[str] = (
+ frozenset(include_categories) if include_categories else frozenset()
+ )
+ exclude_set: frozenset[str] = (
+ frozenset(exclude_categories) if exclude_categories else frozenset()
+ )
- if exclude_categories:
- exclude_set: set[str] = set(deduplicate_strings(exclude_categories))
- filtered_sites = [
- site for site in filtered_sites if site.get("cat") not in exclude_set
- ]
+ filtered_sites = [
+ site
+ for site in sites
+ if (not filtered_names or site.get(SITE_KEY_NAME) in filtered_names)
+ and (not include_set or site.get(SITE_KEY_CATEGORY) in include_set)
+ and (not exclude_set or site.get(SITE_KEY_CATEGORY) not in exclude_set)
+ ]
self._logger.debug(
- "Filter result %d/%d (names=%s include=%s exclude=%s)",
+ "Sites filtered: %d of %d sites remaining",
len(filtered_sites),
len(sites),
- bool(site_names),
- bool(include_categories),
- bool(exclude_categories),
)
return filtered_sites
- @staticmethod
- def _get_missing_keys(
- site: dict[str, Any], required_keys: Sequence[str]
- ) -> list[str]:
- """Return a list of required keys missing from a site mapping."""
- return [key for key in required_keys if key not in site]
+ @_ensure_initialized
+ async def get_wmn_summary(
+ self,
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
+ ) -> WMNSummary:
+ """Get enriched WMN metadata information for diagnostics and UI.
+
+ Retrieves comprehensive summary information about the loaded WhatsMyName
+ dataset, including site counts, categories, authors, and license information.
+ Filters can be applied to compute statistics on a subset of sites.
+
+ Args:
+ site_names: Optional list of specific site names to include in the summary.
+ If None, all sites are included (subject to category filters).
+ include_categories: Optional list of categories to include. Only sites
+ in these categories will be counted. If None, all categories are
+ included (subject to exclude_categories).
+ exclude_categories: Optional list of categories to exclude. Sites in these
+ categories will not be counted.
+
+ Returns:
+ WMNSummary: Summary object containing license, authors, site names, counts,
+ categories, and known usernames count.
+
+ Raises:
+ WMNDataError: If site_names contains unknown site names.
+
+ Example:
+ ```python
+ async with Naminter(wmn_data, wmn_schema) as naminter:
+ # Get summary of all sites
+ summary = await naminter.get_wmn_summary()
+ print(f"Total sites: {summary.sites_count}")
+
+ # Get summary for specific categories
+ summary = await naminter.get_wmn_summary(
+ include_categories=["social", "coding"]
+ )
+ print(f"Social/coding sites: {summary.sites_count}")
+ ```
+ """
+ assert self._wmn_data is not None
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
+
+ category_list = [
+ site.get(SITE_KEY_CATEGORY) for site in sites if site.get(SITE_KEY_CATEGORY)
+ ]
+ site_name_list = [
+ site.get(SITE_KEY_NAME) for site in sites if site.get(SITE_KEY_NAME)
+ ]
+ known_count = sum(
+ len(site.get(SITE_KEY_KNOWN, []))
+ for site in sites
+ if isinstance(site.get(SITE_KEY_KNOWN), list)
+ )
+ wmn_summary = WMNSummary(
+ license=tuple(self._wmn_data.get(WMN_KEY_LICENSE, [])),
+ authors=tuple(self._wmn_data.get(WMN_KEY_AUTHORS, [])),
+ site_names=tuple(str(name) for name in site_name_list),
+ sites_count=len(sites),
+ categories=tuple(str(cat) for cat in category_list),
+ categories_count=len(set(category_list)),
+ known_count=known_count,
+ )
+
+ self._logger.debug(
+ "WMN summary computed (sites=%d, categories=%d)",
+ wmn_summary.sites_count,
+ wmn_summary.categories_count,
+ )
+ return wmn_summary
+
+ @_ensure_initialized
async def enumerate_site(
self,
site: dict[str, Any],
username: str,
- fuzzy_mode: bool = False,
- ) -> SiteResult:
- """Enumerate a single site for the given username."""
- await self._open_session()
- try:
- await self._ensure_dataset()
- except DataError:
- self._logger.error("Dataset load failed")
- raise
-
- missing_keys = self._get_missing_keys(site, REQUIRED_KEYS_ENUMERATE)
+ mode: WMNMode = WMNMode.ALL,
+ ) -> WMNResult:
+ """Enumerate a single site for the given username.
+
+ Performs a single username lookup for a single site definition
+ from the loaded WhatsMyName (WMN) dataset. It builds the URL and optional
+ POST body using the site's configuration, sends an HTTP request, and then
+ evaluates the response using the site's detection rules to determine
+ whether the username is present on that site.
+
+ Args:
+ site:
+ A single site configuration dictionary from the WMN dataset. This dict
+ must contain, at minimum, the following keys:
+ - "name": site name
+ - "cat": site category
+ - "uri_check": URL template with "{account}" placeholder
+ - "e_code": expected HTTP status for a "found" account
+ - "e_string": expected string in body for a "found" account
+ - "m_code": expected HTTP status for a "missing" account
+ - "m_string": expected string in body for a "missing" account
+ Optional keys include:
+ - "headers": dict of HTTP headers to send with the request.
+ - "post_body": POST body template containing "{account}".
+ - "strip_bad_char": characters to strip from the username
+ before substitution in the URL/body.
+ - "uri_pretty": an optional "pretty" URL template for reporting.
+ username:
+ The raw username to test on this site. It is used to build the
+ request URL and optional POST body. If the site defines
+ "strip_bad_char", those characters are removed from the
+ username before substitution.
+ mode:
+ Detection mode that controls how the "expected" (E) and "missing" (M)
+ criteria are interpreted when classifying the HTTP response:
+ - WMNMode.ALL: All configured conditions for a state must match
+ (strict AND logic).
+ - WMNMode.ANY: Any matching condition is sufficient
+ (looser OR logic).
+
+ Returns:
+ WMNResult:
+ A single WMNResult instance that encapsulates:
+ - name: site name (from "name"),
+ - category: site category (from "cat"),
+ - username: the username that was tested,
+ - url: the final URL used for reporting (may be "uri_pretty"),
+ - status: high-level classification, e.g. FOUND, NOT_FOUND,
+ AMBIGUOUS, UNKNOWN, ERROR, or NOT_VALID,
+ - response_code / response_text / elapsed (if the HTTP request
+ completed successfully),
+ - error message (if an error occurred).
+
+ Raises:
+ asyncio.CancelledError:
+ Propagated if the caller cancels the task while the HTTP request
+ is in progress.
+ WMNDataError:
+ Not raised directly from this method, but may be raised earlier
+ when initializing the Naminter instance or when validating the
+ underlying dataset.
+
+ Example:
+ ```python
+ site = {
+ "name": "GitHub",
+ "uri_check": "https://github.com/{account}",
+ "e_code": 200,
+ "e_string": "GitHub Profile",
+ "m_code": 404,
+ "m_string": "Not Found",
+ "cat": "coding",
+ }
+
+ async with Naminter(http_client, wmn_data, wmn_schema) as naminter:
+ result = await naminter.enumerate_site(site, "torvalds")
+ print(result.name, result.username, result.status, result.url)
+ ```
+ """
+ missing_keys = get_missing_keys(site, REQUIRED_KEYS_ENUMERATE)
if missing_keys:
- error_result = SiteResult(
- name=site.get("name", "unknown"),
- category=site.get("cat", "unknown"),
+ site_name = site.get(SITE_KEY_NAME, DEFAULT_UNKNOWN_VALUE)
+ self._logger.warning(
+ "Site '%s' is missing required keys: %s",
+ site_name,
+ missing_keys,
+ )
+ return WMNResult.from_error(
+ name=site_name,
+ category=site.get(SITE_KEY_CATEGORY, DEFAULT_UNKNOWN_VALUE),
username=username,
- status=ResultStatus.ERROR,
- error=f"Site entry missing required keys: {missing_keys}",
+ message=f"Site entry missing required keys: {missing_keys}",
)
- return error_result
-
- name = site["name"]
- category = site["cat"]
-
- uri_check_template = site["uri_check"]
- strip_bad_char = site.get("strip_bad_char", "")
- clean_username = username.translate(str.maketrans("", "", strip_bad_char))
- if not clean_username:
- error_result = SiteResult(
- name,
- category,
- username,
- ResultStatus.ERROR,
- error="Username became empty after stripping",
+
+ name = site[SITE_KEY_NAME]
+ category = site[SITE_KEY_CATEGORY]
+ strip_bad_char = site.get(SITE_KEY_STRIP_BAD_CHAR, EMPTY_STRING)
+ if strip_bad_char:
+ clean_username = username.translate(
+ str.maketrans(dict.fromkeys(strip_bad_char))
)
- return error_result
+ else:
+ clean_username = username
+ uri_check_template = site[SITE_KEY_URI_CHECK]
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
- uri_pretty = site.get("uri_pretty", uri_check_template).replace(
+ uri_pretty = site.get(SITE_KEY_URI_PRETTY, uri_check_template).replace(
ACCOUNT_PLACEHOLDER, clean_username
)
- self._logger.debug(
- "Enumerating site=%s user=%s mode=%s",
- name,
- username,
- "FUZZY" if fuzzy_mode else "STRICT",
- )
-
- headers = site.get("headers", {})
- post_body = site.get("post_body")
+ headers = site.get(SITE_KEY_HEADERS, {})
+ post_body = site.get(SITE_KEY_POST_BODY)
if post_body:
post_body = post_body.replace(ACCOUNT_PLACEHOLDER, clean_username)
- self._logger.debug("POST %s (body_present=%s)", uri_check, True)
+ self._logger.debug("Checking %s with POST request", uri_check)
else:
- self._logger.debug("GET %s", uri_check)
+ self._logger.debug("Checking %s with GET request", uri_check)
- error_result: SiteResult | None = None
+ result: WMNResult | None = None
+ response: WMNResponse | None = None
try:
async with self._semaphore:
if post_body:
@@ -491,215 +427,370 @@ async def enumerate_site(
)
else:
response = await self._http.get(uri_check, headers=headers)
- elapsed = response.elapsed
+
self._logger.debug(
- "Request ok (status=%d, elapsed=%.2fs)",
+ "Response from %s: status=%d, elapsed=%.2fs",
+ name,
response.status_code,
- elapsed,
+ response.elapsed,
)
except asyncio.CancelledError:
- self._logger.warning("Request cancelled")
+ self._logger.debug("Request cancelled")
raise
except HttpTimeoutError as e:
- self._logger.warning("Request timeout for %s: %s", name, e)
- error_result = SiteResult(
+ self._logger.warning("Request to '%s' timed out: %s", name, e)
+ result = WMNResult.from_error(
name=name,
category=category,
username=username,
- result_url=uri_pretty,
- status=ResultStatus.ERROR,
- error=f"Request timeout: {e}",
+ url=uri_pretty,
+ message=f"Request timeout: {e}",
)
except HttpSessionError as e:
- self._logger.warning("Session error for %s: %s", name, e)
- error_result = SiteResult(
+ self._logger.warning("Session error for '%s': %s", name, e)
+ result = WMNResult.from_error(
name=name,
category=category,
username=username,
- result_url=uri_pretty,
- status=ResultStatus.ERROR,
- error=f"Session error: {e}",
+ url=uri_pretty,
+ message=f"Session error: {e}",
)
except HttpError as e:
- self._logger.warning("Network error for %s: %s", name, e)
- error_result = SiteResult(
+ self._logger.warning("Network error for '%s': %s", name, e)
+ result = WMNResult.from_error(
name=name,
category=category,
username=username,
- result_url=uri_pretty,
- status=ResultStatus.ERROR,
- error=f"Network error: {e}",
+ url=uri_pretty,
+ message=f"Network error: {e}",
)
except Exception as e:
- self._logger.exception("Unexpected error during request for %s", name)
- error_result = SiteResult(
+ self._logger.exception("Unexpected error during request for '%s'", name)
+ result = WMNResult.from_error(
name=name,
category=category,
username=username,
- result_url=uri_pretty,
- status=ResultStatus.ERROR,
- error=f"Unexpected error: {e}",
+ url=uri_pretty,
+ message=f"Unexpected error: {e}",
)
- if error_result is not None:
- return error_result
+ if result is not None:
+ return result
- result_status = SiteResult.get_result_status(
+ result = WMNResult.from_response(
+ name=name,
+ category=category,
+ username=username,
+ url=uri_pretty,
response_code=response.status_code,
response_text=response.text,
- e_code=site["e_code"],
- e_string=site["e_string"],
- m_code=site["m_code"],
- m_string=site["m_string"],
- fuzzy_mode=fuzzy_mode,
+ elapsed=response.elapsed,
+ mode=mode,
+ e_code=site[SITE_KEY_E_CODE],
+ e_string=site[SITE_KEY_E_STRING],
+ m_code=site[SITE_KEY_M_CODE],
+ m_string=site[SITE_KEY_M_STRING],
)
self._logger.debug(
- "Result=%s (HTTP %d)", result_status.name, response.status_code
+ "Check result for '%s': %s (HTTP %d)",
+ name,
+ result.status.name,
+ response.status_code,
)
- return SiteResult(
- name=name,
- category=category,
- username=username,
- result_url=uri_pretty,
- status=result_status,
- response_code=response.status_code,
- elapsed=elapsed,
- response_text=response.text,
- )
+ return result
+
+ @staticmethod
+ async def _execute_tasks(
+ coroutines: list[Awaitable[T]],
+ as_generator: bool,
+ ) -> list[T] | AsyncGenerator[T, None]:
+ """Execute tasks and return results as list or generator."""
+ if as_generator:
+ async def _generator() -> AsyncGenerator[T, None]:
+ tasks = [asyncio.create_task(coroutine) for coroutine in coroutines]
+ try:
+ for task in asyncio.as_completed(tasks):
+ yield await task
+ finally:
+ for task in tasks:
+ if not task.done():
+ task.cancel()
+ if tasks:
+ await asyncio.gather(*tasks, return_exceptions=True)
+
+ return _generator()
+ return list(await asyncio.gather(*coroutines))
+
+ @overload
async def enumerate_usernames(
self,
usernames: list[str],
site_names: list[str] | None = None,
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
- fuzzy_mode: bool = False,
- as_generator: bool = False,
- ) -> list[SiteResult] | AsyncGenerator[SiteResult, None]:
- """Enumerate one or multiple usernames across all loaded sites."""
- await self._open_session()
- try:
- await self._ensure_dataset()
- except DataError:
- self._logger.exception("Dataset load failed")
- raise
+ mode: WMNMode = WMNMode.ALL,
+ as_generator: Literal[True] = ...,
+ ) -> AsyncGenerator[WMNResult, None]: ...
- try:
- usernames = validate_usernames(usernames)
- except ValidationError as e:
- self._logger.error("Invalid usernames: %s", e)
- msg = "Invalid usernames"
- raise DataError(msg) from e
- else:
- self._logger.info("Usernames validated (count=%d)", len(usernames))
+ @overload
+ async def enumerate_usernames(
+ self,
+ usernames: list[str],
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
+ mode: WMNMode = WMNMode.ALL,
+ as_generator: Literal[False] = ...,
+ ) -> list[WMNResult]: ...
- try:
- sites = self._filter_sites(
- site_names,
- include_categories=include_categories,
- exclude_categories=exclude_categories,
- )
- except DataError as e:
- self._logger.error("Site filtering failed: %s", e)
- raise
+ @_ensure_initialized
+ async def enumerate_usernames(
+ self,
+ usernames: list[str],
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
+ mode: WMNMode = WMNMode.ALL,
+ as_generator: bool = False,
+ ) -> list[WMNResult] | AsyncGenerator[WMNResult, None]:
+ """Enumerate one or multiple usernames across one or multiple sites.
+
+ This is the high-level method for running bulk username checks. It takes:
+ - one list of usernames, and
+ - a selection of sites (by name and/or category filters),
+ then runs enumerate_site for every (site, username) pair.
+
+ The method can operate in two modes:
+ - "batch" mode (as_generator=False): returns a list of all WMNResult objects
+ once all checks are complete.
+ - "streaming" mode (as_generator=True): returns an async generator that yields
+ WMNResult objects one by one as they finish, without waiting for all tasks.
+
+ Args:
+ usernames:
+ A non-empty list of usernames to enumerate across sites.
+ Each username is tested independently on every selected site.
+ site_names:
+ Optional list of site names to restrict enumeration to a subset of
+ sites. If None, all sites from the WMN dataset are considered
+ (subject to category filters). If provided, every name must correspond
+ to a known site; otherwise a WMNDataError is raised.
+ include_categories:
+ Optional list of site categories (values of the "cat" field) to
+ include. When provided, only sites whose category is in this list
+ are considered. This filter is applied in addition to site_names.
+ exclude_categories:
+ Optional list of site categories (values of the "cat" field) to
+ exclude. When provided, any site whose category is in this list is
+ skipped. This filter is also applied in addition to site_names and
+ include_categories.
+ mode:
+ Detection mode forwarded to enumerate_site for each check:
+ - WMNMode.ALL: strict evaluation (all "found" indicators must match).
+ - WMNMode.ANY: relaxed evaluation (any "found" indicator can match).
+ as_generator:
+ Controls the shape of the returned value:
+ - If False (default), all checks are scheduled, awaited, and a full
+ list[WMNResult] is returned when everything is done.
+ - If True, an AsyncGenerator[WMNResult, None] is returned instead.
+ The caller can then `async for` over individual WMNResult objects
+ as they become available.
+
+ Returns:
+ Union[list[WMNResult], AsyncGenerator[WMNResult, None]]:
+ - If as_generator is False:
+ A flat list of WMNResult objects, one per (site, username) pair.
+ The list order is not guaranteed to match submission order.
+ - If as_generator is True:
+ An async generator that yields WMNResult objects one at a time
+ as tasks complete. This allows streaming processing of results.
+
+ Raises:
+ WMNDataError:
+ If any requested site name in site_names does not exist in the
+ loaded WMN dataset. This validation is performed during site filtering
+ before any network requests are made.
+ WMNDataError / WMNValidationError:
+ May be raised earlier when preparing the dataset (via _ensure_ready),
+ before enumeration starts.
+ """
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
+
+ self._logger.info(
+ "Starting enumeration for %d username(s) on %d site(s)",
+ len(usernames),
+ len(sites),
+ )
coroutines = [
- self.enumerate_site(site, username, fuzzy_mode)
+ self.enumerate_site(site, username, mode)
for site in sites
for username in usernames
]
- async def iterate_results() -> AsyncGenerator[SiteResult, None]:
- for completed_task in asyncio.as_completed(coroutines):
- yield await completed_task
+ return await self._execute_tasks(coroutines, as_generator)
- if as_generator:
- return iterate_results()
+ @overload
+ async def validate_sites(
+ self,
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
+ mode: WMNMode = WMNMode.ALL,
+ as_generator: Literal[True] = ...,
+ ) -> AsyncGenerator[WMNValidationResult, None]: ...
- return await asyncio.gather(*coroutines)
+ @overload
+ async def validate_sites(
+ self,
+ site_names: list[str] | None = None,
+ include_categories: list[str] | None = None,
+ exclude_categories: list[str] | None = None,
+ mode: WMNMode = WMNMode.ALL,
+ as_generator: Literal[False] = ...,
+ ) -> list[WMNValidationResult]: ...
- async def self_enumeration(
+ @_ensure_initialized
+ async def validate_sites(
self,
site_names: list[str] | None = None,
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
- fuzzy_mode: bool = False,
+ mode: WMNMode = WMNMode.ALL,
as_generator: bool = False,
- ) -> list[SelfEnumerationResult] | AsyncGenerator[SelfEnumerationResult, None]:
- """Run self-enumeration using known accounts for each site."""
- await self._open_session()
- try:
- await self._ensure_dataset()
- except DataError:
- self._logger.exception("Dataset load failed")
- raise
-
- try:
- sites = self._filter_sites(
- site_names,
- include_categories=include_categories,
- exclude_categories=exclude_categories,
- )
- except DataError as e:
- self._logger.error("Site filtering failed: %s", e)
- raise
+ ) -> list[WMNValidationResult] | AsyncGenerator[WMNValidationResult, None]:
+ """Validate site detection rules using known usernames from the dataset.
+
+ This method is intended for maintainers and for automated health checks of
+ the WMN dataset. Instead of testing arbitrary usernames, it:
+ - Selects a subset of sites (optionally filtered by site_names and
+ categories).
+ - For each selected site, reads its list of "known good" usernames
+ from the "known" field.
+ - For each (site, known_username) pair, calls enumerate_site.
+ - Aggregates all WMNResult objects into a single WMNValidationResult per site.
+
+ This allows you to confirm that:
+ - The configured detection rules ("e_code", "e_string", "m_code", "m_string")
+ still correctly identify accounts, and
+ - The site entries themselves are structurally valid and complete.
+
+ Args:
+ site_names:
+ Optional list of site names to validate. If None, all sites from the
+ dataset are considered (subject to category filters). If provided,
+ all names must exist in the dataset; unknown names lead to a
+ WMNDataError raised during site filtering.
+ include_categories:
+ Optional list of categories (values of the "cat" field) to include
+ during validation. Only sites whose category is in this list are
+ validated. This is combined with site_names if both are provided.
+ exclude_categories:
+ Optional list of categories (values of the "cat" field) to exclude
+ from validation. Any site whose category is in this list is skipped.
+ This exclusion is applied after site_names and include_categories.
+ mode:
+ Detection mode passed down to enumerate_site for each known username:
+ - WMNMode.ALL: strict evaluation (recommended for validation).
+ - WMNMode.ANY: relaxed evaluation (useful for exploratory checks).
+ as_generator:
+ Controls the return type:
+ - If False (default), returns a list[WMNValidationResult] after all
+ sites have been validated.
+ - If True, returns an AsyncGenerator[WMNValidationResult, None] that
+ yields one WMNValidationResult per site as soon as that site's
+ validation has finished.
+
+ Returns:
+ Union[list[WMNValidationResult], AsyncGenerator[WMNValidationResult, None]]:
+ - If as_generator is False:
+ A list where each item is a WMNValidationResult describing one
+ site and the WMNResult objects for all of its known usernames.
+ - If as_generator is True:
+ An async generator that yields WMNValidationResult objects for
+ each validated site in completion order.
+
+ Each WMNValidationResult includes:
+ - name: site name,
+ - category: site category (the value of the "cat" field),
+ - results: list[WMNResult] for each known username (may be empty),
+ - status: aggregate status derived from underlying WMNResult values
+ (e.g. ERROR if any check failed),
+ - error: textual description if validation could not be performed
+ for that site (e.g. missing required keys or unexpected error).
+
+ Raises:
+ WMNDataError:
+ If any of the requested site_names does not exist in the dataset.
+ WMNDataError / WMNValidationError:
+ May be raised earlier from _ensure_ready if the dataset or schema
+ is invalid.
+
+ Site-level error handling:
+ - If a site is missing required keys needed for self-validation
+ (as defined by REQUIRED_KEYS_SELF_ENUM in code), a WMNValidationResult
+ is returned with `error` populated and `results` left empty.
+ - If an unexpected exception occurs when validating a site, it is caught
+ and converted into a WMNValidationResult with `error` set accordingly.
+ """
+ sites = self._filter_sites(
+ site_names,
+ include_categories=include_categories,
+ exclude_categories=exclude_categories,
+ )
self._logger.info(
- "Starting self-enumeration (sites=%d, mode=%s)",
+ "Starting validation for %d site(s) (mode=%s)",
len(sites),
- ValidationMode.FUZZY if fuzzy_mode else ValidationMode.STRICT,
+ mode,
)
- async def _enumerate_known(site: dict[str, Any]) -> SelfEnumerationResult:
- """Helper function to enumerate a site with all its known users."""
- missing_keys = self._get_missing_keys(site, REQUIRED_KEYS_SELF_ENUM)
+ async def _enumerate_known(site: dict[str, Any]) -> WMNValidationResult:
+ """Helper function to validate a site with all its known users."""
+ site_name = site.get(SITE_KEY_NAME, DEFAULT_UNKNOWN_VALUE)
+ site_category = site.get(SITE_KEY_CATEGORY, DEFAULT_UNKNOWN_VALUE)
+
+ missing_keys = get_missing_keys(site, REQUIRED_KEYS_SELF_ENUM)
if missing_keys:
- return SelfEnumerationResult(
- name=site.get("name", "unknown"),
- category=site.get("cat", "unknown"),
+ self._logger.warning(
+ "Site '%s' is missing required keys for validation: %s",
+ site_name,
+ missing_keys,
+ )
+ return WMNValidationResult(
+ name=site_name,
+ category=site_category,
error=f"Site data missing required keys: {missing_keys}",
)
- name = site["name"]
- category = site["cat"]
- known = site["known"]
-
+ known = site[SITE_KEY_KNOWN]
self._logger.debug(
- "Self-enumerating site=%s category=%s known_count=%d",
- name,
- category,
+ "Validating '%s' with %d known user(s)",
+ site_name,
len(known),
)
try:
- coroutines = [
- self.enumerate_site(site, username, fuzzy_mode)
- for username in known
- ]
- results = await asyncio.gather(*coroutines)
-
- return SelfEnumerationResult(
- name=name, category=category, results=results
+ results = await asyncio.gather(
+ *(self.enumerate_site(site, username, mode) for username in known)
+ )
+ return WMNValidationResult(
+ name=site_name, category=site_category, results=results
)
except Exception as e:
- self._logger.exception("Self-enumeration failed for site=%s", name)
- return SelfEnumerationResult(
- name=name,
- category=category,
- error=f"Unexpected error during self-enumeration: {e}",
+ self._logger.exception("Validation failed for site='%s'", site_name)
+ return WMNValidationResult(
+ name=site_name,
+ category=site_category,
+ error=f"Unexpected error during site validation: {e}",
)
- coroutines = [
- _enumerate_known(site) for site in sites if isinstance(site, dict)
- ]
-
- async def iterate_results() -> AsyncGenerator[SelfEnumerationResult, None]:
- for completed_task in asyncio.as_completed(coroutines):
- yield await completed_task
-
- if as_generator:
- return iterate_results()
-
- return await asyncio.gather(*coroutines)
+ coroutines = [_enumerate_known(site) for site in sites]
+ return await self._execute_tasks(coroutines, as_generator)
diff --git a/naminter/core/models.py b/naminter/core/models.py
index a2a85ed..b114af5 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -1,16 +1,26 @@
+from __future__ import annotations
+
import json
-from dataclasses import asdict, dataclass, field
-from datetime import datetime
+from dataclasses import dataclass, field
+from datetime import UTC, datetime
from enum import StrEnum, auto
-from typing import Any
+from typing import TYPE_CHECKING, Any, TypedDict
+
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+
+class WMNMode(StrEnum):
+ """Enumeration mode for username enumeration.
-class ValidationMode(StrEnum):
- FUZZY = auto()
- STRICT = auto()
+ ALL uses AND logic, ANY uses OR logic.
+ """
+ ALL = auto()
+ ANY = auto()
-class ResultStatus(StrEnum):
+
+class WMNStatus(StrEnum):
"""Status of username search results."""
FOUND = auto()
@@ -21,41 +31,96 @@ class ResultStatus(StrEnum):
ERROR = auto()
-@dataclass(slots=True, frozen=True)
-class SiteResult:
+class WMNDataset(TypedDict):
+ """Type definition for WMN dataset structure."""
+
+ sites: list[dict[str, Any]]
+ categories: list[str]
+ authors: list[str]
+ license: str | list[str]
+
+
+@dataclass(slots=True, frozen=True, kw_only=True)
+class WMNSummary:
+ """Summary of the loaded WhatsMyName dataset and filters applied."""
+
+ license: tuple[str, ...]
+ authors: tuple[str, ...]
+ site_names: tuple[str, ...]
+ sites_count: int
+ categories: tuple[str, ...]
+ categories_count: int
+ known_count: int
+
+ def to_dict(self) -> dict[str, Any]:
+ return {
+ "license": list(self.license),
+ "authors": list(self.authors),
+ "site_names": list(self.site_names),
+ "sites_count": self.sites_count,
+ "categories": list(self.categories),
+ "categories_count": self.categories_count,
+ "known_count": self.known_count,
+ }
+
+
+@dataclass(slots=True, frozen=True, kw_only=True)
+class WMNResult:
"""Result of testing a username on a site."""
name: str
category: str
username: str
- status: ResultStatus
- result_url: str | None = None
+ status: WMNStatus
+ url: str | None = None
response_code: int | None = None
response_text: str | None = None
elapsed: float | None = None
error: str | None = None
- created_at: datetime = field(default_factory=datetime.now)
+ created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
+
+ @classmethod
+ def from_error(
+ cls,
+ *,
+ name: str,
+ category: str,
+ username: str,
+ message: str,
+ url: str | None = None,
+ ) -> WMNResult:
+ return cls(
+ name=name,
+ category=category,
+ username=username,
+ url=url,
+ status=WMNStatus.ERROR,
+ error=message,
+ )
@classmethod
- def get_result_status(
+ def from_response(
cls,
+ *,
+ name: str,
+ category: str,
+ username: str,
+ url: str | None,
response_code: int,
response_text: str,
- e_code: int | None = None,
- e_string: str | None = None,
- m_code: int | None = None,
- m_string: str | None = None,
- fuzzy_mode: bool = False,
- ) -> ResultStatus:
- condition_found = False
- condition_not_found = False
-
- if fuzzy_mode:
+ elapsed: float | None,
+ mode: WMNMode,
+ e_code: int | None,
+ e_string: str | None,
+ m_code: int | None,
+ m_string: str | None,
+ ) -> WMNResult:
+ if mode == WMNMode.ANY:
condition_found = (e_code is not None and response_code == e_code) or (
- e_string and e_string in response_text
+ e_string is not None and e_string in response_text
)
condition_not_found = (m_code is not None and response_code == m_code) or (
- m_string and m_string in response_text
+ m_string is not None and m_string in response_text
)
else:
condition_found = (
@@ -63,7 +128,6 @@ def get_result_status(
and (e_string is None or e_string in response_text)
and (e_code is not None or e_string is not None)
)
-
condition_not_found = (
(m_code is None or response_code == m_code)
and (m_string is None or m_string in response_text)
@@ -71,106 +135,94 @@ def get_result_status(
)
if condition_found and condition_not_found:
- return ResultStatus.AMBIGUOUS
+ status = WMNStatus.AMBIGUOUS
elif condition_found:
- return ResultStatus.FOUND
+ status = WMNStatus.FOUND
elif condition_not_found:
- return ResultStatus.NOT_FOUND
+ status = WMNStatus.NOT_FOUND
else:
- return ResultStatus.UNKNOWN
-
- def to_dict(self, exclude_response_text: bool = False) -> dict[str, Any]:
- """Convert SiteResult to dict."""
- result = asdict(self)
- result["status"] = self.status.value
- result["created_at"] = self.created_at.isoformat()
- if exclude_response_text:
- result.pop("response_text", None)
- return result
+ status = WMNStatus.UNKNOWN
+
+ return cls(
+ name=name,
+ category=category,
+ username=username,
+ url=url,
+ status=status,
+ response_code=response_code,
+ elapsed=elapsed,
+ response_text=response_text,
+ )
+
+ def to_dict(
+ self, *, exclude_response_text: bool = False, include_none: bool = False
+ ) -> dict[str, Any]:
+ result_dict: dict[str, Any] = {
+ "name": self.name,
+ "category": self.category,
+ "username": self.username,
+ "status": self.status.value,
+ "url": self.url,
+ "response_code": self.response_code,
+ "elapsed": self.elapsed,
+ "error": self.error,
+ "created_at": self.created_at.isoformat(),
+ }
+ if not exclude_response_text:
+ result_dict["response_text"] = self.response_text
+ if not include_none:
+ result_dict = {
+ key: value for key, value in result_dict.items() if value is not None
+ }
+ return result_dict
-@dataclass(slots=True, frozen=True)
-class SelfEnumerationResult:
- """Result of a self-enumeration for a username."""
+@dataclass(slots=True, frozen=True, kw_only=True)
+class WMNValidationResult:
+ """Result of validation testing for a site's detection methods."""
name: str
category: str
- results: list[SiteResult] | None = None
- status: ResultStatus = field(init=False)
+ results: Sequence[WMNResult] | None = None
error: str | None = None
- created_at: datetime = field(default_factory=datetime.now)
+ created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
+ status: WMNStatus = field(init=False)
def __post_init__(self) -> None:
- """Calculate result status from results."""
object.__setattr__(self, "status", self._get_result_status())
- def _get_result_status(self) -> ResultStatus:
- """Determine result status from results."""
+ def _get_result_status(self) -> WMNStatus:
+ status = WMNStatus.UNKNOWN
if self.error:
- return ResultStatus.ERROR
-
- if not self.results:
- return ResultStatus.UNKNOWN
-
- statuses: set[ResultStatus] = {
- result.status for result in self.results if result
- }
-
- if not statuses:
- return ResultStatus.UNKNOWN
-
- if ResultStatus.ERROR in statuses:
- return ResultStatus.ERROR
-
- if len(statuses) > 1:
- return ResultStatus.UNKNOWN
-
- return next(iter(statuses))
-
- def to_dict(self, exclude_response_text: bool = False) -> dict[str, Any]:
- """Convert SelfEnumerationResult to dict."""
+ status = WMNStatus.ERROR
+ elif not self.results:
+ status = WMNStatus.UNKNOWN
+ else:
+ statuses = {result.status for result in self.results}
+ if WMNStatus.ERROR in statuses:
+ status = WMNStatus.ERROR
+ elif WMNStatus.FOUND in statuses and WMNStatus.NOT_FOUND in statuses:
+ status = WMNStatus.AMBIGUOUS
+ elif len(statuses) == 1:
+ status = next(iter(statuses))
+ return status
+
+ def to_dict(self, *, exclude_response_text: bool = False) -> dict[str, Any]:
return {
"name": self.name,
"category": self.category,
"results": [
result.to_dict(exclude_response_text=exclude_response_text)
- for result in self.results
- ]
- if self.results
- else [],
+ for result in (self.results or [])
+ ],
+ "error": self.error,
"status": self.status.value,
"created_at": self.created_at.isoformat(),
- "error": self.error,
- }
-
-
-@dataclass(slots=True, frozen=True)
-class Summary:
- """Summary of the loaded WhatsMyName dataset and filters applied."""
-
- license: list[str]
- authors: list[str]
- site_names: list[str]
- sites_count: int
- categories: list[str]
- categories_count: int
- known_accounts_total: int
-
- def to_dict(self) -> dict[str, Any]:
- """Convert Summary to a plain dictionary for serialization/legacy callers."""
- return {
- "license": list(self.license),
- "authors": list(self.authors),
- "site_names": list(self.site_names),
- "sites_count": int(self.sites_count),
- "categories": list(self.categories),
- "categories_count": int(self.categories_count),
- "known_accounts_total": int(self.known_accounts_total),
}
-@dataclass(slots=True, frozen=True)
-class Response:
+@dataclass(slots=True, frozen=True, kw_only=True)
+class WMNResponse:
"""HTTP response abstraction used by session adapters."""
status_code: int
@@ -178,9 +230,14 @@ class Response:
elapsed: float
def json(self) -> Any:
- """Parse the response body as JSON and return the resulting object.
-
- Raises:
- ValueError: If the response text is not valid JSON.
- """
+ """Parse the response body as JSON and return the resulting object."""
return json.loads(self.text)
+
+
+@dataclass(frozen=True, slots=True, kw_only=True)
+class WMNValidationModel:
+ """Structured representation of a validation error."""
+
+ path: str
+ data: str | None
+ message: str
diff --git a/naminter/core/network.py b/naminter/core/network.py
index c6f8047..f903d9b 100644
--- a/naminter/core/network.py
+++ b/naminter/core/network.py
@@ -1,15 +1,20 @@
import asyncio
import logging
from collections.abc import Mapping
-from typing import Any, Protocol, runtime_checkable
+from typing import TYPE_CHECKING, Any, Protocol, cast, runtime_checkable
-from curl_cffi import BrowserTypeLiteral
-from curl_cffi.requests import AsyncSession
+from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
+from curl_cffi.requests import AsyncSession, ProxySpec
from curl_cffi.requests.exceptions import RequestException as CurlRequestException
from curl_cffi.requests.exceptions import Timeout as CurlTimeout
+if TYPE_CHECKING:
+ from curl_cffi.requests.session import HttpMethod
+
+
+from .constants import HTTP_REQUEST_TIMEOUT_SECONDS
from .exceptions import HttpError, HttpSessionError, HttpTimeoutError
-from .models import Response
+from .models import WMNResponse
@runtime_checkable
@@ -24,7 +29,9 @@ async def close(self) -> None:
"""Close the underlying HTTP session."""
...
- async def get(self, url: str, headers: Mapping[str, str] | None = None) -> Response:
+ async def get(
+ self, url: str, headers: Mapping[str, str] | None = None
+ ) -> WMNResponse:
"""HTTP GET request (see class docstring for error contract)."""
...
@@ -33,7 +40,7 @@ async def post(
url: str,
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
- ) -> Response:
+ ) -> WMNResponse:
"""HTTP POST request (see class docstring for error contract)."""
...
@@ -43,7 +50,7 @@ async def request(
url: str,
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
- ) -> Response:
+ ) -> WMNResponse:
"""Generic HTTP request (see class docstring for error contract)."""
...
@@ -54,12 +61,12 @@ def __init__(
*,
proxies: str | dict[str, str] | None = None,
verify: bool = True,
- timeout: int = 30,
+ timeout: int = HTTP_REQUEST_TIMEOUT_SECONDS,
allow_redirects: bool = True,
impersonate: BrowserTypeLiteral | None = None,
ja3: str | None = None,
akamai: str | None = None,
- extra_fp: dict[str, Any] | None = None,
+ extra_fp: ExtraFingerprints | dict[str, Any] | None = None,
) -> None:
self._logger = logging.getLogger(__name__)
self._session: AsyncSession | None = None
@@ -67,38 +74,41 @@ def __init__(
if isinstance(proxies, str):
proxies = {"http": proxies, "https": proxies}
- self._proxies: str | dict[str, str] | None = proxies
+ self._proxies: dict[str, str] | None = proxies
self._verify: bool = verify
self._timeout: int = timeout
self._allow_redirects: bool = allow_redirects
self._impersonate: BrowserTypeLiteral | None = impersonate
self._ja3: str | None = ja3
self._akamai: str | None = akamai
- self._extra_fp: dict[str, Any] | None = extra_fp
+ self._extra_fp: ExtraFingerprints | dict[str, Any] | None = extra_fp
self._lock = asyncio.Lock()
async def open(self) -> None:
if self._session is not None:
return
+
async with self._lock:
if self._session is None:
try:
+ proxies_spec: ProxySpec | None = cast(
+ "ProxySpec | None", self._proxies
+ )
+ extra_fp_spec: Any = self._extra_fp
self._session = AsyncSession(
- proxies=self._proxies,
+ proxies=proxies_spec,
verify=self._verify,
timeout=self._timeout,
allow_redirects=self._allow_redirects,
impersonate=self._impersonate,
ja3=self._ja3,
akamai=self._akamai,
- extra_fp=self._extra_fp,
+ extra_fp=extra_fp_spec,
)
except Exception as e:
- msg = "Failed to open curl-cffi session"
- raise HttpSessionError(
- msg, cause=e
- ) from e
+ msg = "Unexpected error opening HTTP session"
+ raise HttpSessionError(msg, cause=e) from e
async def close(self) -> None:
if not self._session:
@@ -106,62 +116,22 @@ async def close(self) -> None:
try:
await self._session.close()
except Exception as e:
- self._logger.warning("Error closing curl-cffi session: %s", e)
+ self._logger.warning("Unexpected error closing HTTP session: %s", e)
finally:
self._session = None
- async def get(self, url: str, headers: Mapping[str, str] | None = None) -> Response:
- await self.open()
- if self._session is None:
- msg = "Session not initialized"
- raise HttpSessionError(msg)
-
- try:
- response = await self._session.get(
- url, headers=dict(headers) if headers else None
- )
- elapsed = response.elapsed
- return Response(
- status_code=response.status_code, text=response.text, elapsed=elapsed
- )
- except CurlTimeout as e:
- msg = f"GET timeout for {url}"
- raise HttpTimeoutError(msg, cause=e) from e
- except CurlRequestException as e:
- msg = f"GET failed for {url}: {e}"
- raise HttpError(msg, cause=e) from e
- except Exception as e:
- msg = f"GET failed for {url}: {e}"
- raise HttpError(msg, cause=e) from e
+ async def get(
+ self, url: str, headers: Mapping[str, str] | None = None
+ ) -> WMNResponse:
+ return await self.request("GET", url, headers=headers)
async def post(
self,
url: str,
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
- ) -> Response:
- await self.open()
- if self._session is None:
- msg = "Session not initialized"
- raise HttpSessionError(msg)
-
- try:
- response = await self._session.post(
- url, headers=dict(headers) if headers else None, data=data
- )
- elapsed = response.elapsed
- return Response(
- status_code=response.status_code, text=response.text, elapsed=elapsed
- )
- except CurlTimeout as e:
- msg = f"POST timeout for {url}"
- raise HttpTimeoutError(msg, cause=e) from e
- except CurlRequestException as e:
- msg = f"POST failed for {url}: {e}"
- raise HttpError(msg, cause=e) from e
- except Exception as e:
- msg = f"POST failed for {url}: {e}"
- raise HttpError(msg, cause=e) from e
+ ) -> WMNResponse:
+ return await self.request("POST", url, headers=headers, data=data)
async def request(
self,
@@ -169,23 +139,23 @@ async def request(
url: str,
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
- ) -> Response:
+ ) -> WMNResponse:
await self.open()
- if self._session is None:
- msg = "Session not initialized"
- raise HttpSessionError(msg)
+
+ assert self._session is not None
try:
- response = await self._session.request(
- method=method,
+ response = await self._session.request( # type: ignore[reportUnknownMemberType]
+ method=cast("HttpMethod", method.upper()),
url=url,
headers=dict(headers) if headers else None,
data=data,
)
- elapsed = response.elapsed
- return Response(
- status_code=response.status_code, text=response.text, elapsed=elapsed
+ return WMNResponse(
+ status_code=response.status_code,
+ text=response.text,
+ elapsed=response.elapsed,
)
except CurlTimeout as e:
msg = f"{method} timeout for {url}"
@@ -194,12 +164,12 @@ async def request(
msg = f"{method} failed for {url}: {e}"
raise HttpError(msg, cause=e) from e
except Exception as e:
- msg = f"{method} failed for {url}: {e}"
+ msg = f"Unexpected error during {method} request to {url}: {e}"
raise HttpError(msg, cause=e) from e
__all__ = [
"BaseSession",
"CurlCFFISession",
- "Response",
+ "WMNResponse",
]
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index 53739f2..08a3ba7 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -1,157 +1,115 @@
+import json
import logging
+from collections.abc import Sequence
from typing import Any
+from jsonschema import Draft7Validator
+from jsonschema.exceptions import SchemaError as JsonSchemaError
+
from .constants import (
- EXTREME_CONCURRENCY_THRESHOLD,
- HIGH_CONCURRENCY_MIN_TIMEOUT,
- HIGH_CONCURRENCY_THRESHOLD,
- LOW_TIMEOUT_WARNING_THRESHOLD,
- MAX_TASKS_LIMIT,
- MAX_TIMEOUT,
- MIN_TASKS,
- MIN_TIMEOUT,
- VERY_HIGH_CONCURRENCY_MIN_TIMEOUT,
- VERY_HIGH_CONCURRENCY_THRESHOLD,
- WMN_LIST_FIELDS,
-)
-from .exceptions import (
- ConfigurationError,
- ValidationError,
+ DEFAULT_JSON_ENSURE_ASCII,
+ DEFAULT_JSON_INDENT,
+ SITE_KEY_NAME,
+ WMN_KEY_SITES,
)
+from .exceptions import WMNSchemaError
+from .models import WMNDataset, WMNValidationModel
logger = logging.getLogger(__name__)
-def deduplicate_strings(values: list[str] | None) -> list[str]:
- """Return a list of unique, non-empty strings preserving original order."""
- if not values:
- return []
-
- seen: set[str] = set()
- unique_values: list[str] = []
-
- for item in values:
- if isinstance(item, str):
- normalized = item.strip()
- if normalized and normalized not in seen:
- seen.add(normalized)
- unique_values.append(normalized)
-
- return unique_values
-
-
-def validate_numeric_values(max_tasks: int, timeout: int) -> list[str]:
- """Validate numeric configuration values and return warnings."""
- warnings: list[str] = []
-
- if not (MIN_TASKS <= max_tasks <= MAX_TASKS_LIMIT):
- msg = (
- "Invalid max_tasks: "
- f"{max_tasks} must be between {MIN_TASKS} and {MAX_TASKS_LIMIT}"
- )
- raise ConfigurationError(
- msg
- )
-
- if not (MIN_TIMEOUT <= timeout <= MAX_TIMEOUT):
- msg = (
- "Invalid timeout: "
- f"{timeout} must be between {MIN_TIMEOUT} and {MAX_TIMEOUT} seconds"
- )
- raise ConfigurationError(
- msg
- )
-
- if (
- max_tasks > HIGH_CONCURRENCY_THRESHOLD
- and timeout < HIGH_CONCURRENCY_MIN_TIMEOUT
- ):
- warnings.append(
- "High concurrency ("
- f"{max_tasks}) with low timeout ({timeout}s) may cause failures; "
- "consider increasing timeout or reducing max_tasks."
- )
- elif (
- max_tasks > VERY_HIGH_CONCURRENCY_THRESHOLD
- and timeout < VERY_HIGH_CONCURRENCY_MIN_TIMEOUT
- ):
- warnings.append(
- "Very high concurrency ("
- f"{max_tasks}) with very low timeout ({timeout}s) may cause connection "
- "issues; recommend timeout >= "
- f"{HIGH_CONCURRENCY_MIN_TIMEOUT}s for max_tasks > "
- f"{VERY_HIGH_CONCURRENCY_THRESHOLD}."
- )
-
- if max_tasks > EXTREME_CONCURRENCY_THRESHOLD:
- warnings.append(
- "Extremely high concurrency ("
- f"{max_tasks}) may overwhelm servers or cause rate limiting; "
- "lowering value is recommended."
- )
-
- if timeout < LOW_TIMEOUT_WARNING_THRESHOLD:
- warnings.append(
- "Very low timeout ("
- f"{timeout}s) may cause legitimate requests to fail; increase "
- "timeout for better accuracy."
- )
-
- return warnings
-
+def validate_dataset(
+ data: WMNDataset, schema: dict[str, Any]
+) -> list[WMNValidationModel]:
+ """Validate WMN dataset against JSON Schema and return list of errors.
-def configure_proxy(proxy: str | dict[str, str] | None) -> dict[str, str] | None:
- """Validate and configure proxy settings."""
- if proxy is None:
- return None
+ Raises WMNSchemaError if the provided schema is invalid.
- if isinstance(proxy, str):
- if not proxy.strip():
- msg = "Invalid proxy: proxy string cannot be empty"
- raise ConfigurationError(msg)
+ Args:
+ data: WMN dataset to validate
+ schema: JSON Schema to validate against
+ """
+ if not schema:
+ return []
- if not (
- proxy.startswith(("http://", "https://", "socks5://"))
- ):
- msg = "Invalid proxy: must be http://, https://, or socks5:// URL"
- raise ConfigurationError(
- msg
+ try:
+ validator = Draft7Validator(schema)
+ except JsonSchemaError as error:
+ msg = f"Invalid JSON schema: {error}"
+ raise WMNSchemaError(msg) from error
+
+ errors: list[WMNValidationModel] = []
+ for error in validator.iter_errors(data): # type: ignore[reportUnknownMemberType]
+ message_text = error.message
+ path_string = error.json_path
+ data_preview: str | None = None
+
+ try:
+ if error.absolute_path:
+ current_data = data
+ for segment in error.absolute_path:
+ current_data = current_data[segment]
+ if current_data is not None:
+ data_preview = json.dumps(
+ current_data,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ indent=DEFAULT_JSON_INDENT,
+ )
+ except Exception:
+ data_preview = None
+
+ errors.append(
+ WMNValidationModel(
+ path=path_string,
+ data=data_preview,
+ message=message_text,
)
+ )
- logger.debug("Proxy configuration validated")
- return {"http": proxy, "https": proxy}
-
- elif isinstance(proxy, dict):
- for protocol, proxy_url in proxy.items():
- if protocol not in {"http", "https"}:
- msg = f"Invalid proxy protocol: {protocol}"
- raise ConfigurationError(msg)
-
- if not isinstance(proxy_url, str) or not proxy_url.strip():
- msg = f"Invalid proxy URL for {protocol}: must be non-empty string"
- raise ConfigurationError(
- msg
+ sites_data = data.get(WMN_KEY_SITES, [])
+
+ name_indices: dict[str, list[int]] = {}
+ for index, site in enumerate(sites_data):
+ site_name = site.get(SITE_KEY_NAME)
+ if site_name:
+ name_indices.setdefault(site_name, []).append(index)
+
+ for site_name, indices in name_indices.items():
+ if len(indices) > 1:
+ for index in indices:
+ path_string = f"$.{WMN_KEY_SITES}[{index}]"
+ try:
+ site_data = sites_data[index]
+ data_preview = json.dumps(
+ site_data,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ indent=DEFAULT_JSON_INDENT,
+ )
+ except Exception:
+ data_preview = None
+
+ errors.append(
+ WMNValidationModel(
+ path=path_string,
+ data=data_preview,
+ message=(
+ f"Duplicate site name found: '{site_name}' "
+ f"(appears {len(indices)} times)"
+ ),
+ )
)
- logger.debug("Proxy dictionary configuration validated")
- return proxy
-
-
-def validate_usernames(usernames: list[str]) -> list[str]:
- """Validate and deduplicate usernames, preserving order."""
-
- unique_usernames: list[str] = deduplicate_strings(usernames)
+ return errors
- if not unique_usernames:
- msg = "No valid usernames provided"
- raise ValidationError(msg)
- return unique_usernames
+def get_missing_keys(data: dict[str, Any], keys: Sequence[str]) -> list[str]:
+ """Return a list of required keys missing from a dictionary.
+ Args:
+ data: Dictionary to check for missing keys
+ keys: Sequence of keys that should be present
-def merge_lists(data: dict[str, Any], accumulator: dict[str, Any]) -> None:
- """Merge list fields from data into the accumulator dictionary."""
- if isinstance(data, dict):
- for key in WMN_LIST_FIELDS:
- if key in data and isinstance(data[key], list):
- accumulator[key].extend(data[key])
+ Returns:
+ List of keys that are missing from the dictionary
+ """
+ return [key for key in keys if key not in data]
diff --git a/poetry.lock b/poetry.lock
deleted file mode 100644
index 0893ce3..0000000
--- a/poetry.lock
+++ /dev/null
@@ -1,1390 +0,0 @@
-# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
-
-[[package]]
-name = "aiofiles"
-version = "24.1.0"
-description = "File support for asyncio."
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
- {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
-]
-
-[[package]]
-name = "attrs"
-version = "25.3.0"
-description = "Classes Without Boilerplate"
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"},
- {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"},
-]
-
-[package.extras]
-benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
-cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
-dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
-docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
-tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
-tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
-
-[[package]]
-name = "brotli"
-version = "1.1.0"
-description = "Python bindings for the Brotli compression library"
-optional = false
-python-versions = "*"
-groups = ["main"]
-markers = "platform_python_implementation == \"CPython\""
-files = [
- {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"},
- {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"},
- {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"},
- {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"},
- {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"},
- {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
- {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
- {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
- {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
- {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"},
- {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"},
- {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"},
- {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"},
- {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
- {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
- {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
- {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
- {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"},
- {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"},
- {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
- {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
- {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
- {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
- {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
- {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
- {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
- {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
- {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
- {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
- {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"},
- {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
- {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
- {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
- {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
- {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"},
- {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"},
- {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"},
- {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
- {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
- {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
- {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
- {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"},
- {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"},
- {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"},
- {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"},
- {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
- {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
- {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
- {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
- {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"},
- {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"},
- {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"},
- {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"},
- {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
- {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
- {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
- {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
-]
-
-[[package]]
-name = "brotlicffi"
-version = "1.1.0.0"
-description = "Python CFFI bindings to the Brotli library"
-optional = false
-python-versions = ">=3.7"
-groups = ["main"]
-markers = "platform_python_implementation != \"CPython\""
-files = [
- {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"},
- {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"},
- {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814"},
- {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820"},
- {file = "brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb"},
- {file = "brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613"},
- {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca"},
- {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391"},
- {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8"},
- {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35"},
- {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d"},
- {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:246f1d1a90279bb6069de3de8d75a8856e073b8ff0b09dcca18ccc14cec85979"},
- {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc4bc5d82bc56ebd8b514fb8350cfac4627d6b0743382e46d033976a5f80fab6"},
- {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c26ecb14386a44b118ce36e546ce307f4810bc9598a6e6cb4f7fca725ae7e6"},
- {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca72968ae4eaf6470498d5c2887073f7efe3b1e7d7ec8be11a06a79cc810e990"},
- {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:add0de5b9ad9e9aa293c3aa4e9deb2b61e99ad6c1634e01d01d98c03e6a354cc"},
- {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b6068e0f3769992d6b622a1cd2e7835eae3cf8d9da123d7f51ca9c1e9c333e5"},
- {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8557a8559509b61e65083f8782329188a250102372576093c88930c875a69838"},
- {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a7ae37e5d79c5bdfb5b4b99f2715a6035e6c5bf538c3746abc8e26694f92f33"},
- {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391151ec86bb1c683835980f4816272a87eaddc46bb91cbf44f62228b84d8cca"},
- {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f3711be9290f0453de8eed5275d93d286abe26b08ab4a35d7452caa1fef532f"},
- {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a807d760763e398bbf2c6394ae9da5815901aa93ee0a37bca5efe78d4ee3171"},
- {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa8ca0623b26c94fccc3a1fdd895be1743b838f3917300506d04aa3346fd2a14"},
- {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de0cf28a53a3238b252aca9fed1593e9d36c1d116748013339f0949bfc84112"},
- {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6be5ec0e88a4925c91f3dea2bb0013b3a2accda6f77238f76a34a1ea532a1cb0"},
- {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d9eb71bb1085d996244439154387266fd23d6ad37161f6f52f1cd41dd95a3808"},
- {file = "brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13"},
-]
-
-[package.dependencies]
-cffi = ">=1.0.0"
-
-[[package]]
-name = "certifi"
-version = "2025.8.3"
-description = "Python package for providing Mozilla's CA Bundle."
-optional = false
-python-versions = ">=3.7"
-groups = ["main"]
-files = [
- {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"},
- {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"},
-]
-
-[[package]]
-name = "cffi"
-version = "2.0.0"
-description = "Foreign Function Interface for Python calling C code."
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
- {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
- {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"},
- {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"},
- {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"},
- {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"},
- {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"},
- {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"},
- {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"},
- {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"},
- {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"},
- {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"},
- {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"},
- {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"},
- {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"},
- {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"},
- {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"},
- {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"},
- {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"},
- {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"},
- {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"},
- {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"},
- {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"},
- {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"},
- {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"},
- {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"},
- {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"},
- {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"},
- {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"},
- {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"},
- {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"},
- {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"},
- {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"},
- {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"},
- {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"},
- {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"},
- {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"},
- {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"},
- {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"},
- {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"},
- {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"},
- {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"},
- {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"},
- {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"},
- {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"},
- {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"},
- {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"},
- {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"},
- {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"},
- {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"},
- {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"},
- {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"},
- {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"},
- {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"},
- {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"},
- {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"},
- {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"},
- {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"},
- {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"},
- {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"},
- {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"},
- {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"},
- {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"},
- {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"},
- {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"},
- {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"},
- {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"},
- {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"},
- {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"},
- {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"},
- {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"},
- {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"},
- {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"},
- {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"},
- {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"},
- {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"},
- {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"},
- {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"},
- {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"},
- {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"},
- {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"},
- {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"},
- {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
- {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
-]
-
-[package.dependencies]
-pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
-
-[[package]]
-name = "click"
-version = "8.3.0"
-description = "Composable command line interface toolkit"
-optional = false
-python-versions = ">=3.10"
-groups = ["main"]
-files = [
- {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"},
- {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-description = "Cross-platform colored terminal text."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-groups = ["main", "dev"]
-files = [
- {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
- {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-markers = {main = "platform_system == \"Windows\""}
-
-[[package]]
-name = "cssselect2"
-version = "0.8.0"
-description = "CSS selectors for Python ElementTree"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "cssselect2-0.8.0-py3-none-any.whl", hash = "sha256:46fc70ebc41ced7a32cd42d58b1884d72ade23d21e5a4eaaf022401c13f0e76e"},
- {file = "cssselect2-0.8.0.tar.gz", hash = "sha256:7674ffb954a3b46162392aee2a3a0aedb2e14ecf99fcc28644900f4e6e3e9d3a"},
-]
-
-[package.dependencies]
-tinycss2 = "*"
-webencodings = "*"
-
-[package.extras]
-doc = ["furo", "sphinx"]
-test = ["pytest", "ruff"]
-
-[[package]]
-name = "curl-cffi"
-version = "0.13.0"
-description = "libcurl ffi bindings for Python, with impersonation support."
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "curl_cffi-0.13.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:434cadbe8df2f08b2fc2c16dff2779fb40b984af99c06aa700af898e185bb9db"},
- {file = "curl_cffi-0.13.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:59afa877a9ae09efa04646a7d068eeea48915a95d9add0a29854e7781679fcd7"},
- {file = "curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06ed389e45a7ca97b17c275dbedd3d6524560270e675c720e93a2018a766076"},
- {file = "curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4e0de45ab3b7a835c72bd53640c2347415111b43421b5c7a1a0b18deae2e541"},
- {file = "curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eb4083371bbb94e9470d782de235fb5268bf43520de020c9e5e6be8f395443f"},
- {file = "curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:28911b526e8cd4aa0e5e38401bfe6887e8093907272f1f67ca22e6beb2933a51"},
- {file = "curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d433ffcb455ab01dd0d7bde47109083aa38b59863aa183d29c668ae4c96bf8e"},
- {file = "curl_cffi-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:66a6b75ce971de9af64f1b6812e275f60b88880577bac47ef1fa19694fa21cd3"},
- {file = "curl_cffi-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:d438a3b45244e874794bc4081dc1e356d2bb926dcc7021e5a8fef2e2105ef1d8"},
- {file = "curl_cffi-0.13.0.tar.gz", hash = "sha256:62ecd90a382bd5023750e3606e0aa7cb1a3a8ba41c14270b8e5e149ebf72c5ca"},
-]
-
-[package.dependencies]
-certifi = ">=2024.2.2"
-cffi = ">=1.12.0"
-
-[package.extras]
-build = ["cibuildwheel", "wheel"]
-dev = ["charset_normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "typing_extensions", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
-extra = ["lxml_html_clean", "markdownify (>=1.1.0)", "readability-lxml (>=0.8.1)"]
-test = ["charset_normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "typing_extensions", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
-
-[[package]]
-name = "fonttools"
-version = "4.60.0"
-description = "Tools to manipulate font files"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "fonttools-4.60.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:151282a235c36024168c21c02193e939e8b28c73d5fa0b36ae1072671d8fa134"},
- {file = "fonttools-4.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3f32cc42d485d9b1546463b9a7a92bdbde8aef90bac3602503e04c2ddb27e164"},
- {file = "fonttools-4.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:336b89d169c40379b8ccef418c877edbc28840b553099c9a739b0db2bcbb57c5"},
- {file = "fonttools-4.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39a38d950b2b04cd6da729586e6b51d686b0c27d554a2154a6a35887f87c09b1"},
- {file = "fonttools-4.60.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7067dd03e0296907a5c6184285807cbb7bc0bf61a584ffebbf97c2b638d8641a"},
- {file = "fonttools-4.60.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:342753fe1a1bd2e6896e7a4e936a67c0f441d6897bd11477f718e772d6e63e88"},
- {file = "fonttools-4.60.0-cp310-cp310-win32.whl", hash = "sha256:0746c2b2b32087da2ac5f81e14d319c44cb21127d419bc60869daed089790e3d"},
- {file = "fonttools-4.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:b83b32e5e8918f8e0ccd79816fc2f914e30edc6969ab2df6baf4148e72dbcc11"},
- {file = "fonttools-4.60.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a9106c202d68ff5f9b4a0094c4d7ad2eaa7e9280f06427b09643215e706eb016"},
- {file = "fonttools-4.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9da3a4a3f2485b156bb429b4f8faa972480fc01f553f7c8c80d05d48f17eec89"},
- {file = "fonttools-4.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f84de764c6057b2ffd4feb50ddef481d92e348f0c70f2c849b723118d352bf3"},
- {file = "fonttools-4.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:800b3fa0d5c12ddff02179d45b035a23989a6c597a71c8035c010fff3b2ef1bb"},
- {file = "fonttools-4.60.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd68f60b030277f292a582d31c374edfadc60bb33d51ec7b6cd4304531819ba"},
- {file = "fonttools-4.60.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:53328e3ca9e5c8660ef6de07c35f8f312c189b757535e12141be7a8ec942de6e"},
- {file = "fonttools-4.60.0-cp311-cp311-win32.whl", hash = "sha256:d493c175ddd0b88a5376e61163e3e6fde3be8b8987db9b092e0a84650709c9e7"},
- {file = "fonttools-4.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cc2770c9dc49c2d0366e9683f4d03beb46c98042d7ccc8ddbadf3459ecb051a7"},
- {file = "fonttools-4.60.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8c68928a438d60dfde90e2f09aa7f848ed201176ca6652341744ceec4215859f"},
- {file = "fonttools-4.60.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b7133821249097cffabf0624eafd37f5a3358d5ce814febe9db688e3673e724e"},
- {file = "fonttools-4.60.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3638905d3d77ac8791127ce181f7cb434f37e4204d8b2e31b8f1e154320b41f"},
- {file = "fonttools-4.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7968a26ef010ae89aabbb2f8e9dec1e2709a2541bb8620790451ee8aeb4f6fbf"},
- {file = "fonttools-4.60.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ef01ca7847c356b0fe026b7b92304bc31dc60a4218689ee0acc66652c1a36b2"},
- {file = "fonttools-4.60.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f3482d7ed7867edfcf785f77c1dffc876c4b2ddac19539c075712ff2a0703cf5"},
- {file = "fonttools-4.60.0-cp312-cp312-win32.whl", hash = "sha256:8c937c4fe8addff575a984c9519433391180bf52cf35895524a07b520f376067"},
- {file = "fonttools-4.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:99b06d5d6f29f32e312adaed0367112f5ff2d300ea24363d377ec917daf9e8c5"},
- {file = "fonttools-4.60.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:97100ba820936cdb5148b634e0884f0088699c7e2f1302ae7bba3747c7a19fb3"},
- {file = "fonttools-4.60.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:03fccf84f377f83e99a5328a9ebe6b41e16fcf64a1450c352b6aa7e0deedbc01"},
- {file = "fonttools-4.60.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a3ef06671f862cd7da78ab105fbf8dce9da3634a8f91b3a64ed5c29c0ac6a9a8"},
- {file = "fonttools-4.60.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f2195faf96594c238462c420c7eff97d1aa51de595434f806ec3952df428616"},
- {file = "fonttools-4.60.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3887008865fa4f56cff58a1878f1300ba81a4e34f76daf9b47234698493072ee"},
- {file = "fonttools-4.60.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5567bd130378f21231d3856d8f0571dcdfcd77e47832978c26dabe572d456daa"},
- {file = "fonttools-4.60.0-cp313-cp313-win32.whl", hash = "sha256:699d0b521ec0b188ac11f2c14ccf6a926367795818ddf2bd00a273e9a052dd20"},
- {file = "fonttools-4.60.0-cp313-cp313-win_amd64.whl", hash = "sha256:24296163268e7c800009711ce5c0e9997be8882c0bd546696c82ef45966163a6"},
- {file = "fonttools-4.60.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b6fe3efdc956bdad95145cea906ad9ff345c17b706356dfc1098ce3230591343"},
- {file = "fonttools-4.60.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:764b2aaab839762a3aa3207e5b3f0e0dfa41799e0b091edec5fcbccc584fdab5"},
- {file = "fonttools-4.60.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b81c7c47d9e78106a4d70f1dbeb49150513171715e45e0d2661809f2b0e3f710"},
- {file = "fonttools-4.60.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799ff60ee66b300ebe1fe6632b1cc55a66400fe815cef7b034d076bce6b1d8fc"},
- {file = "fonttools-4.60.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f9878abe155ddd1b433bab95d027a686898a6afba961f3c5ca14b27488f2d772"},
- {file = "fonttools-4.60.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ded432b7133ea4602fdb4731a4a7443a8e9548edad28987b99590cf6da626254"},
- {file = "fonttools-4.60.0-cp314-cp314-win32.whl", hash = "sha256:5d97cf3a9245316d5978628c05642b939809c4f55ca632ca40744cb9de6e8d4a"},
- {file = "fonttools-4.60.0-cp314-cp314-win_amd64.whl", hash = "sha256:61b9ef46dd5e9dcb6f437eb0cc5ed83d5049e1bf9348e31974ffee1235db0f8f"},
- {file = "fonttools-4.60.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:bba7e3470cf353e1484a36dfb4108f431c2859e3f6097fe10118eeae92166773"},
- {file = "fonttools-4.60.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5ac6439a38c27b3287063176b3303b34982024b01e2e95bba8ac1e45f6d41c1"},
- {file = "fonttools-4.60.0-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4acd21e9f125a1257da59edf7a6e9bd4abd76282770715c613f1fe482409e9f9"},
- {file = "fonttools-4.60.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4a6fc53039ea047e35dc62b958af9cd397eedbc3fa42406d2910ae091b9ae37"},
- {file = "fonttools-4.60.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ef34f44eadf133e94e82c775a33ee3091dd37ee0161c5f5ea224b46e3ce0fb8e"},
- {file = "fonttools-4.60.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d112cae3e7ad1bb5d7f7a60365fcf6c181374648e064a8c07617b240e7c828ee"},
- {file = "fonttools-4.60.0-cp314-cp314t-win32.whl", hash = "sha256:0f7b2c251dc338973e892a1e153016114e7a75f6aac7a49b84d5d1a4c0608d08"},
- {file = "fonttools-4.60.0-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a72771106bc7434098db35abecd84d608857f6e116d3ef00366b213c502ce9"},
- {file = "fonttools-4.60.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79a18fff39ce2044dfc88050a033eb16e48ee0024bd0ea831950aad342b9eae9"},
- {file = "fonttools-4.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97fe4f9483a6cecaa3976f29cd896501f47840474188b6e505ba73e4fa25006a"},
- {file = "fonttools-4.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fa66f07f5f4a019c36dcac86d112e016ee7f579a3100154051031a422cea8903"},
- {file = "fonttools-4.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47e82dcf6ace13a1fd36a0b4d6966c559653f459a80784b0746f4b342e335a5d"},
- {file = "fonttools-4.60.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d25e9af0c2e1eb70a204072cc29ec01b2efc4d072f4ebca9334145a4a8cbfca"},
- {file = "fonttools-4.60.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3e445e9db6ce9ccda22b1dc29d619825cf91bf1b955e25974a3c47f67a7983c3"},
- {file = "fonttools-4.60.0-cp39-cp39-win32.whl", hash = "sha256:dfd7b71a196c6929f21a7f30fa64a5d62f1acf5d857dd40ad6864452ebe615de"},
- {file = "fonttools-4.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:1eab07d561e18b971e20510631c048cf496ffa1adf3574550dbcac38e6425832"},
- {file = "fonttools-4.60.0-py3-none-any.whl", hash = "sha256:496d26e4d14dcccdd6ada2e937e4d174d3138e3d73f5c9b6ec6eb2fd1dab4f66"},
- {file = "fonttools-4.60.0.tar.gz", hash = "sha256:8f5927f049091a0ca74d35cce7f78e8f7775c83a6901a8fbe899babcc297146a"},
-]
-
-[package.dependencies]
-brotli = {version = ">=1.0.1", optional = true, markers = "platform_python_implementation == \"CPython\" and extra == \"woff\""}
-brotlicffi = {version = ">=0.8.0", optional = true, markers = "platform_python_implementation != \"CPython\" and extra == \"woff\""}
-zopfli = {version = ">=0.1.4", optional = true, markers = "extra == \"woff\""}
-
-[package.extras]
-all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
-graphite = ["lz4 (>=1.7.4.2)"]
-interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
-lxml = ["lxml (>=4.0)"]
-pathops = ["skia-pathops (>=0.5.0)"]
-plot = ["matplotlib"]
-repacker = ["uharfbuzz (>=0.23.0)"]
-symfont = ["sympy"]
-type1 = ["xattr ; sys_platform == \"darwin\""]
-unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
-woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
-
-[[package]]
-name = "jinja2"
-version = "3.1.6"
-description = "A very fast and expressive template engine."
-optional = false
-python-versions = ">=3.7"
-groups = ["main"]
-files = [
- {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
- {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
-]
-
-[package.dependencies]
-MarkupSafe = ">=2.0"
-
-[package.extras]
-i18n = ["Babel (>=2.7)"]
-
-[[package]]
-name = "jsonschema"
-version = "4.25.1"
-description = "An implementation of JSON Schema validation for Python"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"},
- {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"},
-]
-
-[package.dependencies]
-attrs = ">=22.2.0"
-jsonschema-specifications = ">=2023.03.6"
-referencing = ">=0.28.4"
-rpds-py = ">=0.7.1"
-
-[package.extras]
-format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
-format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"]
-
-[[package]]
-name = "jsonschema-specifications"
-version = "2025.9.1"
-description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"},
- {file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"},
-]
-
-[package.dependencies]
-referencing = ">=0.31.0"
-
-[[package]]
-name = "markdown-it-py"
-version = "4.0.0"
-description = "Python port of markdown-it. Markdown parsing, done right!"
-optional = false
-python-versions = ">=3.10"
-groups = ["main"]
-files = [
- {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"},
- {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"},
-]
-
-[package.dependencies]
-mdurl = ">=0.1,<1.0"
-
-[package.extras]
-benchmarking = ["psutil", "pytest", "pytest-benchmark"]
-compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "markdown-it-pyrs", "mistletoe (>=1.0,<2.0)", "mistune (>=3.0,<4.0)", "panflute (>=2.3,<3.0)"]
-linkify = ["linkify-it-py (>=1,<3)"]
-plugins = ["mdit-py-plugins (>=0.5.0)"]
-profiling = ["gprof2dot"]
-rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"]
-testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"]
-
-[[package]]
-name = "markupsafe"
-version = "3.0.3"
-description = "Safely add untrusted strings to HTML/XML markup."
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"},
- {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"},
- {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"},
- {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"},
- {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"},
- {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"},
- {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"},
- {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"},
- {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"},
- {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"},
- {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"},
- {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"},
- {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"},
- {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"},
- {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"},
- {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"},
- {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"},
- {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"},
- {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"},
- {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"},
- {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"},
- {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"},
- {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"},
- {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"},
- {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"},
- {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"},
- {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"},
- {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"},
- {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"},
- {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"},
- {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"},
- {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"},
- {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"},
- {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"},
- {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"},
- {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"},
- {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"},
- {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"},
- {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"},
- {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"},
- {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"},
- {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"},
- {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"},
- {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"},
- {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"},
- {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"},
- {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"},
- {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"},
- {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"},
- {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"},
- {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"},
- {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"},
- {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"},
- {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"},
- {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"},
- {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"},
- {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"},
- {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"},
- {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"},
- {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"},
- {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"},
- {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"},
- {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"},
- {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"},
- {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"},
- {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"},
- {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"},
- {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"},
- {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"},
- {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"},
- {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"},
- {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"},
- {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"},
- {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"},
- {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"},
- {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"},
- {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"},
- {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"},
- {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"},
- {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"},
- {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"},
- {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"},
- {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"},
- {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"},
- {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"},
- {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"},
- {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"},
- {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"},
- {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"},
-]
-
-[[package]]
-name = "mdurl"
-version = "0.1.2"
-description = "Markdown URL utilities"
-optional = false
-python-versions = ">=3.7"
-groups = ["main"]
-files = [
- {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
- {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
-]
-
-[[package]]
-name = "mslex"
-version = "1.3.0"
-description = "shlex for windows"
-optional = false
-python-versions = ">=3.5"
-groups = ["dev"]
-markers = "sys_platform == \"win32\""
-files = [
- {file = "mslex-1.3.0-py3-none-any.whl", hash = "sha256:c7074b347201b3466fc077c5692fbce9b5f62a63a51f537a53fbbd02eff2eea4"},
- {file = "mslex-1.3.0.tar.gz", hash = "sha256:641c887d1d3db610eee2af37a8e5abda3f70b3006cdfd2d0d29dc0d1ae28a85d"},
-]
-
-[[package]]
-name = "pillow"
-version = "11.3.0"
-description = "Python Imaging Library (Fork)"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
- {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
- {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
- {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
- {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"},
- {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"},
- {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"},
- {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
- {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
- {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
- {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
- {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
- {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"},
- {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"},
- {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"},
- {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
- {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
- {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
- {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
- {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
- {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"},
- {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"},
- {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"},
- {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"},
- {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"},
- {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"},
- {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
- {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
- {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
- {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
- {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
- {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"},
- {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"},
- {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"},
- {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
- {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
- {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
- {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
- {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
- {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"},
- {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"},
- {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"},
- {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
- {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
- {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
- {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
- {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
- {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"},
- {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"},
- {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"},
- {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
- {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
- {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
- {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
- {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
- {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"},
- {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"},
- {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"},
- {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
- {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
- {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
- {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
- {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
- {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"},
- {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"},
- {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"},
- {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
- {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
- {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
- {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"},
-]
-
-[package.extras]
-docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
-fpx = ["olefile"]
-mic = ["olefile"]
-test-arrow = ["pyarrow"]
-tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"]
-typing = ["typing-extensions ; python_version < \"3.10\""]
-xmp = ["defusedxml"]
-
-[[package]]
-name = "psutil"
-version = "6.1.1"
-description = "Cross-platform lib for process and system monitoring in Python."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
-groups = ["dev"]
-files = [
- {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"},
- {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"},
- {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"},
- {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"},
- {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"},
- {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"},
- {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"},
- {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"},
- {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"},
- {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"},
- {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"},
- {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"},
- {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"},
- {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"},
- {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"},
- {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"},
- {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"},
-]
-
-[package.extras]
-dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"]
-test = ["pytest", "pytest-xdist", "setuptools"]
-
-[[package]]
-name = "pycparser"
-version = "2.23"
-description = "C parser in Python"
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-markers = "implementation_name != \"PyPy\""
-files = [
- {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"},
- {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"},
-]
-
-[[package]]
-name = "pydyf"
-version = "0.11.0"
-description = "A low-level PDF generator."
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "pydyf-0.11.0-py3-none-any.whl", hash = "sha256:0aaf9e2ebbe786ec7a78ec3fbffa4cdcecde53fd6f563221d53c6bc1328848a3"},
- {file = "pydyf-0.11.0.tar.gz", hash = "sha256:394dddf619cca9d0c55715e3c55ea121a9bf9cbc780cdc1201a2427917b86b64"},
-]
-
-[package.extras]
-doc = ["sphinx", "sphinx_rtd_theme"]
-test = ["pillow", "pytest", "ruff"]
-
-[[package]]
-name = "pygments"
-version = "2.19.2"
-description = "Pygments is a syntax highlighting package written in Python."
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
- {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
-]
-
-[package.extras]
-windows-terminal = ["colorama (>=0.4.6)"]
-
-[[package]]
-name = "pyphen"
-version = "0.17.2"
-description = "Pure Python module to hyphenate text"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "pyphen-0.17.2-py3-none-any.whl", hash = "sha256:3a07fb017cb2341e1d9ff31b8634efb1ae4dc4b130468c7c39dd3d32e7c3affd"},
- {file = "pyphen-0.17.2.tar.gz", hash = "sha256:f60647a9c9b30ec6c59910097af82bc5dd2d36576b918e44148d8b07ef3b4aa3"},
-]
-
-[package.extras]
-doc = ["sphinx", "sphinx_rtd_theme"]
-test = ["pytest", "ruff"]
-
-[[package]]
-name = "referencing"
-version = "0.36.2"
-description = "JSON Referencing + Python"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"},
- {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"},
-]
-
-[package.dependencies]
-attrs = ">=22.2.0"
-rpds-py = ">=0.7.0"
-typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
-
-[[package]]
-name = "rich"
-version = "14.1.0"
-description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
-optional = false
-python-versions = ">=3.8.0"
-groups = ["main"]
-files = [
- {file = "rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f"},
- {file = "rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8"},
-]
-
-[package.dependencies]
-markdown-it-py = ">=2.2.0"
-pygments = ">=2.13.0,<3.0.0"
-
-[package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<9)"]
-
-[[package]]
-name = "rich-click"
-version = "1.9.1"
-description = "Format click help output nicely with rich"
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "rich_click-1.9.1-py3-none-any.whl", hash = "sha256:ea6114a9e081b7d68cc07b315070398f806f01bb0e0c49da56f129e672877817"},
- {file = "rich_click-1.9.1.tar.gz", hash = "sha256:4f2620589d7287f86265432e6a909de4f281de909fe68d8c835fbba49265d268"},
-]
-
-[package.dependencies]
-click = ">=8"
-rich = ">=12"
-
-[package.extras]
-dev = ["inline-snapshot (>=0.24)", "jsonschema (>=4)", "mypy (>=1.14.1)", "nodeenv (>=1.9.1)", "packaging (>=25)", "pre-commit (>=3.5)", "pytest (>=8.3.5)", "pytest-cov (>=5)", "rich-codex (>=1.2.11)", "ruff (>=0.12.4)", "typer (>=0.15)", "types-setuptools (>=75.8.0.20250110)"]
-docs = ["markdown-include (>=0.8.1)", "mike (>=2.1.3)", "mkdocs-github-admonitions-plugin (>=0.1.1)", "mkdocs-glightbox (>=0.4)", "mkdocs-include-markdown-plugin (>=7.1.7) ; python_version >= \"3.9\"", "mkdocs-material-extensions (>=1.3.1)", "mkdocs-material[imaging] (>=9.5.18,<9.6.0)", "mkdocs-redirects (>=1.2.2)", "mkdocs-rss-plugin (>=1.15)", "mkdocs[docs] (>=1.6.1)", "mkdocstrings[python] (>=0.26.1)", "rich-codex (>=1.2.11)", "typer (>=0.15)"]
-
-[[package]]
-name = "rpds-py"
-version = "0.27.1"
-description = "Python bindings to Rust's persistent data structures (rpds)"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef"},
- {file = "rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1"},
- {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10"},
- {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808"},
- {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8"},
- {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9"},
- {file = "rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4"},
- {file = "rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1"},
- {file = "rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881"},
- {file = "rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a"},
- {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde"},
- {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21"},
- {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9"},
- {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948"},
- {file = "rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39"},
- {file = "rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15"},
- {file = "rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746"},
- {file = "rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90"},
- {file = "rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a"},
- {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444"},
- {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a"},
- {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1"},
- {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998"},
- {file = "rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39"},
- {file = "rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594"},
- {file = "rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502"},
- {file = "rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b"},
- {file = "rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d"},
- {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274"},
- {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd"},
- {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2"},
- {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002"},
- {file = "rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3"},
- {file = "rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83"},
- {file = "rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d"},
- {file = "rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228"},
- {file = "rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21"},
- {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef"},
- {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081"},
- {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd"},
- {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7"},
- {file = "rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688"},
- {file = "rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797"},
- {file = "rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334"},
- {file = "rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9"},
- {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60"},
- {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e"},
- {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212"},
- {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675"},
- {file = "rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3"},
- {file = "rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456"},
- {file = "rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3"},
- {file = "rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2"},
- {file = "rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48"},
- {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb"},
- {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734"},
- {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb"},
- {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0"},
- {file = "rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a"},
- {file = "rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772"},
- {file = "rpds_py-0.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c918c65ec2e42c2a78d19f18c553d77319119bf43aa9e2edf7fb78d624355527"},
- {file = "rpds_py-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fea2b1a922c47c51fd07d656324531adc787e415c8b116530a1d29c0516c62d"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbf94c58e8e0cd6b6f38d8de67acae41b3a515c26169366ab58bdca4a6883bb8"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2a8fed130ce946d5c585eddc7c8eeef0051f58ac80a8ee43bd17835c144c2cc"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:037a2361db72ee98d829bc2c5b7cc55598ae0a5e0ec1823a56ea99374cfd73c1"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5281ed1cc1d49882f9997981c88df1a22e140ab41df19071222f7e5fc4e72125"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd50659a069c15eef8aa3d64bbef0d69fd27bb4a50c9ab4f17f83a16cbf8905"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:c4b676c4ae3921649a15d28ed10025548e9b561ded473aa413af749503c6737e"},
- {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:079bc583a26db831a985c5257797b2b5d3affb0386e7ff886256762f82113b5e"},
- {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4e44099bd522cba71a2c6b97f68e19f40e7d85399de899d66cdb67b32d7cb786"},
- {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e202e6d4188e53c6661af813b46c37ca2c45e497fc558bacc1a7630ec2695aec"},
- {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f41f814b8eaa48768d1bb551591f6ba45f87ac76899453e8ccd41dba1289b04b"},
- {file = "rpds_py-0.27.1-cp39-cp39-win32.whl", hash = "sha256:9e71f5a087ead99563c11fdaceee83ee982fd39cf67601f4fd66cb386336ee52"},
- {file = "rpds_py-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:71108900c9c3c8590697244b9519017a400d9ba26a36c48381b3f64743a44aab"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b"},
- {file = "rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6"},
- {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa8933159edc50be265ed22b401125c9eebff3171f570258854dbce3ecd55475"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50431bf02583e21bf273c71b89d710e7a710ad5e39c725b14e685610555926f"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78af06ddc7fe5cc0e967085a9115accee665fb912c22a3f54bad70cc65b05fe6"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70d0738ef8fee13c003b100c2fbd667ec4f133468109b3472d249231108283a3"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2f6fd8a1cea5bbe599b6e78a6e5ee08db434fc8ffea51ff201c8765679698b3"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8177002868d1426305bb5de1e138161c2ec9eb2d939be38291d7c431c4712df8"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008b839781d6c9bf3b6a8984d1d8e56f0ec46dc56df61fd669c49b58ae800400"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:a55b9132bb1ade6c734ddd2759c8dc132aa63687d259e725221f106b83a0e485"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a46fdec0083a26415f11d5f236b79fa1291c32aaa4a17684d82f7017a1f818b1"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8a63b640a7845f2bdd232eb0d0a4a2dd939bcdd6c57e6bb134526487f3160ec5"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e32721e5d4922deaaf963469d795d5bde6093207c52fec719bd22e5d1bedbc4"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c426b99a068601b5f4623573df7a7c3d72e87533a2dd2253353a03e7502566c"},
- {file = "rpds_py-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fc9b7fe29478824361ead6e14e4f5aed570d477e06088826537e202d25fe859"},
- {file = "rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8"},
-]
-
-[[package]]
-name = "ruff"
-version = "0.13.2"
-description = "An extremely fast Python linter and code formatter, written in Rust."
-optional = false
-python-versions = ">=3.7"
-groups = ["dev"]
-files = [
- {file = "ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3"},
- {file = "ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2"},
- {file = "ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3"},
- {file = "ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d"},
- {file = "ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b"},
- {file = "ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22"},
- {file = "ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736"},
- {file = "ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2"},
- {file = "ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac"},
- {file = "ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585"},
- {file = "ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7"},
- {file = "ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff"},
-]
-
-[[package]]
-name = "taskipy"
-version = "1.14.1"
-description = "tasks runner for python projects"
-optional = false
-python-versions = "<4.0,>=3.6"
-groups = ["dev"]
-files = [
- {file = "taskipy-1.14.1-py3-none-any.whl", hash = "sha256:6e361520f29a0fd2159848e953599f9c75b1d0b047461e4965069caeb94908f1"},
- {file = "taskipy-1.14.1.tar.gz", hash = "sha256:410fbcf89692dfd4b9f39c2b49e1750b0a7b81affd0e2d7ea8c35f9d6a4774ed"},
-]
-
-[package.dependencies]
-colorama = ">=0.4.4,<0.5.0"
-mslex = {version = ">=1.1.0,<2.0.0", markers = "sys_platform == \"win32\""}
-psutil = ">=5.7.2,<7"
-tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""}
-
-[[package]]
-name = "tinycss2"
-version = "1.4.0"
-description = "A tiny CSS parser"
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"},
- {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"},
-]
-
-[package.dependencies]
-webencodings = ">=0.4"
-
-[package.extras]
-doc = ["sphinx", "sphinx_rtd_theme"]
-test = ["pytest", "ruff"]
-
-[[package]]
-name = "tinyhtml5"
-version = "2.0.0"
-description = "HTML parser based on the WHATWG HTML specification"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "tinyhtml5-2.0.0-py3-none-any.whl", hash = "sha256:13683277c5b176d070f82d099d977194b7a1e26815b016114f581a74bbfbf47e"},
- {file = "tinyhtml5-2.0.0.tar.gz", hash = "sha256:086f998833da24c300c414d9fe81d9b368fd04cb9d2596a008421cbc705fcfcc"},
-]
-
-[package.dependencies]
-webencodings = ">=0.5.1"
-
-[package.extras]
-doc = ["sphinx", "sphinx_rtd_theme"]
-test = ["pytest", "ruff"]
-
-[[package]]
-name = "tomli"
-version = "2.2.1"
-description = "A lil' TOML parser"
-optional = false
-python-versions = ">=3.8"
-groups = ["dev"]
-files = [
- {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
- {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
- {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
- {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
- {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
- {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
- {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
- {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
- {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
- {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
- {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
- {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
- {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
- {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
- {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
- {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
- {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
- {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
- {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
- {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
- {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
- {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
- {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
- {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
- {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
- {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
- {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
- {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
- {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
- {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
- {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
- {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
-]
-
-[[package]]
-name = "typing-extensions"
-version = "4.15.0"
-description = "Backported and Experimental Type Hints for Python 3.9+"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-markers = "python_version < \"3.13\""
-files = [
- {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
- {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
-]
-
-[[package]]
-name = "weasyprint"
-version = "66.0"
-description = "The Awesome Document Factory"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "weasyprint-66.0-py3-none-any.whl", hash = "sha256:82b0783b726fcd318e2c977dcdddca76515b30044bc7a830cc4fbe717582a6d0"},
- {file = "weasyprint-66.0.tar.gz", hash = "sha256:da71dc87dc129ac9cffdc65e5477e90365ab9dbae45c744014ec1d06303dde40"},
-]
-
-[package.dependencies]
-cffi = ">=0.6"
-cssselect2 = ">=0.8.0"
-fonttools = {version = ">=4.0.0", extras = ["woff"]}
-Pillow = ">=9.1.0"
-pydyf = ">=0.11.0"
-Pyphen = ">=0.9.1"
-tinycss2 = ">=1.4.0"
-tinyhtml5 = ">=2.0.0b1"
-
-[package.extras]
-doc = ["furo", "sphinx"]
-test = ["pytest", "ruff"]
-
-[[package]]
-name = "webencodings"
-version = "0.5.1"
-description = "Character encoding aliases for legacy web content"
-optional = false
-python-versions = "*"
-groups = ["main"]
-files = [
- {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
- {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
-]
-
-[[package]]
-name = "zopfli"
-version = "0.2.3.post1"
-description = "Zopfli module for python"
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "zopfli-0.2.3.post1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0137dd64a493ba6a4be37405cfd6febe650a98cc1e9dca8f6b8c63b1db11b41"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aa588b21044f8a74e423d8c8a4c7fc9988501878aacced793467010039c50734"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f4a7ec2770e6af05f5a02733fd3900f30a9cd58e5d6d3727e14c5bcd6e7d587"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f7d69c1a7168ad0e9cb864e8663acb232986a0c9c9cb9801f56bf6214f53a54d"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2d2bc8129707e34c51f9352c4636ca313b52350bbb7e04637c46c1818a2a70"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:39e576f93576c5c223b41d9c780bbb91fd6db4babf3223d2a4fe7bf568e2b5a8"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cbe6df25807227519debd1a57ab236f5f6bad441500e85b13903e51f93a43214"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7cce242b5df12b2b172489daf19c32e5577dd2fac659eb4b17f6a6efb446fd5c"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-win32.whl", hash = "sha256:f815fcc2b2a457977724bad97fb4854022980f51ce7b136925e336b530545ae1"},
- {file = "zopfli-0.2.3.post1-cp310-cp310-win_amd64.whl", hash = "sha256:0cc20b02a9531559945324c38302fd4ba763311632d0ec8a1a0aa9c10ea363e6"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:518f1f4ed35dd69ce06b552f84e6d081f07c552b4c661c5312d950a0b764a58a"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:615a8ac9dda265e9cc38b2a76c3142e4a9f30fea4a79c85f670850783bc6feb4"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a82fc2dbebe6eb908b9c665e71496f8525c1bc4d2e3a7a7722ef2b128b6227c8"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37d011e92f7b9622742c905fdbed9920a1d0361df84142807ea2a528419dea7f"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e63d558847166543c2c9789e6f985400a520b7eacc4b99181668b2c3aeadd352"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60db20f06c3d4c5934b16cfa62a2cc5c3f0686bffe0071ed7804d3c31ab1a04e"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:716cdbfc57bfd3d3e31a58e6246e8190e6849b7dbb7c4ce39ef8bbf0edb8f6d5"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3a89277ed5f8c0fb2d0b46d669aa0633123aa7381f1f6118c12f15e0fb48f8ca"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-win32.whl", hash = "sha256:75a26a2307b10745a83b660c404416e984ee6fca515ec7f0765f69af3ce08072"},
- {file = "zopfli-0.2.3.post1-cp311-cp311-win_amd64.whl", hash = "sha256:81c341d9bb87a6dbbb0d45d6e272aca80c7c97b4b210f9b6e233bf8b87242f29"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3f0197b6aa6eb3086ae9e66d6dd86c4d502b6c68b0ec490496348ae8c05ecaef"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fcfc0dc2761e4fcc15ad5d273b4d58c2e8e059d3214a7390d4d3c8e2aee644e"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cac2b37ab21c2b36a10b685b1893ebd6b0f83ae26004838ac817680881576567"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d5ab297d660b75c159190ce6d73035502310e40fd35170aed7d1a1aea7ddd65"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba214f4f45bec195ee8559651154d3ac2932470b9d91c5715fc29c013349f8c"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c1e0ed5d84ffa2d677cc9582fc01e61dab2e7ef8b8996e055f0a76167b1b94df"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bfa1eb759e07d8b7aa7a310a2bc535e127ee70addf90dc8d4b946b593c3e51a8"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cd2c002f160502608dcc822ed2441a0f4509c52e86fcfd1a09e937278ed1ca14"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-win32.whl", hash = "sha256:7be5cc6732eb7b4df17305d8a7b293223f934a31783a874a01164703bc1be6cd"},
- {file = "zopfli-0.2.3.post1-cp312-cp312-win_amd64.whl", hash = "sha256:4e50ffac74842c1c1018b9b73875a0d0a877c066ab06bf7cccbaa84af97e754f"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecb7572df5372abce8073df078207d9d1749f20b8b136089916a4a0868d56051"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1cf720896d2ce998bc8e051d4b4ce0d8bec007aab6243102e8e1d22a0b2fb3f"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aad740b4d4fcbaaae4887823925166ffd062db3b248b3f432198fc287381d1a"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6617fb10f9e4393b331941861d73afb119cd847e88e4974bdbe8068ceef3f73f"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a53b18797cdef27e019db595d66c4b077325afe2fd62145953275f53d84ce40c"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b78008a69300d929ca2efeffec951b64a312e9a811e265ea4a907ab546d79fa6"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa5f90d6298bda02a95bc8dc8c3c19004d5a4e44bda00b67ca7431d857b4b54"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2768c877f76c8a0e7519b1c86c93757f3c01492ddde55751e9988afb7eff64e1"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-win32.whl", hash = "sha256:71390dbd3fbf6ebea9a5d85ffed8c26ee1453ee09248e9b88486e30e0397b775"},
- {file = "zopfli-0.2.3.post1-cp313-cp313-win_amd64.whl", hash = "sha256:a86eb88e06bd87e1fff31dac878965c26b0c26db59ddcf78bb0379a954b120de"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3827170de28faf144992d3d4dcf8f3998fe3c8a6a6f4a08f1d42c2ec6119d2bb"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b0ec13f352ea5ae0fc91f98a48540512eed0767d0ec4f7f3cb92d92797983d18"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f272186e03ad55e7af09ab78055535c201b1a0bcc2944edb1768298d9c483a4"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:29ea74e72ffa6e291b8c6f2504ce6c146b4fe990c724c1450eb8e4c27fd31431"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eb45a34f23da4f8bc712b6376ca5396914b0b7c09adbb001dad964eb7f3132f8"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6482db9876c68faac2d20a96b566ffbf65ddaadd97b222e4e73641f4f8722fc4"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:95a260cafd56b8fffa679918937401c80bb38e1681c448b988022e4c3610965d"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:676919fba7311125244eb0c4393679ac5fe856e5864a15d122bd815205369fa0"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-win32.whl", hash = "sha256:b9026a21b6d41eb0e2e63f5bc1242c3fcc43ecb770963cda99a4307863dac12e"},
- {file = "zopfli-0.2.3.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3c163911f8bad94b3e1db0a572e7c28ba681a0c91d0002ea1e4fa9264c21ef17"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b05296e8bc88c92e2b21e0a9bae4740c1551ee613c1d93a51fd28a7a0b2b6fbb"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12000a6accdd4bf0a3fa6eaa1b1c7a7bc80af0a2edf3f89d770d3dcce1d0e22"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a241a68581d34d67b40c425cce3d1fd211c092f99d9250947824ccba9f491949"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3657e416ffb8f31d9d3424af12122bb251befae109f2e271d87d825c92fc5b7b"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4915a41375bdee4db749ecd07d985a0486eb688a6619f713b7bf6fbfd145e960"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bbe429fc50686bb2a2608a30843e36fbaa123462a5284f136c7d9e0145220bfd"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2345e713260a350bea0b01a816a469ea356bc2d63d009a0d777691ecbbcf7493"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fc39f5c27f962ec8660d8d20c24762431131b5d8c672b44b0a54cf2b5bcde9b9"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-win32.whl", hash = "sha256:9a6aec38a989bad7ddd1ef53f1265699e49e294d08231b5313d61293f3cd6237"},
- {file = "zopfli-0.2.3.post1-cp39-cp39-win_amd64.whl", hash = "sha256:b3df42f52502438ee973042cc551877d24619fa1cd38ef7b7e9ac74200daca8b"},
- {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4c1226a7e2c7105ac31503a9bb97454743f55d88164d6d46bc138051b77f609b"},
- {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48dba9251060289101343110ab47c0756f66f809bb4d1ddbb6d5c7e7752115c5"},
- {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89899641d4de97dbad8e0cde690040d078b6aea04066dacaab98e0b5a23573f2"},
- {file = "zopfli-0.2.3.post1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3654bfc927bc478b1c3f3ff5056ed7b20a1a37fa108ca503256d0a699c03bbb1"},
- {file = "zopfli-0.2.3.post1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c4278d1873ce6e803e5d4f8d702fd3026bd67fca744aa98881324d1157ddf748"},
- {file = "zopfli-0.2.3.post1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1d8cc06605519e82b16df090e17cb3990d1158861b2872c3117f1168777b81e4"},
- {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1f990634fd5c5c8ced8edddd8bd45fab565123b4194d6841e01811292650acae"},
- {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91a2327a4d7e77471fa4fbb26991c6de4a738c6fc6a33e09bb25f56a870a4b7b"},
- {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbe5bcf10d01aab3513550f284c09fef32f342b36f56bfae2120a9c4d12c130"},
- {file = "zopfli-0.2.3.post1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:34a99592f3d9eb6f737616b5bd74b48a589fdb3cb59a01a50d636ea81d6af272"},
- {file = "zopfli-0.2.3.post1.tar.gz", hash = "sha256:96484dc0f48be1c5d7ae9f38ed1ce41e3675fd506b27c11a6607f14b49101e99"},
-]
-
-[package.extras]
-test = ["pytest"]
-
-[metadata]
-lock-version = "2.1"
-python-versions = "^3.11"
-content-hash = "10d66250510675424f4becad4e83d9785e2d3b0282dc30eb6d8c2c17bdd2df13"
diff --git a/pyproject.toml b/pyproject.toml
index e57b0ec..d2ef3ff 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,13 +1,13 @@
-[tool.poetry]
+[project]
name = "naminter"
version = "1.0.7"
description = "A Python package and command-line interface (CLI) tool for asynchronous OSINT username enumeration using the WhatsMyName dataset"
-authors = ["3xp0rt "]
-license = "MIT"
+authors = [
+ {name = "3xp0rt", email = "contact@3xp0rt.com"}
+]
+license = {text = "MIT"}
readme = "README.md"
-packages = [{include = "naminter"}]
-include = ["naminter/cli/templates/*.html"]
-
+requires-python = ">=3.11"
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
@@ -21,7 +21,6 @@ classifiers = [
"Topic :: Internet",
"Topic :: Utilities"
]
-
keywords = [
"osint",
"username",
@@ -33,40 +32,36 @@ keywords = [
"investigation",
"naminter"
]
+dependencies = [
+ "click>=8.3.0",
+ "curl-cffi>=0.13.0",
+ "aiofiles>=25.1.0",
+ "jinja2>=3.1.6",
+ "jsonschema>=4.25.1",
+ "rich>=14.2.0",
+ "rich-click>=1.9.4",
+ "weasyprint>=66.0",
+]
-[tool.poetry.urls]
+[project.optional-dependencies]
+dev = [
+ "ruff>=0.14.4",
+]
+
+[project.urls]
Homepage = "https://github.com/3xp0rt/naminter"
Repository = "https://github.com/3xp0rt/naminter"
-BugTracker = "https://github.com/3xp0rt/naminter/issues"
+"Bug Tracker" = "https://github.com/3xp0rt/naminter/issues"
Documentation = "https://github.com/3xp0rt/naminter#readme"
Source = "https://github.com/3xp0rt/naminter"
-[tool.poetry.dependencies]
-python = "^3.11"
-click = "^8.3.0"
-curl-cffi = "^0.13.0"
-aiofiles = "^24.1.0"
-jinja2 = "^3.1.6"
-jsonschema = "^4.25.1"
-rich = "^14.1.0"
-rich-click = "^1.9.1"
-weasyprint = "^66.0"
-
-[tool.poetry.group.dev.dependencies]
-ruff = "^0.13.2"
-taskipy = "^1.14.1"
-
-[tool.poetry.scripts]
+[project.scripts]
naminter = "naminter.cli.main:entry_point"
[build-system]
-requires = ["poetry-core"]
-build-backend = "poetry.core.masonry.api"
+requires = ["hatchling"]
+build-backend = "hatchling.build"
-# Taskipy tasks
-[tool.taskipy.tasks]
-lint = "ruff check .; ruff check . --diff"
-format = "ruff check . --fix; ruff format ."
# Ruff configuration
[tool.ruff]
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000..2f751e5
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,847 @@
+version = 1
+revision = 3
+requires-python = ">=3.11"
+
+[[package]]
+name = "aiofiles"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" },
+]
+
+[[package]]
+name = "attrs"
+version = "25.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
+]
+
+[[package]]
+name = "brotli"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270, upload-time = "2023-09-07T14:05:41.643Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/96/12/ad41e7fadd5db55459c4c401842b47f7fee51068f86dd2894dd0dcfc2d2a/Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc", size = 873068, upload-time = "2023-09-07T14:03:37.779Z" },
+ { url = "https://files.pythonhosted.org/packages/95/4e/5afab7b2b4b61a84e9c75b17814198ce515343a44e2ed4488fac314cd0a9/Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6", size = 446244, upload-time = "2023-09-07T14:03:39.223Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/e6/f305eb61fb9a8580c525478a4a34c5ae1a9bcb12c3aee619114940bc513d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd", size = 2906500, upload-time = "2023-09-07T14:03:40.858Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/4f/af6846cfbc1550a3024e5d3775ede1e00474c40882c7bf5b37a43ca35e91/Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf", size = 2943950, upload-time = "2023-09-07T14:03:42.896Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/e7/ca2993c7682d8629b62630ebf0d1f3bb3d579e667ce8e7ca03a0a0576a2d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61", size = 2918527, upload-time = "2023-09-07T14:03:44.552Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/96/da98e7bedc4c51104d29cc61e5f449a502dd3dbc211944546a4cc65500d3/Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327", size = 2845489, upload-time = "2023-09-07T14:03:46.594Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/ef/ccbc16947d6ce943a7f57e1a40596c75859eeb6d279c6994eddd69615265/Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd", size = 2914080, upload-time = "2023-09-07T14:03:48.204Z" },
+ { url = "https://files.pythonhosted.org/packages/80/d6/0bd38d758d1afa62a5524172f0b18626bb2392d717ff94806f741fcd5ee9/Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9", size = 2813051, upload-time = "2023-09-07T14:03:50.348Z" },
+ { url = "https://files.pythonhosted.org/packages/14/56/48859dd5d129d7519e001f06dcfbb6e2cf6db92b2702c0c2ce7d97e086c1/Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265", size = 2938172, upload-time = "2023-09-07T14:03:52.395Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/77/a236d5f8cd9e9f4348da5acc75ab032ab1ab2c03cc8f430d24eea2672888/Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8", size = 2933023, upload-time = "2023-09-07T14:03:53.96Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/87/3b283efc0f5cb35f7f84c0c240b1e1a1003a5e47141a4881bf87c86d0ce2/Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f", size = 2935871, upload-time = "2024-10-18T12:32:16.688Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/eb/2be4cc3e2141dc1a43ad4ca1875a72088229de38c68e842746b342667b2a/Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757", size = 2847784, upload-time = "2024-10-18T12:32:18.459Z" },
+ { url = "https://files.pythonhosted.org/packages/66/13/b58ddebfd35edde572ccefe6890cf7c493f0c319aad2a5badee134b4d8ec/Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0", size = 3034905, upload-time = "2024-10-18T12:32:20.192Z" },
+ { url = "https://files.pythonhosted.org/packages/84/9c/bc96b6c7db824998a49ed3b38e441a2cae9234da6fa11f6ed17e8cf4f147/Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b", size = 2929467, upload-time = "2024-10-18T12:32:21.774Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/71/8f161dee223c7ff7fea9d44893fba953ce97cf2c3c33f78ba260a91bcff5/Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50", size = 333169, upload-time = "2023-09-07T14:03:55.404Z" },
+ { url = "https://files.pythonhosted.org/packages/02/8a/fece0ee1057643cb2a5bbf59682de13f1725f8482b2c057d4e799d7ade75/Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1", size = 357253, upload-time = "2023-09-07T14:03:56.643Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/d0/5373ae13b93fe00095a58efcbce837fd470ca39f703a235d2a999baadfbc/Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28", size = 815693, upload-time = "2024-10-18T12:32:23.824Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/48/f6e1cdf86751300c288c1459724bfa6917a80e30dbfc326f92cea5d3683a/Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f", size = 422489, upload-time = "2024-10-18T12:32:25.641Z" },
+ { url = "https://files.pythonhosted.org/packages/06/88/564958cedce636d0f1bed313381dfc4b4e3d3f6015a63dae6146e1b8c65c/Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409", size = 873081, upload-time = "2023-09-07T14:03:57.967Z" },
+ { url = "https://files.pythonhosted.org/packages/58/79/b7026a8bb65da9a6bb7d14329fd2bd48d2b7f86d7329d5cc8ddc6a90526f/Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2", size = 446244, upload-time = "2023-09-07T14:03:59.319Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/18/c18c32ecea41b6c0004e15606e274006366fe19436b6adccc1ae7b2e50c2/Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451", size = 2906505, upload-time = "2023-09-07T14:04:01.327Z" },
+ { url = "https://files.pythonhosted.org/packages/08/c8/69ec0496b1ada7569b62d85893d928e865df29b90736558d6c98c2031208/Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91", size = 2944152, upload-time = "2023-09-07T14:04:03.033Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/fb/0517cea182219d6768113a38167ef6d4eb157a033178cc938033a552ed6d/Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408", size = 2919252, upload-time = "2023-09-07T14:04:04.675Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/53/73a3431662e33ae61a5c80b1b9d2d18f58dfa910ae8dd696e57d39f1a2f5/Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0", size = 2845955, upload-time = "2023-09-07T14:04:06.585Z" },
+ { url = "https://files.pythonhosted.org/packages/55/ac/bd280708d9c5ebdbf9de01459e625a3e3803cce0784f47d633562cf40e83/Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc", size = 2914304, upload-time = "2023-09-07T14:04:08.668Z" },
+ { url = "https://files.pythonhosted.org/packages/76/58/5c391b41ecfc4527d2cc3350719b02e87cb424ef8ba2023fb662f9bf743c/Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180", size = 2814452, upload-time = "2023-09-07T14:04:10.736Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/4e/91b8256dfe99c407f174924b65a01f5305e303f486cc7a2e8a5d43c8bec3/Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248", size = 2938751, upload-time = "2023-09-07T14:04:12.875Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/a6/e2a39a5d3b412938362bbbeba5af904092bf3f95b867b4a3eb856104074e/Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966", size = 2933757, upload-time = "2023-09-07T14:04:14.551Z" },
+ { url = "https://files.pythonhosted.org/packages/13/f0/358354786280a509482e0e77c1a5459e439766597d280f28cb097642fc26/Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9", size = 2936146, upload-time = "2024-10-18T12:32:27.257Z" },
+ { url = "https://files.pythonhosted.org/packages/80/f7/daf538c1060d3a88266b80ecc1d1c98b79553b3f117a485653f17070ea2a/Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb", size = 2848055, upload-time = "2024-10-18T12:32:29.376Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/cf/0eaa0585c4077d3c2d1edf322d8e97aabf317941d3a72d7b3ad8bce004b0/Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111", size = 3035102, upload-time = "2024-10-18T12:32:31.371Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/63/1c1585b2aa554fe6dbce30f0c18bdbc877fa9a1bf5ff17677d9cca0ac122/Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839", size = 2930029, upload-time = "2024-10-18T12:32:33.293Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/3b/4e3fd1893eb3bbfef8e5a80d4508bec17a57bb92d586c85c12d28666bb13/Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0", size = 333276, upload-time = "2023-09-07T14:04:16.49Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/d5/942051b45a9e883b5b6e98c041698b1eb2012d25e5948c58d6bf85b1bb43/Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951", size = 357255, upload-time = "2023-09-07T14:04:17.83Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/9f/fb37bb8ffc52a8da37b1c03c459a8cd55df7a57bdccd8831d500e994a0ca/Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5", size = 815681, upload-time = "2024-10-18T12:32:34.942Z" },
+ { url = "https://files.pythonhosted.org/packages/06/b3/dbd332a988586fefb0aa49c779f59f47cae76855c2d00f450364bb574cac/Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8", size = 422475, upload-time = "2024-10-18T12:32:36.485Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/80/6aaddc2f63dbcf2d93c2d204e49c11a9ec93a8c7c63261e2b4bd35198283/Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f", size = 2906173, upload-time = "2024-10-18T12:32:37.978Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/1d/e6ca79c96ff5b641df6097d299347507d39a9604bde8915e76bf026d6c77/Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648", size = 2943803, upload-time = "2024-10-18T12:32:39.606Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/a3/d98d2472e0130b7dd3acdbb7f390d478123dbf62b7d32bda5c830a96116d/Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0", size = 2918946, upload-time = "2024-10-18T12:32:41.679Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/a5/c69e6d272aee3e1423ed005d8915a7eaa0384c7de503da987f2d224d0721/Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089", size = 2845707, upload-time = "2024-10-18T12:32:43.478Z" },
+ { url = "https://files.pythonhosted.org/packages/58/9f/4149d38b52725afa39067350696c09526de0125ebfbaab5acc5af28b42ea/Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368", size = 2936231, upload-time = "2024-10-18T12:32:45.224Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/5a/145de884285611838a16bebfdb060c231c52b8f84dfbe52b852a15780386/Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c", size = 2848157, upload-time = "2024-10-18T12:32:46.894Z" },
+ { url = "https://files.pythonhosted.org/packages/50/ae/408b6bfb8525dadebd3b3dd5b19d631da4f7d46420321db44cd99dcf2f2c/Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284", size = 3035122, upload-time = "2024-10-18T12:32:48.844Z" },
+ { url = "https://files.pythonhosted.org/packages/af/85/a94e5cfaa0ca449d8f91c3d6f78313ebf919a0dbd55a100c711c6e9655bc/Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7", size = 2930206, upload-time = "2024-10-18T12:32:51.198Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/f0/a61d9262cd01351df22e57ad7c34f66794709acab13f34be2675f45bf89d/Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0", size = 333804, upload-time = "2024-10-18T12:32:52.661Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/c1/ec214e9c94000d1c1974ec67ced1c970c148aa6b8d8373066123fc3dbf06/Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b", size = 358517, upload-time = "2024-10-18T12:32:54.066Z" },
+]
+
+[[package]]
+name = "brotlicffi"
+version = "1.1.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192, upload-time = "2023-09-14T14:22:40.707Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a2/11/7b96009d3dcc2c931e828ce1e157f03824a69fb728d06bfd7b2fc6f93718/brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851", size = 453786, upload-time = "2023-09-14T14:21:57.72Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/e6/a8f46f4a4ee7856fbd6ac0c6fb0dc65ed181ba46cd77875b8d9bbe494d9e/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b", size = 2911165, upload-time = "2023-09-14T14:21:59.613Z" },
+ { url = "https://files.pythonhosted.org/packages/be/20/201559dff14e83ba345a5ec03335607e47467b6633c210607e693aefac40/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814", size = 2927895, upload-time = "2023-09-14T14:22:01.22Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/15/695b1409264143be3c933f708a3f81d53c4a1e1ebbc06f46331decbf6563/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820", size = 2851834, upload-time = "2023-09-14T14:22:03.571Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/40/b961a702463b6005baf952794c2e9e0099bde657d0d7e007f923883b907f/brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb", size = 341731, upload-time = "2023-09-14T14:22:05.74Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/fa/5408a03c041114ceab628ce21766a4ea882aa6f6f0a800e04ee3a30ec6b9/brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613", size = 366783, upload-time = "2023-09-14T14:22:07.096Z" },
+]
+
+[[package]]
+name = "certifi"
+version = "2025.10.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
+]
+
+[[package]]
+name = "cffi"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser", marker = "implementation_name != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },
+ { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },
+ { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
+ { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
+ { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
+ { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
+ { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
+ { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
+ { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
+ { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
+ { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
+ { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
+ { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
+ { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
+ { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
+ { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
+]
+
+[[package]]
+name = "click"
+version = "8.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
+[[package]]
+name = "cssselect2"
+version = "0.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "tinycss2" },
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9f/86/fd7f58fc498b3166f3a7e8e0cddb6e620fe1da35b02248b1bd59e95dbaaa/cssselect2-0.8.0.tar.gz", hash = "sha256:7674ffb954a3b46162392aee2a3a0aedb2e14ecf99fcc28644900f4e6e3e9d3a", size = 35716, upload-time = "2025-03-05T14:46:07.988Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/e7/aa315e6a749d9b96c2504a1ba0ba031ba2d0517e972ce22682e3fccecb09/cssselect2-0.8.0-py3-none-any.whl", hash = "sha256:46fc70ebc41ced7a32cd42d58b1884d72ade23d21e5a4eaaf022401c13f0e76e", size = 15454, upload-time = "2025-03-05T14:46:06.463Z" },
+]
+
+[[package]]
+name = "curl-cffi"
+version = "0.13.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4e/3d/f39ca1f8fdf14408888e7c25e15eed63eac5f47926e206fb93300d28378c/curl_cffi-0.13.0.tar.gz", hash = "sha256:62ecd90a382bd5023750e3606e0aa7cb1a3a8ba41c14270b8e5e149ebf72c5ca", size = 151303, upload-time = "2025-08-06T13:05:42.988Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/19/d1/acabfd460f1de26cad882e5ef344d9adde1507034528cb6f5698a2e6a2f1/curl_cffi-0.13.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:434cadbe8df2f08b2fc2c16dff2779fb40b984af99c06aa700af898e185bb9db", size = 5686337, upload-time = "2025-08-06T13:05:28.985Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/1c/cdb4fb2d16a0e9de068e0e5bc02094e105ce58a687ff30b4c6f88e25a057/curl_cffi-0.13.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:59afa877a9ae09efa04646a7d068eeea48915a95d9add0a29854e7781679fcd7", size = 2994613, upload-time = "2025-08-06T13:05:31.027Z" },
+ { url = "https://files.pythonhosted.org/packages/04/3e/fdf617c1ec18c3038b77065d484d7517bb30f8fb8847224eb1f601a4e8bc/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06ed389e45a7ca97b17c275dbedd3d6524560270e675c720e93a2018a766076", size = 7931353, upload-time = "2025-08-06T13:05:32.273Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/10/6f30c05d251cf03ddc2b9fd19880f3cab8c193255e733444a2df03b18944/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4e0de45ab3b7a835c72bd53640c2347415111b43421b5c7a1a0b18deae2e541", size = 7486378, upload-time = "2025-08-06T13:05:33.672Z" },
+ { url = "https://files.pythonhosted.org/packages/77/81/5bdb7dd0d669a817397b2e92193559bf66c3807f5848a48ad10cf02bf6c7/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eb4083371bbb94e9470d782de235fb5268bf43520de020c9e5e6be8f395443f", size = 8328585, upload-time = "2025-08-06T13:05:35.28Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/c1/df5c6b4cfad41c08442e0f727e449f4fb5a05f8aa564d1acac29062e9e8e/curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:28911b526e8cd4aa0e5e38401bfe6887e8093907272f1f67ca22e6beb2933a51", size = 8739831, upload-time = "2025-08-06T13:05:37.078Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/91/6dd1910a212f2e8eafe57877bcf97748eb24849e1511a266687546066b8a/curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d433ffcb455ab01dd0d7bde47109083aa38b59863aa183d29c668ae4c96bf8e", size = 8711908, upload-time = "2025-08-06T13:05:38.741Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/e4/15a253f9b4bf8d008c31e176c162d2704a7e0c5e24d35942f759df107b68/curl_cffi-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:66a6b75ce971de9af64f1b6812e275f60b88880577bac47ef1fa19694fa21cd3", size = 1614510, upload-time = "2025-08-06T13:05:40.451Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/0f/9c5275f17ad6ff5be70edb8e0120fdc184a658c9577ca426d4230f654beb/curl_cffi-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:d438a3b45244e874794bc4081dc1e356d2bb926dcc7021e5a8fef2e2105ef1d8", size = 1365753, upload-time = "2025-08-06T13:05:41.879Z" },
+]
+
+[[package]]
+name = "fonttools"
+version = "4.60.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4b/42/97a13e47a1e51a5a7142475bbcf5107fe3a68fc34aef331c897d5fb98ad0/fonttools-4.60.1.tar.gz", hash = "sha256:ef00af0439ebfee806b25f24c8f92109157ff3fac5731dc7867957812e87b8d9", size = 3559823, upload-time = "2025-09-29T21:13:27.129Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ea/85/639aa9bface1537e0fb0f643690672dde0695a5bbbc90736bc571b0b1941/fonttools-4.60.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b4c32e232a71f63a5d00259ca3d88345ce2a43295bb049d21061f338124246f", size = 2831872, upload-time = "2025-09-29T21:11:20.329Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/47/3c63158459c95093be9618794acb1067b3f4d30dcc5c3e8114b70e67a092/fonttools-4.60.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3630e86c484263eaac71d117085d509cbcf7b18f677906824e4bace598fb70d2", size = 2356990, upload-time = "2025-09-29T21:11:22.754Z" },
+ { url = "https://files.pythonhosted.org/packages/94/dd/1934b537c86fcf99f9761823f1fc37a98fbd54568e8e613f29a90fed95a9/fonttools-4.60.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c1015318e4fec75dd4943ad5f6a206d9727adf97410d58b7e32ab644a807914", size = 5042189, upload-time = "2025-09-29T21:11:25.061Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/d2/9f4e4c4374dd1daa8367784e1bd910f18ba886db1d6b825b12edf6db3edc/fonttools-4.60.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6c58beb17380f7c2ea181ea11e7db8c0ceb474c9dd45f48e71e2cb577d146a1", size = 4978683, upload-time = "2025-09-29T21:11:27.693Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/c4/0fb2dfd1ecbe9a07954cc13414713ed1eab17b1c0214ef07fc93df234a47/fonttools-4.60.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec3681a0cb34c255d76dd9d865a55f260164adb9fa02628415cdc2d43ee2c05d", size = 5021372, upload-time = "2025-09-29T21:11:30.257Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/d5/495fc7ae2fab20223cc87179a8f50f40f9a6f821f271ba8301ae12bb580f/fonttools-4.60.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4b5c37a5f40e4d733d3bbaaef082149bee5a5ea3156a785ff64d949bd1353fa", size = 5132562, upload-time = "2025-09-29T21:11:32.737Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/fa/021dab618526323c744e0206b3f5c8596a2e7ae9aa38db5948a131123e83/fonttools-4.60.1-cp311-cp311-win32.whl", hash = "sha256:398447f3d8c0c786cbf1209711e79080a40761eb44b27cdafffb48f52bcec258", size = 2230288, upload-time = "2025-09-29T21:11:35.015Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/78/0e1a6d22b427579ea5c8273e1c07def2f325b977faaf60bb7ddc01456cb1/fonttools-4.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:d066ea419f719ed87bc2c99a4a4bfd77c2e5949cb724588b9dd58f3fd90b92bf", size = 2278184, upload-time = "2025-09-29T21:11:37.434Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/f7/a10b101b7a6f8836a5adb47f2791f2075d044a6ca123f35985c42edc82d8/fonttools-4.60.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7b0c6d57ab00dae9529f3faf187f2254ea0aa1e04215cf2f1a8ec277c96661bc", size = 2832953, upload-time = "2025-09-29T21:11:39.616Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/fe/7bd094b59c926acf2304d2151354ddbeb74b94812f3dc943c231db09cb41/fonttools-4.60.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:839565cbf14645952d933853e8ade66a463684ed6ed6c9345d0faf1f0e868877", size = 2352706, upload-time = "2025-09-29T21:11:41.826Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/ca/4bb48a26ed95a1e7eba175535fe5805887682140ee0a0d10a88e1de84208/fonttools-4.60.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8177ec9676ea6e1793c8a084a90b65a9f778771998eb919d05db6d4b1c0b114c", size = 4923716, upload-time = "2025-09-29T21:11:43.893Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/9f/2cb82999f686c1d1ddf06f6ae1a9117a880adbec113611cc9d22b2fdd465/fonttools-4.60.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:996a4d1834524adbb423385d5a629b868ef9d774670856c63c9a0408a3063401", size = 4968175, upload-time = "2025-09-29T21:11:46.439Z" },
+ { url = "https://files.pythonhosted.org/packages/18/79/be569699e37d166b78e6218f2cde8c550204f2505038cdd83b42edc469b9/fonttools-4.60.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a46b2f450bc79e06ef3b6394f0c68660529ed51692606ad7f953fc2e448bc903", size = 4911031, upload-time = "2025-09-29T21:11:48.977Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/9f/89411cc116effaec5260ad519162f64f9c150e5522a27cbb05eb62d0c05b/fonttools-4.60.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ec722ee589e89a89f5b7574f5c45604030aa6ae24cb2c751e2707193b466fed", size = 5062966, upload-time = "2025-09-29T21:11:54.344Z" },
+ { url = "https://files.pythonhosted.org/packages/62/a1/f888221934b5731d46cb9991c7a71f30cb1f97c0ef5fcf37f8da8fce6c8e/fonttools-4.60.1-cp312-cp312-win32.whl", hash = "sha256:b2cf105cee600d2de04ca3cfa1f74f1127f8455b71dbad02b9da6ec266e116d6", size = 2218750, upload-time = "2025-09-29T21:11:56.601Z" },
+ { url = "https://files.pythonhosted.org/packages/88/8f/a55b5550cd33cd1028601df41acd057d4be20efa5c958f417b0c0613924d/fonttools-4.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:992775c9fbe2cf794786fa0ffca7f09f564ba3499b8fe9f2f80bd7197db60383", size = 2267026, upload-time = "2025-09-29T21:11:58.852Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/5b/cdd2c612277b7ac7ec8c0c9bc41812c43dc7b2d5f2b0897e15fdf5a1f915/fonttools-4.60.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f68576bb4bbf6060c7ab047b1574a1ebe5c50a17de62830079967b211059ebb", size = 2825777, upload-time = "2025-09-29T21:12:01.22Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/8a/de9cc0540f542963ba5e8f3a1f6ad48fa211badc3177783b9d5cadf79b5d/fonttools-4.60.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eedacb5c5d22b7097482fa834bda0dafa3d914a4e829ec83cdea2a01f8c813c4", size = 2348080, upload-time = "2025-09-29T21:12:03.785Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/8b/371ab3cec97ee3fe1126b3406b7abd60c8fec8975fd79a3c75cdea0c3d83/fonttools-4.60.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b33a7884fabd72bdf5f910d0cf46be50dce86a0362a65cfc746a4168c67eb96c", size = 4903082, upload-time = "2025-09-29T21:12:06.382Z" },
+ { url = "https://files.pythonhosted.org/packages/04/05/06b1455e4bc653fcb2117ac3ef5fa3a8a14919b93c60742d04440605d058/fonttools-4.60.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2409d5fb7b55fd70f715e6d34e7a6e4f7511b8ad29a49d6df225ee76da76dd77", size = 4960125, upload-time = "2025-09-29T21:12:09.314Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/37/f3b840fcb2666f6cb97038793606bdd83488dca2d0b0fc542ccc20afa668/fonttools-4.60.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8651e0d4b3bdeda6602b85fdc2abbefc1b41e573ecb37b6779c4ca50753a199", size = 4901454, upload-time = "2025-09-29T21:12:11.931Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/9e/eb76f77e82f8d4a46420aadff12cec6237751b0fb9ef1de373186dcffb5f/fonttools-4.60.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:145daa14bf24824b677b9357c5e44fd8895c2a8f53596e1b9ea3496081dc692c", size = 5044495, upload-time = "2025-09-29T21:12:15.241Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/b3/cede8f8235d42ff7ae891bae8d619d02c8ac9fd0cfc450c5927a6200c70d/fonttools-4.60.1-cp313-cp313-win32.whl", hash = "sha256:2299df884c11162617a66b7c316957d74a18e3758c0274762d2cc87df7bc0272", size = 2217028, upload-time = "2025-09-29T21:12:17.96Z" },
+ { url = "https://files.pythonhosted.org/packages/75/4d/b022c1577807ce8b31ffe055306ec13a866f2337ecee96e75b24b9b753ea/fonttools-4.60.1-cp313-cp313-win_amd64.whl", hash = "sha256:a3db56f153bd4c5c2b619ab02c5db5192e222150ce5a1bc10f16164714bc39ac", size = 2266200, upload-time = "2025-09-29T21:12:20.14Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/83/752ca11c1aa9a899b793a130f2e466b79ea0cf7279c8d79c178fc954a07b/fonttools-4.60.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:a884aef09d45ba1206712c7dbda5829562d3fea7726935d3289d343232ecb0d3", size = 2822830, upload-time = "2025-09-29T21:12:24.406Z" },
+ { url = "https://files.pythonhosted.org/packages/57/17/bbeab391100331950a96ce55cfbbff27d781c1b85ebafb4167eae50d9fe3/fonttools-4.60.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8a44788d9d91df72d1a5eac49b31aeb887a5f4aab761b4cffc4196c74907ea85", size = 2345524, upload-time = "2025-09-29T21:12:26.819Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/2e/d4831caa96d85a84dd0da1d9f90d81cec081f551e0ea216df684092c6c97/fonttools-4.60.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e852d9dda9f93ad3651ae1e3bb770eac544ec93c3807888798eccddf84596537", size = 4843490, upload-time = "2025-09-29T21:12:29.123Z" },
+ { url = "https://files.pythonhosted.org/packages/49/13/5e2ea7c7a101b6fc3941be65307ef8df92cbbfa6ec4804032baf1893b434/fonttools-4.60.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:154cb6ee417e417bf5f7c42fe25858c9140c26f647c7347c06f0cc2d47eff003", size = 4944184, upload-time = "2025-09-29T21:12:31.414Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/2b/cf9603551c525b73fc47c52ee0b82a891579a93d9651ed694e4e2cd08bb8/fonttools-4.60.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5664fd1a9ea7f244487ac8f10340c4e37664675e8667d6fee420766e0fb3cf08", size = 4890218, upload-time = "2025-09-29T21:12:33.936Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/2f/933d2352422e25f2376aae74f79eaa882a50fb3bfef3c0d4f50501267101/fonttools-4.60.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:583b7f8e3c49486e4d489ad1deacfb8d5be54a8ef34d6df824f6a171f8511d99", size = 4999324, upload-time = "2025-09-29T21:12:36.637Z" },
+ { url = "https://files.pythonhosted.org/packages/38/99/234594c0391221f66216bc2c886923513b3399a148defaccf81dc3be6560/fonttools-4.60.1-cp314-cp314-win32.whl", hash = "sha256:66929e2ea2810c6533a5184f938502cfdaea4bc3efb7130d8cc02e1c1b4108d6", size = 2220861, upload-time = "2025-09-29T21:12:39.108Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/1d/edb5b23726dde50fc4068e1493e4fc7658eeefcaf75d4c5ffce067d07ae5/fonttools-4.60.1-cp314-cp314-win_amd64.whl", hash = "sha256:f3d5be054c461d6a2268831f04091dc82753176f6ea06dc6047a5e168265a987", size = 2270934, upload-time = "2025-09-29T21:12:41.339Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/da/1392aaa2170adc7071fe7f9cfd181a5684a7afcde605aebddf1fb4d76df5/fonttools-4.60.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b6379e7546ba4ae4b18f8ae2b9bc5960936007a1c0e30b342f662577e8bc3299", size = 2894340, upload-time = "2025-09-29T21:12:43.774Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/a7/3b9f16e010d536ce567058b931a20b590d8f3177b2eda09edd92e392375d/fonttools-4.60.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9d0ced62b59e0430b3690dbc5373df1c2aa7585e9a8ce38eff87f0fd993c5b01", size = 2375073, upload-time = "2025-09-29T21:12:46.437Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/b5/e9bcf51980f98e59bb5bb7c382a63c6f6cac0eec5f67de6d8f2322382065/fonttools-4.60.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:875cb7764708b3132637f6c5fb385b16eeba0f7ac9fa45a69d35e09b47045801", size = 4849758, upload-time = "2025-09-29T21:12:48.694Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/dc/1d2cf7d1cba82264b2f8385db3f5960e3d8ce756b4dc65b700d2c496f7e9/fonttools-4.60.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a184b2ea57b13680ab6d5fbde99ccef152c95c06746cb7718c583abd8f945ccc", size = 5085598, upload-time = "2025-09-29T21:12:51.081Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/4d/279e28ba87fb20e0c69baf72b60bbf1c4d873af1476806a7b5f2b7fac1ff/fonttools-4.60.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:026290e4ec76583881763fac284aca67365e0be9f13a7fb137257096114cb3bc", size = 4957603, upload-time = "2025-09-29T21:12:53.423Z" },
+ { url = "https://files.pythonhosted.org/packages/78/d4/ff19976305e0c05aa3340c805475abb00224c954d3c65e82c0a69633d55d/fonttools-4.60.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0e8817c7d1a0c2eedebf57ef9a9896f3ea23324769a9a2061a80fe8852705ed", size = 4974184, upload-time = "2025-09-29T21:12:55.962Z" },
+ { url = "https://files.pythonhosted.org/packages/63/22/8553ff6166f5cd21cfaa115aaacaa0dc73b91c079a8cfd54a482cbc0f4f5/fonttools-4.60.1-cp314-cp314t-win32.whl", hash = "sha256:1410155d0e764a4615774e5c2c6fc516259fe3eca5882f034eb9bfdbee056259", size = 2282241, upload-time = "2025-09-29T21:12:58.179Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cb/fa7b4d148e11d5a72761a22e595344133e83a9507a4c231df972e657579b/fonttools-4.60.1-cp314-cp314t-win_amd64.whl", hash = "sha256:022beaea4b73a70295b688f817ddc24ed3e3418b5036ffcd5658141184ef0d0c", size = 2345760, upload-time = "2025-09-29T21:13:00.375Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/93/0dd45cd283c32dea1545151d8c3637b4b8c53cdb3a625aeb2885b184d74d/fonttools-4.60.1-py3-none-any.whl", hash = "sha256:906306ac7afe2156fcf0042173d6ebbb05416af70f6b370967b47f8f00103bbb", size = 1143175, upload-time = "2025-09-29T21:13:24.134Z" },
+]
+
+[package.optional-dependencies]
+woff = [
+ { name = "brotli", marker = "platform_python_implementation == 'CPython'" },
+ { name = "brotlicffi", marker = "platform_python_implementation != 'CPython'" },
+ { name = "zopfli" },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.25.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "jsonschema-specifications" },
+ { name = "referencing" },
+ { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" },
+]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2025.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "referencing" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" },
+ { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" },
+ { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" },
+ { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" },
+ { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" },
+ { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" },
+ { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" },
+ { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" },
+ { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
+ { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
+ { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
+ { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
+ { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
+ { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
+ { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
+ { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
+ { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
+ { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" },
+ { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" },
+ { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" },
+ { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" },
+ { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" },
+ { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" },
+ { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" },
+ { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" },
+ { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" },
+ { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" },
+ { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
+]
+
+[[package]]
+name = "naminter"
+version = "1.0.7"
+source = { editable = "." }
+dependencies = [
+ { name = "aiofiles" },
+ { name = "click" },
+ { name = "curl-cffi" },
+ { name = "jinja2" },
+ { name = "jsonschema" },
+ { name = "rich" },
+ { name = "rich-click" },
+ { name = "weasyprint" },
+]
+
+[package.optional-dependencies]
+dev = [
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "aiofiles", specifier = ">=25.1.0" },
+ { name = "click", specifier = ">=8.3.0" },
+ { name = "curl-cffi", specifier = ">=0.13.0" },
+ { name = "jinja2", specifier = ">=3.1.6" },
+ { name = "jsonschema", specifier = ">=4.25.1" },
+ { name = "rich", specifier = ">=14.2.0" },
+ { name = "rich-click", specifier = ">=1.9.4" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.14.4" },
+ { name = "weasyprint", specifier = ">=66.0" },
+]
+provides-extras = ["dev"]
+
+[[package]]
+name = "pillow"
+version = "12.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/5a/a2f6773b64edb921a756eb0729068acad9fc5208a53f4a349396e9436721/pillow-12.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0fd00cac9c03256c8b2ff58f162ebcd2587ad3e1f2e397eab718c47e24d231cc", size = 5289798, upload-time = "2025-10-15T18:21:47.763Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/05/069b1f8a2e4b5a37493da6c5868531c3f77b85e716ad7a590ef87d58730d/pillow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3475b96f5908b3b16c47533daaa87380c491357d197564e0ba34ae75c0f3257", size = 4650589, upload-time = "2025-10-15T18:21:49.515Z" },
+ { url = "https://files.pythonhosted.org/packages/61/e3/2c820d6e9a36432503ead175ae294f96861b07600a7156154a086ba7111a/pillow-12.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:110486b79f2d112cf6add83b28b627e369219388f64ef2f960fef9ebaf54c642", size = 6230472, upload-time = "2025-10-15T18:21:51.052Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/89/63427f51c64209c5e23d4d52071c8d0f21024d3a8a487737caaf614a5795/pillow-12.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5269cc1caeedb67e6f7269a42014f381f45e2e7cd42d834ede3c703a1d915fe3", size = 8033887, upload-time = "2025-10-15T18:21:52.604Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/1b/c9711318d4901093c15840f268ad649459cd81984c9ec9887756cca049a5/pillow-12.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa5129de4e174daccbc59d0a3b6d20eaf24417d59851c07ebb37aeb02947987c", size = 6343964, upload-time = "2025-10-15T18:21:54.619Z" },
+ { url = "https://files.pythonhosted.org/packages/41/1e/db9470f2d030b4995083044cd8738cdd1bf773106819f6d8ba12597d5352/pillow-12.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee2a6db3a7242ea309aa7ee8e2780726fed67ff4e5b40169f2c940e7eb09227", size = 7034756, upload-time = "2025-10-15T18:21:56.151Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/b0/6177a8bdd5ee4ed87cba2de5a3cc1db55ffbbec6176784ce5bb75aa96798/pillow-12.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:90387104ee8400a7b4598253b4c406f8958f59fcf983a6cea2b50d59f7d63d0b", size = 6458075, upload-time = "2025-10-15T18:21:57.759Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/5e/61537aa6fa977922c6a03253a0e727e6e4a72381a80d63ad8eec350684f2/pillow-12.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc91a56697869546d1b8f0a3ff35224557ae7f881050e99f615e0119bf934b4e", size = 7125955, upload-time = "2025-10-15T18:21:59.372Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/3d/d5033539344ee3cbd9a4d69e12e63ca3a44a739eb2d4c8da350a3d38edd7/pillow-12.0.0-cp311-cp311-win32.whl", hash = "sha256:27f95b12453d165099c84f8a8bfdfd46b9e4bda9e0e4b65f0635430027f55739", size = 6298440, upload-time = "2025-10-15T18:22:00.982Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/42/aaca386de5cc8bd8a0254516957c1f265e3521c91515b16e286c662854c4/pillow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b583dc9070312190192631373c6c8ed277254aa6e6084b74bdd0a6d3b221608e", size = 6999256, upload-time = "2025-10-15T18:22:02.617Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/f1/9197c9c2d5708b785f631a6dfbfa8eb3fb9672837cb92ae9af812c13b4ed/pillow-12.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:759de84a33be3b178a64c8ba28ad5c135900359e85fb662bc6e403ad4407791d", size = 2436025, upload-time = "2025-10-15T18:22:04.598Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" },
+ { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" },
+ { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" },
+ { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" },
+ { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" },
+ { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" },
+ { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" },
+ { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" },
+ { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" },
+ { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" },
+ { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" },
+ { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" },
+ { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" },
+ { url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531, upload-time = "2025-10-15T18:23:10.121Z" },
+ { url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554, upload-time = "2025-10-15T18:23:12.14Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812, upload-time = "2025-10-15T18:23:13.962Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689, upload-time = "2025-10-15T18:23:15.562Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186, upload-time = "2025-10-15T18:23:17.379Z" },
+ { url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308, upload-time = "2025-10-15T18:23:18.971Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222, upload-time = "2025-10-15T18:23:20.909Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657, upload-time = "2025-10-15T18:23:23.077Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482, upload-time = "2025-10-15T18:23:25.005Z" },
+ { url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416, upload-time = "2025-10-15T18:23:27.009Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584, upload-time = "2025-10-15T18:23:29.752Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621, upload-time = "2025-10-15T18:23:32.06Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916, upload-time = "2025-10-15T18:23:34.71Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836, upload-time = "2025-10-15T18:23:36.967Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092, upload-time = "2025-10-15T18:23:38.573Z" },
+ { url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158, upload-time = "2025-10-15T18:23:40.238Z" },
+ { url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882, upload-time = "2025-10-15T18:23:42.434Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001, upload-time = "2025-10-15T18:23:44.29Z" },
+ { url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146, upload-time = "2025-10-15T18:23:46.065Z" },
+ { url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344, upload-time = "2025-10-15T18:23:47.898Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864, upload-time = "2025-10-15T18:23:49.607Z" },
+ { url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911, upload-time = "2025-10-15T18:23:51.351Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045, upload-time = "2025-10-15T18:23:53.177Z" },
+ { url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282, upload-time = "2025-10-15T18:23:55.316Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630, upload-time = "2025-10-15T18:23:57.149Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/b3/582327e6c9f86d037b63beebe981425d6811104cb443e8193824ef1a2f27/pillow-12.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b22bd8c974942477156be55a768f7aa37c46904c175be4e158b6a86e3a6b7ca8", size = 5215068, upload-time = "2025-10-15T18:23:59.594Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/d6/67748211d119f3b6540baf90f92fae73ae51d5217b171b0e8b5f7e5d558f/pillow-12.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:805ebf596939e48dbb2e4922a1d3852cfc25c38160751ce02da93058b48d252a", size = 4614994, upload-time = "2025-10-15T18:24:01.669Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/e1/f8281e5d844c41872b273b9f2c34a4bf64ca08905668c8ae730eedc7c9fa/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae81479f77420d217def5f54b5b9d279804d17e982e0f2fa19b1d1e14ab5197", size = 5246639, upload-time = "2025-10-15T18:24:03.403Z" },
+ { url = "https://files.pythonhosted.org/packages/94/5a/0d8ab8ffe8a102ff5df60d0de5af309015163bf710c7bb3e8311dd3b3ad0/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aeaefa96c768fc66818730b952a862235d68825c178f1b3ffd4efd7ad2edcb7c", size = 6986839, upload-time = "2025-10-15T18:24:05.344Z" },
+ { url = "https://files.pythonhosted.org/packages/20/2e/3434380e8110b76cd9eb00a363c484b050f949b4bbe84ba770bb8508a02c/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f2d0abef9e4e2f349305a4f8cc784a8a6c2f58a8c4892eea13b10a943bd26e", size = 5313505, upload-time = "2025-10-15T18:24:07.137Z" },
+ { url = "https://files.pythonhosted.org/packages/57/ca/5a9d38900d9d74785141d6580950fe705de68af735ff6e727cb911b64740/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdee52571a343d721fb2eb3b090a82d959ff37fc631e3f70422e0c2e029f3e76", size = 5963654, upload-time = "2025-10-15T18:24:09.579Z" },
+ { url = "https://files.pythonhosted.org/packages/95/7e/f896623c3c635a90537ac093c6a618ebe1a90d87206e42309cb5d98a1b9e/pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5", size = 6997850, upload-time = "2025-10-15T18:24:11.495Z" },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.23"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" },
+]
+
+[[package]]
+name = "pydyf"
+version = "0.11.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2e/c2/97fc6ce4ce0045080dc99446def812081b57750ed8aa67bfdfafa4561fe5/pydyf-0.11.0.tar.gz", hash = "sha256:394dddf619cca9d0c55715e3c55ea121a9bf9cbc780cdc1201a2427917b86b64", size = 17769, upload-time = "2024-07-12T12:26:51.95Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c9/ac/d5db977deaf28c6ecbc61bbca269eb3e8f0b3a1f55c8549e5333e606e005/pydyf-0.11.0-py3-none-any.whl", hash = "sha256:0aaf9e2ebbe786ec7a78ec3fbffa4cdcecde53fd6f563221d53c6bc1328848a3", size = 8104, upload-time = "2024-07-12T12:26:49.896Z" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
+[[package]]
+name = "pyphen"
+version = "0.17.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/69/56/e4d7e1bd70d997713649c5ce530b2d15a5fc2245a74ca820fc2d51d89d4d/pyphen-0.17.2.tar.gz", hash = "sha256:f60647a9c9b30ec6c59910097af82bc5dd2d36576b918e44148d8b07ef3b4aa3", size = 2079470, upload-time = "2025-01-20T13:18:36.296Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/1f/c2142d2edf833a90728e5cdeb10bdbdc094dde8dbac078cee0cf33f5e11b/pyphen-0.17.2-py3-none-any.whl", hash = "sha256:3a07fb017cb2341e1d9ff31b8634efb1ae4dc4b130468c7c39dd3d32e7c3affd", size = 2079358, upload-time = "2025-01-20T13:18:29.629Z" },
+]
+
+[[package]]
+name = "referencing"
+version = "0.37.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "rpds-py" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
+]
+
+[[package]]
+name = "rich"
+version = "14.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" },
+]
+
+[[package]]
+name = "rich-click"
+version = "1.9.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "rich" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bf/d8/f2c1b7e9a645ba40f756d7a5b195fc104729bc6b19061ba3ab385f342931/rich_click-1.9.4.tar.gz", hash = "sha256:af73dc68e85f3bebb80ce302a642b9fe3b65f3df0ceb42eb9a27c467c1b678c8", size = 73632, upload-time = "2025-10-25T01:08:49.142Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5b/6a/1f03adcb3cc7beb6f63aecc21565e9d515ccee653187fc4619cd0b42713b/rich_click-1.9.4-py3-none-any.whl", hash = "sha256:d70f39938bcecaf5543e8750828cbea94ef51853f7d0e174cda1e10543767389", size = 70245, upload-time = "2025-10-25T01:08:47.939Z" },
+]
+
+[[package]]
+name = "rpds-py"
+version = "0.28.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/48/dc/95f074d43452b3ef5d06276696ece4b3b5d696e7c9ad7173c54b1390cd70/rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea", size = 27419, upload-time = "2025-10-22T22:24:29.327Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a6/34/058d0db5471c6be7bef82487ad5021ff8d1d1d27794be8730aad938649cf/rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296", size = 362344, upload-time = "2025-10-22T22:21:39.713Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/67/9503f0ec8c055a0782880f300c50a2b8e5e72eb1f94dfc2053da527444dd/rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27", size = 348440, upload-time = "2025-10-22T22:21:41.056Z" },
+ { url = "https://files.pythonhosted.org/packages/68/2e/94223ee9b32332a41d75b6f94b37b4ce3e93878a556fc5f152cbd856a81f/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c", size = 379068, upload-time = "2025-10-22T22:21:42.593Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/25/54fd48f9f680cfc44e6a7f39a5fadf1d4a4a1fd0848076af4a43e79f998c/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205", size = 390518, upload-time = "2025-10-22T22:21:43.998Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/85/ac258c9c27f2ccb1bd5d0697e53a82ebcf8088e3186d5d2bf8498ee7ed44/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95", size = 525319, upload-time = "2025-10-22T22:21:45.645Z" },
+ { url = "https://files.pythonhosted.org/packages/40/cb/c6734774789566d46775f193964b76627cd5f42ecf246d257ce84d1912ed/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9", size = 404896, upload-time = "2025-10-22T22:21:47.544Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/53/14e37ce83202c632c89b0691185dca9532288ff9d390eacae3d2ff771bae/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2", size = 382862, upload-time = "2025-10-22T22:21:49.176Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/83/f3642483ca971a54d60caa4449f9d6d4dbb56a53e0072d0deff51b38af74/rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0", size = 398848, upload-time = "2025-10-22T22:21:51.024Z" },
+ { url = "https://files.pythonhosted.org/packages/44/09/2d9c8b2f88e399b4cfe86efdf2935feaf0394e4f14ab30c6c5945d60af7d/rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e", size = 412030, upload-time = "2025-10-22T22:21:52.665Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/f5/e1cec473d4bde6df1fd3738be8e82d64dd0600868e76e92dfeaebbc2d18f/rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67", size = 559700, upload-time = "2025-10-22T22:21:54.123Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/be/73bb241c1649edbf14e98e9e78899c2c5e52bbe47cb64811f44d2cc11808/rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d", size = 584581, upload-time = "2025-10-22T22:21:56.102Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/9c/ffc6e9218cd1eb5c2c7dbd276c87cd10e8c2232c456b554169eb363381df/rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6", size = 549981, upload-time = "2025-10-22T22:21:58.253Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/50/da8b6d33803a94df0149345ee33e5d91ed4d25fc6517de6a25587eae4133/rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c", size = 214729, upload-time = "2025-10-22T22:21:59.625Z" },
+ { url = "https://files.pythonhosted.org/packages/12/fd/b0f48c4c320ee24c8c20df8b44acffb7353991ddf688af01eef5f93d7018/rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa", size = 223977, upload-time = "2025-10-22T22:22:01.092Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/21/c8e77a2ac66e2ec4e21f18a04b4e9a0417ecf8e61b5eaeaa9360a91713b4/rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120", size = 217326, upload-time = "2025-10-22T22:22:02.944Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/5c/6c3936495003875fe7b14f90ea812841a08fca50ab26bd840e924097d9c8/rpds_py-0.28.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6b4f28583a4f247ff60cd7bdda83db8c3f5b05a7a82ff20dd4b078571747708f", size = 366439, upload-time = "2025-10-22T22:22:04.525Z" },
+ { url = "https://files.pythonhosted.org/packages/56/f9/a0f1ca194c50aa29895b442771f036a25b6c41a35e4f35b1a0ea713bedae/rpds_py-0.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d678e91b610c29c4b3d52a2c148b641df2b4676ffe47c59f6388d58b99cdc424", size = 348170, upload-time = "2025-10-22T22:22:06.397Z" },
+ { url = "https://files.pythonhosted.org/packages/18/ea/42d243d3a586beb72c77fa5def0487daf827210069a95f36328e869599ea/rpds_py-0.28.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e819e0e37a44a78e1383bf1970076e2ccc4dc8c2bbaa2f9bd1dc987e9afff628", size = 378838, upload-time = "2025-10-22T22:22:07.932Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/78/3de32e18a94791af8f33601402d9d4f39613136398658412a4e0b3047327/rpds_py-0.28.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ee514e0f0523db5d3fb171f397c54875dbbd69760a414dccf9d4d7ad628b5bd", size = 393299, upload-time = "2025-10-22T22:22:09.435Z" },
+ { url = "https://files.pythonhosted.org/packages/13/7e/4bdb435afb18acea2eb8a25ad56b956f28de7c59f8a1d32827effa0d4514/rpds_py-0.28.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3fa06d27fdcee47f07a39e02862da0100cb4982508f5ead53ec533cd5fe55e", size = 518000, upload-time = "2025-10-22T22:22:11.326Z" },
+ { url = "https://files.pythonhosted.org/packages/31/d0/5f52a656875cdc60498ab035a7a0ac8f399890cc1ee73ebd567bac4e39ae/rpds_py-0.28.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46959ef2e64f9e4a41fc89aa20dbca2b85531f9a72c21099a3360f35d10b0d5a", size = 408746, upload-time = "2025-10-22T22:22:13.143Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/cd/49ce51767b879cde77e7ad9fae164ea15dce3616fe591d9ea1df51152706/rpds_py-0.28.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8455933b4bcd6e83fde3fefc987a023389c4b13f9a58c8d23e4b3f6d13f78c84", size = 386379, upload-time = "2025-10-22T22:22:14.602Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/99/e4e1e1ee93a98f72fc450e36c0e4d99c35370220e815288e3ecd2ec36a2a/rpds_py-0.28.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ad50614a02c8c2962feebe6012b52f9802deec4263946cddea37aaf28dd25a66", size = 401280, upload-time = "2025-10-22T22:22:16.063Z" },
+ { url = "https://files.pythonhosted.org/packages/61/35/e0c6a57488392a8b319d2200d03dad2b29c0db9996f5662c3b02d0b86c02/rpds_py-0.28.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5deca01b271492553fdb6c7fd974659dce736a15bae5dad7ab8b93555bceb28", size = 412365, upload-time = "2025-10-22T22:22:17.504Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/6a/841337980ea253ec797eb084665436007a1aad0faac1ba097fb906c5f69c/rpds_py-0.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:735f8495a13159ce6a0d533f01e8674cec0c57038c920495f87dcb20b3ddb48a", size = 559573, upload-time = "2025-10-22T22:22:19.108Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/5e/64826ec58afd4c489731f8b00729c5f6afdb86f1df1df60bfede55d650bb/rpds_py-0.28.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:961ca621ff10d198bbe6ba4957decca61aa2a0c56695384c1d6b79bf61436df5", size = 583973, upload-time = "2025-10-22T22:22:20.768Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/ee/44d024b4843f8386a4eeaa4c171b3d31d55f7177c415545fd1a24c249b5d/rpds_py-0.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2374e16cc9131022e7d9a8f8d65d261d9ba55048c78f3b6e017971a4f5e6353c", size = 553800, upload-time = "2025-10-22T22:22:22.25Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/89/33e675dccff11a06d4d85dbb4d1865f878d5020cbb69b2c1e7b2d3f82562/rpds_py-0.28.0-cp312-cp312-win32.whl", hash = "sha256:d15431e334fba488b081d47f30f091e5d03c18527c325386091f31718952fe08", size = 216954, upload-time = "2025-10-22T22:22:24.105Z" },
+ { url = "https://files.pythonhosted.org/packages/af/36/45f6ebb3210887e8ee6dbf1bc710ae8400bb417ce165aaf3024b8360d999/rpds_py-0.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:a410542d61fc54710f750d3764380b53bf09e8c4edbf2f9141a82aa774a04f7c", size = 227844, upload-time = "2025-10-22T22:22:25.551Z" },
+ { url = "https://files.pythonhosted.org/packages/57/91/f3fb250d7e73de71080f9a221d19bd6a1c1eb0d12a1ea26513f6c1052ad6/rpds_py-0.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:1f0cfd1c69e2d14f8c892b893997fa9a60d890a0c8a603e88dca4955f26d1edd", size = 217624, upload-time = "2025-10-22T22:22:26.914Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/03/ce566d92611dfac0085c2f4b048cd53ed7c274a5c05974b882a908d540a2/rpds_py-0.28.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e9e184408a0297086f880556b6168fa927d677716f83d3472ea333b42171ee3b", size = 366235, upload-time = "2025-10-22T22:22:28.397Z" },
+ { url = "https://files.pythonhosted.org/packages/00/34/1c61da1b25592b86fd285bd7bd8422f4c9d748a7373b46126f9ae792a004/rpds_py-0.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edd267266a9b0448f33dc465a97cfc5d467594b600fe28e7fa2f36450e03053a", size = 348241, upload-time = "2025-10-22T22:22:30.171Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/00/ed1e28616848c61c493a067779633ebf4b569eccaacf9ccbdc0e7cba2b9d/rpds_py-0.28.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85beb8b3f45e4e32f6802fb6cd6b17f615ef6c6a52f265371fb916fae02814aa", size = 378079, upload-time = "2025-10-22T22:22:31.644Z" },
+ { url = "https://files.pythonhosted.org/packages/11/b2/ccb30333a16a470091b6e50289adb4d3ec656fd9951ba8c5e3aaa0746a67/rpds_py-0.28.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2412be8d00a1b895f8ad827cc2116455196e20ed994bb704bf138fe91a42724", size = 393151, upload-time = "2025-10-22T22:22:33.453Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/d0/73e2217c3ee486d555cb84920597480627d8c0240ff3062005c6cc47773e/rpds_py-0.28.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf128350d384b777da0e68796afdcebc2e9f63f0e9f242217754e647f6d32491", size = 517520, upload-time = "2025-10-22T22:22:34.949Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/91/23efe81c700427d0841a4ae7ea23e305654381831e6029499fe80be8a071/rpds_py-0.28.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2036d09b363aa36695d1cc1a97b36865597f4478470b0697b5ee9403f4fe399", size = 408699, upload-time = "2025-10-22T22:22:36.584Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/ee/a324d3198da151820a326c1f988caaa4f37fc27955148a76fff7a2d787a9/rpds_py-0.28.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e1e9be4fa6305a16be628959188e4fd5cd6f1b0e724d63c6d8b2a8adf74ea6", size = 385720, upload-time = "2025-10-22T22:22:38.014Z" },
+ { url = "https://files.pythonhosted.org/packages/19/ad/e68120dc05af8b7cab4a789fccd8cdcf0fe7e6581461038cc5c164cd97d2/rpds_py-0.28.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0a403460c9dd91a7f23fc3188de6d8977f1d9603a351d5db6cf20aaea95b538d", size = 401096, upload-time = "2025-10-22T22:22:39.869Z" },
+ { url = "https://files.pythonhosted.org/packages/99/90/c1e070620042459d60df6356b666bb1f62198a89d68881816a7ed121595a/rpds_py-0.28.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7366b6553cdc805abcc512b849a519167db8f5e5c3472010cd1228b224265cb", size = 411465, upload-time = "2025-10-22T22:22:41.395Z" },
+ { url = "https://files.pythonhosted.org/packages/68/61/7c195b30d57f1b8d5970f600efee72a4fad79ec829057972e13a0370fd24/rpds_py-0.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b43c6a3726efd50f18d8120ec0551241c38785b68952d240c45ea553912ac41", size = 558832, upload-time = "2025-10-22T22:22:42.871Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/3d/06f3a718864773f69941d4deccdf18e5e47dd298b4628062f004c10f3b34/rpds_py-0.28.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0cb7203c7bc69d7c1585ebb33a2e6074492d2fc21ad28a7b9d40457ac2a51ab7", size = 583230, upload-time = "2025-10-22T22:22:44.877Z" },
+ { url = "https://files.pythonhosted.org/packages/66/df/62fc783781a121e77fee9a21ead0a926f1b652280a33f5956a5e7833ed30/rpds_py-0.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a52a5169c664dfb495882adc75c304ae1d50df552fbd68e100fdc719dee4ff9", size = 553268, upload-time = "2025-10-22T22:22:46.441Z" },
+ { url = "https://files.pythonhosted.org/packages/84/85/d34366e335140a4837902d3dea89b51f087bd6a63c993ebdff59e93ee61d/rpds_py-0.28.0-cp313-cp313-win32.whl", hash = "sha256:2e42456917b6687215b3e606ab46aa6bca040c77af7df9a08a6dcfe8a4d10ca5", size = 217100, upload-time = "2025-10-22T22:22:48.342Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/1c/f25a3f3752ad7601476e3eff395fe075e0f7813fbb9862bd67c82440e880/rpds_py-0.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:e0a0311caedc8069d68fc2bf4c9019b58a2d5ce3cd7cb656c845f1615b577e1e", size = 227759, upload-time = "2025-10-22T22:22:50.219Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/d6/5f39b42b99615b5bc2f36ab90423ea404830bdfee1c706820943e9a645eb/rpds_py-0.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:04c1b207ab8b581108801528d59ad80aa83bb170b35b0ddffb29c20e411acdc1", size = 217326, upload-time = "2025-10-22T22:22:51.647Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/8b/0c69b72d1cee20a63db534be0df271effe715ef6c744fdf1ff23bb2b0b1c/rpds_py-0.28.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f296ea3054e11fc58ad42e850e8b75c62d9a93a9f981ad04b2e5ae7d2186ff9c", size = 355736, upload-time = "2025-10-22T22:22:53.211Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/6d/0c2ee773cfb55c31a8514d2cece856dd299170a49babd50dcffb15ddc749/rpds_py-0.28.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5a7306c19b19005ad98468fcefeb7100b19c79fc23a5f24a12e06d91181193fa", size = 342677, upload-time = "2025-10-22T22:22:54.723Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/1c/22513ab25a27ea205144414724743e305e8153e6abe81833b5e678650f5a/rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d9b86aa501fed9862a443c5c3116f6ead8bc9296185f369277c42542bd646b", size = 371847, upload-time = "2025-10-22T22:22:56.295Z" },
+ { url = "https://files.pythonhosted.org/packages/60/07/68e6ccdb4b05115ffe61d31afc94adef1833d3a72f76c9632d4d90d67954/rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5bbc701eff140ba0e872691d573b3d5d30059ea26e5785acba9132d10c8c31d", size = 381800, upload-time = "2025-10-22T22:22:57.808Z" },
+ { url = "https://files.pythonhosted.org/packages/73/bf/6d6d15df80781d7f9f368e7c1a00caf764436518c4877fb28b029c4624af/rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5690671cd672a45aa8616d7374fdf334a1b9c04a0cac3c854b1136e92374fe", size = 518827, upload-time = "2025-10-22T22:22:59.826Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/d3/2decbb2976cc452cbf12a2b0aaac5f1b9dc5dd9d1f7e2509a3ee00421249/rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f1d92ecea4fa12f978a367c32a5375a1982834649cdb96539dcdc12e609ab1a", size = 399471, upload-time = "2025-10-22T22:23:01.968Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/2c/f30892f9e54bd02e5faca3f6a26d6933c51055e67d54818af90abed9748e/rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d252db6b1a78d0a3928b6190156042d54c93660ce4d98290d7b16b5296fb7cc", size = 377578, upload-time = "2025-10-22T22:23:03.52Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/5d/3bce97e5534157318f29ac06bf2d279dae2674ec12f7cb9c12739cee64d8/rpds_py-0.28.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d61b355c3275acb825f8777d6c4505f42b5007e357af500939d4a35b19177259", size = 390482, upload-time = "2025-10-22T22:23:05.391Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/f0/886bd515ed457b5bd93b166175edb80a0b21a210c10e993392127f1e3931/rpds_py-0.28.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acbe5e8b1026c0c580d0321c8aae4b0a1e1676861d48d6e8c6586625055b606a", size = 402447, upload-time = "2025-10-22T22:23:06.93Z" },
+ { url = "https://files.pythonhosted.org/packages/42/b5/71e8777ac55e6af1f4f1c05b47542a1eaa6c33c1cf0d300dca6a1c6e159a/rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8aa23b6f0fc59b85b4c7d89ba2965af274346f738e8d9fc2455763602e62fd5f", size = 552385, upload-time = "2025-10-22T22:23:08.557Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/cb/6ca2d70cbda5a8e36605e7788c4aa3bea7c17d71d213465a5a675079b98d/rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7b14b0c680286958817c22d76fcbca4800ddacef6f678f3a7c79a1fe7067fe37", size = 575642, upload-time = "2025-10-22T22:23:10.348Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/d4/407ad9960ca7856d7b25c96dcbe019270b5ffdd83a561787bc682c797086/rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bcf1d210dfee61a6c86551d67ee1031899c0fdbae88b2d44a569995d43797712", size = 544507, upload-time = "2025-10-22T22:23:12.434Z" },
+ { url = "https://files.pythonhosted.org/packages/51/31/2f46fe0efcac23fbf5797c6b6b7e1c76f7d60773e525cb65fcbc582ee0f2/rpds_py-0.28.0-cp313-cp313t-win32.whl", hash = "sha256:3aa4dc0fdab4a7029ac63959a3ccf4ed605fee048ba67ce89ca3168da34a1342", size = 205376, upload-time = "2025-10-22T22:23:13.979Z" },
+ { url = "https://files.pythonhosted.org/packages/92/e4/15947bda33cbedfc134490a41841ab8870a72a867a03d4969d886f6594a2/rpds_py-0.28.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7b7d9d83c942855e4fdcfa75d4f96f6b9e272d42fffcb72cd4bb2577db2e2907", size = 215907, upload-time = "2025-10-22T22:23:15.5Z" },
+ { url = "https://files.pythonhosted.org/packages/08/47/ffe8cd7a6a02833b10623bf765fbb57ce977e9a4318ca0e8cf97e9c3d2b3/rpds_py-0.28.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dcdcb890b3ada98a03f9f2bb108489cdc7580176cb73b4f2d789e9a1dac1d472", size = 353830, upload-time = "2025-10-22T22:23:17.03Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/9f/890f36cbd83a58491d0d91ae0db1702639edb33fb48eeb356f80ecc6b000/rpds_py-0.28.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f274f56a926ba2dc02976ca5b11c32855cbd5925534e57cfe1fda64e04d1add2", size = 341819, upload-time = "2025-10-22T22:23:18.57Z" },
+ { url = "https://files.pythonhosted.org/packages/09/e3/921eb109f682aa24fb76207698fbbcf9418738f35a40c21652c29053f23d/rpds_py-0.28.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe0438ac4a29a520ea94c8c7f1754cdd8feb1bc490dfda1bfd990072363d527", size = 373127, upload-time = "2025-10-22T22:23:20.216Z" },
+ { url = "https://files.pythonhosted.org/packages/23/13/bce4384d9f8f4989f1a9599c71b7a2d877462e5fd7175e1f69b398f729f4/rpds_py-0.28.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a358a32dd3ae50e933347889b6af9a1bdf207ba5d1a3f34e1a38cd3540e6733", size = 382767, upload-time = "2025-10-22T22:23:21.787Z" },
+ { url = "https://files.pythonhosted.org/packages/23/e1/579512b2d89a77c64ccef5a0bc46a6ef7f72ae0cf03d4b26dcd52e57ee0a/rpds_py-0.28.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e80848a71c78aa328fefaba9c244d588a342c8e03bda518447b624ea64d1ff56", size = 517585, upload-time = "2025-10-22T22:23:23.699Z" },
+ { url = "https://files.pythonhosted.org/packages/62/3c/ca704b8d324a2591b0b0adcfcaadf9c862375b11f2f667ac03c61b4fd0a6/rpds_py-0.28.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f586db2e209d54fe177e58e0bc4946bea5fb0102f150b1b2f13de03e1f0976f8", size = 399828, upload-time = "2025-10-22T22:23:25.713Z" },
+ { url = "https://files.pythonhosted.org/packages/da/37/e84283b9e897e3adc46b4c88bb3f6ec92a43bd4d2f7ef5b13459963b2e9c/rpds_py-0.28.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae8ee156d6b586e4292491e885d41483136ab994e719a13458055bec14cf370", size = 375509, upload-time = "2025-10-22T22:23:27.32Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/c2/a980beab869d86258bf76ec42dec778ba98151f253a952b02fe36d72b29c/rpds_py-0.28.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:a805e9b3973f7e27f7cab63a6b4f61d90f2e5557cff73b6e97cd5b8540276d3d", size = 392014, upload-time = "2025-10-22T22:23:29.332Z" },
+ { url = "https://files.pythonhosted.org/packages/da/b5/b1d3c5f9d3fa5aeef74265f9c64de3c34a0d6d5cd3c81c8b17d5c8f10ed4/rpds_py-0.28.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5d3fd16b6dc89c73a4da0b4ac8b12a7ecc75b2864b95c9e5afed8003cb50a728", size = 402410, upload-time = "2025-10-22T22:23:31.14Z" },
+ { url = "https://files.pythonhosted.org/packages/74/ae/cab05ff08dfcc052afc73dcb38cbc765ffc86f94e966f3924cd17492293c/rpds_py-0.28.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6796079e5d24fdaba6d49bda28e2c47347e89834678f2bc2c1b4fc1489c0fb01", size = 553593, upload-time = "2025-10-22T22:23:32.834Z" },
+ { url = "https://files.pythonhosted.org/packages/70/80/50d5706ea2a9bfc9e9c5f401d91879e7c790c619969369800cde202da214/rpds_py-0.28.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:76500820c2af232435cbe215e3324c75b950a027134e044423f59f5b9a1ba515", size = 576925, upload-time = "2025-10-22T22:23:34.47Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/12/85a57d7a5855a3b188d024b099fd09c90db55d32a03626d0ed16352413ff/rpds_py-0.28.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bbdc5640900a7dbf9dd707fe6388972f5bbd883633eb68b76591044cfe346f7e", size = 542444, upload-time = "2025-10-22T22:23:36.093Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/65/10643fb50179509150eb94d558e8837c57ca8b9adc04bd07b98e57b48f8c/rpds_py-0.28.0-cp314-cp314-win32.whl", hash = "sha256:adc8aa88486857d2b35d75f0640b949759f79dc105f50aa2c27816b2e0dd749f", size = 207968, upload-time = "2025-10-22T22:23:37.638Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/84/0c11fe4d9aaea784ff4652499e365963222481ac647bcd0251c88af646eb/rpds_py-0.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:66e6fa8e075b58946e76a78e69e1a124a21d9a48a5b4766d15ba5b06869d1fa1", size = 218876, upload-time = "2025-10-22T22:23:39.179Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/e0/3ab3b86ded7bb18478392dc3e835f7b754cd446f62f3fc96f4fe2aca78f6/rpds_py-0.28.0-cp314-cp314-win_arm64.whl", hash = "sha256:a6fe887c2c5c59413353b7c0caff25d0e566623501ccfff88957fa438a69377d", size = 212506, upload-time = "2025-10-22T22:23:40.755Z" },
+ { url = "https://files.pythonhosted.org/packages/51/ec/d5681bb425226c3501eab50fc30e9d275de20c131869322c8a1729c7b61c/rpds_py-0.28.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7a69df082db13c7070f7b8b1f155fa9e687f1d6aefb7b0e3f7231653b79a067b", size = 355433, upload-time = "2025-10-22T22:23:42.259Z" },
+ { url = "https://files.pythonhosted.org/packages/be/ec/568c5e689e1cfb1ea8b875cffea3649260955f677fdd7ddc6176902d04cd/rpds_py-0.28.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b1cde22f2c30ebb049a9e74c5374994157b9b70a16147d332f89c99c5960737a", size = 342601, upload-time = "2025-10-22T22:23:44.372Z" },
+ { url = "https://files.pythonhosted.org/packages/32/fe/51ada84d1d2a1d9d8f2c902cfddd0133b4a5eb543196ab5161d1c07ed2ad/rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5338742f6ba7a51012ea470bd4dc600a8c713c0c72adaa0977a1b1f4327d6592", size = 372039, upload-time = "2025-10-22T22:23:46.025Z" },
+ { url = "https://files.pythonhosted.org/packages/07/c1/60144a2f2620abade1a78e0d91b298ac2d9b91bc08864493fa00451ef06e/rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1460ebde1bcf6d496d80b191d854adedcc619f84ff17dc1c6d550f58c9efbba", size = 382407, upload-time = "2025-10-22T22:23:48.098Z" },
+ { url = "https://files.pythonhosted.org/packages/45/ed/091a7bbdcf4038a60a461df50bc4c82a7ed6d5d5e27649aab61771c17585/rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3eb248f2feba84c692579257a043a7699e28a77d86c77b032c1d9fbb3f0219c", size = 518172, upload-time = "2025-10-22T22:23:50.16Z" },
+ { url = "https://files.pythonhosted.org/packages/54/dd/02cc90c2fd9c2ef8016fd7813bfacd1c3a1325633ec8f244c47b449fc868/rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3bbba5def70b16cd1c1d7255666aad3b290fbf8d0fe7f9f91abafb73611a91", size = 399020, upload-time = "2025-10-22T22:23:51.81Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/81/5d98cc0329bbb911ccecd0b9e19fbf7f3a5de8094b4cda5e71013b2dd77e/rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3114f4db69ac5a1f32e7e4d1cbbe7c8f9cf8217f78e6e002cedf2d54c2a548ed", size = 377451, upload-time = "2025-10-22T22:23:53.711Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/07/4d5bcd49e3dfed2d38e2dcb49ab6615f2ceb9f89f5a372c46dbdebb4e028/rpds_py-0.28.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:4b0cb8a906b1a0196b863d460c0222fb8ad0f34041568da5620f9799b83ccf0b", size = 390355, upload-time = "2025-10-22T22:23:55.299Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/79/9f14ba9010fee74e4f40bf578735cfcbb91d2e642ffd1abe429bb0b96364/rpds_py-0.28.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf681ac76a60b667106141e11a92a3330890257e6f559ca995fbb5265160b56e", size = 403146, upload-time = "2025-10-22T22:23:56.929Z" },
+ { url = "https://files.pythonhosted.org/packages/39/4c/f08283a82ac141331a83a40652830edd3a4a92c34e07e2bbe00baaea2f5f/rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1e8ee6413cfc677ce8898d9cde18cc3a60fc2ba756b0dec5b71eb6eb21c49fa1", size = 552656, upload-time = "2025-10-22T22:23:58.62Z" },
+ { url = "https://files.pythonhosted.org/packages/61/47/d922fc0666f0dd8e40c33990d055f4cc6ecff6f502c2d01569dbed830f9b/rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b3072b16904d0b5572a15eb9d31c1954e0d3227a585fc1351aa9878729099d6c", size = 576782, upload-time = "2025-10-22T22:24:00.312Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/0c/5bafdd8ccf6aa9d3bfc630cfece457ff5b581af24f46a9f3590f790e3df2/rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b670c30fd87a6aec281c3c9896d3bae4b205fd75d79d06dc87c2503717e46092", size = 544671, upload-time = "2025-10-22T22:24:02.297Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/37/dcc5d8397caa924988693519069d0beea077a866128719351a4ad95e82fc/rpds_py-0.28.0-cp314-cp314t-win32.whl", hash = "sha256:8014045a15b4d2b3476f0a287fcc93d4f823472d7d1308d47884ecac9e612be3", size = 205749, upload-time = "2025-10-22T22:24:03.848Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/69/64d43b21a10d72b45939a28961216baeb721cc2a430f5f7c3bfa21659a53/rpds_py-0.28.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a4e59c90d9c27c561eb3160323634a9ff50b04e4f7820600a2beb0ac90db578", size = 216233, upload-time = "2025-10-22T22:24:05.471Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/bc/b43f2ea505f28119bd551ae75f70be0c803d2dbcd37c1b3734909e40620b/rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16", size = 363913, upload-time = "2025-10-22T22:24:07.129Z" },
+ { url = "https://files.pythonhosted.org/packages/28/f2/db318195d324c89a2c57dc5195058cbadd71b20d220685c5bd1da79ee7fe/rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d", size = 350452, upload-time = "2025-10-22T22:24:08.754Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/f2/1391c819b8573a4898cedd6b6c5ec5bc370ce59e5d6bdcebe3c9c1db4588/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db", size = 380957, upload-time = "2025-10-22T22:24:10.826Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/5c/e5de68ee7eb7248fce93269833d1b329a196d736aefb1a7481d1e99d1222/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7", size = 391919, upload-time = "2025-10-22T22:24:12.559Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/4f/2376336112cbfeb122fd435d608ad8d5041b3aed176f85a3cb32c262eb80/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78", size = 528541, upload-time = "2025-10-22T22:24:14.197Z" },
+ { url = "https://files.pythonhosted.org/packages/68/53/5ae232e795853dd20da7225c5dd13a09c0a905b1a655e92bdf8d78a99fd9/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec", size = 405629, upload-time = "2025-10-22T22:24:16.001Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/2d/351a3b852b683ca9b6b8b38ed9efb2347596973849ba6c3a0e99877c10aa/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72", size = 384123, upload-time = "2025-10-22T22:24:17.585Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/15/870804daa00202728cc91cb8e2385fa9f1f4eb49857c49cfce89e304eae6/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27", size = 400923, upload-time = "2025-10-22T22:24:19.512Z" },
+ { url = "https://files.pythonhosted.org/packages/53/25/3706b83c125fa2a0bccceac951de3f76631f6bd0ee4d02a0ed780712ef1b/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316", size = 413767, upload-time = "2025-10-22T22:24:21.316Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/f9/ce43dbe62767432273ed2584cef71fef8411bddfb64125d4c19128015018/rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912", size = 561530, upload-time = "2025-10-22T22:24:22.958Z" },
+ { url = "https://files.pythonhosted.org/packages/46/c9/ffe77999ed8f81e30713dd38fd9ecaa161f28ec48bb80fa1cd9118399c27/rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829", size = 585453, upload-time = "2025-10-22T22:24:24.779Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/d2/4a73b18821fd4669762c855fd1f4e80ceb66fb72d71162d14da58444a763/rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f", size = 552199, upload-time = "2025-10-22T22:24:26.54Z" },
+]
+
+[[package]]
+name = "ruff"
+version = "0.14.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/55/cccfca45157a2031dcbb5a462a67f7cf27f8b37d4b3b1cd7438f0f5c1df6/ruff-0.14.4.tar.gz", hash = "sha256:f459a49fe1085a749f15414ca76f61595f1a2cc8778ed7c279b6ca2e1fd19df3", size = 5587844, upload-time = "2025-11-06T22:07:45.033Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/b9/67240254166ae1eaa38dec32265e9153ac53645a6c6670ed36ad00722af8/ruff-0.14.4-py3-none-linux_armv6l.whl", hash = "sha256:e6604613ffbcf2297cd5dcba0e0ac9bd0c11dc026442dfbb614504e87c349518", size = 12606781, upload-time = "2025-11-06T22:07:01.841Z" },
+ { url = "https://files.pythonhosted.org/packages/46/c8/09b3ab245d8652eafe5256ab59718641429f68681ee713ff06c5c549f156/ruff-0.14.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d99c0b52b6f0598acede45ee78288e5e9b4409d1ce7f661f0fa36d4cbeadf9a4", size = 12946765, upload-time = "2025-11-06T22:07:05.858Z" },
+ { url = "https://files.pythonhosted.org/packages/14/bb/1564b000219144bf5eed2359edc94c3590dd49d510751dad26202c18a17d/ruff-0.14.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9358d490ec030f1b51d048a7fd6ead418ed0826daf6149e95e30aa67c168af33", size = 11928120, upload-time = "2025-11-06T22:07:08.023Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/92/d5f1770e9988cc0742fefaa351e840d9aef04ec24ae1be36f333f96d5704/ruff-0.14.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b40d27924f1f02dfa827b9c0712a13c0e4b108421665322218fc38caf615c2", size = 12370877, upload-time = "2025-11-06T22:07:10.015Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/29/e9282efa55f1973d109faf839a63235575519c8ad278cc87a182a366810e/ruff-0.14.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5e649052a294fe00818650712083cddc6cc02744afaf37202c65df9ea52efa5", size = 12408538, upload-time = "2025-11-06T22:07:13.085Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/01/930ed6ecfce130144b32d77d8d69f5c610e6d23e6857927150adf5d7379a/ruff-0.14.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa082a8f878deeba955531f975881828fd6afd90dfa757c2b0808aadb437136e", size = 13141942, upload-time = "2025-11-06T22:07:15.386Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/46/a9c89b42b231a9f487233f17a89cbef9d5acd538d9488687a02ad288fa6b/ruff-0.14.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1043c6811c2419e39011890f14d0a30470f19d47d197c4858b2787dfa698f6c8", size = 14544306, upload-time = "2025-11-06T22:07:17.631Z" },
+ { url = "https://files.pythonhosted.org/packages/78/96/9c6cf86491f2a6d52758b830b89b78c2ae61e8ca66b86bf5a20af73d20e6/ruff-0.14.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f3a936ac27fb7c2a93e4f4b943a662775879ac579a433291a6f69428722649", size = 14210427, upload-time = "2025-11-06T22:07:19.832Z" },
+ { url = "https://files.pythonhosted.org/packages/71/f4/0666fe7769a54f63e66404e8ff698de1dcde733e12e2fd1c9c6efb689cb5/ruff-0.14.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95643ffd209ce78bc113266b88fba3d39e0461f0cbc8b55fb92505030fb4a850", size = 13658488, upload-time = "2025-11-06T22:07:22.32Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/79/6ad4dda2cfd55e41ac9ed6d73ef9ab9475b1eef69f3a85957210c74ba12c/ruff-0.14.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:456daa2fa1021bc86ca857f43fe29d5d8b3f0e55e9f90c58c317c1dcc2afc7b5", size = 13354908, upload-time = "2025-11-06T22:07:24.347Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/60/f0b6990f740bb15c1588601d19d21bcc1bd5de4330a07222041678a8e04f/ruff-0.14.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f911bba769e4a9f51af6e70037bb72b70b45a16db5ce73e1f72aefe6f6d62132", size = 13587803, upload-time = "2025-11-06T22:07:26.327Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/da/eaaada586f80068728338e0ef7f29ab3e4a08a692f92eb901a4f06bbff24/ruff-0.14.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76158a7369b3979fa878612c623a7e5430c18b2fd1c73b214945c2d06337db67", size = 12279654, upload-time = "2025-11-06T22:07:28.46Z" },
+ { url = "https://files.pythonhosted.org/packages/66/d4/b1d0e82cf9bf8aed10a6d45be47b3f402730aa2c438164424783ac88c0ed/ruff-0.14.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f3b8f3b442d2b14c246e7aeca2e75915159e06a3540e2f4bed9f50d062d24469", size = 12357520, upload-time = "2025-11-06T22:07:31.468Z" },
+ { url = "https://files.pythonhosted.org/packages/04/f4/53e2b42cc82804617e5c7950b7079d79996c27e99c4652131c6a1100657f/ruff-0.14.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c62da9a06779deecf4d17ed04939ae8b31b517643b26370c3be1d26f3ef7dbde", size = 12719431, upload-time = "2025-11-06T22:07:33.831Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/94/80e3d74ed9a72d64e94a7b7706b1c1ebaa315ef2076fd33581f6a1cd2f95/ruff-0.14.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a443a83a1506c684e98acb8cb55abaf3ef725078be40237463dae4463366349", size = 13464394, upload-time = "2025-11-06T22:07:35.905Z" },
+ { url = "https://files.pythonhosted.org/packages/54/1a/a49f071f04c42345c793d22f6cf5e0920095e286119ee53a64a3a3004825/ruff-0.14.4-py3-none-win32.whl", hash = "sha256:643b69cb63cd996f1fc7229da726d07ac307eae442dd8974dbc7cf22c1e18fff", size = 12493429, upload-time = "2025-11-06T22:07:38.43Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/22/e58c43e641145a2b670328fb98bc384e20679b5774258b1e540207580266/ruff-0.14.4-py3-none-win_amd64.whl", hash = "sha256:26673da283b96fe35fa0c939bf8411abec47111644aa9f7cfbd3c573fb125d2c", size = 13635380, upload-time = "2025-11-06T22:07:40.496Z" },
+ { url = "https://files.pythonhosted.org/packages/30/bd/4168a751ddbbf43e86544b4de8b5c3b7be8d7167a2a5cb977d274e04f0a1/ruff-0.14.4-py3-none-win_arm64.whl", hash = "sha256:dd09c292479596b0e6fec8cd95c65c3a6dc68e9ad17b8f2382130f87ff6a75bb", size = 12663065, upload-time = "2025-11-06T22:07:42.603Z" },
+]
+
+[[package]]
+name = "tinycss2"
+version = "1.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" },
+]
+
+[[package]]
+name = "tinyhtml5"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fd/03/6111ed99e9bf7dfa1c30baeef0e0fb7e0bd387bd07f8e5b270776fe1de3f/tinyhtml5-2.0.0.tar.gz", hash = "sha256:086f998833da24c300c414d9fe81d9b368fd04cb9d2596a008421cbc705fcfcc", size = 179507, upload-time = "2024-10-29T15:37:14.078Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5c/de/27c57899297163a4a84104d5cec0af3b1ac5faf62f44667e506373c6b8ce/tinyhtml5-2.0.0-py3-none-any.whl", hash = "sha256:13683277c5b176d070f82d099d977194b7a1e26815b016114f581a74bbfbf47e", size = 39793, upload-time = "2024-10-29T15:37:11.743Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "weasyprint"
+version = "66.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi" },
+ { name = "cssselect2" },
+ { name = "fonttools", extra = ["woff"] },
+ { name = "pillow" },
+ { name = "pydyf" },
+ { name = "pyphen" },
+ { name = "tinycss2" },
+ { name = "tinyhtml5" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/32/99/480b5430b7eb0916e7d5df1bee7d9508b28b48fee28da894d0a050e0e930/weasyprint-66.0.tar.gz", hash = "sha256:da71dc87dc129ac9cffdc65e5477e90365ab9dbae45c744014ec1d06303dde40", size = 504224, upload-time = "2025-07-24T11:44:42.771Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/d1/c5d9b341bf3d556c1e4c6566b3efdda0b1bb175510aa7b09dd3eee246923/weasyprint-66.0-py3-none-any.whl", hash = "sha256:82b0783b726fcd318e2c977dcdddca76515b30044bc7a830cc4fbe717582a6d0", size = 301965, upload-time = "2025-07-24T11:44:40.968Z" },
+]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
+]
+
+[[package]]
+name = "zopfli"
+version = "0.2.3.post1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5e/7c/a8f6696e694709e2abcbccd27d05ef761e9b6efae217e11d977471555b62/zopfli-0.2.3.post1.tar.gz", hash = "sha256:96484dc0f48be1c5d7ae9f38ed1ce41e3675fd506b27c11a6607f14b49101e99", size = 175629, upload-time = "2024-10-18T15:42:05.946Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/92/6d/c8224a8fc77c1dff6caaa2dc63794a40ea284c82ac20030fb2521092dca6/zopfli-0.2.3.post1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:518f1f4ed35dd69ce06b552f84e6d081f07c552b4c661c5312d950a0b764a58a", size = 296334, upload-time = "2024-10-18T15:40:44.684Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/da/df0f87a489d223f184d69e9e88c80c1314be43b2361acffefdc09659e00d/zopfli-0.2.3.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:615a8ac9dda265e9cc38b2a76c3142e4a9f30fea4a79c85f670850783bc6feb4", size = 163886, upload-time = "2024-10-18T15:40:45.812Z" },
+ { url = "https://files.pythonhosted.org/packages/39/b7/14529a7ae608cedddb2f791cbc13a392a246e2e6d9c9b4b8bcda707d08d8/zopfli-0.2.3.post1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a82fc2dbebe6eb908b9c665e71496f8525c1bc4d2e3a7a7722ef2b128b6227c8", size = 823654, upload-time = "2024-10-18T15:40:46.969Z" },
+ { url = "https://files.pythonhosted.org/packages/57/48/217c7bd720553d9e68b96926c02820e8b6184ef6dbac937823abad85b154/zopfli-0.2.3.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37d011e92f7b9622742c905fdbed9920a1d0361df84142807ea2a528419dea7f", size = 826188, upload-time = "2024-10-18T15:40:48.147Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/8b/5ab8c4c6db2564a0c3369e584090c101ffad4f9d0a39396e0d3e80c98413/zopfli-0.2.3.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e63d558847166543c2c9789e6f985400a520b7eacc4b99181668b2c3aeadd352", size = 850573, upload-time = "2024-10-18T15:40:49.481Z" },
+ { url = "https://files.pythonhosted.org/packages/33/f8/f52ec5c713f3325c852f19af7c8e3f98109ddcd1ce400dc39005072a2fea/zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60db20f06c3d4c5934b16cfa62a2cc5c3f0686bffe0071ed7804d3c31ab1a04e", size = 1754164, upload-time = "2024-10-18T15:40:50.952Z" },
+ { url = "https://files.pythonhosted.org/packages/92/24/6a6018125e1cc6ee5880a0ae60456fdc8a2da43f2f14b487cf49439a3448/zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:716cdbfc57bfd3d3e31a58e6246e8190e6849b7dbb7c4ce39ef8bbf0edb8f6d5", size = 1906135, upload-time = "2024-10-18T15:40:52.484Z" },
+ { url = "https://files.pythonhosted.org/packages/87/ad/697521dac8b46f0e0d081a3da153687d7583f3a2cd5466af1ddb9928394f/zopfli-0.2.3.post1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3a89277ed5f8c0fb2d0b46d669aa0633123aa7381f1f6118c12f15e0fb48f8ca", size = 1835047, upload-time = "2024-10-18T15:40:54.453Z" },
+ { url = "https://files.pythonhosted.org/packages/95/00/042c0cdba957343d7a83e572fc5ffe62de03d57c43075c8cf920b8b542e6/zopfli-0.2.3.post1-cp311-cp311-win32.whl", hash = "sha256:75a26a2307b10745a83b660c404416e984ee6fca515ec7f0765f69af3ce08072", size = 82635, upload-time = "2024-10-18T15:40:55.632Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/cc/07119cba00db12d7ef0472637b7d71a95f2c8e9a20ed460d759acd274887/zopfli-0.2.3.post1-cp311-cp311-win_amd64.whl", hash = "sha256:81c341d9bb87a6dbbb0d45d6e272aca80c7c97b4b210f9b6e233bf8b87242f29", size = 99345, upload-time = "2024-10-18T15:40:56.965Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/ce/b6441cc01881d06e0b5883f32c44e7cc9772e0d04e3e59277f59f80b9a19/zopfli-0.2.3.post1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3f0197b6aa6eb3086ae9e66d6dd86c4d502b6c68b0ec490496348ae8c05ecaef", size = 295489, upload-time = "2024-10-18T15:40:57.96Z" },
+ { url = "https://files.pythonhosted.org/packages/93/f0/24dd708f00ae0a925bc5c9edae858641c80f6a81a516810dc4d21688a930/zopfli-0.2.3.post1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fcfc0dc2761e4fcc15ad5d273b4d58c2e8e059d3214a7390d4d3c8e2aee644e", size = 163010, upload-time = "2024-10-18T15:40:59.444Z" },
+ { url = "https://files.pythonhosted.org/packages/65/57/0378eeeb5e3e1e83b1b0958616b2bf954f102ba5b0755b9747dafbd8cb72/zopfli-0.2.3.post1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cac2b37ab21c2b36a10b685b1893ebd6b0f83ae26004838ac817680881576567", size = 823649, upload-time = "2024-10-18T15:41:00.642Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/8a/3ab8a616d4655acf5cf63c40ca84e434289d7d95518a1a42d28b4a7228f8/zopfli-0.2.3.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d5ab297d660b75c159190ce6d73035502310e40fd35170aed7d1a1aea7ddd65", size = 826557, upload-time = "2024-10-18T15:41:02.431Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/4d/7f6820af119c4fec6efaf007bffee7bc9052f695853a711a951be7afd26b/zopfli-0.2.3.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba214f4f45bec195ee8559651154d3ac2932470b9d91c5715fc29c013349f8c", size = 851127, upload-time = "2024-10-18T15:41:04.259Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/db/1ef5353ab06f9f2fb0c25ed0cddf1418fe275cc2ee548bc4a29340c44fe1/zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c1e0ed5d84ffa2d677cc9582fc01e61dab2e7ef8b8996e055f0a76167b1b94df", size = 1754183, upload-time = "2024-10-18T15:41:05.808Z" },
+ { url = "https://files.pythonhosted.org/packages/39/03/44f8f39950354d330fa798e4bab1ac8e38ec787d3fde25d5b9c7770065a2/zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bfa1eb759e07d8b7aa7a310a2bc535e127ee70addf90dc8d4b946b593c3e51a8", size = 1905945, upload-time = "2024-10-18T15:41:07.136Z" },
+ { url = "https://files.pythonhosted.org/packages/74/7b/94b920c33cc64255f59e3cfc77c829b5c6e60805d189baeada728854a342/zopfli-0.2.3.post1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cd2c002f160502608dcc822ed2441a0f4509c52e86fcfd1a09e937278ed1ca14", size = 1835885, upload-time = "2024-10-18T15:41:08.705Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/89/c869ac844351e285a6165e2da79b715b0619a122e3160d183805adf8ab45/zopfli-0.2.3.post1-cp312-cp312-win32.whl", hash = "sha256:7be5cc6732eb7b4df17305d8a7b293223f934a31783a874a01164703bc1be6cd", size = 82743, upload-time = "2024-10-18T15:41:10.377Z" },
+ { url = "https://files.pythonhosted.org/packages/29/e6/c98912fd3a589d8a7316c408fd91519f72c237805c4400b753e3942fda0b/zopfli-0.2.3.post1-cp312-cp312-win_amd64.whl", hash = "sha256:4e50ffac74842c1c1018b9b73875a0d0a877c066ab06bf7cccbaa84af97e754f", size = 99403, upload-time = "2024-10-18T15:41:11.547Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/24/0e552e2efce9a20625b56e9609d1e33c2966be33fc008681121ec267daec/zopfli-0.2.3.post1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecb7572df5372abce8073df078207d9d1749f20b8b136089916a4a0868d56051", size = 295485, upload-time = "2024-10-18T15:41:12.57Z" },
+ { url = "https://files.pythonhosted.org/packages/08/83/b2564369fb98797a617fe2796097b1d719a4937234375757ad2a3febc04b/zopfli-0.2.3.post1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1cf720896d2ce998bc8e051d4b4ce0d8bec007aab6243102e8e1d22a0b2fb3f", size = 163000, upload-time = "2024-10-18T15:41:13.743Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/55/81d419739c2aab35e19b58bce5498dcb58e6446e5eb69f2d3c748b1c9151/zopfli-0.2.3.post1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aad740b4d4fcbaaae4887823925166ffd062db3b248b3f432198fc287381d1a", size = 823699, upload-time = "2024-10-18T15:41:14.874Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/91/89f07c8ea3c9bc64099b3461627b07a8384302235ee0f357eaa86f98f509/zopfli-0.2.3.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6617fb10f9e4393b331941861d73afb119cd847e88e4974bdbe8068ceef3f73f", size = 826612, upload-time = "2024-10-18T15:41:16.069Z" },
+ { url = "https://files.pythonhosted.org/packages/41/31/46670fc0c7805d42bc89702440fa9b73491d68abbc39e28d687180755178/zopfli-0.2.3.post1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a53b18797cdef27e019db595d66c4b077325afe2fd62145953275f53d84ce40c", size = 851148, upload-time = "2024-10-18T15:41:17.403Z" },
+ { url = "https://files.pythonhosted.org/packages/22/00/71ad39277bbb88f9fd20fb786bd3ff2ea4025c53b31652a0da796fb546cd/zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b78008a69300d929ca2efeffec951b64a312e9a811e265ea4a907ab546d79fa6", size = 1754215, upload-time = "2024-10-18T15:41:18.661Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/4e/e542c508d20c3dfbef1b90fcf726f824f505e725747f777b0b7b7d1deb95/zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa5f90d6298bda02a95bc8dc8c3c19004d5a4e44bda00b67ca7431d857b4b54", size = 1905988, upload-time = "2024-10-18T15:41:19.933Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/a5/817ac1ecc888723e91dc172e8c6eeab9f48a1e52285803b965084e11bbd5/zopfli-0.2.3.post1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2768c877f76c8a0e7519b1c86c93757f3c01492ddde55751e9988afb7eff64e1", size = 1835907, upload-time = "2024-10-18T15:41:21.582Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/35/2525f90c972d8aafc39784a8c00244eeee8e8221b26cbc576748ee9dc1cd/zopfli-0.2.3.post1-cp313-cp313-win32.whl", hash = "sha256:71390dbd3fbf6ebea9a5d85ffed8c26ee1453ee09248e9b88486e30e0397b775", size = 82742, upload-time = "2024-10-18T15:41:23.362Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/c6/49b27570923956d52d37363e8f5df3a31a61bd7719bb8718527a9df3ae5f/zopfli-0.2.3.post1-cp313-cp313-win_amd64.whl", hash = "sha256:a86eb88e06bd87e1fff31dac878965c26b0c26db59ddcf78bb0379a954b120de", size = 99408, upload-time = "2024-10-18T15:41:24.377Z" },
+]
From 9aebed975308cae7753f01648c3c9792f00c486f Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Mon, 12 Jan 2026 02:59:45 +0200
Subject: [PATCH 18/19] feat: add docs and formatter/validator modules
---
.github/workflows/docs.yml | 40 ++
Dockerfile | 6 +-
README.md | 208 ++-----
docs/api/core/constants.md | 35 ++
docs/api/core/exceptions.md | 81 +++
docs/api/core/formatter.md | 54 ++
docs/api/core/main.md | 29 +
docs/api/core/models.md | 23 +
docs/api/core/network.md | 159 +++++
docs/api/core/validator.md | 49 ++
docs/contributing.md | 55 ++
docs/getting-started/installation.md | 66 ++
docs/getting-started/usage.md | 163 +++++
docs/index.md | 43 ++
docs/license.md | 6 +
mkdocs.yml | 134 ++++
naminter/__init__.py | 38 +-
naminter/cli/config.py | 327 +++++++---
naminter/cli/console.py | 288 ++++++---
naminter/cli/constants.py | 49 +-
naminter/cli/exceptions.py | 47 +-
naminter/cli/exporters.py | 272 ++++----
naminter/cli/main.py | 808 ++++++++++++++----------
naminter/cli/progress.py | 136 ++--
naminter/cli/utils.py | 338 +++++++---
naminter/core/constants.py | 35 +-
naminter/core/exceptions.py | 93 +++
naminter/core/formatter.py | 215 +++++++
naminter/core/main.py | 896 +++++++++++++--------------
naminter/core/models.py | 344 +++++++---
naminter/core/network.py | 339 ++++++++--
naminter/core/utils.py | 138 ++---
naminter/core/validator.py | 282 +++++++++
pyproject.toml | 87 ++-
uv.lock | 570 +++++++++++++++--
35 files changed, 4706 insertions(+), 1747 deletions(-)
create mode 100644 .github/workflows/docs.yml
create mode 100644 docs/api/core/constants.md
create mode 100644 docs/api/core/exceptions.md
create mode 100644 docs/api/core/formatter.md
create mode 100644 docs/api/core/main.md
create mode 100644 docs/api/core/models.md
create mode 100644 docs/api/core/network.md
create mode 100644 docs/api/core/validator.md
create mode 100644 docs/contributing.md
create mode 100644 docs/getting-started/installation.md
create mode 100644 docs/getting-started/usage.md
create mode 100644 docs/index.md
create mode 100644 docs/license.md
create mode 100644 mkdocs.yml
create mode 100644 naminter/core/formatter.py
create mode 100644 naminter/core/validator.py
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..afdf6df
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,40 @@
+name: Deploy Documentation
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - 'docs/**'
+ - 'mkdocs.yml'
+ - '.github/workflows/docs.yml'
+ workflow_dispatch:
+
+permissions:
+ contents: write
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install mkdocs mkdocs-material mkdocstrings[python]
+
+ - name: Build documentation
+ run: mkdocs build
+
+ - name: Deploy to GitHub Pages
+ uses: peaceiris/actions-gh-pages@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./site
+
diff --git a/Dockerfile b/Dockerfile
index 49423a1..46c7197 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -16,12 +16,8 @@ RUN apt-get update && apt-get install -y \
libcairo2 \
libpango-1.0-0 \
libpangocairo-1.0-0 \
- libgdk-pixbuf2.0-0 \
- libffi-dev \
+ libgdk-pixbuf-2.0-0 \
ca-certificates \
- curl \
- gcc \
- g++ \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
diff --git a/README.md b/README.md
index a318fe0..8e70de6 100644
--- a/README.md
+++ b/README.md
@@ -14,7 +14,6 @@ Naminter is a Python package and command-line interface (CLI) tool for asynchron
## Table of Contents
-- [Features](#features)
- [Installation](#installation)
- [From PyPI](#from-pypi)
- [From Source](#from-source)
@@ -27,21 +26,6 @@ Naminter is a Python package and command-line interface (CLI) tool for asynchron
- [Contributing](#contributing)
- [License](#license)
-## Features
-
-- **Broad Site Coverage:** Leverages the [WhatsMyName](https://github.com/WebBreacher/WhatsMyName) dataset for extensive username enumeration
-- **Browser Impersonation:** Simulate Chrome, Firefox, Safari, Edge for accurate detection
-- **Real-Time Console UI:** Live progress bar, colored output, and instant feedback
-- **Concurrent & Fast:** High-speed, concurrent checks with adjustable task limits
-- **Validation Modes:** Strict (ALL) or permissive (ANY) matching for detection criteria
-- **Category Filters:** Include or exclude sites by category
-- **Custom Site Lists:** Use your own or remote WhatsMyName-format lists and schemas
-- **Proxy & Network Options:** Full proxy support, SSL verification, and redirect control
-- **Site Validation Mode:** Validate detection methods for reliability
-- **Export Results:** Output to CSV, JSON, HTML, and PDF
-- **Response Handling:** Save/open HTTP responses for analysis
-- **Flexible Filtering:** Filter results by found, not found, errors, or unknown
-
## Installation
### From PyPI
@@ -134,7 +118,7 @@ naminter --username alice_bob \
--html report.html
# Site validation with detailed output
-naminter --validate-sites \
+naminter --test \
--show-details \
--log-level DEBUG \
--log-file debug.log
@@ -142,120 +126,55 @@ naminter --validate-sites \
### Using as a Python Package
-Naminter can be used programmatically in Python projects to enumerate usernames across various platforms. The Naminter class requires WhatsMyName (WMN) data to operate. You can either load this data from local files or fetch it from remote sources.
-
-#### Getting Started - Loading WMN Data
-
-Before using Naminter, you need to load the WhatsMyName dataset:
-
-```python
-import asyncio
-import json
-import aiohttp
-from naminter import Naminter
-
-async def load_wmn_data():
- """Load WhatsMyName data from the official repository."""
- async with aiohttp.ClientSession() as session:
- # Load the main sites data
- async with session.get("https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data.json") as response:
- wmn_data = await response.json()
-
- # Optionally load the schema for validation
- async with session.get("https://raw.githubusercontent.com/WebBreacher/WhatsMyName/main/wmn-data-schema.json") as response:
- wmn_schema = await response.json()
-
- return wmn_data, wmn_schema
-
-# Alternative: Load from local files
-def load_local_wmn_data():
- """Load WhatsMyName data from local files."""
- with open("wmn-data.json", "r") as f:
- wmn_data = json.load(f)
-
- with open("wmn-data-schema.json", "r") as f:
- wmn_schema = json.load(f)
-
- return wmn_data, wmn_schema
-```
+Naminter can be used programmatically in Python projects to enumerate usernames across various platforms.
-#### Basic Asynchronous Example
+#### Basic Example
```python
import asyncio
-from naminter import Naminter
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
async def main():
- # Load WMN data
- wmn_data, wmn_schema = await load_wmn_data()
-
- # Initialize Naminter with the WMN data
- async with Naminter(wmn_data, wmn_schema) as naminter:
- results = await naminter.enumerate_usernames(["example_username"])
- for result in results:
- if result.status.value == "found":
- print(f"✅ {result.username} found on {result.name}: {result.url}")
- elif result.status.value == "not_found":
- print(f"❌ {result.username} not found on {result.name}")
- elif result.status.value == "error":
- print(f"⚠️ Error enumerating {result.username} on {result.name}: {result.error}")
+ async with CurlCFFISession() as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
+ async for result in naminter.enumerate_usernames(["example_username"]):
+ if result.status.value == "exists":
+ print(f"✅ {result.username} found on {result.name}: {result.url}")
+ elif result.status.value == "missing":
+ print(f"❌ {result.username} not found on {result.name}")
+ elif result.status.value == "error":
+ print(f"⚠️ Error checking {result.username} on {result.name}: {result.error}")
asyncio.run(main())
```
-#### Asynchronous Example with Generator
-
-For more efficient processing, use an asynchronous generator to handle results as they come in:
-
-```python
-import asyncio
-from naminter import Naminter
-
-async def main():
- wmn_data, wmn_schema = await load_wmn_data()
-
- async with Naminter(wmn_data, wmn_schema) as naminter:
- # Use as_generator=True for streaming results
- results = await naminter.enumerate_usernames(["example_username"], as_generator=True)
- async for result in results:
- if result.status.value == "found":
- print(f"✅ {result.username} found on {result.name}: {result.url}")
- elif result.status.value == "not_found":
- print(f"❌ {result.username} not found on {result.name}")
-
-asyncio.run(main())
-```
-#### Multiple Usernames and Advanced Configuration
+#### Advanced Configuration
```python
import asyncio
-from naminter import Naminter
-from naminter.core.models import WMNMode
+from naminter import Naminter, CurlCFFISession, WMNMode, WMN_REMOTE_URL
async def main():
- wmn_data, wmn_schema = await load_wmn_data()
-
- # Advanced configuration with custom settings
- async with Naminter(
- wmn_data=wmn_data,
- wmn_schema=wmn_schema,
- max_tasks=100,
+ async with CurlCFFISession(
timeout=15,
impersonate="chrome",
- verify_ssl=True,
- proxy="http://proxy:8080"
- ) as naminter:
- usernames = ["user1", "user2", "user3"]
- results = await naminter.enumerate_usernames(usernames, mode=WMNMode.ANY)
-
- for result in results:
- if result.status.value == "found":
- print(f"✅ Found: {result.username} on {result.name}")
- print(f" URL: {result.url}")
- print(f" Response time: {result.elapsed:.2f}s")
- else:
- print(f"❌ Not found: {result.username} on {result.name}")
+ verify=True,
+ proxies="http://proxy:8080"
+ ) as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(
+ http_client=http_client,
+ wmn_data=wmn_data,
+ max_tasks=100
+ ) as naminter:
+ usernames = ["user1", "user2", "user3"]
+ async for result in naminter.enumerate_usernames(usernames, mode=WMNMode.ANY):
+ if result.status.value == "exists":
+ print(f"✅ {result.username} on {result.name}: {result.url}")
asyncio.run(main())
```
@@ -264,43 +183,45 @@ asyncio.run(main())
```python
import asyncio
-from naminter import Naminter
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
async def main():
- wmn_data, wmn_schema = await load_wmn_data()
-
- async with Naminter(wmn_data, wmn_schema) as naminter:
- # Validate site detection methods using known usernames
- validation_results = await naminter.validate_sites()
-
- for site_result in validation_results:
- if site_result.error:
- print(f"❌ {site_result.name}: {site_result.error}")
- else:
- found_count = sum(1 for r in site_result.results if r.status.value == "found")
- total_count = len(site_result.results)
- print(f"✅ {site_result.name}: {found_count}/{total_count} known accounts found")
+ async with CurlCFFISession() as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
+ async for site_result in naminter.enumerate_test():
+ if site_result.error:
+ print(f"❌ {site_result.name}: {site_result.error}")
+ else:
+ found = sum(1 for r in site_result.results if r.status.value == "exists")
+ total = len(site_result.results)
+ print(f"✅ {site_result.name}: {found}/{total} known accounts found")
asyncio.run(main())
```
-#### Getting WMN Information
+#### Getting WMN Summary
```python
import asyncio
-from naminter import Naminter
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL, WMN_SCHEMA_URL
async def main():
- wmn_data, wmn_schema = await load_wmn_data()
-
- async with Naminter(wmn_data, wmn_schema) as naminter:
- # Get information about the loaded WMN data
- info = await naminter.get_wmn_summary()
- print(f"Total sites: {info['sites_count']}")
- print(f"Categories: {', '.join(info['categories'])}")
-
- # Summaries include sites_count, categories and categories_count
- # Use this data to derive lists as needed.
+ async with CurlCFFISession() as http_client:
+ # Load data and (optionally) schema using public constants
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+ wmn_schema = (await http_client.get(WMN_SCHEMA_URL)).json()
+
+ async with Naminter(
+ http_client=http_client,
+ wmn_data=wmn_data,
+ wmn_schema=wmn_schema,
+ ) as naminter:
+ summary = naminter.get_wmn_summary()
+ print(f"Total sites: {summary.sites_count}")
+ print(f"Total categories: {summary.categories_count}")
+ print(f"Known accounts: {summary.known_count}")
asyncio.run(main())
```
@@ -328,7 +249,7 @@ asyncio.run(main())
### Site Validation
| Option | Description |
|-----------------------------|------------------------------------------------------------|
-| `--validate-sites` | Validate site detection methods by checking known usernames |
+| `--test` | Validate site detection methods by checking known usernames |
### Category Filters
| Option | Description |
@@ -373,10 +294,11 @@ asyncio.run(main())
| Option | Description |
|-----------------------------|------------------------------------------------------------|
| `--filter-all` | Include all results in console and exports |
-| `--filter-found` | Show only found results in console and exports |
-| `--filter-ambiguous` | Show only ambiguous results in console and exports |
+| `--filter-exists` | Show only existing username results in console and exports |
+| `--filter-partial` | Show only partial match results in console and exports |
+| `--filter-conflicting` | Show only conflicting results in console and exports |
| `--filter-unknown` | Show only unknown results in console and exports |
-| `--filter-not-found` | Show only not found results in console and exports |
+| `--filter-missing` | Show only missing username results in console and exports |
| `--filter-not-valid` | Show only not valid results in console and exports |
| `--filter-errors` | Show only error results in console and exports |
diff --git a/docs/api/core/constants.md b/docs/api/core/constants.md
new file mode 100644
index 0000000..dff5ce7
--- /dev/null
+++ b/docs/api/core/constants.md
@@ -0,0 +1,35 @@
+# Constants
+
+Public constants used throughout Naminter.
+
+## Overview
+
+This module provides public constants for URLs and configuration values that are commonly used when working with Naminter.
+
+## Available Constants
+
+### URLs
+
+- **`WMN_REMOTE_URL`**: Default URL for the remote WhatsMyName dataset
+- **`WMN_SCHEMA_URL`**: Default URL for the WhatsMyName JSON schema
+
+These constants are exported from the main `naminter` package and can be imported directly.
+
+## Usage
+
+```python
+from naminter import CurlCFFISession, WMN_REMOTE_URL, WMN_SCHEMA_URL
+
+async with CurlCFFISession() as http_client:
+ # Fetch data using the public constant
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+ wmn_schema = (await http_client.get(WMN_SCHEMA_URL)).json()
+```
+
+## Internal Constants
+
+The `naminter.core.constants` module also contains internal constants used throughout the codebase for configuration, HTTP settings, and data structure keys. These are primarily for internal use and are not exported from the main package.
+
+## API Reference
+
+::: naminter.core.constants
diff --git a/docs/api/core/exceptions.md b/docs/api/core/exceptions.md
new file mode 100644
index 0000000..805f37e
--- /dev/null
+++ b/docs/api/core/exceptions.md
@@ -0,0 +1,81 @@
+# Exceptions
+
+Exception classes used throughout Naminter for error handling.
+
+## Overview
+
+Naminter uses a hierarchical exception structure with `NaminterError` as the base exception class. All exceptions inherit from this base class, allowing for consistent error handling across the codebase.
+
+## Core Exceptions
+
+### Base Exception
+
+::: naminter.core.exceptions.NaminterError
+
+### Network/HTTP Errors
+
+::: naminter.core.exceptions.HttpError
+::: naminter.core.exceptions.HttpSessionError
+::: naminter.core.exceptions.HttpTimeoutError
+::: naminter.core.exceptions.HttpStatusError
+
+### Data Processing Errors
+
+::: naminter.core.exceptions.WMNDataError
+::: naminter.core.exceptions.WMNUninitializedError
+::: naminter.core.exceptions.WMNUnknownSiteError
+::: naminter.core.exceptions.WMNUnknownCategoriesError
+::: naminter.core.exceptions.WMNSchemaError
+::: naminter.core.exceptions.WMNValidationError
+
+## Common Exception Patterns
+
+### Handling Network Errors
+
+```python
+from naminter import HttpError, HttpTimeoutError
+from naminter.core.exceptions import HttpStatusError
+
+try:
+ # Network operation
+ pass
+except HttpTimeoutError:
+ # Handle timeout specifically (e.g., retry with backoff)
+ pass
+except HttpStatusError as e:
+ # Handle HTTP error status codes (access e.status_code, e.url)
+ if e.status_code == 404:
+ # Handle not found
+ pass
+except HttpError:
+ # Handle any other HTTP error
+ pass
+```
+
+### Handling Data Errors
+
+```python
+from naminter.core.exceptions import (
+ WMNDataError,
+ WMNUninitializedError,
+ WMNUnknownSiteError,
+ WMNValidationError,
+)
+
+try:
+ # WMN data operation
+ pass
+except WMNUninitializedError:
+ # Data not loaded
+ pass
+except WMNUnknownSiteError as e:
+ # Access unknown site names
+ print(f"Unknown sites: {e.site_names}")
+except WMNValidationError as e:
+ # Access validation errors
+ for error in e.errors:
+ print(f"Validation error: {error}")
+except WMNDataError:
+ # Handle any other data error
+ pass
+```
diff --git a/docs/api/core/formatter.md b/docs/api/core/formatter.md
new file mode 100644
index 0000000..a0a72f1
--- /dev/null
+++ b/docs/api/core/formatter.md
@@ -0,0 +1,54 @@
+# Formatter
+
+Formatter for WhatsMyName JSON data.
+
+## Overview
+
+The `WMNFormatter` class provides functionality to format and sort WhatsMyName JSON data according to a JSON schema. It ensures consistent ordering of keys, alphabetical sorting of arrays, and proper formatting of site data.
+
+## Basic Usage
+
+```python
+import json
+from pathlib import Path
+from naminter import WMNFormatter
+
+# Load data and schema
+with open("wmn-data.json", encoding="utf-8") as f:
+ data = json.load(f)
+
+with open("wmn-schema.json", encoding="utf-8") as f:
+ schema = json.load(f)
+
+# Read original content for comparison
+input_path = Path("wmn-data.json")
+original_content = input_path.read_text(encoding="utf-8")
+
+# Create formatter with schema
+formatter = WMNFormatter(schema)
+# Format data (data is not modified)
+formatted_content = formatter.format_data(data)
+
+# Compare and write if changed
+if original_content != formatted_content:
+ output_path = Path("wmn-data-formatted.json")
+ output_path.write_text(formatted_content, encoding="utf-8")
+ print("File was formatted and saved")
+else:
+ print("File was already properly formatted")
+```
+
+## CLI Usage
+
+The formatter is also available via the CLI:
+
+```bash
+naminter format \
+ --local-schema schema.json \
+ --local-data data.json \
+ --output formatted-data.json
+```
+
+## API Reference
+
+::: naminter.core.formatter.WMNFormatter
diff --git a/docs/api/core/main.md b/docs/api/core/main.md
new file mode 100644
index 0000000..2961670
--- /dev/null
+++ b/docs/api/core/main.md
@@ -0,0 +1,29 @@
+# Naminter
+
+The main class for username enumeration across multiple platforms.
+
+## Overview
+
+The `Naminter` class provides asynchronous username enumeration functionality using the WhatsMyName dataset. It supports concurrent requests, custom filtering, and multiple validation modes.
+
+## Basic Usage
+
+```python
+import asyncio
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
+
+async def main():
+ async with CurlCFFISession() as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
+ async for result in naminter.enumerate_usernames(["username"]):
+ print(f"{result.name}: {result.status.value}")
+
+asyncio.run(main())
+```
+
+## API Reference
+
+::: naminter.core.main.Naminter
+
diff --git a/docs/api/core/models.md b/docs/api/core/models.md
new file mode 100644
index 0000000..d99b866
--- /dev/null
+++ b/docs/api/core/models.md
@@ -0,0 +1,23 @@
+# Models
+
+Data models and types used throughout Naminter.
+
+## Overview
+
+This module defines the core data structures for:
+- Enumeration results (`WMNResult`)
+- Site datasets (`WMNDataset`)
+- Validation modes (`WMNMode`)
+- Summary statistics (`WMNSummary`)
+
+## Common Models
+
+- **`WMNResult`**: Represents the result of checking a username on a specific site
+- **`WMNDataset`**: Container for WhatsMyName site data
+- **`WMNMode`**: Enumeration mode (ALL for strict matching, ANY for permissive)
+- **`WMNStatus`**: Status of enumeration (exists, missing, error, etc.)
+
+## API Reference
+
+::: naminter.core.models
+
diff --git a/docs/api/core/network.md b/docs/api/core/network.md
new file mode 100644
index 0000000..8ed1968
--- /dev/null
+++ b/docs/api/core/network.md
@@ -0,0 +1,159 @@
+# Network
+
+HTTP client and network utilities for making requests.
+
+## Overview
+
+The network module provides HTTP session management with support for:
+- Browser impersonation
+- Proxy configuration
+- SSL verification
+- Custom timeouts and redirects
+- Custom session implementations via `BaseSession` protocol
+
+## Basic Usage
+
+```python
+from naminter import CurlCFFISession
+
+async with CurlCFFISession(
+ timeout=30,
+ impersonate="chrome",
+ proxies="http://proxy:8080"
+) as http_client:
+ response = await http_client.get("https://example.com")
+```
+
+## Custom Session Implementation
+
+You can create your own HTTP session implementation by implementing the `BaseSession` protocol. This allows you to use any HTTP client library (aiohttp, httpx, etc.) with Naminter.
+
+### Implementing BaseSession
+
+The `BaseSession` protocol requires the following methods:
+
+- `async open() -> None`: Initialize/open the HTTP session
+- `async close() -> None`: Clean up/close the HTTP session
+- `async get(url: str, headers: Mapping[str, str] | None = None) -> WMNResponse`: Perform HTTP GET request
+- `async post(url: str, headers: Mapping[str, str] | None = None, data: str | bytes | None = None) -> WMNResponse`: Perform HTTP POST request
+- `async request(method: str, url: str, headers: Mapping[str, str] | None = None, data: str | bytes | None = None) -> WMNResponse`: Generic HTTP request
+- `async __aenter__() -> BaseSession`: Async context manager entry
+- `async __aexit__(exc_type, exc_val, exc_tb) -> None`: Async context manager exit
+
+### Error Handling
+
+Your implementation should raise the following exceptions:
+- `HttpSessionError`: For session initialization/management errors
+- `HttpTimeoutError`: For request timeouts
+- `HttpError`: For other network-related errors
+
+### Example: aiohttp Implementation
+
+```python
+import asyncio
+import aiohttp
+from collections.abc import Mapping
+from naminter import (
+ BaseSession,
+ HttpError,
+ HttpSessionError,
+ HttpTimeoutError,
+ Naminter,
+ WMNResponse,
+)
+
+class AiohttpSession:
+ """Custom aiohttp-based session implementation."""
+
+ def __init__(self, timeout: int = 30, **kwargs):
+ self._timeout = aiohttp.ClientTimeout(total=timeout)
+ self._session: aiohttp.ClientSession | None = None
+ self._kwargs = kwargs
+
+ async def open(self) -> None:
+ """Open the aiohttp session."""
+ if self._session is None:
+ try:
+ self._session = aiohttp.ClientSession(
+ timeout=self._timeout,
+ **self._kwargs
+ )
+ except Exception as e:
+ raise HttpSessionError("Failed to create session", cause=e) from e
+
+ async def close(self) -> None:
+ """Close the aiohttp session."""
+ if self._session:
+ await self._session.close()
+ self._session = None
+
+ async def get(
+ self, url: str, headers: Mapping[str, str] | None = None
+ ) -> WMNResponse:
+ """Perform HTTP GET request."""
+ return await self.request("GET", url, headers=headers)
+
+ async def post(
+ self,
+ url: str,
+ headers: Mapping[str, str] | None = None,
+ data: str | bytes | None = None,
+ ) -> WMNResponse:
+ """Perform HTTP POST request."""
+ return await self.request("POST", url, headers=headers, data=data)
+
+ async def request(
+ self,
+ method: str,
+ url: str,
+ headers: Mapping[str, str] | None = None,
+ data: str | bytes | None = None,
+ ) -> WMNResponse:
+ """Perform generic HTTP request."""
+ await self.open()
+
+ if self._session is None:
+ raise HttpSessionError("Session not initialized")
+
+ try:
+ async with self._session.request(
+ method=method,
+ url=url,
+ headers=dict(headers) if headers else None,
+ data=data,
+ ) as response:
+ text = await response.text()
+ return WMNResponse(
+ status_code=response.status,
+ text=text,
+ elapsed=0.0, # aiohttp doesn't provide elapsed time directly
+ )
+ except asyncio.TimeoutError as e:
+ raise HttpTimeoutError(f"{method} timeout for {url}", cause=e) from e
+ except aiohttp.ClientError as e:
+ raise HttpError(f"{method} failed for {url}: {e}", cause=e) from e
+ except Exception as e:
+ raise HttpError(f"Unexpected error: {e}", cause=e) from e
+
+ async def __aenter__(self) -> "AiohttpSession":
+ """Async context manager entry."""
+ await self.open()
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
+ """Async context manager exit."""
+ await self.close()
+
+# Usage with Naminter
+async with AiohttpSession() as http_client:
+ async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
+ async for result in naminter.enumerate_usernames(["username"]):
+ print(f"{result.name}: {result.status.value}")
+```
+
+## API Reference
+
+::: naminter.core.network.BaseSession
+
+::: naminter.core.network.CurlCFFISession
+
diff --git a/docs/api/core/validator.md b/docs/api/core/validator.md
new file mode 100644
index 0000000..a70f5e6
--- /dev/null
+++ b/docs/api/core/validator.md
@@ -0,0 +1,49 @@
+# Validator
+
+Validator for WhatsMyName JSON data.
+
+## Overview
+
+The `WMNValidator` class validates WhatsMyName JSON data against a JSON Schema. It uses the Draft7 JSON Schema validator to check data structure, types, and constraints.
+
+## Basic Usage
+
+```python
+import json
+from pathlib import Path
+from naminter import WMNValidator
+
+# Load data and schema
+with open("wmn-data.json", encoding="utf-8") as f:
+ data = json.load(f)
+
+with open("wmn-schema.json", encoding="utf-8") as f:
+ schema = json.load(f)
+
+# Create validator with schema
+validator = WMNValidator(schema)
+
+# Validate data (data is not modified)
+errors = validator.validate(data)
+
+if errors:
+ print(f"Validation failed with {len(errors)} errors:")
+ for error in errors:
+ print(f" - {error.path}: {error.message}")
+else:
+ print("Validation passed!")
+```
+
+## CLI Usage
+
+The validator is also available via the CLI:
+
+```bash
+naminter validate \
+ --local-schema schema.json \
+ --local-data data.json
+```
+
+## API Reference
+
+::: naminter.core.validator.WMNValidator
diff --git a/docs/contributing.md b/docs/contributing.md
new file mode 100644
index 0000000..763fd7c
--- /dev/null
+++ b/docs/contributing.md
@@ -0,0 +1,55 @@
+# Contributing
+
+Contributions are always welcome! Please submit a pull request with your improvements or open an issue to discuss.
+
+## Development Setup
+
+1. Clone the repository:
+```bash
+git clone https://github.com/3xp0rt/naminter.git
+cd naminter
+```
+
+2. Install in editable mode with dev dependencies:
+
+ ```bash
+ uv sync --extra dev
+ ```
+
+ Alternatively, using `uv pip`:
+
+ ```bash
+ uv pip install -e ".[dev]"
+ ```
+
+3. Run linting:
+```bash
+uv run ruff format
+uv run ruff check
+```
+
+## Code Style
+
+This project uses:
+
+- **Ruff** for linting and formatting
+- **Google-style** docstrings
+- **Type hints** for all function signatures
+
+## Submitting Changes
+
+1. Fork the repository
+2. Create a feature branch (`git checkout -b feature/amazing-feature`)
+3. Make your changes following the code style guidelines
+4. Run linting and ensure all checks pass
+5. Commit your changes with clear, descriptive messages
+6. Push to your fork (`git push origin feature/amazing-feature`)
+7. Open a pull request with a detailed description of your changes
+
+## Pull Request Guidelines
+
+- Provide a clear description of what the PR does
+- Reference any related issues
+- Ensure code follows the project's style guidelines
+- Update documentation if needed
+
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
new file mode 100644
index 0000000..f994c9c
--- /dev/null
+++ b/docs/getting-started/installation.md
@@ -0,0 +1,66 @@
+# Installation
+
+## From PyPI
+
+Install Naminter with pip or uv:
+
+```bash
+# Using pip
+pip install naminter
+
+# Using uv
+uv tool install naminter
+```
+
+## From Source
+
+Clone the repository and install in editable mode:
+
+```bash
+git clone https://github.com/3xp0rt/naminter.git
+cd naminter
+pip install -e .
+
+# Or with uv
+uv pip install -e .
+```
+
+For development with dev dependencies:
+
+```bash
+uv sync --extra dev
+```
+
+## Using Docker
+
+All needed folders are mounted on the first start of the docker compose run command.
+
+```bash
+# Using the prebuilt docker image from the GitHub registry
+docker run --rm -it ghcr.io/3xp0rt/naminter --username john_doe
+
+# Build the docker from the source yourself
+git clone https://github.com/3xp0rt/naminter.git && cd naminter
+docker build -t naminter .
+docker compose run --rm naminter --username john_doe
+```
+
+## Requirements
+
+- Python 3.11 or higher
+- See `pyproject.toml` for full dependency list
+
+## Verification
+
+After installation, verify that Naminter is correctly installed:
+
+```bash
+naminter --version
+```
+
+You should see the version number displayed. If you encounter any issues, ensure that:
+
+1. Python 3.11+ is installed: `python --version`
+2. The installation completed without errors
+3. Your PATH includes the Python scripts directory
+
diff --git a/docs/getting-started/usage.md b/docs/getting-started/usage.md
new file mode 100644
index 0000000..2566f29
--- /dev/null
+++ b/docs/getting-started/usage.md
@@ -0,0 +1,163 @@
+# Usage
+
+## Basic CLI Usage
+
+Enumerate a single username:
+
+```bash
+naminter --username john_doe
+```
+
+Enumerate multiple usernames:
+
+```bash
+naminter --username user1 --username user2 --username user3
+```
+
+## Advanced CLI Options
+
+Customize the enumerator with various command-line arguments:
+
+```bash
+# Basic username enumeration with custom settings
+naminter --username john_doe \
+ --max-tasks 100 \
+ --timeout 15 \
+ --impersonate chrome \
+ --include-categories social coding
+
+# Using proxy and saving responses
+naminter --username jane_smith \
+ --proxy http://proxy:8080 \
+ --save-response \
+ --open-response
+
+# Using custom schema validation
+naminter --username alice_bob \
+ --local-schema ./custom-schema.json \
+ --local-list ./my-sites.json
+
+# Using remote schema with custom list
+naminter --username test_user \
+ --remote-schema https://example.com/custom-schema.json \
+ --remote-list https://example.com/my-sites.json
+
+# Export results in multiple formats
+naminter --username alice_bob \
+ --csv \
+ --json \
+ --html \
+ --filter-all
+
+# Export with custom paths using merged flags
+naminter --username alice_bob \
+ --csv results.csv \
+ --json results.json \
+ --html report.html
+
+# Site validation with detailed output
+naminter --test \
+ --show-details \
+ --log-level DEBUG \
+ --log-file debug.log
+```
+
+## Using as a Python Package
+
+### Basic Example
+
+```python
+import asyncio
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
+
+async def main():
+ async with CurlCFFISession() as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
+ async for result in naminter.enumerate_usernames(["example_username"]):
+ if result.status.value == "exists":
+ print(f"✅ {result.username} found on {result.name}: {result.url}")
+ elif result.status.value == "missing":
+ print(f"❌ {result.username} not found on {result.name}")
+ elif result.status.value == "error":
+ print(f"⚠️ Error checking {result.username} on {result.name}: {result.error}")
+
+asyncio.run(main())
+```
+
+### Advanced Configuration
+
+```python
+import asyncio
+from naminter import Naminter, CurlCFFISession, WMNMode, WMN_REMOTE_URL
+
+async def main():
+ async with CurlCFFISession(
+ timeout=15,
+ impersonate="chrome",
+ verify=True,
+ proxies="http://proxy:8080"
+ ) as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(
+ http_client=http_client,
+ wmn_data=wmn_data,
+ max_tasks=100
+ ) as naminter:
+ usernames = ["user1", "user2", "user3"]
+ async for result in naminter.enumerate_usernames(usernames, mode=WMNMode.ANY):
+ if result.status.value == "exists":
+ print(f"✅ {result.username} on {result.name}: {result.url}")
+
+asyncio.run(main())
+```
+
+### Site Validation
+
+```python
+import asyncio
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
+
+async def main():
+ async with CurlCFFISession() as http_client:
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+
+ async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
+ async for site_result in naminter.enumerate_test():
+ if site_result.error:
+ print(f"❌ {site_result.name}: {site_result.error}")
+ else:
+ found = sum(1 for r in site_result.results if r.status.value == "exists")
+ total = len(site_result.results)
+ print(f"✅ {site_result.name}: {found}/{total} known accounts found")
+
+asyncio.run(main())
+```
+
+### Getting WMN Summary
+
+```python
+import asyncio
+from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL, WMN_SCHEMA_URL
+
+async def main():
+ async with CurlCFFISession() as http_client:
+ # Load data and (optionally) schema using public constants
+ wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
+ wmn_schema = (await http_client.get(WMN_SCHEMA_URL)).json()
+
+ async with Naminter(
+ http_client=http_client,
+ wmn_data=wmn_data,
+ wmn_schema=wmn_schema,
+ ) as naminter:
+ summary = naminter.get_wmn_summary()
+ print(f"Total sites: {summary.sites_count}")
+ print(f"Total categories: {summary.categories_count}")
+ print(f"Known accounts: {summary.known_count}")
+
+asyncio.run(main())
+```
+
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..7c00eda
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,43 @@
+# Naminter
+
+A Python package and command-line interface (CLI) tool for asynchronous OSINT username enumeration using the WhatsMyName dataset.
+
+## Overview
+
+Naminter leverages the comprehensive [WhatsMyName](https://github.com/WebBreacher/WhatsMyName) list to enumerate usernames across hundreds of websites. With advanced features like browser impersonation, asynchronous enumeration, and customizable filtering, it can be used both as a command-line tool and as a library in your Python projects.
+
+## Quick Start
+
+```bash
+# Install from PyPI
+pip install naminter
+
+# Basic usage
+naminter --username john_doe
+```
+
+## Documentation
+
+### Getting Started
+- [Installation Guide](getting-started/installation.md) - Installation instructions for all platforms
+- [Usage Examples](getting-started/usage.md) - CLI and Python API usage examples
+
+### API Reference
+- [Naminter Class](api/core/main.md) - Main enumeration class
+- [Models](api/core/models.md) - Data models and types
+- [Network](api/core/network.md) - HTTP client and network utilities
+- [Formatter](api/core/formatter.md) - JSON data formatting utilities
+- [Validator](api/core/validator.md) - JSON schema validation utilities
+- [Constants](api/core/constants.md) - Public constants and URLs
+- [Exceptions](api/core/exceptions.md) - Exception classes
+
+### Additional Resources
+- [Contributing](contributing.md) - Guidelines for contributing to the project
+- [License](license.md) - Project license information
+
+## Links
+
+- [GitHub Repository](https://github.com/3xp0rt/naminter)
+- [PyPI Package](https://pypi.org/project/naminter/)
+- [Issue Tracker](https://github.com/3xp0rt/naminter/issues)
+
diff --git a/docs/license.md b/docs/license.md
new file mode 100644
index 0000000..06f4f1a
--- /dev/null
+++ b/docs/license.md
@@ -0,0 +1,6 @@
+# License
+
+This project is licensed under the MIT License.
+
+See the [LICENSE](https://github.com/3xp0rt/naminter/blob/main/LICENSE) file for details.
+
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..91ed3d0
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,134 @@
+site_name: Naminter Documentation
+site_description: A Python package and CLI tool for asynchronous OSINT username enumeration
+site_author: 3xp0rt
+site_url: https://naminter.github.io
+copyright: Copyright © 2025 3xp0rt
+
+repo_name: 3xp0rt/naminter
+repo_url: https://github.com/3xp0rt/naminter
+edit_uri: edit/main/docs/
+
+theme:
+ name: material
+ palette:
+ - scheme: default
+ primary: blue
+ accent: blue
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ - scheme: slate
+ primary: blue
+ accent: blue
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+ features:
+ - navigation.tabs
+ - navigation.tabs.sticky
+ - navigation.sections
+ - navigation.indexes
+ - navigation.expand
+ - navigation.path
+ - navigation.top
+ - toc.follow
+ - content.code.copy
+ - content.code.select
+ - content.tooltips
+ - search.highlight
+ - search.suggest
+ - content.code.annotate
+ icon:
+ repo: fontawesome/brands/github
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/3xp0rt/naminter
+ - icon: fontawesome/brands/python
+ link: https://pypi.org/project/naminter/
+ toc:
+ depth: 4
+
+markdown_extensions:
+ - pymdownx.highlight:
+ anchor_linenums: true
+ line_spans: __span
+ pygments_lang_class: true
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.superfences:
+ custom_fences:
+ - name: mermaid
+ class: mermaid
+ format: !!python/name:pymdownx.superfences.fence_code_format
+ - admonition
+ - pymdownx.details
+ - pymdownx.tabbed:
+ alternate_style: true
+ - pymdownx.tasklist:
+ custom_checkbox: true
+ - pymdownx.emoji:
+ emoji_index: !!python/name:material.extensions.emoji.twemoji
+ emoji_generator: !!python/name:material.extensions.emoji.to_svg
+ - tables
+ - attr_list
+ - md_in_html
+ - pymdownx.arithmatex:
+ generic: true
+ - pymdownx.caret
+ - pymdownx.mark
+ - pymdownx.tilde
+
+nav:
+ - Home: index.md
+ - Getting Started:
+ - Installation: getting-started/installation.md
+ - Usage: getting-started/usage.md
+ - API Reference:
+ - Naminter: api/core/main.md
+ - Models: api/core/models.md
+ - Network: api/core/network.md
+ - Formatter: api/core/formatter.md
+ - Validator: api/core/validator.md
+ - Constants: api/core/constants.md
+ - Exceptions: api/core/exceptions.md
+ - Contributing: contributing.md
+ - License: license.md
+
+plugins:
+ - search
+ - mkdocstrings:
+ handlers:
+ python:
+ paths: [naminter]
+ options:
+ show_root_heading: true
+ show_if_no_docstring: true
+ inherited_members: true
+ members_order: source
+ separate_signature: true
+ filters:
+ - '!^_'
+ - '!^__'
+ merge_init_into_class: true
+ docstring_section_style: spacy
+ signature_crossrefs: true
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_source: false
+ show_bases: true
+ show_root_toc_entry: true
+ heading_level: 2
+ show_submodules: false
+
+# Uncomment to add Google Analytics
+# extra:
+# analytics:
+# provider: google
+# property: G-XXXXXXXXXX
+
+# Uncomment to add custom CSS/JS
+# extra_css:
+# - stylesheets/extra.css
+# extra_javascript:
+# - javascripts/extra.js
+
diff --git a/naminter/__init__.py b/naminter/__init__.py
index 944ca07..cb2e9d3 100644
--- a/naminter/__init__.py
+++ b/naminter/__init__.py
@@ -1,4 +1,21 @@
-from .core.main import Naminter
+from naminter.core.constants import WMN_REMOTE_URL, WMN_SCHEMA_URL
+from naminter.core.exceptions import (
+ HttpError,
+ HttpSessionError,
+ HttpTimeoutError,
+)
+from naminter.core.formatter import WMNFormatter
+from naminter.core.main import Naminter
+from naminter.core.models import (
+ WMNMode,
+ WMNResponse,
+ WMNResult,
+ WMNStatus,
+ WMNSummary,
+ WMNTestResult,
+)
+from naminter.core.network import BaseSession, CurlCFFISession
+from naminter.core.validator import WMNValidator
__version__ = "1.0.7"
__name__ = "naminter"
@@ -11,4 +28,21 @@
__license__ = "MIT"
__email__ = "contact@3xp0rt.com"
__url__ = "https://github.com/3xp0rt/Naminter"
-__all__ = ["Naminter"]
+__all__ = [
+ "WMN_REMOTE_URL",
+ "WMN_SCHEMA_URL",
+ "BaseSession",
+ "CurlCFFISession",
+ "HttpError",
+ "HttpSessionError",
+ "HttpTimeoutError",
+ "Naminter",
+ "WMNFormatter",
+ "WMNMode",
+ "WMNResponse",
+ "WMNResult",
+ "WMNStatus",
+ "WMNSummary",
+ "WMNTestResult",
+ "WMNValidator",
+]
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index f6b379e..af75dce 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -1,24 +1,27 @@
+from functools import cached_property
import json
-from dataclasses import dataclass, field
from pathlib import Path
-from typing import Any
-
-from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
+from typing import TYPE_CHECKING, Any
from naminter.cli.console import display_warning
+from naminter.cli.constants import OPTION_AUTO_VALUE
from naminter.cli.exceptions import ConfigurationError
from naminter.core.constants import (
BROWSER_IMPERSONATE_AGENT,
BROWSER_IMPERSONATE_NONE,
+ HTTP_ALLOW_REDIRECTS,
HTTP_REQUEST_TIMEOUT_SECONDS,
+ HTTP_SSL_VERIFY,
MAX_CONCURRENT_TASKS,
WMN_REMOTE_URL,
WMN_SCHEMA_URL,
)
from naminter.core.models import WMNMode
+if TYPE_CHECKING:
+ from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
+
-@dataclass
class NaminterConfig:
"""Configuration for Naminter CLI tool.
@@ -27,114 +30,268 @@ class NaminterConfig:
parameters.
"""
- # Required parameters
- usernames: list[str]
- sites: list[str] | None = None
- logger: object | None = None
-
- # List and schema sources
- local_list_path: Path | str | None = None
- remote_list_url: str | None = None
- local_schema_path: Path | str | None = None
- remote_schema_url: str | None = WMN_SCHEMA_URL
-
- # Validation and filtering
- skip_validation: bool = False
- include_categories: list[str] = field(default_factory=lambda: []) # noqa: PIE807
- exclude_categories: list[str] = field(default_factory=lambda: []) # noqa: PIE807
- filter_all: bool = False
- filter_found: bool = False
- filter_ambiguous: bool = False
- filter_unknown: bool = False
- filter_not_found: bool = False
- filter_not_valid: bool = False
- filter_errors: bool = False
-
- # Network and concurrency
- max_tasks: int = MAX_CONCURRENT_TASKS
- timeout: int = HTTP_REQUEST_TIMEOUT_SECONDS
- proxy: str | None = None
- allow_redirects: bool = False
- verify_ssl: bool = False
- impersonate: BrowserTypeLiteral | str | None = BROWSER_IMPERSONATE_AGENT
- ja3: str | None = None
- akamai: str | None = None
- extra_fp: ExtraFingerprints | dict[str, Any] | str | None = None
- browse: bool = False
- mode: WMNMode = WMNMode.ALL
- validate_sites: bool = False
- no_progressbar: bool = False
-
- # Logging
- log_level: str | None = None
- log_file: str | None = None
- show_details: bool = False
-
- # Response saving
- save_response: bool = False
- response_path: str | None = None
- open_response: bool = False
-
- # Export options
- csv_export: bool = False
- csv_path: str | None = None
- pdf_export: bool = False
- pdf_path: str | None = None
- html_export: bool = False
- html_path: str | None = None
- json_export: bool = False
- json_path: str | None = None
+ def __init__(self, /, **kwargs: object) -> None:
+ """Initialize config from kwargs (CLI or direct parameters).
+
+ Args:
+ **kwargs: Configuration parameters. Can be either:
+ - Direct field names (usernames, sites, etc.)
+ - CLI-specific names (username, site, csv_opt, etc.)
+
+ Note:
+ At least one keyword argument must be provided. Positional arguments
+ are not allowed (enforced by position-only `/` parameter).
+ """
+ if not kwargs:
+ msg = "NaminterConfig requires at least one keyword argument"
+ raise ConfigurationError(msg)
+
+ # Parse CLI-specific kwargs if present
+ parsed = self._parse_cli_kwargs(kwargs)
+
+ # Set all fields with defaults
+ self.usernames: list[str] = parsed.get("usernames", [])
+ self.sites: list[str] | None = parsed.get("sites")
+ self.local_list_path: Path | str | None = parsed.get("local_list")
+ self.remote_list_url: str | None = parsed.get("remote_list")
+ self.local_schema_path: Path | str | None = parsed.get("local_schema")
+ self.remote_schema_url: str | None = parsed.get(
+ "remote_schema",
+ WMN_SCHEMA_URL,
+ )
+ self.skip_validation: bool = parsed.get("skip_validation", False)
+ self.include_categories: list[str] = parsed.get("include_categories", [])
+ self.exclude_categories: list[str] = parsed.get("exclude_categories", [])
+ self.filter_all: bool = parsed.get("filter_all", False)
+ self.filter_exists: bool = parsed.get("filter_exists", False)
+ self.filter_partial: bool = parsed.get("filter_partial", False)
+ self.filter_conflicting: bool = parsed.get("filter_conflicting", False)
+ self.filter_unknown: bool = parsed.get("filter_unknown", False)
+ self.filter_missing: bool = parsed.get("filter_missing", False)
+ self.filter_not_valid: bool = parsed.get("filter_not_valid", False)
+ self.filter_errors: bool = parsed.get("filter_errors", False)
+ self.max_tasks: int = parsed.get("max_tasks", MAX_CONCURRENT_TASKS)
+ self.timeout: int = parsed.get("timeout", HTTP_REQUEST_TIMEOUT_SECONDS)
+ self.proxy: str | None = parsed.get("proxy")
+ self.allow_redirects: bool = parsed.get("allow_redirects", HTTP_ALLOW_REDIRECTS)
+ self.verify_ssl: bool = parsed.get("verify_ssl", HTTP_SSL_VERIFY)
+ self.impersonate: BrowserTypeLiteral | str | None = parsed.get(
+ "impersonate",
+ BROWSER_IMPERSONATE_AGENT,
+ )
+ self.ja3: str | None = parsed.get("ja3")
+ self.akamai: str | None = parsed.get("akamai")
+ self.extra_fp: ExtraFingerprints | dict[str, Any] | str | None = parsed.get(
+ "extra_fp",
+ )
+ self.browse: bool = parsed.get("browse", False)
+ self.mode: WMNMode = parsed.get("mode", WMNMode.ALL)
+ self.test: bool = parsed.get("test", False)
+ self.no_progressbar: bool = parsed.get("no_progressbar", False)
+ self.log_level: str | None = parsed.get("log_level")
+ self.log_file: str | None = parsed.get("log_file")
+ self.show_details: bool = parsed.get("show_details", False)
+ self.save_response: bool = parsed.get("save_response", False)
+ self.response_path: str | None = parsed.get("response_path")
+ self.open_response: bool = parsed.get("open_response", False)
+ self.csv_export: bool = parsed.get("csv_export", False)
+ self.csv_path: str | None = parsed.get("csv_path")
+ self.pdf_export: bool = parsed.get("pdf_export", False)
+ self.pdf_path: str | None = parsed.get("pdf_path")
+ self.html_export: bool = parsed.get("html_export", False)
+ self.html_path: str | None = parsed.get("html_path")
+ self.json_export: bool = parsed.get("json_export", False)
+ self.json_path: str | None = parsed.get("json_path")
+
+ self.__post_init__()
+
+ @staticmethod
+ def _parse_option_path(option_value: str | None) -> str | None:
+ """Parse export/response option value, returning None for auto or unset.
+
+ Args:
+ option_value: The option value to parse. Can be None, OPTION_AUTO_VALUE,
+ or a path string.
+
+ Returns:
+ None if the option is unset or set to auto mode, otherwise the path string.
+ """
+ if option_value in {None, OPTION_AUTO_VALUE}:
+ return None
+ return option_value
+
+ @staticmethod
+ def _parse_cli_kwargs(kwargs: dict[str, Any]) -> dict[str, Any]:
+ """Parse CLI-specific kwargs into config field names.
+
+ Args:
+ kwargs: Raw kwargs from CLI or direct parameters.
+
+ Returns:
+ Dictionary with parsed configuration values.
+ """
+ parsed = kwargs.copy()
+
+ # Handle CLI-specific username/site parameters (tuples from click)
+ if "username" in parsed:
+ parsed["usernames"] = list(parsed.pop("username") or [])
+ if "site" in parsed:
+ sites = list(parsed.pop("site") or [])
+ parsed["sites"] = sites if sites else None
+
+ # Handle include/exclude categories
+ if "include_categories" in parsed and isinstance(
+ parsed["include_categories"],
+ tuple,
+ ):
+ parsed["include_categories"] = list(parsed["include_categories"])
+ if "exclude_categories" in parsed and isinstance(
+ parsed["exclude_categories"],
+ tuple,
+ ):
+ parsed["exclude_categories"] = list(parsed["exclude_categories"])
+
+ # Parse export format options (csv_opt -> csv_export + csv_path)
+ for fmt in ["csv", "pdf", "html", "json"]:
+ opt_key = f"{fmt}_opt"
+ if opt_key in parsed:
+ opt_value = parsed.pop(opt_key)
+ parsed[f"{fmt}_export"] = opt_value is not None
+ parsed[f"{fmt}_path"] = NaminterConfig._parse_option_path(opt_value)
+
+ # Parse response saving option
+ if "save_response_opt" in parsed:
+ opt_value = parsed.pop("save_response_opt")
+ parsed["save_response"] = opt_value is not None
+ parsed["response_path"] = NaminterConfig._parse_option_path(opt_value)
+
+ # Convert mode string to WMNMode enum if needed
+ if "mode" in parsed and isinstance(parsed["mode"], str):
+ parsed["mode"] = WMNMode(parsed["mode"])
+
+ # Convert boolean strings to actual booleans
+ bool_fields = [
+ "skip_validation",
+ "allow_redirects",
+ "verify_ssl",
+ "browse",
+ "test",
+ "show_details",
+ "open_response",
+ "no_progressbar",
+ "filter_all",
+ "filter_exists",
+ "filter_partial",
+ "filter_conflicting",
+ "filter_unknown",
+ "filter_missing",
+ "filter_not_valid",
+ "filter_errors",
+ ]
+ for field_name in bool_fields:
+ if field_name in parsed and not isinstance(parsed[field_name], bool):
+ parsed[field_name] = bool(parsed[field_name])
+
+ return parsed
def __post_init__(self) -> None:
"""Validate and normalize configuration after initialization."""
- if self.validate_sites and self.usernames:
+ self._validate_usernames()
+ self._validate_mode()
+ self._validate_sources()
+ self._normalize_filters()
+ self._normalize_impersonate()
+ self._normalize_fingerprint()
+
+ def _validate_usernames(self) -> None:
+ """Ensure usernames are provided when not running in test mode."""
+ if not self.usernames and not self.test:
+ msg = (
+ "At least one --username/-u is required unless --test is used. "
+ "Provide a username or run in validation mode with --test."
+ )
+ raise ConfigurationError(msg)
+
+ def _validate_mode(self) -> None:
+ """Validate and warn about site validation mode configuration."""
+ if self.test and self.usernames:
display_warning(
"Site validation mode enabled: provided usernames will be ignored, "
- "using known usernames from site configurations instead."
+ "using known usernames from site configurations instead.",
)
+ def _validate_sources(self) -> None:
+ """Validate data source configuration (list and schema sources)."""
+ # Validate list sources
if self.local_list_path and self.remote_list_url:
- msg = "Both local and remote list sources provided; only one is allowed"
+ msg = (
+ "Conflicting list sources: both local_list_path and remote_list_url "
+ "are provided. Please specify only one."
+ )
raise ConfigurationError(msg)
if not self.local_list_path and not self.remote_list_url:
self.remote_list_url = WMN_REMOTE_URL
- if self.local_schema_path and self.remote_schema_url:
- msg = "Both local and remote schema sources provided; only one is allowed"
+ # Validate schema sources
+ if self.local_schema_path and self.remote_schema_url != WMN_SCHEMA_URL:
+ msg = (
+ "Conflicting schema sources: both local_schema_path and "
+ "remote_schema_url are provided. Please specify only one."
+ )
raise ConfigurationError(msg)
if not self.local_schema_path and not self.remote_schema_url:
self.remote_schema_url = WMN_SCHEMA_URL
- filter_fields = [
+ def _normalize_filters(self) -> None:
+ """Normalize filter settings to ensure at least one filter is active."""
+ has_any_filter = any([
self.filter_all,
- self.filter_ambiguous,
+ self.filter_exists,
+ self.filter_partial,
+ self.filter_conflicting,
self.filter_unknown,
- self.filter_not_found,
+ self.filter_missing,
self.filter_not_valid,
self.filter_errors,
- ]
- if not any(filter_fields):
- self.filter_found = True
+ ])
+ if not has_any_filter:
+ self.filter_exists = True
+
+ def _normalize_impersonate(self) -> None:
+ """Normalize impersonate setting to handle 'none' string value."""
if (
isinstance(self.impersonate, str)
and self.impersonate.lower() == BROWSER_IMPERSONATE_NONE
):
self.impersonate = None
- if isinstance(self.extra_fp, str):
- try:
- self.extra_fp = json.loads(self.extra_fp)
- except json.JSONDecodeError as e:
- msg = f"Invalid JSON in extra_fp: {e}"
- raise ConfigurationError(msg) from e
- except TypeError as e:
- msg = f"Invalid data type in extra_fp: {e}"
- raise ConfigurationError(msg) from e
-
- @property
+ def _normalize_fingerprint(self) -> None:
+ """Parse and normalize extra_fp from JSON string to dict if needed."""
+ if not isinstance(self.extra_fp, str):
+ return
+
+ extra_fp_str = self.extra_fp.strip()
+ if not extra_fp_str:
+ self.extra_fp = None
+ return
+
+ try:
+ parsed = json.loads(extra_fp_str)
+ if not isinstance(parsed, dict):
+ msg = (
+ f"Invalid extra_fp format: expected JSON object, "
+ f"got {type(parsed).__name__}"
+ )
+ raise ConfigurationError(msg)
+ self.extra_fp = parsed
+ except json.JSONDecodeError as e:
+ msg = f"Invalid JSON in extra_fp parameter: {e}"
+ raise ConfigurationError(msg) from e
+
+ @cached_property
def response_dir(self) -> Path | None:
"""Return response directory Path if save_response is enabled."""
if not self.save_response:
@@ -145,7 +302,7 @@ def response_dir(self) -> Path | None:
return Path.cwd() / "responses"
- @property
+ @cached_property
def export_formats(self) -> dict[str, str | None]:
"""Return enabled export formats with their custom paths."""
export_configs = [
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index 545ae45..c0c6ef3 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -1,3 +1,6 @@
+from dataclasses import dataclass
+from datetime import timedelta
+import difflib
from pathlib import Path
from typing import Any
@@ -17,66 +20,96 @@
__url__,
__version__,
)
-from naminter.core.models import WMNResult, WMNStatus, WMNValidationResult
+from naminter.cli.constants import (
+ STATUS_STYLES,
+ STATUS_SYMBOLS,
+)
+from naminter.core.models import WMNResult, WMNStatus, WMNTestResult
console: Console = Console()
-THEME: dict[str, str] = {
- "primary": "bright_blue",
- "success": "bright_green",
- "error": "bright_red",
- "warning": "bright_yellow",
- "info": "bright_cyan",
- "muted": "bright_black",
-}
-
-_STATUS_SYMBOLS: dict[WMNStatus, str] = {
- WMNStatus.FOUND: "+",
- WMNStatus.AMBIGUOUS: "*",
- WMNStatus.UNKNOWN: "?",
- WMNStatus.NOT_FOUND: "-",
- WMNStatus.NOT_VALID: "X",
- WMNStatus.ERROR: "!",
-}
-
-_STATUS_STYLES: dict[WMNStatus, Style] = {
- WMNStatus.FOUND: Style(color=THEME["success"], bold=True),
- WMNStatus.AMBIGUOUS: Style(color=THEME["warning"], bold=True),
- WMNStatus.UNKNOWN: Style(color=THEME["warning"]),
- WMNStatus.NOT_FOUND: Style(color=THEME["error"]),
- WMNStatus.NOT_VALID: Style(color=THEME["error"]),
- WMNStatus.ERROR: Style(color=THEME["error"], bold=True),
-}
+
+@dataclass(frozen=True)
+class Theme:
+ """Application color theme configuration."""
+
+ primary: str = "bright_blue"
+ success: str = "bright_green"
+ error: str = "bright_red"
+ warning: str = "bright_yellow"
+ info: str = "bright_cyan"
+ muted: str = "bright_black"
+
+
+THEME = Theme()
+
+
+def _get_status_symbol(status: WMNStatus) -> str:
+ """Get display symbol for a status using constants.
+
+ Args:
+ status: The WMNStatus to get symbol for.
+
+ Returns:
+ Symbol character for the status.
+ """
+ return STATUS_SYMBOLS.get(status.value, "?")
+
+
+def _get_status_style(status: WMNStatus) -> Style:
+ """Get Rich Style for a status using constants.
+
+ Args:
+ status: The WMNStatus to get styling for.
+
+ Returns:
+ Rich Style object with appropriate color and formatting.
+ """
+ style_str = STATUS_STYLES.get(status.value, "white")
+ return Style.parse(style_str)
class ResultFormatter:
"""Formats test results for console output."""
- def __init__(self, show_details: bool = False) -> None:
- """Initialize the result formatter."""
+ def __init__(self, *, show_details: bool = False) -> None:
+ """Initialize the result formatter.
+
+ Args:
+ show_details: Whether to include detailed debug information in output.
+ """
self.show_details = show_details
def format_result(
- self, site_result: WMNResult, response_file_path: Path | None = None
+ self,
+ site_result: WMNResult,
+ response_file_path: Path | None = None,
) -> Tree:
- """Format a single result as a tree-style output."""
+ """Format a single result as a tree-style output.
+ Args:
+ site_result: The result to format.
+ response_file_path: Optional path to the response file for debugging.
+
+ Returns:
+ A Rich Tree object containing the formatted result.
+ """
root_label = Text()
- status_symbol = _STATUS_SYMBOLS.get(site_result.status, "?")
- status_style = _STATUS_STYLES.get(site_result.status, Style())
+ status_symbol = _get_status_symbol(site_result.status)
+ status_style = _get_status_style(site_result.status)
root_label.append(status_symbol, style=status_style)
- root_label.append(" [", style=THEME["muted"])
- root_label.append(site_result.name or "Unknown", style=THEME["info"])
- root_label.append("] ", style=THEME["muted"])
- root_label.append(site_result.url or "No URL", style=THEME["primary"])
+ root_label.append(" [", style=THEME.muted)
+ root_label.append(site_result.name or "Unknown", style=THEME.info)
+ root_label.append("] ", style=THEME.muted)
+ root_label.append(site_result.url or "No URL", style=THEME.primary)
- tree = Tree(root_label, guide_style=THEME["muted"])
+ tree = Tree(root_label, guide_style=THEME.muted)
if self.show_details:
self._add_debug_info(
tree,
- site_result.response_code,
+ site_result.status_code,
site_result.elapsed,
site_result.error,
response_file_path,
@@ -86,32 +119,39 @@ def format_result(
def format_validation(
self,
- validation_result: WMNValidationResult,
+ validation_result: WMNTestResult,
response_files: list[Path | None] | None = None,
) -> Tree:
- """Format validation results into a tree structure."""
+ """Format validation results into a tree structure.
+
+ Args:
+ validation_result: The validation result to format.
+ response_files: Optional list of response file paths for debugging.
+ Returns:
+ A Rich Tree object containing the formatted validation results.
+ """
root_label = Text()
root_label.append(
- _STATUS_SYMBOLS.get(validation_result.status, "?"),
- style=_STATUS_STYLES.get(validation_result.status, Style()),
+ _get_status_symbol(validation_result.status),
+ style=_get_status_style(validation_result.status),
)
- root_label.append(" [", style=THEME["muted"])
- root_label.append(validation_result.name, style=THEME["info"])
- root_label.append("]", style=THEME["muted"])
+ root_label.append(" [", style=THEME.muted)
+ root_label.append(validation_result.name, style=THEME.info)
+ root_label.append("]", style=THEME.muted)
- tree = Tree(root_label, guide_style=THEME["muted"], expanded=True)
+ tree = Tree(root_label, guide_style=THEME.muted, expanded=True)
if validation_result.results:
for i, result in enumerate(validation_result.results):
url_text = Text()
url_text.append(
- _STATUS_SYMBOLS.get(result.status, "?"),
- style=_STATUS_STYLES.get(result.status, Style()),
+ _get_status_symbol(result.status),
+ style=_get_status_style(result.status),
)
- url_text.append(" ", style=THEME["muted"])
- url_text.append(f"{result.username}: ", style=THEME["info"])
- url_text.append(result.url or "No URL", style=THEME["primary"])
+ url_text.append(" ", style=THEME.muted)
+ url_text.append(f"{result.username}: ", style=THEME.info)
+ url_text.append(result.url or "No URL", style=THEME.primary)
result_node = tree.add(url_text)
@@ -123,7 +163,7 @@ def format_validation(
)
self._add_debug_info(
result_node,
- result.response_code,
+ result.status_code,
result.elapsed,
result.error,
response_file,
@@ -134,28 +174,36 @@ def format_validation(
@staticmethod
def _add_debug_info(
node: Tree,
- response_code: int | None = None,
- elapsed: float | None = None,
+ status_code: int | None = None,
+ elapsed: timedelta | None = None,
error: str | None = None,
response_file: Path | None = None,
) -> None:
- """Add debug information to a tree node."""
-
- if response_code is not None:
- node.add(Text(f"Response Code: {response_code}", style=THEME["info"]))
+ """Add debug information to a tree node.
+
+ Args:
+ node: The tree node to add information to.
+ status_code: Optional HTTP status code.
+ elapsed: Optional elapsed time in seconds.
+ error: Optional error message.
+ response_file: Optional path to response file.
+ """
+ if status_code is not None:
+ node.add(Text(f"Status Code: {status_code}", style=THEME.info))
if response_file is not None:
- node.add(Text(f"Response File: {response_file}", style=THEME["info"]))
+ node.add(Text(f"Response File: {response_file}", style=THEME.info))
if elapsed is not None:
- node.add(Text(f"Elapsed: {elapsed:.2f}s", style=THEME["info"]))
+ elapsed_seconds = elapsed.total_seconds()
+ node.add(Text(f"Elapsed: {elapsed_seconds:.2f}s", style=THEME.info))
if error is not None:
- node.add(Text(f"Error: {error}", style=THEME["error"]))
+ node.add(Text(f"Error: {error}", style=THEME.error))
def display_version() -> None:
"""Display version and metadata of the application."""
version_table = Table.grid(padding=(0, 2))
- version_table.add_column(style=THEME["info"])
+ version_table.add_column(style=THEME.info)
version_table.add_column(style="bold")
version_table.add_row("Version:", __version__)
@@ -168,14 +216,20 @@ def display_version() -> None:
panel = Panel(
version_table,
title="[bold]:mag: Naminter[/]",
- border_style=THEME["muted"],
+ border_style=THEME.muted,
box=box.ROUNDED,
)
console.print(panel)
-def _display_message(message: str, style: str, symbol: str, label: str) -> None:
+def _display_message(
+ message: str,
+ style: str,
+ symbol: str,
+ label: str,
+ end: str = "\n",
+) -> None:
"""Display a styled message with symbol and label."""
formatted_message = Text()
@@ -183,14 +237,24 @@ def _display_message(message: str, style: str, symbol: str, label: str) -> None:
formatted_message.append(f" [{label}] ", style=style)
formatted_message.append(message)
- console.print(formatted_message)
+ console.print(formatted_message, end=end)
console.file.flush()
-def display_error(message: str, show_traceback: bool = False) -> None:
- """Display an error message."""
-
- _display_message(message, THEME["error"], "!", "ERROR")
+def display_error(
+ message: str,
+ *,
+ show_traceback: bool = False,
+ end: str = "\n",
+) -> None:
+ """Display an error message.
+
+ Args:
+ message: The error message to display.
+ show_traceback: Whether to print the full traceback.
+ end: String to append after the message (default: newline).
+ """
+ _display_message(message, THEME.error, "!", "ERROR", end=end)
if show_traceback:
console.print_exception()
@@ -198,47 +262,89 @@ def display_error(message: str, show_traceback: bool = False) -> None:
def display_warning(message: str) -> None:
"""Display a warning message."""
- _display_message(message, THEME["warning"], "?", "WARNING")
+ _display_message(message, THEME.warning, "?", "WARNING")
def display_info(message: str) -> None:
"""Display an info message."""
- _display_message(message, THEME["info"], "*", "INFO")
+ _display_message(message, THEME.info, "*", "INFO")
def display_success(message: str) -> None:
"""Display a success message."""
- _display_message(message, THEME["success"], "+", "SUCCESS")
+ _display_message(message, THEME.success, "+", "SUCCESS")
def display_validation_errors(errors: list[Any]) -> None:
- """Display validation errors in a formatted table."""
+ """Display validation errors in a formatted tree structure.
+
+ Args:
+ errors: List of validation errors to display.
+ """
if not errors:
return
- table = Table(
- title="[bold bright_red]Validation Errors[/bold bright_red]",
- border_style=THEME["error"],
- box=box.ROUNDED,
- show_lines=True,
- )
-
- table.add_column("Path", style=THEME["info"], no_wrap=False)
- table.add_column("Message", style=THEME["warning"])
- table.add_column("Data Preview", style=THEME["muted"], overflow="fold")
+ root_label = Text()
+ tree = Tree(root_label, guide_style=THEME.muted, expanded=True)
for error in errors:
- path = getattr(error, "path", "N/A") or "N/A"
- message = getattr(error, "message", "Unknown error")
+ path = str(getattr(error, "path", "N/A") or "N/A")
+ message = str(getattr(error, "message", "Unknown error"))
data = getattr(error, "data", None)
- data_preview = (
- data[:200] + "..." if data and len(data) > 200 else (data or "N/A")
- )
+ error_text = Text()
+ error_text.append("• ", style=THEME.error)
+ error_text.append(f"{path}: ", style=THEME.info)
+ error_text.append(message, style=THEME.warning)
+
+ error_node = tree.add(error_text)
+
+ if data is not None:
+ error_node.add(Text(f"Data: {data}", style=THEME.muted))
- table.add_row(path, message, data_preview)
+ console.print(tree)
+ console.file.flush()
+
+
+def display_diff(original: str, formatted: str, file_path: Path) -> None:
+ """Display a git-style diff showing changes between original and formatted content.
+
+ Args:
+ original: The original file content.
+ formatted: The formatted file content.
+ file_path: Path to the file being formatted.
+ """
+ original_lines = original.splitlines(keepends=False)
+ formatted_lines = formatted.splitlines(keepends=False)
+
+ diff = difflib.unified_diff(
+ original_lines,
+ formatted_lines,
+ fromfile=str(file_path),
+ tofile=str(file_path),
+ lineterm="",
+ )
+
+ diff_lines = list(diff)
+ if not diff_lines:
+ return
- console.print(table)
+ diff_text = Text()
+ for line in diff_lines:
+ if line.startswith(("---", "+++")):
+ diff_text.append(line, style=THEME.muted)
+ elif line.startswith("@@"):
+ diff_text.append(line, style=THEME.info)
+ elif line.startswith("-"):
+ diff_text.append(line, style=THEME.error)
+ elif line.startswith("+"):
+ diff_text.append(line, style=THEME.success)
+ else:
+ diff_text.append(line)
+
+ diff_text.append("\n")
+
+ console.print(diff_text)
console.file.flush()
diff --git a/naminter/cli/constants.py b/naminter/cli/constants.py
index a05a86c..d3dc175 100644
--- a/naminter/cli/constants.py
+++ b/naminter/cli/constants.py
@@ -1,19 +1,48 @@
+from typing import Final
+
# Constants for file operations
-RESPONSE_FILE_DATE_FORMAT = "%Y%m%d_%H%M%S"
-RESPONSE_FILE_EXTENSION = ".html"
+RESPONSE_FILE_DATE_FORMAT: Final[str] = "%Y%m%d_%H%M%S"
+RESPONSE_FILE_EXTENSION: Final[str] = ".html"
-# Default network timeout (overrides core default for CLI)
-DEFAULT_NETWORK_TIMEOUT: int = 30
+# Default fallback values
+DEFAULT_UNNAMED_VALUE: Final[str] = "unnamed"
# Progress tracking
-PROGRESS_ADVANCE_INCREMENT: int = 1
+PROGRESS_ADVANCE_INCREMENT: Final[int] = 1
# Exit codes
-EXIT_CODE_ERROR: int = 1
-EXIT_CODE_SUCCESS: int = 0
+EXIT_CODE_ERROR: Final[int] = 1
+EXIT_CODE_SUCCESS: Final[int] = 0
+EXIT_CODE_INTERRUPTED: Final[int] = 130
# Filename constraints
-MAX_FILENAME_LENGTH: int = 200
+MAX_FILENAME_LENGTH: Final[int] = 200
+
+# Status Display Configuration (for CLI/UI)
+# Symbol keys match WMNStatus enum values
+STATUS_SYMBOLS: Final[dict[str, str]] = {
+ "exists": "+",
+ "partial": "~",
+ "conflicting": "*",
+ "unknown": "?",
+ "missing": "-",
+ "not_valid": "X",
+ "error": "!",
+}
+
+# Style keys match WMNStatus enum values
+STATUS_STYLES: Final[dict[str, str]] = {
+ "exists": "bright_green bold",
+ "partial": "bright_yellow",
+ "conflicting": "bright_yellow bold",
+ "unknown": "bright_yellow",
+ "missing": "bright_red",
+ "not_valid": "bright_red",
+ "error": "bright_red bold",
+}
+
+# Export field ordering
+HTML_FIELDS_ORDER: Final[list[str]] = ["name", "url", "elapsed"]
-# Supported export formats
-SUPPORTED_FORMATS: list[str] = ["csv", "json", "html", "pdf"]
+# Option parsing
+OPTION_AUTO_VALUE: Final[str] = "__AUTO__"
diff --git a/naminter/cli/exceptions.py b/naminter/cli/exceptions.py
index e0dcd44..47b868a 100644
--- a/naminter/cli/exceptions.py
+++ b/naminter/cli/exceptions.py
@@ -1,38 +1,43 @@
-from naminter.core.exceptions import NaminterError
+class CLIError(Exception):
+ """Base class for all CLI-layer errors."""
-# Configuration errors
-class ConfigurationError(NaminterError):
- """Raised when there's an error in the configuration parameters.
+class FileError(CLIError):
+ """File-related errors (paths, permissions, encoding, JSON content, etc.)."""
- This includes invalid configuration values, missing required settings,
- configuration file parsing errors, or invalid URLs.
- """
+class NetworkError(CLIError):
+ """Network-related errors (URLs, HTTP failures, invalid remote JSON, etc.)."""
-# File/IO errors
-class FileIOError(NaminterError):
- """Raised when file operations fail.
- This includes reading/writing local lists, responses, exports,
- and other file system operations.
- """
+class BrowserError(CLIError):
+ """Browser-related errors (invalid URL, browser launch problems, etc.)."""
+
+
+class ExportError(CLIError):
+ """Errors raised during export operations in the CLI layer."""
-# Browser errors
-class BrowserError(NaminterError):
- """Raised when browser operations fail in the CLI layer."""
+class ConfigurationError(CLIError):
+ """Configuration validation errors.
+
+ Invalid CLI arguments, conflicting options, etc.
+ """
-# Export errors
-class ExportError(NaminterError):
- """Raised when export operations fail in the CLI layer."""
+class ValidationError(CLIError):
+ """Input format validation errors.
+
+ Invalid username format, site name format, etc.
+ """
__all__ = [
"BrowserError",
+ "CLIError",
"ConfigurationError",
"ExportError",
- "FileIOError",
- "NaminterError",
+ "FileError",
+ "NetworkError",
+ "ValidationError",
]
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index a22fc96..872707e 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -1,39 +1,37 @@
import csv
+from datetime import UTC, datetime
import importlib.resources
+from io import StringIO
import json
-from datetime import UTC, datetime
from pathlib import Path
-from typing import Any, Literal, Protocol
+from typing import Any, Literal, Protocol, get_args
import jinja2
from weasyprint import HTML
from naminter import __version__
-from naminter.cli.constants import SUPPORTED_FORMATS
+from naminter.cli.constants import HTML_FIELDS_ORDER
+from naminter.cli.exceptions import ExportError, FileError
+from naminter.cli.utils import read_file, write_file
from naminter.core.constants import (
- DEFAULT_JSON_ENCODING,
DEFAULT_JSON_ENSURE_ASCII,
DEFAULT_JSON_INDENT,
EMPTY_STRING,
)
-from naminter.core.models import WMNResult, WMNValidationResult
-
-from .exceptions import ConfigurationError, ExportError, FileIOError
+from naminter.core.models import WMNResult, WMNTestResult
-FormatName = Literal["csv", "json", "html", "pdf"]
+FormatName = Literal["json", "csv", "html", "pdf"]
ResultDict = dict[str, Any]
class ExportMethod(Protocol):
"""Protocol for export method callables."""
- def __call__(self, results: list[ResultDict], output_path: Path) -> None: ...
+ async def __call__(self, results: list[ResultDict], output_path: Path) -> None: ...
class Exporter:
- """
- Unified exporter for CSV, JSON, HTML, and PDF formats.
- """
+ """Unified exporter for CSV, JSON, HTML, and PDF formats."""
def __init__(self, usernames: list[str] | None = None) -> None:
self.usernames = usernames or []
@@ -44,120 +42,150 @@ def __init__(self, usernames: list[str] | None = None) -> None:
"pdf": self._export_pdf,
}
- def export(
+ async def export(
self,
- results: list[WMNResult | WMNValidationResult],
+ results: list[WMNResult | WMNTestResult],
formats: dict[FormatName, str | Path | None],
) -> None:
- """
- Export results in the given formats.
+ """Export results in the given formats.
+
+ Args:
+ results: List of results to export.
+ formats: Dictionary mapping format names to output paths (None for auto).
+
+ Raises:
+ ExportError: If export operation fails.
"""
if not results:
msg = "No results to export"
raise ExportError(msg)
- dict_results = [
- result.to_dict(exclude_response_text=True) for result in results
- ]
-
- for format_name, path in formats.items():
- if format_name not in SUPPORTED_FORMATS:
- msg = f"Unsupported export format: {format_name}"
- raise ExportError(msg)
-
- try:
- out_path = self._resolve_path(format_name, path)
- out_path.parent.mkdir(parents=True, exist_ok=True)
- self.export_methods[format_name](dict_results, out_path)
- except FileIOError as e:
- msg = f"File access error during {format_name} export: {e}"
- raise ExportError(msg) from e
- except Exception as e:
- msg = f"Unexpected error exporting {format_name}: {e}"
- raise ExportError(msg) from e
+ try:
+ dict_results = [result.to_dict(exclude_text=True) for result in results]
+ except (AttributeError, TypeError, ValueError) as e:
+ msg = f"Failed to convert results to dictionary format: {e}"
+ raise ExportError(msg) from e
+
+ for format_name in get_args(FormatName):
+ if format_name not in formats:
+ continue
+
+ path = formats[format_name]
+ out_path = self._resolve_path(format_name, path)
+
+ await self.export_methods[format_name](dict_results, out_path)
@staticmethod
- def _export_csv(results: list[ResultDict], output_path: Path) -> None:
- """Export results to CSV format."""
- fieldnames: list[str] = []
- seen: set[str] = set()
- for result in results:
- for key in result:
- if key not in seen:
- fieldnames.append(key)
- seen.add(key)
+ async def _export_csv(results: list[ResultDict], output_path: Path) -> None:
+ """Export results to CSV format.
+
+ Args:
+ results: List of result dictionaries to export.
+ output_path: Path where CSV file will be written.
+
+ Raises:
+ ExportError: If CSV serialization fails or unexpected error occurs.
+ """
+ fieldnames = list(dict.fromkeys(key for result in results for key in result))
+
+ if not fieldnames:
+ msg = "CSV data error: no fields found in results"
+ raise ExportError(msg)
try:
- with output_path.open("w", newline=EMPTY_STRING, encoding="utf-8") as f:
- writer = csv.DictWriter(f, fieldnames=fieldnames)
+ with StringIO(newline=EMPTY_STRING) as csv_buffer:
+ writer = csv.DictWriter(
+ csv_buffer,
+ fieldnames=fieldnames,
+ lineterminator="\n",
+ extrasaction="raise",
+ )
writer.writeheader()
writer.writerows(results)
- except PermissionError as e:
- msg = f"Permission denied writing CSV file: {e}"
- raise FileIOError(msg) from e
- except OSError as e:
- msg = f"OS error writing CSV file: {e}"
- raise FileIOError(msg) from e
+ csv_content = csv_buffer.getvalue()
+
+ await write_file(output_path, csv_content)
+ except FileError as e:
+ msg = f"File access error during CSV export: {e}"
+ raise ExportError(msg) from e
+ except csv.Error as e:
+ msg = f"CSV serialization error: {e}"
+ raise ExportError(msg) from e
+ except (TypeError, ValueError, AttributeError, KeyError) as e:
+ msg = f"CSV data error: {e}"
+ raise ExportError(msg) from e
except Exception as e:
msg = f"Unexpected error during CSV export: {e}"
raise ExportError(msg) from e
@staticmethod
- def _export_json(results: list[ResultDict], output_path: Path) -> None:
- """Export results to JSON format."""
+ async def _export_json(results: list[ResultDict], output_path: Path) -> None:
+ """Export results to JSON format.
+
+ Args:
+ results: List of result dictionaries to export.
+ output_path: Path where JSON file will be written.
+
+ Raises:
+ ExportError: If JSON serialization fails or unexpected error occurs.
+ """
try:
- output_path.write_text(
- json.dumps(
- results,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- indent=DEFAULT_JSON_INDENT,
- ),
- encoding=DEFAULT_JSON_ENCODING,
+ json_content = json.dumps(
+ results,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ indent=DEFAULT_JSON_INDENT,
)
- except PermissionError as e:
- msg = f"Permission denied writing JSON file: {e}"
- raise FileIOError(msg) from e
- except OSError as e:
- msg = f"OS error writing JSON file: {e}"
- raise FileIOError(msg) from e
- except (TypeError, ValueError) as e:
+ await write_file(output_path, json_content)
+ except FileError as e:
+ msg = f"File access error during JSON export: {e}"
+ raise ExportError(msg) from e
+ except (TypeError, ValueError, RecursionError) as e:
msg = f"JSON serialization error: {e}"
raise ExportError(msg) from e
except Exception as e:
msg = f"Unexpected error during JSON export: {e}"
raise ExportError(msg) from e
- def _generate_html(self, results: list[ResultDict]) -> str:
- """Generate HTML report from results."""
+ async def _generate_html(self, results: list[ResultDict]) -> str:
+ """Generate HTML report from results.
+
+ Args:
+ results: List of result dictionaries to format as HTML.
+
+ Returns:
+ Generated HTML string.
+
+ Raises:
+ ExportError: If template loading or rendering fails.
+ """
grouped: dict[str, list[ResultDict]] = {}
for item in results:
- cat = item.get("category", "uncategorized")
+ cat = item.get("category") or "uncategorized"
grouped.setdefault(cat, []).append(item)
- display_fields = ["name", "url", "elapsed"]
+ available_fields = {key for item in results for key in item}
+ display_fields = [
+ field for field in HTML_FIELDS_ORDER if field in available_fields
+ ] + sorted(available_fields - set(HTML_FIELDS_ORDER))
try:
- with (
- importlib.resources.files("naminter.cli.templates")
- .joinpath("report.html")
- .open("r", encoding="utf-8") as f
- ):
- template_source = f.read()
- except FileNotFoundError as e:
- msg = f"HTML template not found: {e}"
- raise ConfigurationError(msg) from e
- except PermissionError as e:
- msg = f"Permission denied reading HTML template: {e}"
- raise FileIOError(msg) from e
- except OSError as e:
- msg = f"OS error reading HTML template: {e}"
- raise FileIOError(msg) from e
+ template_resource = importlib.resources.files(
+ "naminter.cli.templates",
+ ).joinpath("report.html")
+ with importlib.resources.as_file(template_resource) as template_path:
+ template_source = await read_file(template_path)
+ except FileError as e:
+ msg = f"File access error loading HTML template: {e}"
+ raise ExportError(msg) from e
except Exception as e:
msg = f"Unexpected error loading HTML template: {e}"
- raise ConfigurationError(msg) from e
+ raise ExportError(msg) from e
try:
- template = jinja2.Template(template_source, autoescape=True)
+ env = jinja2.Environment(
+ autoescape=jinja2.select_autoescape(["html", "xml"]),
+ )
+ template = env.from_string(template_source)
return template.render(
grouped_results=grouped,
display_fields=display_fields,
@@ -171,45 +199,59 @@ def _generate_html(self, results: list[ResultDict]) -> str:
msg = f"Template rendering error: {e}"
raise ExportError(msg) from e
- def _export_html(self, results: list[ResultDict], output_path: Path) -> None:
- """Export results to HTML format."""
+ async def _export_html(self, results: list[ResultDict], output_path: Path) -> None:
+ """Export results to HTML format.
+
+ Args:
+ results: List of result dictionaries to export.
+ output_path: Path where HTML file will be written.
+
+ Raises:
+ ExportError: If template rendering or file writing fails.
+ """
try:
- html = self._generate_html(results)
- output_path.write_text(html, encoding="utf-8")
- except PermissionError as e:
- msg = f"Permission denied writing HTML file: {e}"
- raise FileIOError(msg) from e
- except OSError as e:
- msg = f"OS error writing HTML file: {e}"
- raise FileIOError(msg) from e
+ html = await self._generate_html(results)
+ await write_file(output_path, html)
+ except FileError as e:
+ msg = f"File access error during HTML export: {e}"
+ raise ExportError(msg) from e
except Exception as e:
msg = f"Unexpected error during HTML export: {e}"
raise ExportError(msg) from e
- def _export_pdf(self, results: list[ResultDict], output_path: Path) -> None:
- """Export results to PDF format."""
- if not results:
- msg = "No results to export to PDF"
- raise ExportError(msg)
+ async def _export_pdf(self, results: list[ResultDict], output_path: Path) -> None:
+ """Export results to PDF format.
+
+ Args:
+ results: List of result dictionaries to export.
+ output_path: Path where PDF file will be written.
+ Raises:
+ ExportError: If PDF generation fails or unexpected error occurs.
+ """
try:
- html = self._generate_html(results)
- HTML(string=html).write_pdf(str(output_path))
- except PermissionError as e:
- msg = f"Permission denied writing PDF file: {e}"
- raise FileIOError(msg) from e
- except OSError as e:
- msg = f"OS error writing PDF file: {e}"
- raise FileIOError(msg) from e
- except (ValueError, TypeError) as e:
- msg = f"PDF generation error: {e}"
+ html = await self._generate_html(results)
+ weasyprint_html = HTML(string=html)
+ pdf_bytes = weasyprint_html.write_pdf()
+ await write_file(output_path, pdf_bytes)
+ except FileError as e:
+ msg = f"File access error during PDF export: {e}"
raise ExportError(msg) from e
except Exception as e:
- msg = f"Unexpected error during PDF export: {e}"
+ msg = f"PDF generation error: {e}"
raise ExportError(msg) from e
@staticmethod
def _resolve_path(format_name: FormatName, custom: str | Path | None) -> Path:
+ """Resolve output path for export format.
+
+ Args:
+ format_name: Export format name (csv, json, html, pdf).
+ custom: Custom path if provided, None for auto-generated path.
+
+ Returns:
+ Resolved Path object for the output file.
+ """
if custom:
return Path(custom)
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index c0654c9..d254601 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -1,16 +1,16 @@
import asyncio
import logging
-import typing
from pathlib import Path
-from typing import Any, Final
+from typing import Any, Final, get_args
-import rich_click as click
from curl_cffi import BrowserTypeLiteral
+import rich_click as click
from naminter.cli.config import NaminterConfig
from naminter.cli.console import (
ResultFormatter,
console,
+ display_diff,
display_error,
display_validation_errors,
display_version,
@@ -18,14 +18,25 @@
)
from naminter.cli.constants import (
EXIT_CODE_ERROR,
- PROGRESS_ADVANCE_INCREMENT,
+ EXIT_CODE_INTERRUPTED,
+ OPTION_AUTO_VALUE,
+)
+from naminter.cli.exceptions import (
+ BrowserError,
+ CLIError,
+ ConfigurationError,
+ ExportError,
+ FileError,
+ NetworkError,
+ ValidationError,
)
from naminter.cli.exporters import Exporter
-from naminter.cli.progress import ProgressManager, ResultsTracker
+from naminter.cli.progress import ProgressBar
from naminter.cli.utils import (
fetch_json,
- generate_response_filename,
- open_browser,
+ get_response_filename,
+ open_url,
+ read_file,
read_json,
write_file,
)
@@ -40,15 +51,24 @@
MAX_CONCURRENT_TASKS,
WMN_SCHEMA_URL,
)
-from naminter.core.exceptions import HttpError, WMNDataError, WMNValidationError
+from naminter.core.exceptions import (
+ HttpError,
+ WMNDataError,
+ WMNFormatError,
+ WMNValidationError,
+)
+from naminter.core.formatter import WMNFormatter
from naminter.core.main import Naminter
-from naminter.core.models import WMNMode, WMNResult, WMNStatus, WMNValidationResult
+from naminter.core.models import WMNMode, WMNResult, WMNStatus, WMNTestResult
from naminter.core.network import CurlCFFISession
+from naminter.core.validator import WMNValidator
-from .exceptions import BrowserError, ConfigurationError, ExportError, FileIOError
-
-def _version_callback(ctx: click.Context, _param: click.Parameter, value: bool) -> None:
+def _version_callback(
+ ctx: click.Context,
+ _param: click.Parameter,
+ value: bool, # noqa: FBT001
+) -> None:
"""Eager callback to display version and exit."""
if not value or ctx.resilient_parsing:
return
@@ -60,42 +80,58 @@ class NaminterCLI:
"""Handles username enumeration operations."""
def __init__(self, config: NaminterConfig) -> None:
- self.config: NaminterConfig = config
+ self._config: NaminterConfig = config
self._formatter: ResultFormatter = ResultFormatter(
- show_details=config.show_details
+ show_details=config.show_details,
)
self._response_dir: Path | None = self._setup_response_dir()
- self._status_filters: Final[dict[WMNStatus, bool]] = {
- WMNStatus.FOUND: config.filter_found,
- WMNStatus.AMBIGUOUS: config.filter_ambiguous,
- WMNStatus.UNKNOWN: config.filter_unknown,
- WMNStatus.NOT_FOUND: config.filter_not_found,
- WMNStatus.NOT_VALID: config.filter_not_valid,
- WMNStatus.ERROR: config.filter_errors,
+ self._status_filters: Final[dict[WMNStatus, bool]] = (
+ self._create_status_filters()
+ )
+
+ def _create_status_filters(self) -> dict[WMNStatus, bool]:
+ """Create status filter mapping from config."""
+ return {
+ WMNStatus.EXISTS: self._config.filter_exists,
+ WMNStatus.PARTIAL: self._config.filter_partial,
+ WMNStatus.CONFLICTING: self._config.filter_conflicting,
+ WMNStatus.UNKNOWN: self._config.filter_unknown,
+ WMNStatus.MISSING: self._config.filter_missing,
+ WMNStatus.NOT_VALID: self._config.filter_not_valid,
+ WMNStatus.ERROR: self._config.filter_errors,
}
def _setup_response_dir(self) -> Path | None:
- """Setup response directory if response saving is enabled."""
- if not self.config.save_response:
+ """Setup response directory if response saving is enabled.
+
+ Returns:
+ Path to response directory if enabled, None otherwise.
+ """
+ if not self._config.save_response:
return None
- try:
- dir_path = self.config.response_dir
- if dir_path is not None:
- dir_path.mkdir(parents=True, exist_ok=True)
- return dir_path
+ dir_path = self._config.response_dir
+ if not dir_path:
+ display_warning("Response saving enabled but no directory configured")
+ self._config.save_response = False
return None
+
+ try:
+ dir_path.mkdir(parents=True, exist_ok=True)
except PermissionError as e:
- display_error(
- f"Permission denied creating/accessing response directory: {e}"
+ display_warning(
+ f"Permission denied creating response directory, disabling: {e}",
)
+ self._config.save_response = False
return None
except OSError as e:
- display_error(f"OS error creating/accessing response directory: {e}")
- return None
- except Exception as e:
- display_error(f"Unexpected error setting up response directory: {e}")
+ display_warning(
+ f"OS error creating response directory, disabling: {e}",
+ )
+ self._config.save_response = False
return None
+ else:
+ return dir_path
@staticmethod
def setup_logging(config: NaminterConfig) -> None:
@@ -108,229 +144,253 @@ def setup_logging(config: NaminterConfig) -> None:
log_path.parent.mkdir(parents=True, exist_ok=True)
except (PermissionError, OSError) as e:
msg = f"Failed to create log directory {log_path.parent}: {e}"
- raise FileIOError(msg) from e
+ raise OSError(msg) from e
level_value = getattr(
- logging, str(config.log_level or "INFO").upper(), logging.INFO
+ logging,
+ str(config.log_level or "INFO").upper(),
+ logging.INFO,
)
logger = logging.getLogger("naminter")
logger.setLevel(level_value)
logger.propagate = False
- has_file_handler = any(
- isinstance(handler, logging.FileHandler) for handler in logger.handlers
- )
- if not has_file_handler:
- try:
- file_handler = logging.FileHandler(
- str(log_path),
- mode="a",
- encoding=DEFAULT_FILE_ENCODING,
- )
- formatter = logging.Formatter(LOGGING_FORMAT)
- file_handler.setFormatter(formatter)
- file_handler.setLevel(level_value)
- logger.addHandler(file_handler)
- except (PermissionError, OSError) as e:
- msg = f"Failed to create log file {log_path}: {e}"
- raise FileIOError(msg) from e
+ for handler in logger.handlers[:]:
+ if isinstance(handler, logging.FileHandler):
+ handler.close()
+ logger.removeHandler(handler)
+
+ try:
+ file_handler = logging.FileHandler(
+ str(log_path),
+ mode="a",
+ encoding=DEFAULT_FILE_ENCODING,
+ )
+ formatter = logging.Formatter(LOGGING_FORMAT)
+ file_handler.setFormatter(formatter)
+ file_handler.setLevel(level_value)
+ logger.addHandler(file_handler)
+ except (PermissionError, OSError) as e:
+ msg = f"Failed to create log file {log_path}: {e}"
+ raise OSError(msg) from e
async def run(self) -> None:
"""Main execution method with progress tracking."""
- http_client = CurlCFFISession(
- proxies=self.config.proxy,
- verify=self.config.verify_ssl,
- timeout=self.config.timeout,
- allow_redirects=self.config.allow_redirects,
- impersonate=self.config.impersonate,
- ja3=self.config.ja3,
- akamai=self.config.akamai,
- extra_fp=self.config.extra_fp,
- )
-
- wmn_data: dict[str, Any] | None = None
- if self.config.local_list_path:
- wmn_data = await read_json(self.config.local_list_path)
- elif self.config.remote_list_url:
- wmn_data = await fetch_json(http_client, self.config.remote_list_url)
-
- wmn_schema: dict[str, Any] | None = None
- if not self.config.skip_validation:
- if self.config.local_schema_path:
- wmn_schema = await read_json(self.config.local_schema_path)
- elif self.config.remote_schema_url:
- wmn_schema = await fetch_json(
- http_client, self.config.remote_schema_url
- )
-
- async with Naminter(
- http_client=http_client,
- wmn_data=wmn_data,
- wmn_schema=wmn_schema,
- max_tasks=self.config.max_tasks,
- ) as naminter:
- if self.config.validate_sites:
- results = await self._run_validation(naminter)
- else:
- results = await self._run_check(naminter)
+ async with CurlCFFISession(
+ proxies=self._config.proxy,
+ verify=self._config.verify_ssl,
+ timeout=self._config.timeout,
+ allow_redirects=self._config.allow_redirects,
+ impersonate=self._config.impersonate,
+ ja3=self._config.ja3,
+ akamai=self._config.akamai,
+ extra_fp=self._config.extra_fp,
+ ) as http_client:
+ wmn_data: dict[str, Any] | None = None
+ if self._config.local_list_path:
+ wmn_data = await read_json(self._config.local_list_path)
+ elif self._config.remote_list_url:
+ wmn_data = await fetch_json(http_client, self._config.remote_list_url)
+
+ wmn_schema: dict[str, Any] | None = None
+ if not self._config.skip_validation:
+ if self._config.local_schema_path:
+ wmn_schema = await read_json(self._config.local_schema_path)
+ elif self._config.remote_schema_url:
+ wmn_schema = await fetch_json(
+ http_client,
+ self._config.remote_schema_url,
+ )
- if self.config.export_formats and results:
- exporter = Exporter(self.config.usernames or [])
- exporter.export(results, self.config.export_formats)
+ async with Naminter(
+ http_client=http_client,
+ wmn_data=wmn_data,
+ wmn_schema=wmn_schema,
+ max_tasks=self._config.max_tasks,
+ ) as naminter:
+ if self._config.test:
+ results = await self._run_validation(naminter)
+ else:
+ results = await self._run_check(naminter)
+
+ if self._config.export_formats and results:
+ exporter = Exporter(self._config.usernames or [])
+ await exporter.export(results, self._config.export_formats)
async def _run_check(self, naminter: Naminter) -> list[WMNResult]:
"""Run the username enumeration functionality."""
- summary = await naminter.get_wmn_summary(
- site_names=self.config.sites,
- include_categories=self.config.include_categories,
- exclude_categories=self.config.exclude_categories,
+ summary = naminter.get_wmn_summary(
+ site_names=self._config.sites,
+ include_categories=self._config.include_categories,
+ exclude_categories=self._config.exclude_categories,
)
actual_site_count = summary.sites_count
- total_sites = actual_site_count * len(self.config.usernames)
+ username_count = len(self._config.usernames) if self._config.usernames else 0
+ total_sites = actual_site_count * username_count
- tracker = ResultsTracker(total_sites)
results: list[WMNResult] = []
- with ProgressManager(
- console, disabled=self.config.no_progressbar
- ) as progress_mgr:
- progress_mgr.start(
- total_sites, "[bright_cyan]Enumerating usernames...[/bright_cyan]"
- )
+ if total_sites == 0:
+ return results
- result_stream = await naminter.enumerate_usernames(
- usernames=self.config.usernames,
- site_names=self.config.sites,
- include_categories=self.config.include_categories,
- exclude_categories=self.config.exclude_categories,
- mode=self.config.mode,
- as_generator=True,
- )
+ progress_bar = ProgressBar(console, disabled=self._config.no_progressbar)
+ progress_bar.start(
+ total_sites,
+ "[bright_cyan]Enumerating usernames...[/bright_cyan]",
+ )
- async for result in result_stream:
- tracker.add_result(result)
-
- if self._filter_result(result):
- try:
- file_path = await self._process_result(result)
- formatted_output = self._formatter.format_result(
- result, file_path
- )
- console.print(formatted_output)
- results.append(result)
- except Exception as e:
- display_error(f"Error processing result for {result.name}: {e}")
-
- progress_mgr.update(
- advance=PROGRESS_ADVANCE_INCREMENT,
- description=tracker.get_progress_text(),
- )
+ async for result in naminter.enumerate_usernames(
+ usernames=self._config.usernames,
+ site_names=self._config.sites,
+ include_categories=self._config.include_categories,
+ exclude_categories=self._config.exclude_categories,
+ mode=self._config.mode,
+ ):
+ progress_bar.add_result(result)
+
+ if self._filter_result(result):
+ try:
+ file_path = await self._save_response_file(result)
+ await self._open_in_browser(result, file_path)
+ formatted_output = self._formatter.format_result(result, file_path)
+ console.print(formatted_output)
+ results.append(result)
+ except (FileError, BrowserError) as e:
+ display_error(
+ f"Error processing result for {result.name} "
+ f"(status={result.status.value}): {e}",
+ )
+ progress_bar.stop()
return results
- async def _run_validation(self, naminter: Naminter) -> list[WMNValidationResult]:
+ async def _run_validation(self, naminter: Naminter) -> list[WMNTestResult]:
"""Run the site validation functionality."""
- summary = await naminter.get_wmn_summary(
- site_names=self.config.sites,
- include_categories=self.config.include_categories,
- exclude_categories=self.config.exclude_categories,
+ summary = naminter.get_wmn_summary(
+ site_names=self._config.sites,
+ include_categories=self._config.include_categories,
+ exclude_categories=self._config.exclude_categories,
)
total_tests = summary.known_count
- tracker = ResultsTracker(total_tests)
- results: list[WMNValidationResult] = []
+ results: list[WMNTestResult] = []
- with ProgressManager(
- console, disabled=self.config.no_progressbar
- ) as progress_mgr:
- progress_mgr.start(
- total_tests,
- "[bright_cyan]Validating sites...[/bright_cyan]",
- )
+ if total_tests == 0:
+ return results
- result_stream = await naminter.validate_sites(
- site_names=self.config.sites,
- include_categories=self.config.include_categories,
- exclude_categories=self.config.exclude_categories,
- mode=self.config.mode,
- as_generator=True,
- )
+ progress_bar = ProgressBar(console, disabled=self._config.no_progressbar)
+ progress_bar.start(
+ total_tests,
+ "[bright_cyan]Running testing...[/bright_cyan]",
+ )
- async for result in result_stream:
+ async for result in naminter.enumerate_test(
+ site_names=self._config.sites,
+ include_categories=self._config.include_categories,
+ exclude_categories=self._config.exclude_categories,
+ mode=self._config.mode,
+ ):
+ if result.results:
for site_result in result.results:
- tracker.add_result(site_result)
- progress_mgr.update(
- advance=PROGRESS_ADVANCE_INCREMENT,
- description=tracker.get_progress_text(),
- )
+ progress_bar.add_result(site_result)
- if self._filter_result(result):
- try:
- response_files: list[Path | None] = []
+ if self._filter_result(result):
+ try:
+ response_files: list[Path | None] = []
+ if result.results:
for site_result in result.results:
- response_file_path = await self._process_result(site_result)
- if response_file_path:
- response_files.append(response_file_path)
- else:
- response_files.append(None)
- formatted_output = self._formatter.format_validation(
- result, response_files
- )
- console.print(formatted_output)
- results.append(result)
- except Exception as e:
- display_error(
- f"Error processing validation result for {result.name}: {e}"
- )
+ file_path = await self._save_response_file(site_result)
+ await self._open_in_browser(site_result, file_path)
+ response_files.append(file_path)
+ formatted_output = self._formatter.format_validation(
+ result,
+ response_files,
+ )
+ console.print(formatted_output)
+ results.append(result)
+ except (FileError, BrowserError) as e:
+ display_error(
+ f"Error processing validation result for {result.name}: {e}",
+ )
+ progress_bar.stop()
return results
- def _filter_result(self, result: WMNResult | WMNValidationResult) -> bool:
+ def _filter_result(self, result: WMNResult | WMNTestResult) -> bool:
"""Determine if a result should be included based on filter settings."""
- if self.config.filter_all:
+ if self._config.filter_all:
return True
return self._status_filters.get(result.status, False)
- async def _process_result(self, result: WMNResult) -> Path | None:
- """Handle browser opening, response saving, and console output for a result."""
- if result.url and self.config.browse:
+ async def _open_in_browser(self, result: WMNResult, file_path: Path | None) -> None:
+ """Open result URL and saved response file in browser if configured.
+
+ Args:
+ result: The WMN result containing URL information.
+ file_path: Path to saved response file, if any.
+ """
+ if self._config.browse and result.url:
+ try:
+ await open_url(result.url)
+ except BrowserError as e:
+ display_error(f"Browser error opening {result.url}: {e}")
+
+ if self._config.open_response and file_path:
+ file_uri = await asyncio.to_thread(
+ lambda: file_path.resolve().as_uri(), # noqa: ASYNC240
+ )
try:
- await open_browser(result.url)
+ await open_url(file_uri)
except BrowserError as e:
- display_error(str(e))
+ display_error(f"Browser error opening response file {file_uri}: {e}")
- if not self.config.save_response:
+ async def _save_response_file(self, result: WMNResult) -> Path | None:
+ """Save HTTP response to file if configured."""
+ if not self._config.save_response:
return None
- if not result.response_text or not self._response_dir:
+ if not result.text or not self._response_dir:
return None
- filename = generate_response_filename(result)
+ filename = get_response_filename(result)
file_path = self._response_dir / filename
try:
- await write_file(file_path, result.response_text)
- except FileIOError as e:
- display_error(str(e))
+ await write_file(file_path, result.text)
+ except FileError as e:
+ display_error(f"Failed to save response file {file_path}: {e}")
return None
- if self.config.open_response:
- file_uri = file_path.resolve().as_uri()
- try:
- await open_browser(file_uri)
- except BrowserError as e:
- display_error(str(e))
-
return file_path
+def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
+ """Handle CLI errors and exit with appropriate code.
+
+ Args:
+ ctx: Click context.
+ error: The exception that was raised.
+ """
+ if isinstance(error, WMNValidationError):
+ display_error(str(error), end="" if error.errors else "\n")
+ if error.errors:
+ display_validation_errors(error.errors)
+ elif isinstance(error, CLIError):
+ display_error(str(error))
+ else:
+ display_error(f"Unexpected error: {error}")
+
+ ctx.exit(EXIT_CODE_ERROR)
+
+
@click.group(
invoke_without_command=True,
no_args_is_help=True,
context_settings={"help_option_names": ["-h", "--help"]},
)
+# Version & Help
@click.option(
"--version",
is_flag=True,
@@ -339,15 +399,28 @@ async def _process_result(self, result: WMNResult) -> Path | None:
callback=_version_callback,
help="Show version information and exit",
)
+# Display & Output Formatting
@click.option("--no-color", is_flag=True, help="Disable colored console output")
@click.option(
- "--no-progressbar", is_flag=True, help="Disable progress bar during execution"
+ "--no-progressbar",
+ is_flag=True,
+ help="Disable progress bar during execution",
+)
+@click.option(
+ "--show-details",
+ is_flag=True,
+ help="Show detailed information in console output",
)
+# Input Specification
@click.option(
"--username",
"-u",
multiple=True,
- help="Username(s) to search for across social media platforms",
+ required=False,
+ help=(
+ "Username(s) to search for across social media platforms "
+ "(required unless --test)"
+ ),
)
@click.option(
"--site",
@@ -355,17 +428,19 @@ async def _process_result(self, result: WMNResult) -> Path | None:
multiple=True,
help='Specific site name(s) to enumerate (e.g., "GitHub", "X")',
)
+# Data Sources - Local
@click.option(
"--local-list",
type=click.Path(exists=True, path_type=Path),
help="Path to a local JSON file containing WhatsMyName site data",
)
-@click.option("--remote-list", help="URL to fetch remote WhatsMyName site data")
@click.option(
"--local-schema",
type=click.Path(exists=True, path_type=Path),
help="Path to local WhatsMyName JSON schema file for validation",
)
+# Data Sources - Remote
+@click.option("--remote-list", help="URL to fetch remote WhatsMyName site data")
@click.option(
"--remote-schema",
default=WMN_SCHEMA_URL,
@@ -374,16 +449,18 @@ async def _process_result(self, result: WMNResult) -> Path | None:
"(ignored with --skip-validation)"
),
)
+# Validation
@click.option(
"--skip-validation",
is_flag=True,
help="Skip JSON schema validation of WhatsMyName data",
)
@click.option(
- "--validate-sites",
+ "--test",
is_flag=True,
help="Validate site detection methods by checking known usernames",
)
+# Category Filtering
@click.option(
"--include-categories",
multiple=True,
@@ -394,6 +471,7 @@ async def _process_result(self, result: WMNResult) -> Path | None:
multiple=True,
help='Exclude sites from specified categories (e.g., "adult", "gaming")',
)
+# Network Configuration
@click.option(
"--proxy",
help="Proxy server to use for requests (e.g., http://proxy:port, socks5://proxy:port)",
@@ -405,26 +483,26 @@ async def _process_result(self, result: WMNResult) -> Path | None:
help="Maximum time in seconds to wait for each HTTP request",
)
@click.option(
- "--allow-redirects",
- is_flag=True,
+ "--allow-redirects/--no-allow-redirects",
default=HTTP_ALLOW_REDIRECTS,
help="Whether to follow HTTP redirects automatically",
)
@click.option(
- "--verify-ssl",
- is_flag=True,
+ "--verify-ssl/--no-verify-ssl",
default=HTTP_SSL_VERIFY,
help="Whether to verify SSL/TLS certificates for HTTPS requests",
)
@click.option(
"--impersonate",
- type=click.Choice([BROWSER_IMPERSONATE_NONE, *typing.get_args(BrowserTypeLiteral)]),
+ type=click.Choice([BROWSER_IMPERSONATE_NONE, *get_args(BrowserTypeLiteral)]),
default=BROWSER_IMPERSONATE_AGENT,
help='Browser to impersonate in HTTP requests (use "none" to disable)',
)
+# Fingerprinting Options
@click.option("--ja3", help="JA3 fingerprint string for TLS fingerprinting")
@click.option(
- "--akamai", help="Akamai fingerprint string for Akamai bot detection bypass"
+ "--akamai",
+ help="Akamai fingerprint string for Akamai bot detection bypass",
)
@click.option(
"--extra-fp",
@@ -434,6 +512,7 @@ async def _process_result(self, result: WMNResult) -> Path | None:
")"
),
)
+# Concurrency & Debugging
@click.option(
"--max-tasks",
type=int,
@@ -452,67 +531,74 @@ async def _process_result(self, result: WMNResult) -> Path | None:
help="Set logging level",
)
@click.option("--log-file", help="Path to log file for debug output")
-@click.option(
- "--show-details", is_flag=True, help="Show detailed information in console output"
-)
-@click.option("--browse", is_flag=True, help="Open found profiles in web browser")
+# Response Handling
@click.option(
"--save-response",
"save_response_opt",
type=str,
- flag_value="__AUTO__",
+ flag_value=OPTION_AUTO_VALUE,
default=None,
help="Save HTTP responses; optionally specify directory path",
)
@click.option(
- "--open-response", is_flag=True, help="Open saved response files in web browser"
+ "--open-response",
+ is_flag=True,
+ help="Open saved response files in web browser",
)
+@click.option("--browse", is_flag=True, help="Open found profiles in web browser")
+# Export Options
@click.option(
"--csv",
"csv_opt",
type=str,
- flag_value="__AUTO__",
+ flag_value=OPTION_AUTO_VALUE,
default=None,
help="Export results to CSV; optionally specify a custom path",
)
@click.option(
- "--pdf",
- "pdf_opt",
+ "--json",
+ "json_opt",
type=str,
- flag_value="__AUTO__",
+ flag_value=OPTION_AUTO_VALUE,
default=None,
- help="Export results to PDF; optionally specify a custom path",
+ help="Export results to JSON; optionally specify a custom path",
)
@click.option(
"--html",
"html_opt",
type=str,
- flag_value="__AUTO__",
+ flag_value=OPTION_AUTO_VALUE,
default=None,
help="Export results to HTML; optionally specify a custom path",
)
@click.option(
- "--json",
- "json_opt",
+ "--pdf",
+ "pdf_opt",
type=str,
- flag_value="__AUTO__",
+ flag_value=OPTION_AUTO_VALUE,
default=None,
- help="Export results to JSON; optionally specify a custom path",
+ help="Export results to PDF; optionally specify a custom path",
)
+# Result Filtering
@click.option(
"--filter-all",
is_flag=True,
help="Include all results in console output and exports",
)
@click.option(
- "--filter-found",
+ "--filter-exists",
+ is_flag=True,
+ help="Show only existing username results in console output and exports",
+)
+@click.option(
+ "--filter-partial",
is_flag=True,
- help="Show only found results in console output and exports",
+ help="Show only partial match results in console output and exports",
)
@click.option(
- "--filter-ambiguous",
+ "--filter-conflicting",
is_flag=True,
- help="Show only ambiguous results in console output and exports",
+ help="Show only conflicting results in console output and exports",
)
@click.option(
"--filter-unknown",
@@ -520,9 +606,9 @@ async def _process_result(self, result: WMNResult) -> Path | None:
help="Show only unknown results in console output and exports",
)
@click.option(
- "--filter-not-found",
+ "--filter-missing",
is_flag=True,
- help="Show only not found results in console output and exports",
+ help="Show only missing username results in console output and exports",
)
@click.option(
"--filter-not-valid",
@@ -535,10 +621,10 @@ async def _process_result(self, result: WMNResult) -> Path | None:
help="Show only error results in console output and exports",
)
@click.pass_context
-def main(ctx: click.Context, **kwargs: Any) -> None:
- """A Python package and command-line interface (CLI) tool.
- For asynchronous OSINT username enumeration using the
- WhatsMyName dataset.
+def main(ctx: click.Context, **kwargs: dict[str, Any]) -> None:
+ """A Python package and CLI tool for asynchronous OSINT username enumeration.
+
+ Uses the WhatsMyName dataset.
"""
if ctx.invoked_subcommand is not None:
@@ -548,114 +634,178 @@ def main(ctx: click.Context, **kwargs: Any) -> None:
console.no_color = True
try:
- csv_export = kwargs.get("csv_opt") is not None
- csv_path = (
- None
- if kwargs.get("csv_opt") in {None, "__AUTO__"}
- else kwargs.get("csv_opt")
- )
+ config = NaminterConfig(**kwargs)
+ NaminterCLI.setup_logging(config)
+ naminter_cli = NaminterCLI(config)
+ asyncio.run(naminter_cli.run())
+ except KeyboardInterrupt:
+ display_warning("Operation interrupted")
+ ctx.exit(EXIT_CODE_INTERRUPTED)
+ except (
+ ConfigurationError,
+ ValidationError,
+ FileError,
+ NetworkError,
+ HttpError,
+ WMNValidationError,
+ WMNDataError,
+ BrowserError,
+ ExportError,
+ CLIError,
+ ) as e:
+ _handle_cli_error(ctx, e)
+
+
+@main.command(name="validate")
+@click.option(
+ "--local-schema",
+ type=click.Path(exists=True, path_type=Path),
+ required=True,
+ help="Path to local WhatsMyName JSON schema file for validation",
+)
+@click.option(
+ "--local-data",
+ type=click.Path(exists=True, path_type=Path),
+ required=True,
+ help="Path to local WhatsMyName JSON data file to validate",
+)
+@click.option("--no-color", is_flag=True, help="Disable colored console output")
+@click.pass_context
+def validator_command(
+ ctx: click.Context,
+ local_schema: Path,
+ local_data: Path,
+ *,
+ no_color: bool,
+) -> None:
+ """Validate WhatsMyName JSON data against a JSON schema."""
+ if no_color:
+ console.no_color = True
- pdf_export = kwargs.get("pdf_opt") is not None
- pdf_path = (
- None
- if kwargs.get("pdf_opt") in {None, "__AUTO__"}
- else kwargs.get("pdf_opt")
- )
+ async def run_validator() -> None:
+ """Run validation asynchronously."""
+ try:
+ schema = await read_json(local_schema)
+ data = await read_json(local_data)
- html_export = kwargs.get("html_opt") is not None
- html_path = (
- None
- if kwargs.get("html_opt") in {None, "__AUTO__"}
- else kwargs.get("html_opt")
- )
+ validator = WMNValidator(schema)
+ errors = validator.validate(data)
- json_export = kwargs.get("json_opt") is not None
- json_path = (
- None
- if kwargs.get("json_opt") in {None, "__AUTO__"}
- else kwargs.get("json_opt")
- )
+ if errors:
+ display_validation_errors(errors)
+ ctx.exit(EXIT_CODE_ERROR)
+ else:
+ console.print(
+ "[green]+ [Validator] Validation passed: No errors found[/green]",
+ )
+ except (
+ FileError,
+ WMNValidationError,
+ WMNDataError,
+ ) as e:
+ _handle_cli_error(ctx, e)
- save_response = kwargs.get("save_response_opt") is not None
- response_path = (
- None
- if kwargs.get("save_response_opt") in {None, "__AUTO__"}
- else kwargs.get("save_response_opt")
- )
+ try:
+ asyncio.run(run_validator())
+ except KeyboardInterrupt:
+ display_warning("Operation interrupted")
+ ctx.exit(EXIT_CODE_INTERRUPTED)
+ except (
+ ConfigurationError,
+ ValidationError,
+ FileError,
+ WMNValidationError,
+ WMNDataError,
+ BrowserError,
+ ExportError,
+ CLIError,
+ ) as e:
+ _handle_cli_error(ctx, e)
+
+
+@main.command(name="format")
+@click.option(
+ "--local-schema",
+ type=click.Path(exists=True, path_type=Path),
+ required=True,
+ help="Path to local WhatsMyName JSON schema file",
+)
+@click.option(
+ "--local-data",
+ type=click.Path(exists=True, path_type=Path),
+ required=True,
+ help="Path to local WhatsMyName JSON data file to format",
+)
+@click.option(
+ "--output",
+ "-o",
+ type=click.Path(path_type=Path),
+ help="Output file path (defaults to overwriting input file)",
+)
+@click.option("--no-color", is_flag=True, help="Disable colored console output")
+@click.pass_context
+def format_command(
+ ctx: click.Context,
+ local_schema: Path,
+ local_data: Path,
+ output: Path | None,
+ *,
+ no_color: bool,
+) -> None:
+ """Format WhatsMyName JSON data according to schema ordering and sorting."""
+ if no_color:
+ console.no_color = True
- config = NaminterConfig(
- usernames=list(kwargs.get("username") or []),
- sites=kwargs.get("site"),
- local_list_path=kwargs.get("local_list"),
- remote_list_url=kwargs.get("remote_list"),
- local_schema_path=kwargs.get("local_schema"),
- remote_schema_url=kwargs.get("remote_schema"),
- skip_validation=kwargs.get("skip_validation"),
- include_categories=kwargs.get("include_categories"),
- exclude_categories=kwargs.get("exclude_categories"),
- max_tasks=kwargs.get("max_tasks"),
- timeout=kwargs.get("timeout"),
- proxy=kwargs.get("proxy"),
- allow_redirects=bool(kwargs.get("allow_redirects")),
- verify_ssl=bool(kwargs.get("verify_ssl")),
- impersonate=kwargs.get("impersonate"),
- ja3=kwargs.get("ja3"),
- akamai=kwargs.get("akamai"),
- extra_fp=kwargs.get("extra_fp"),
- mode=WMNMode(kwargs.get("mode", WMNMode.ALL.value)),
- validate_sites=bool(kwargs.get("validate_sites")),
- log_level=kwargs.get("log_level"),
- log_file=kwargs.get("log_file"),
- show_details=bool(kwargs.get("show_details")),
- browse=bool(kwargs.get("browse")),
- save_response=save_response,
- response_path=response_path,
- open_response=bool(kwargs.get("open_response")),
- csv_export=csv_export,
- csv_path=csv_path,
- pdf_export=pdf_export,
- pdf_path=pdf_path,
- html_export=html_export,
- html_path=html_path,
- json_export=json_export,
- json_path=json_path,
- filter_all=bool(kwargs.get("filter_all")),
- filter_found=bool(kwargs.get("filter_found")),
- filter_ambiguous=bool(kwargs.get("filter_ambiguous")),
- filter_unknown=bool(kwargs.get("filter_unknown")),
- filter_not_found=bool(kwargs.get("filter_not_found")),
- filter_not_valid=bool(kwargs.get("filter_not_valid")),
- filter_errors=bool(kwargs.get("filter_errors")),
- no_progressbar=bool(kwargs.get("no_progressbar")),
- )
+ async def run_formatter() -> None:
+ """Run formatting asynchronously."""
+ try:
+ schema_data = await read_json(local_schema)
+ data = await read_json(local_data)
- NaminterCLI.setup_logging(config)
+ original_content = await read_file(local_data)
- naminter_cli = NaminterCLI(config)
- asyncio.run(naminter_cli.run())
+ formatter = WMNFormatter(schema_data)
+ formatted_content = formatter.format_dataset(data)
+
+ output_path = output or local_data
+
+ if original_content != formatted_content:
+ await write_file(output_path, formatted_content)
+ display_diff(original_content, formatted_content, output_path)
+ msg = (
+ f"[green]+ [Formatter] Formatted data written to: "
+ f"{output_path}[/green]"
+ )
+ console.print(msg)
+ else:
+ console.print("[green]+ [Formatter] Data is already formatted[/green]")
+ except (
+ FileError,
+ WMNFormatError,
+ WMNValidationError,
+ WMNDataError,
+ ) as e:
+ _handle_cli_error(ctx, e)
+
+ try:
+ asyncio.run(run_formatter())
except KeyboardInterrupt:
display_warning("Operation interrupted")
- ctx.exit(EXIT_CODE_ERROR)
- except ConfigurationError as e:
- display_error(f"Configuration error: {e}")
- ctx.exit(EXIT_CODE_ERROR)
- except HttpError as e:
- display_error(f"Network error: {e}")
- ctx.exit(EXIT_CODE_ERROR)
- except WMNDataError as e:
- display_error(f"Data error: {e}")
- if isinstance(e, WMNValidationError) and e.errors:
- display_validation_errors(e.errors)
- ctx.exit(EXIT_CODE_ERROR)
- except FileIOError as e:
- display_error(f"File I/O error: {e}")
- ctx.exit(EXIT_CODE_ERROR)
- except ExportError as e:
- display_error(f"Export error: {e}")
- ctx.exit(EXIT_CODE_ERROR)
- except Exception as e:
- display_error(f"Unexpected error: {e}")
- ctx.exit(EXIT_CODE_ERROR)
+ ctx.exit(EXIT_CODE_INTERRUPTED)
+ except (
+ ConfigurationError,
+ ValidationError,
+ FileError,
+ NetworkError,
+ HttpError,
+ WMNFormatError,
+ WMNValidationError,
+ WMNDataError,
+ BrowserError,
+ ExportError,
+ CLIError,
+ ) as e:
+ _handle_cli_error(ctx, e)
def entry_point() -> None:
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index 135df90..3818378 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -1,5 +1,5 @@
import time
-from typing import Any
+from types import TracebackType
from rich.console import Console
from rich.progress import (
@@ -14,80 +14,96 @@
)
from naminter.cli.console import THEME
-from naminter.cli.constants import PROGRESS_ADVANCE_INCREMENT
+from naminter.cli.constants import PROGRESS_ADVANCE_INCREMENT, STATUS_SYMBOLS
from naminter.core.models import WMNResult, WMNStatus
-class ResultsTracker:
- """Tracks results for the username enumeration operations."""
+class ProgressBar:
+ """Manages progress bar and result tracking for CLI applications."""
- def __init__(self, total_sites: int) -> None:
- """Initialize the results tracker."""
- self.total_sites = max(total_sites, 0)
- self.results_count = 0
- self.start_time = time.time()
+ def __init__(self, console: Console, *, disabled: bool = False) -> None:
+ """Initialize the progress bar.
+
+ Args:
+ console: Rich Console instance for output.
+ disabled: Whether to disable progress bar display.
+ """
+ self.console: Console = console
+ self.disabled: bool = disabled
+ self.progress: Progress | None = None
+ self.task_id: TaskID | None = None
+
+ self.total_sites: int = 0
+ self.results_count: int = 0
+ self.start_time: float | None = None
self.status_counts: dict[WMNStatus, int] = dict.fromkeys(WMNStatus, 0)
def add_result(self, result: WMNResult) -> None:
- """Update counters with a new result."""
+ """Update counters with a new result and refresh progress display."""
self.results_count += 1
self.status_counts[result.status] += 1
+ self.update(
+ advance=PROGRESS_ADVANCE_INCREMENT,
+ description=self._get_progress_text(),
+ )
- def get_progress_text(self) -> str:
+ def _get_progress_text(self) -> str:
"""Get formatted progress text with request speed and statistics."""
elapsed = time.time() - self.start_time if self.start_time else 0.0
- found = self.status_counts[WMNStatus.FOUND]
- ambiguous = self.status_counts[WMNStatus.AMBIGUOUS]
+ exists = self.status_counts[WMNStatus.EXISTS]
+ partial = self.status_counts[WMNStatus.PARTIAL]
+ conflicting = self.status_counts[WMNStatus.CONFLICTING]
unknown = self.status_counts[WMNStatus.UNKNOWN]
- not_found = self.status_counts[WMNStatus.NOT_FOUND]
+ missing = self.status_counts[WMNStatus.MISSING]
not_valid = self.status_counts[WMNStatus.NOT_VALID]
errors = self.status_counts[WMNStatus.ERROR]
- valid_count = self.results_count - errors - not_valid
- valid_count = max(valid_count, 0)
- rate = valid_count / elapsed if elapsed > 0 else 0.0
+ valid_count = max(self.results_count - errors - not_valid, 0)
+ rate = valid_count / elapsed if elapsed > 0.0 else 0.0
sections = [
- f"[{THEME['primary']}]{rate:.1f} req/s[/]",
- f"[{THEME['success']}]+ {found}[/]",
- f"[{THEME['error']}]- {not_found}[/]",
+ f"[{THEME.primary}]{rate:.1f} req/s[/]",
+ f"[{THEME.success}]{STATUS_SYMBOLS['exists']} {exists}[/]",
+ f"[{THEME.error}]{STATUS_SYMBOLS['missing']} {missing}[/]",
]
if unknown > 0:
- sections.append(f"[{THEME['warning']}]? {unknown}[/]")
- if ambiguous > 0:
- sections.append(f"[{THEME['warning']}]* {ambiguous}[/]")
+ sections.append(
+ f"[{THEME.warning}]{STATUS_SYMBOLS['unknown']} {unknown}[/]",
+ )
+ if partial > 0:
+ sections.append(
+ f"[{THEME.warning}]{STATUS_SYMBOLS['partial']} {partial}[/]",
+ )
+ if conflicting > 0:
+ sections.append(
+ f"[{THEME.warning}]{STATUS_SYMBOLS['conflicting']} {conflicting}[/]",
+ )
if errors > 0:
- sections.append(f"[{THEME['error']}]! {errors}[/]")
+ sections.append(
+ f"[{THEME.error}]{STATUS_SYMBOLS['error']} {errors}[/]",
+ )
if not_valid > 0:
- sections.append(f"[{THEME['warning']}]x {not_valid}[/]")
+ sections.append(
+ f"[{THEME.warning}]{STATUS_SYMBOLS['not_valid']} {not_valid}[/]",
+ )
- total = max(self.total_sites, self.results_count)
- sections.append(
- f"[{THEME['primary']}]{self.results_count}/{total}[/]"
- )
+ sections.append(f"[{THEME.primary}]{self.results_count}/{self.total_sites}[/]")
return " │ ".join(sections)
+ def _create_progress_bar(self) -> Progress:
+ """Create a new progress bar with configured styling.
-class ProgressManager:
- """Manages progress bar and tracking for CLI applications."""
-
- def __init__(self, console: Console, disabled: bool = False) -> None:
- """Initialize the progress manager."""
- self.console: Console = console
- self.disabled: bool = disabled
- self.progress: Progress | None = None
- self.task_id: TaskID | None = None
-
- def create_progress_bar(self) -> Progress:
- """Create a new progress bar."""
+ Returns:
+ Configured Progress instance ready for display.
+ """
return Progress(
SpinnerColumn(),
TextColumn("[progress.description]{task.description}"),
BarColumn(
- complete_style=THEME["primary"],
- finished_style=THEME["success"],
+ complete_style=THEME.primary,
+ finished_style=THEME.success,
),
TaskProgressColumn(),
TimeElapsedColumn(),
@@ -97,21 +113,32 @@ def create_progress_bar(self) -> Progress:
)
def start(self, total: int, description: str) -> None:
- """Start the progress bar."""
+ """Start the progress bar and result tracking.
+
+ Args:
+ total: Total number of tasks to track.
+ description: Initial description text for the progress bar.
+ """
+ self.total_sites = max(total, 0)
+ self.start_time = time.time()
if not self.disabled:
- self.progress = self.create_progress_bar()
+ self.progress = self._create_progress_bar()
self.progress.start()
self.task_id = self.progress.add_task(description, total=total)
def update(
- self, advance: int = PROGRESS_ADVANCE_INCREMENT, description: str | None = None
+ self,
+ advance: int = PROGRESS_ADVANCE_INCREMENT,
+ description: str | None = None,
) -> None:
- """Update the progress bar."""
+ """Update the progress bar.
+
+ Args:
+ advance: Number of steps to advance the progress.
+ description: Optional new description to display.
+ """
if self.progress and self.task_id is not None:
- update_kwargs: dict[str, Any] = {"advance": advance}
- if description is not None:
- update_kwargs["description"] = description
- self.progress.update(self.task_id, **update_kwargs)
+ self.progress.update(self.task_id, advance=advance, description=description)
def stop(self) -> None:
"""Stop and close the progress bar."""
@@ -120,12 +147,15 @@ def stop(self) -> None:
self.progress = None
self.task_id = None
- def __enter__(self) -> "ProgressManager":
+ def __enter__(self) -> "ProgressBar":
"""Enter context manager."""
return self
def __exit__(
- self, exc_type: type | None, exc_val: BaseException | None, exc_tb: Any | None
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
) -> None:
"""Exit context manager and stop progress bar."""
self.stop()
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
index 3e18979..44812c8 100644
--- a/naminter/cli/utils.py
+++ b/naminter/cli/utils.py
@@ -1,147 +1,297 @@
import asyncio
import json
-import webbrowser
from pathlib import Path
from typing import Any
+import webbrowser
import aiofiles
from naminter.cli.constants import (
+ DEFAULT_UNNAMED_VALUE,
MAX_FILENAME_LENGTH,
+ OPTION_AUTO_VALUE,
RESPONSE_FILE_DATE_FORMAT,
RESPONSE_FILE_EXTENSION,
)
-from naminter.cli.exceptions import BrowserError, ConfigurationError, FileIOError
+from naminter.cli.exceptions import (
+ BrowserError,
+ FileError,
+ NetworkError,
+ ValidationError,
+)
from naminter.core.constants import (
+ ASCII_CONTROL_CHAR_THRESHOLD,
DEFAULT_FILE_ENCODING,
EMPTY_STRING,
+ HTTP_STATUS_OK,
)
-from naminter.core.exceptions import (
- HttpError,
- HttpSessionError,
- HttpTimeoutError,
- WMNDataError,
-)
+from naminter.core.exceptions import HttpError
from naminter.core.models import WMNResult
from naminter.core.network import BaseSession
-def sanitize_filename(filename: str) -> str:
- """Sanitize filename for cross-platform compatibility."""
- if not filename or not str(filename).strip():
- return "unnamed"
+# Option parsing utilities
+def parse_option_path(option_value: str | None) -> str | None:
+ """Parse export/response option value, returning None for auto or unset.
+
+ Args:
+ option_value: The option value to parse. Can be None, OPTION_AUTO_VALUE, or
+ a path string.
+
+ Returns:
+ None if the option is unset or set to auto mode, otherwise the path string.
+ """
+ if option_value in {None, OPTION_AUTO_VALUE}:
+ return None
+ return option_value
+
+
+# Filename utilities
+def sanitize_filename(filename: str) -> str | None:
+ """Sanitize filename for cross-platform compatibility.
+
+ Removes or replaces invalid characters that are not allowed in filenames
+ on various operating systems (Windows, macOS, Linux).
+
+ Args:
+ filename: The filename to sanitize.
+
+ Returns:
+ A sanitized filename safe for all platforms, or None if invalid.
+
+ Raises:
+ ValidationError: If filename cannot be converted to string.
+ """
+ if not filename:
+ return None
+
+ try:
+ filename_str = str(filename).strip()
+ except (TypeError, ValueError) as e:
+ msg = f"Failed to convert filename to string: {e}"
+ raise ValidationError(msg) from e
+
+ if not filename_str:
+ return None
invalid_chars = '<>:"|?*\\/\0'
+ translation_table = str.maketrans(invalid_chars, "_" * len(invalid_chars))
sanitized = EMPTY_STRING.join(
- "_" if c in invalid_chars or ord(c) < 32 else c for c in str(filename)
+ "_" if ord(c) < ASCII_CONTROL_CHAR_THRESHOLD else c
+ for c in filename_str.translate(translation_table)
)
- sanitized = (
- sanitized.strip(" .")[:MAX_FILENAME_LENGTH]
- if sanitized.strip(" .")
- else "unnamed"
- )
- return sanitized
+ sanitized = sanitized.strip(" .")
+ if len(sanitized) > MAX_FILENAME_LENGTH:
+ sanitized = sanitized[:MAX_FILENAME_LENGTH].rstrip(" .")
-async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any]:
- """Fetch and parse JSON from a URL."""
- if not url or not url.strip():
- msg = f"Invalid URL: {url}"
- raise ConfigurationError(msg)
+ return sanitized or None
- try:
- response = await http_client.get(url)
- except (HttpError, HttpTimeoutError, HttpSessionError):
- raise
- except Exception as e:
- msg = f"Network error while fetching from {url}: {e}"
- raise HttpError(msg, cause=e) from e
- if response.status_code != 200:
- msg = f"Failed to fetch from {url}: HTTP {response.status_code}"
- raise HttpError(msg)
+def get_response_filename(result: WMNResult) -> str:
+ """Generate a sanitized filename for saving response data.
+ Args:
+ result: The WMNResult containing response data.
+
+ Returns:
+ A sanitized filename with format: status_code_site_username_timestamp.html
+
+ Raises:
+ ValidationError: If WMNResult is missing required attributes.
+ """
try:
- return response.json()
- except (ValueError, json.JSONDecodeError) as e:
- msg = f"Failed to parse JSON from {url}: {e}"
- raise WMNDataError(msg, cause=e) from e
- except Exception as e:
- msg = f"Unexpected error parsing response from {url}: {e}"
- raise WMNDataError(msg, cause=e) from e
+ safe_site_name = sanitize_filename(result.name) or DEFAULT_UNNAMED_VALUE
+ safe_username = sanitize_filename(result.username) or DEFAULT_UNNAMED_VALUE
+ status_str = result.status.value
+ created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
+ status_code = result.status_code
+ except AttributeError as e:
+ msg = f"WMNResult missing required attribute: {e}"
+ raise ValidationError(msg) from e
+ base_name = (
+ f"{status_str}_{status_code}_{safe_site_name}_{safe_username}_{created_at_str}"
+ )
+ safe_base_name = sanitize_filename(base_name) or DEFAULT_UNNAMED_VALUE
+ return f"{safe_base_name}{RESPONSE_FILE_EXTENSION}"
+
+
+# File operations
+async def read_file(file_path: str | Path) -> str:
+ """Read text content from a file asynchronously with error handling.
+
+ Args:
+ file_path: Path to the file to read.
+
+ Returns:
+ Text content of the file.
+
+ Raises:
+ ValidationError: If file_path is missing or invalid.
+ FileError: For any problem reading the file.
+ """
+ if not file_path:
+ msg = "File path is required"
+ raise ValidationError(msg)
+
+ path_obj = Path(file_path)
-async def read_json(path: str | Path) -> dict[str, Any]:
- """Read JSON from a local file without blocking the event loop."""
try:
- async with aiofiles.open(path, encoding=DEFAULT_FILE_ENCODING) as file:
- content = await file.read()
+ async with aiofiles.open(path_obj, encoding=DEFAULT_FILE_ENCODING) as f:
+ content = await f.read()
except FileNotFoundError as e:
- msg = f"File not found: {path}"
- raise FileIOError(msg) from e
+ msg = f"File not found: {path_obj}"
+ raise FileError(msg) from e
except PermissionError as e:
- msg = f"Permission denied accessing file: {path}"
- raise FileIOError(msg) from e
+ msg = f"Permission denied reading file: {path_obj}"
+ raise FileError(msg) from e
except UnicodeDecodeError as e:
- msg = f"Encoding error reading file {path}: {e}"
- raise FileIOError(msg) from e
+ msg = f"Encoding error reading file {path_obj}: {e}"
+ raise FileError(msg) from e
except OSError as e:
- msg = f"Error reading file {path}: {e}"
- raise FileIOError(msg) from e
+ msg = f"OS error reading file {path_obj}: {e}"
+ raise FileError(msg) from e
+
+ if not content or not content.strip():
+ msg = f"File is empty: {path_obj}"
+ raise FileError(msg)
+
+ return content
+
+
+async def read_json(path: str | Path) -> dict[str, Any]:
+ """Read JSON from a local file without blocking the event loop.
+ Args:
+ path: Path to the JSON file.
+
+ Returns:
+ Parsed JSON data as dictionary.
+
+ Raises:
+ ValidationError: If path is missing or invalid.
+ FileError: For any problem reading or parsing the JSON file.
+ """
+ content = await read_file(path)
try:
return json.loads(content)
except json.JSONDecodeError as e:
- msg = f"Invalid JSON in file {path}: {e}"
- raise WMNDataError(msg, cause=e) from e
+ path_obj = Path(path)
+ msg = (
+ f"Invalid JSON in file {path_obj} at line {e.lineno}, "
+ f"column {e.colno}: {e.msg}"
+ )
+ raise FileError(msg) from e
+
+
+async def write_file(file_path: str | Path, data: str | bytes) -> None:
+ """Write data to a file asynchronously with error handling.
+
+ Args:
+ file_path: Path to the file to write.
+ data: Text or binary data to write to the file.
+ Raises:
+ ValidationError: If file_path is missing or invalid.
+ FileError: For any problem creating directories or writing the file.
+ """
+ if not file_path:
+ msg = "File path is required"
+ raise ValidationError(msg)
-async def open_browser(url: str) -> None:
- """Open a URL in the browser with error handling."""
- if not url or not url.strip():
- msg = "Invalid URL provided to browser"
- raise BrowserError(msg)
+ path_obj = Path(file_path)
try:
- await asyncio.to_thread(webbrowser.open, url)
- except webbrowser.Error as e:
- msg = f"Browser error opening {url}: {e}"
- raise BrowserError(msg) from e
+ path_obj.parent.mkdir(parents=True, exist_ok=True)
+ except PermissionError as e:
+ msg = f"Permission denied creating directory for {path_obj}"
+ raise FileError(msg) from e
except OSError as e:
- msg = f"OS error opening browser for {url}: {e}"
- raise BrowserError(msg) from e
- except Exception as e:
- msg = f"Unexpected error opening browser for {url}: {e}"
- raise BrowserError(msg) from e
-
+ msg = f"OS error creating directory for {path_obj}: {e}"
+ raise FileError(msg) from e
-async def write_file(file_path: Path, content: str) -> None:
- """Write content to a file with error handling."""
try:
- async with aiofiles.open(
- file_path, mode="w", encoding=DEFAULT_FILE_ENCODING
- ) as file:
- await file.write(content)
+ mode = "wb" if isinstance(data, bytes) else "w"
+ encoding = None if isinstance(data, bytes) else DEFAULT_FILE_ENCODING
+ async with aiofiles.open(path_obj, mode=mode, encoding=encoding) as f:
+ await f.write(data)
except PermissionError as e:
- msg = f"Permission denied writing to {file_path}: {e}"
- raise FileIOError(msg) from e
+ msg = f"Permission denied writing to {path_obj}"
+ raise FileError(msg) from e
+ except UnicodeEncodeError as e:
+ msg = f"Encoding error writing to {path_obj}: {e}"
+ raise FileError(msg) from e
except OSError as e:
- msg = f"OS error writing to {file_path}: {e}"
- raise FileIOError(msg) from e
- except Exception as e:
- msg = f"Unexpected error writing to {file_path}: {e}"
- raise FileIOError(msg) from e
+ msg = f"OS error writing to {path_obj}: {e}"
+ raise FileError(msg) from e
-def generate_response_filename(result: WMNResult) -> str:
- """Generate a sanitized filename for saving response data."""
- safe_site_name = sanitize_filename(result.name)
- safe_username = sanitize_filename(result.username)
- status_str = result.status.value
- created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
+# Network operations
+async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any]:
+ """Fetch and parse JSON from a URL.
- return (
- f"{status_str}_{result.response_code}_"
- f"{safe_site_name}_{safe_username}_{created_at_str}"
- f"{RESPONSE_FILE_EXTENSION}"
- )
+ Args:
+ http_client: HTTP client session to use for the request.
+ url: URL to fetch JSON from.
+
+ Returns:
+ Parsed JSON data as dictionary.
+
+ Raises:
+ ValidationError: If http_client or url is missing or invalid.
+ NetworkError: For any URL / HTTP / network / JSON issues.
+ """
+ url_stripped = url.strip() if url else ""
+ if not url_stripped:
+ msg = "URL is required and cannot be empty"
+ raise ValidationError(msg)
+
+ try:
+ response = await http_client.get(url_stripped)
+ except HttpError as e:
+ msg = f"Network error fetching {url_stripped}: {e}"
+ raise NetworkError(msg) from e
+
+ if response.status_code != HTTP_STATUS_OK:
+ msg = f"Failed to fetch from {url_stripped}: HTTP {response.status_code}"
+ raise NetworkError(msg)
+
+ if not response.text or not response.text.strip():
+ msg = f"Empty response from {url_stripped}"
+ raise NetworkError(msg)
+
+ try:
+ return response.json()
+ except (ValueError, json.JSONDecodeError) as e:
+ msg = f"Failed to parse JSON from {url_stripped}: {e}"
+ raise NetworkError(msg) from e
+
+
+# Browser operations
+async def open_url(url: str) -> None:
+ """Open a URL in the browser with error handling.
+
+ Args:
+ url: URL to open in the default browser.
+
+ Raises:
+ ValidationError: If url is missing or invalid.
+ BrowserError: For any issue with the browser operation.
+ """
+ url_stripped = url.strip() if url else ""
+ if not url_stripped:
+ msg = "URL is required and cannot be empty"
+ raise ValidationError(msg)
+
+ try:
+ await asyncio.to_thread(webbrowser.open, url_stripped)
+ except webbrowser.Error as e:
+ msg = f"Browser error opening {url_stripped}: {e}"
+ raise BrowserError(msg) from e
+ except OSError as e:
+ msg = f"OS error opening browser for {url_stripped}: {e}"
+ raise BrowserError(msg) from e
+ except Exception as e:
+ msg = f"Unexpected error opening browser for {url_stripped}: {e}"
+ raise BrowserError(msg) from e
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index 9a7c025..8445830 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -1,4 +1,4 @@
-from typing import Final
+from typing import Final, Literal
# Remote Data Source Configuration
WMN_REMOTE_URL: Final[str] = (
@@ -54,31 +54,28 @@
SITE_KEY_M_CODE: Final[str] = "m_code"
SITE_KEY_KNOWN: Final[str] = "known"
-# Validation Requirements
-REQUIRED_KEYS_ENUMERATE: Final[tuple[str, ...]] = (
- SITE_KEY_NAME,
- SITE_KEY_URI_CHECK,
- SITE_KEY_E_CODE,
- SITE_KEY_E_STRING,
- SITE_KEY_M_STRING,
- SITE_KEY_M_CODE,
- SITE_KEY_CATEGORY,
-)
-
-REQUIRED_KEYS_SELF_ENUM: Final[tuple[str, ...]] = (
- SITE_KEY_NAME,
- SITE_KEY_CATEGORY,
- SITE_KEY_KNOWN,
-)
-
# JSON Configuration
DEFAULT_JSON_INDENT: Final[int] = 2
-DEFAULT_JSON_ENCODING: Final[str] = "utf-8"
DEFAULT_JSON_ENSURE_ASCII: Final[bool] = False
+# JSON Schema Keys
+SCHEMA_KEY_PROPERTIES: Final[str] = "properties"
+SCHEMA_KEY_ITEMS: Final[str] = "items"
+
# File Operations
DEFAULT_FILE_ENCODING: Final[str] = "utf-8"
# Default Values and String Processing
DEFAULT_UNKNOWN_VALUE: Final[str] = "unknown"
EMPTY_STRING: Final[str] = ""
+
+# Character constants
+ASCII_CONTROL_CHAR_THRESHOLD: Final[int] = 32
+
+# HTTP Status codes
+HTTP_STATUS_OK: Final[int] = 200
+
+# HTTP Methods
+HTTP_METHOD_GET: Final[str] = "GET"
+HTTP_METHOD_POST: Final[str] = "POST"
+HttpMethod = Literal["GET", "POST"]
diff --git a/naminter/core/exceptions.py b/naminter/core/exceptions.py
index 7e4915c..0116d5c 100644
--- a/naminter/core/exceptions.py
+++ b/naminter/core/exceptions.py
@@ -41,6 +41,26 @@ class HttpTimeoutError(HttpError):
"""
+class HttpStatusError(HttpError):
+ """Raised when an HTTP request returns an error status code.
+
+ Attributes:
+ status_code: The HTTP status code that caused the error.
+ url: The URL that returned the error status.
+ """
+
+ def __init__(
+ self,
+ message: str,
+ status_code: int | None = None,
+ url: str | None = None,
+ cause: Exception | None = None,
+ ) -> None:
+ super().__init__(message, cause)
+ self.status_code: int | None = status_code
+ self.url: str | None = url
+
+
# Data processing errors
class WMNDataError(NaminterError):
"""Raised when there are issues with WMN data processing or validation.
@@ -49,6 +69,48 @@ class WMNDataError(NaminterError):
"""
+class WMNUninitializedError(WMNDataError):
+ """Raised when WMN data is not initialized or missing.
+
+ This occurs when operations require WMN data but it hasn't been provided
+ or loaded yet.
+ """
+
+
+class WMNUnknownSiteError(WMNDataError):
+ """Raised when a requested site name doesn't exist in the WMN dataset.
+
+ Attributes:
+ site_names: List of unknown site names that were requested.
+ """
+
+ def __init__(
+ self,
+ message: str,
+ site_names: list[str] | None = None,
+ cause: Exception | None = None,
+ ) -> None:
+ super().__init__(message, cause)
+ self.site_names: list[str] = site_names or []
+
+
+class WMNUnknownCategoriesError(WMNDataError):
+ """Raised when requested categories don't exist in the WMN dataset.
+
+ Attributes:
+ categories: List of unknown category names that were requested.
+ """
+
+ def __init__(
+ self,
+ message: str,
+ categories: list[str] | None = None,
+ cause: Exception | None = None,
+ ) -> None:
+ super().__init__(message, cause)
+ self.categories: list[str] = categories or []
+
+
class WMNSchemaError(WMNDataError):
"""Raised when the WMN JSON Schema itself is invalid or cannot be used."""
@@ -70,12 +132,43 @@ def __init__(
self.errors: list[Any] = errors or []
+class WMNArgumentError(WMNDataError):
+ """Raised when invalid arguments are passed to Naminter core APIs.
+
+ This is used for programmer / caller mistakes such as providing an empty
+ username list where at least one username is required.
+ """
+
+
+class WMNEnumerationError(WMNDataError):
+ """Raised when site enumeration fails due to configuration errors.
+
+ This includes invalid headers, strip_bad_char configuration errors,
+ and other site-specific configuration issues.
+ """
+
+
+class WMNFormatError(WMNDataError):
+ """Raised when WMN data formatting fails.
+
+ This includes JSON serialization errors, invalid data structure,
+ and other formatting-related issues.
+ """
+
+
__all__ = [
"HttpError",
"HttpSessionError",
+ "HttpStatusError",
"HttpTimeoutError",
"NaminterError",
+ "WMNArgumentError",
"WMNDataError",
+ "WMNEnumerationError",
+ "WMNFormatError",
"WMNSchemaError",
+ "WMNUninitializedError",
+ "WMNUnknownCategoriesError",
+ "WMNUnknownSiteError",
"WMNValidationError",
]
diff --git a/naminter/core/formatter.py b/naminter/core/formatter.py
new file mode 100644
index 0000000..a6f952c
--- /dev/null
+++ b/naminter/core/formatter.py
@@ -0,0 +1,215 @@
+from collections.abc import Mapping, Sequence
+import json
+from typing import Any
+
+from naminter.core.constants import (
+ DEFAULT_JSON_ENSURE_ASCII,
+ DEFAULT_JSON_INDENT,
+ SCHEMA_KEY_ITEMS,
+ SCHEMA_KEY_PROPERTIES,
+ SITE_KEY_HEADERS,
+ SITE_KEY_NAME,
+ WMN_KEY_AUTHORS,
+ WMN_KEY_CATEGORIES,
+ WMN_KEY_SITES,
+)
+from naminter.core.exceptions import WMNFormatError, WMNSchemaError
+from naminter.core.models import WMNDataset
+
+
+class WMNFormatter:
+ """Formatter for WhatsMyName JSON data."""
+
+ def __init__(self, schema: Mapping[str, Any]) -> None:
+ """Initialize formatter with schema.
+
+ Args:
+ schema: JSON Schema for the dataset.
+ """
+ self.schema = schema
+ self._site_key_order: list[str] | None = None
+ self._site_key_order_set: set[str] | None = None
+
+ @staticmethod
+ def _sort_array_alphabetically(array: list[str]) -> list[str]:
+ """Sort strings alphabetically case-insensitively."""
+ return sorted(array, key=str.casefold)
+
+ @staticmethod
+ def _sort_sites_by_name(sites: Sequence[Any]) -> list[dict[str, Any]]:
+ """Sort sites by name case-insensitively."""
+ site_dicts: list[dict[str, Any]] = []
+ for i, site in enumerate(sites):
+ if not isinstance(site, Mapping):
+ msg = (
+ f"Each site must be an object, "
+ f"got {type(site).__name__} at index {i}"
+ )
+ raise WMNFormatError(msg)
+ site_dicts.append(dict(site))
+
+ return sorted(
+ site_dicts,
+ key=lambda site: str(site.get(SITE_KEY_NAME, "")).casefold(),
+ )
+
+ @staticmethod
+ def _sort_site_headers(
+ site_data: Mapping[str, Any],
+ ) -> dict[str, Any]:
+ """Return new site dict with headers sorted by name."""
+ result = dict(site_data)
+
+ headers = result.get(SITE_KEY_HEADERS)
+ if headers is not None:
+ if not isinstance(headers, dict):
+ msg = (
+ f"'{SITE_KEY_HEADERS}' must be an object, "
+ f"got {type(headers).__name__}"
+ )
+ raise WMNFormatError(msg)
+ result[SITE_KEY_HEADERS] = dict(
+ sorted(
+ headers.items(),
+ key=lambda item: str(item[0]).casefold(),
+ ),
+ )
+
+ return result
+
+ def _reorder_site_keys(
+ self,
+ site_data: Mapping[str, Any],
+ key_order: list[str],
+ ) -> dict[str, Any]:
+ """Return site dict with keys in schema-defined order."""
+ if self._site_key_order_set is None:
+ self._site_key_order_set = set(key_order)
+ allowed = self._site_key_order_set
+ unknown = set(site_data) - allowed
+ if unknown:
+ msg = f"Unknown keys found in site data: {sorted(unknown)}"
+ raise WMNFormatError(msg)
+
+ return {key: site_data[key] for key in key_order if key in site_data}
+
+ @staticmethod
+ def _dumps(obj: object, *, what: str) -> str:
+ """Serialize object to JSON string with consistent error handling."""
+ try:
+ return json.dumps(
+ obj,
+ indent=DEFAULT_JSON_INDENT,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ )
+ except (TypeError, ValueError, RecursionError) as error:
+ msg = f"{what} is not JSON-serializable: {error}"
+ raise WMNFormatError(msg) from error
+
+ def format_schema(self) -> str:
+ """Return formatted schema JSON string."""
+ return self._dumps(self.schema, what="Schema")
+
+ def format_dataset(self, data: WMNDataset) -> str:
+ """Return formatted data JSON string per schema.
+
+ Args:
+ data: WMN dataset to format. This will not be modified.
+
+ Returns:
+ Formatted JSON string.
+
+ Raises:
+ WMNFormatError: If data is not JSON-serializable or invalid.
+ """
+ formatted_authors = self._format_string_array(data, WMN_KEY_AUTHORS)
+ formatted_categories = self._format_string_array(data, WMN_KEY_CATEGORIES)
+ formatted_sites = self._format_sites(data)
+
+ excluded_keys = {WMN_KEY_AUTHORS, WMN_KEY_CATEGORIES, WMN_KEY_SITES}
+ other_keys = {
+ key: value for key, value in data.items() if key not in excluded_keys
+ }
+
+ formatted_data = {
+ WMN_KEY_AUTHORS: formatted_authors,
+ WMN_KEY_CATEGORIES: formatted_categories,
+ WMN_KEY_SITES: formatted_sites,
+ **other_keys,
+ }
+ return self._dumps(formatted_data, what="Data")
+
+ def _get_site_key_order(self) -> list[str]:
+ """Extract key order from schema for site objects.
+
+ Returns:
+ List of keys in the order they appear in the schema.
+
+ Raises:
+ WMNSchemaError: If site schema properties are not found or invalid.
+ """
+ if self._site_key_order is not None:
+ return self._site_key_order
+
+ site_schema = (
+ self.schema
+ .get(SCHEMA_KEY_PROPERTIES, {})
+ .get(WMN_KEY_SITES, {})
+ .get(SCHEMA_KEY_ITEMS, {})
+ .get(SCHEMA_KEY_PROPERTIES)
+ )
+
+ if site_schema is None:
+ msg = "Site schema properties not found in schema"
+ raise WMNSchemaError(msg)
+ if not isinstance(site_schema, dict):
+ msg = (
+ f"Site schema properties must be an object, "
+ f"got {type(site_schema).__name__}"
+ )
+ raise WMNSchemaError(msg)
+
+ self._site_key_order = list(site_schema.keys())
+ self._site_key_order_set = set(self._site_key_order)
+ return self._site_key_order
+
+ def _format_string_array(self, data: Mapping[str, Any], key: str) -> list[str]:
+ """Sort string array alphabetically if present."""
+ array_data = data.get(key)
+ if array_data is None:
+ msg = f"'{key}' is required but not found"
+ raise WMNFormatError(msg)
+ if not isinstance(array_data, list):
+ msg = f"'{key}' must be a list, got {type(array_data).__name__}"
+ raise WMNFormatError(msg)
+ if not array_data:
+ msg = f"'{key}' must be a non-empty list"
+ raise WMNFormatError(msg)
+ for item in array_data:
+ if not isinstance(item, str):
+ msg = f"'{key}' must contain only strings, got {type(item).__name__}"
+ raise WMNFormatError(msg)
+ if not item.strip():
+ msg = f"'{key}' must contain non-empty strings"
+ raise WMNFormatError(msg)
+ return self._sort_array_alphabetically(array_data)
+
+ def _format_site(
+ self,
+ site_data: Mapping[str, Any],
+ key_order: list[str],
+ ) -> dict[str, Any]:
+ """Format one site with sorted headers and ordered keys."""
+ formatted_site = self._sort_site_headers(site_data)
+ return self._reorder_site_keys(formatted_site, key_order)
+
+ def _format_sites(self, data: Mapping[str, Any]) -> list[dict[str, Any]]:
+ """Sort and format site data per schema."""
+ sites = data.get(WMN_KEY_SITES)
+ if not isinstance(sites, list):
+ msg = f"'{WMN_KEY_SITES}' must be a list, got {type(sites).__name__}"
+ raise WMNFormatError(msg)
+
+ sorted_sites = self._sort_sites_by_name(sites)
+ key_order = self._get_site_key_order()
+ return [self._format_site(site_data, key_order) for site_data in sorted_sites]
diff --git a/naminter/core/main.py b/naminter/core/main.py
index 491ffe4..e837eeb 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -1,16 +1,16 @@
import asyncio
+from collections.abc import AsyncGenerator, Awaitable
import logging
-from collections.abc import AsyncGenerator, Awaitable, Callable
-from functools import wraps
-from typing import Any, Literal, TypeVar, overload
+from typing import Any
from naminter.core.constants import (
ACCOUNT_PLACEHOLDER,
- DEFAULT_UNKNOWN_VALUE,
+ DEFAULT_JSON_ENSURE_ASCII,
+ DEFAULT_JSON_INDENT,
EMPTY_STRING,
+ HTTP_METHOD_GET,
+ HTTP_METHOD_POST,
MAX_CONCURRENT_TASKS,
- REQUIRED_KEYS_ENUMERATE,
- REQUIRED_KEYS_SELF_ENUM,
SITE_KEY_CATEGORY,
SITE_KEY_E_CODE,
SITE_KEY_E_STRING,
@@ -30,26 +30,28 @@
from naminter.core.exceptions import (
HttpError,
HttpSessionError,
- HttpTimeoutError,
+ WMNArgumentError,
WMNDataError,
+ WMNEnumerationError,
WMNSchemaError,
+ WMNUninitializedError,
+ WMNUnknownCategoriesError,
+ WMNUnknownSiteError,
WMNValidationError,
)
from naminter.core.models import (
WMNDataset,
+ WMNError,
WMNMode,
- WMNResult,
WMNResponse,
+ WMNResult,
+ WMNSite,
WMNSummary,
- WMNValidationResult,
+ WMNTestResult,
)
from naminter.core.network import BaseSession
-from naminter.core.utils import (
- get_missing_keys,
- validate_dataset,
-)
-
-T = TypeVar("T")
+from naminter.core.utils import execute_tasks
+from naminter.core.validator import WMNValidator
class Naminter:
@@ -62,148 +64,207 @@ def __init__(
wmn_schema: dict[str, Any] | None = None,
max_tasks: int = MAX_CONCURRENT_TASKS,
) -> None:
- """Initialize Naminter with configuration parameters."""
+ """Initialize Naminter with configuration parameters.
+
+ Raises:
+ WMNSchemaError: If the JSON schema is invalid.
+ """
self._logger = logging.getLogger(__name__)
- self._logger.addHandler(logging.NullHandler())
+ if not self._logger.handlers:
+ self._logger.addHandler(logging.NullHandler())
self._wmn_data: WMNDataset | None = wmn_data
self._wmn_schema: dict[str, Any] | None = wmn_schema
self._semaphore = asyncio.Semaphore(max_tasks)
self._http: BaseSession = http_client
- self._session_open: bool = False
- self._session_lock = asyncio.Lock()
- self._dataset_ready: bool = False
-
- async def _open_session(self) -> None:
- """Open the HTTP session."""
- if self._session_open:
- return
-
- async with self._session_lock:
- if self._session_open:
- return
- try:
- await self._http.open()
- self._session_open = True
- self._logger.info("HTTP session opened")
- except HttpSessionError as e:
- self._logger.error("Failed to open HTTP session: %s", e)
- msg = f"HTTP session initialization failed: {e}"
- raise WMNDataError(msg) from e
-
- async def _close_session(self) -> None:
- """Close the HTTP session if open."""
- async with self._session_lock:
- if not self._session_open:
- return
+ self._validator: WMNValidator | None = None
+ if self._wmn_schema:
try:
- await self._http.close()
- except asyncio.CancelledError:
- self._logger.debug("HTTP client close cancelled")
+ self._validator = WMNValidator(self._wmn_schema)
+ except WMNSchemaError as e:
+ self._logger.exception("WMN schema error during initialization")
raise
- except Exception as e:
- self._logger.exception(
- "Unexpected error during HTTP client close: %s", e
- )
- finally:
- self._session_open = False
- async def _ensure_ready(self) -> None:
- """Ensure HTTP session is open and dataset is loaded."""
- if not self._session_open:
- await self._open_session()
+ async def open(self) -> None:
+ """Initialize the HTTP session and validate the WMN dataset.
- if self._dataset_ready:
- return
+ Use this method for long-running services where you need explicit
+ lifecycle control. For scripts and CLI usage, prefer the context
+ manager pattern: `async with Naminter(...) as naminter:`.
+
+ Example:
+ ```python
+ # Long-running service (FastAPI, etc.)
+ naminter = Naminter(http_client, wmn_data)
+ await naminter.open() # Call once at startup
+ # ... handle many requests ...
+
+ await naminter.close() # Call once at shutdown
+ ```
+
+ Raises:
+ HttpSessionError: If HTTP session initialization fails.
+ WMNUninitializedError: If WMN data is not provided.
+ WMNDataError: If WMN data loading fails.
+ WMNValidationError: If dataset validation fails.
+ """
+ try:
+ await self._http.open()
+ self._logger.info("HTTP session opened")
+ except HttpSessionError:
+ self._logger.exception("Failed to open HTTP session")
+ raise
+
+ try:
+ self._validate_dataset()
+ except Exception:
+ await self.close()
+ raise
+
+ def _validate_dataset(self) -> None:
+ """Validate WMN data and schema after HTTP session is opened.
+
+ Raises:
+ WMNUninitializedError: If WMN data is not provided.
+ WMNDataError: If WMN data loading fails.
+ WMNValidationError: If dataset validation fails.
+ """
if not self._wmn_data:
msg = "WMN data must be provided to Naminter constructor"
- raise WMNDataError(msg)
+ raise WMNUninitializedError(msg)
- async with self._session_lock:
- if self._dataset_ready:
- return
+ validation_errors: list[WMNError] = []
+ try:
+ if self._validator:
+ validation_errors = self._validator.validate(self._wmn_data)
+ except (TypeError, ValueError, KeyError, AttributeError) as e:
+ self._logger.exception("Unexpected error loading WMN data")
+ msg = f"Unexpected error loading WMN data: {e}"
+ raise WMNDataError(msg) from e
- try:
- if self._wmn_schema:
- errors = validate_dataset(self._wmn_data, self._wmn_schema)
- if errors:
- msg = "WMN dataset validation failed"
- raise WMNValidationError(msg, errors=errors)
-
- self._dataset_ready = True
- self._logger.info(
- "Dataset loaded: %d sites",
- len(self._wmn_data.get(WMN_KEY_SITES, [])),
- )
- except WMNSchemaError as e:
- msg = f"WMN schema error: {e}"
- raise WMNDataError(msg) from e
- except WMNValidationError:
- raise
- except Exception as e:
- msg = f"Unexpected error loading WMN data: {e}"
- raise WMNDataError(msg) from e
+ if validation_errors:
+ msg = "WMN dataset validation failed"
+ raise WMNValidationError(msg, errors=validation_errors)
+
+ sites = self._wmn_data.get(WMN_KEY_SITES, [])
+ site_errors: list[WMNError] = []
+ if self._validator:
+ site_errors = self._validator.validate_sites(sites)
+
+ if site_errors:
+ msg = f"Site validation failed for {len(site_errors)} site(s)"
+ raise WMNValidationError(msg, errors=site_errors)
+
+ self._logger.info("Dataset loaded: %d sites", len(sites))
- @staticmethod
- def _ensure_initialized(
- method: Callable[..., Any],
- ) -> Callable[..., Any]:
- """Decorator to ensure the instance is ready before calling a method."""
+ async def close(self) -> None:
+ """Close the HTTP session and release resources.
- @wraps(method)
- async def wrapper(self: "Naminter", *args: Any, **kwargs: Any) -> Any:
- await self._ensure_ready()
- return await method(self, *args, **kwargs)
+ Use this method for long-running services to clean up at shutdown.
+ For scripts and CLI usage, prefer the context manager pattern.
- return wrapper
+ Handles errors gracefully during cleanup. CancelledError is propagated
+ to allow proper cancellation handling.
+ """
+ try:
+ await self._http.close()
+ except asyncio.CancelledError:
+ self._logger.debug("HTTP client close cancelled")
+ raise
+ except (HttpSessionError, OSError, RuntimeError):
+ self._logger.exception(
+ "Unexpected error during HTTP client close",
+ )
async def __aenter__(self) -> "Naminter":
- await self._ensure_ready()
+ """Async context manager entry."""
+ await self.open()
return self
async def __aexit__(
- self, exc_type: type | None, exc_val: BaseException | None, exc_tb: Any | None
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: object,
) -> None:
"""Async context manager exit."""
- await self._close_session()
+ await self.close()
def _filter_sites(
self,
site_names: list[str] | None,
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
- ) -> list[dict[str, Any]]:
- """Filter sites by names and categories for the current WMN dataset."""
- assert self._wmn_data is not None
- sites: list[dict[str, Any]] = self._wmn_data.get(WMN_KEY_SITES, [])
+ ) -> list[WMNSite]:
+ """Filter sites by names and categories for the current WMN dataset.
+
+ Args:
+ site_names: Optional list of site names to filter by.
+ include_categories: Optional list of categories to include.
+ exclude_categories: Optional list of categories to exclude.
+
+ Returns:
+ Filtered list of site dictionaries.
+
+ Raises:
+ WMNUninitializedError: If WMN data is not initialized.
+ WMNUnknownSiteError: If unknown site names are provided.
+ WMNUnknownCategoriesError: If unknown categories are provided.
+ """
+ if self._wmn_data is None:
+ msg = "WMN data not initialized"
+ raise WMNUninitializedError(msg)
+
+ sites: list[WMNSite] = self._wmn_data.get(WMN_KEY_SITES, [])
- if not any((site_names, include_categories, exclude_categories)):
+ if not (site_names or include_categories or exclude_categories):
return sites
- filtered_names: frozenset[str] = frozenset()
+ filtered_names: frozenset[str] | None = None
if site_names:
filtered_names = frozenset(site_names)
- available_names: frozenset[str] = frozenset({
- name for site in sites if (name := site.get(SITE_KEY_NAME)) is not None
- })
- missing_names: frozenset[str] = filtered_names - available_names
- if missing_names:
+ available_names = frozenset(
+ site.get(SITE_KEY_NAME)
+ for site in sites
+ if site.get(SITE_KEY_NAME) is not None
+ )
+ if missing_names := filtered_names - available_names:
msg = f"Unknown site names: {sorted(missing_names)}"
- raise WMNDataError(msg)
+ raise WMNUnknownSiteError(msg, site_names=sorted(missing_names))
- include_set: frozenset[str] = (
+ include_set = (
frozenset(include_categories) if include_categories else frozenset()
)
- exclude_set: frozenset[str] = (
+ exclude_set = (
frozenset(exclude_categories) if exclude_categories else frozenset()
)
+ if include_set and include_set.issubset(exclude_set):
+ self._logger.debug(
+ "All included categories are excluded, returning empty list",
+ )
+ return []
+
+ if include_set or exclude_set:
+ available_categories = frozenset(
+ category
+ for site in sites
+ if (category := site.get(SITE_KEY_CATEGORY)) is not None
+ )
+ requested_categories = include_set | exclude_set
+ if unknown_categories := requested_categories - available_categories:
+ msg = f"Unknown categories: {sorted(unknown_categories)}"
+ raise WMNUnknownCategoriesError(
+ msg,
+ categories=sorted(unknown_categories),
+ )
+
filtered_sites = [
site
for site in sites
- if (not filtered_names or site.get(SITE_KEY_NAME) in filtered_names)
+ if (filtered_names is None or site.get(SITE_KEY_NAME) in filtered_names)
and (not include_set or site.get(SITE_KEY_CATEGORY) in include_set)
and (not exclude_set or site.get(SITE_KEY_CATEGORY) not in exclude_set)
]
@@ -215,8 +276,120 @@ def _filter_sites(
)
return filtered_sites
- @_ensure_initialized
- async def get_wmn_summary(
+ def _prepare_request(
+ self,
+ site: WMNSite,
+ username: str,
+ ) -> tuple[str, str, dict[str, str], str | None]:
+ """Prepare all request data for site enumeration.
+
+ Args:
+ site: Site configuration.
+ username: Username to substitute.
+
+ Returns:
+ Tuple of (uri_check, uri_pretty, headers, post_body).
+
+ Raises:
+ WMNEnumerationError: If strip_bad_char configuration is invalid.
+ """
+ clean_username = self._prepare_username(username, site)
+
+ uri_check_template = site[SITE_KEY_URI_CHECK]
+ uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
+
+ uri_pretty_template = site.get(SITE_KEY_URI_PRETTY, uri_check_template)
+ uri_pretty = uri_pretty_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
+
+ headers = site.get(SITE_KEY_HEADERS) or {}
+
+ post_body_template = site.get(SITE_KEY_POST_BODY)
+ post_body = (
+ post_body_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
+ if post_body_template
+ else None
+ )
+
+ return uri_check, uri_pretty, headers, post_body
+
+ def _prepare_username(
+ self,
+ username: str,
+ site: WMNSite,
+ ) -> str:
+ """Prepare username by stripping bad characters.
+
+ Args:
+ username: Raw username to process.
+ site: Site configuration containing strip_bad_char.
+
+ Returns:
+ Cleaned username.
+
+ Raises:
+ WMNEnumerationError: If strip_bad_char configuration is invalid.
+ """
+ strip_bad_char = site.get(SITE_KEY_STRIP_BAD_CHAR, EMPTY_STRING)
+ if not strip_bad_char:
+ return username
+
+ try:
+ return username.translate(
+ str.maketrans(dict.fromkeys(strip_bad_char)),
+ )
+ except (ValueError, TypeError) as e:
+ self._logger.warning(
+ "Invalid strip_bad_char for site: %s - %s",
+ site,
+ e,
+ )
+ msg = f"Invalid strip_bad_char configuration: {e}"
+ raise WMNEnumerationError(msg) from e
+
+ async def _perform_request(
+ self,
+ uri_check: str,
+ headers: dict[str, str],
+ post_body: str | None,
+ site: WMNSite,
+ ) -> WMNResponse:
+ """Perform HTTP request for site enumeration.
+
+ Args:
+ uri_check: URL to check.
+ headers: HTTP headers to send.
+ post_body: Optional POST body data.
+ site: Site configuration for logging.
+
+ Returns:
+ HTTP response object.
+
+ Raises:
+ asyncio.CancelledError: If the request is cancelled.
+ HttpError: If an HTTP error occurs.
+ """
+ async with self._semaphore:
+ method = HTTP_METHOD_POST if post_body else HTTP_METHOD_GET
+ response = await self._http.request(
+ method=method,
+ url=uri_check,
+ headers=headers,
+ data=post_body,
+ )
+
+ self._logger.debug(
+ "%s %s -> %d (%.2fs) | headers=%s | data=%s | site=%s",
+ method,
+ uri_check,
+ response.status_code,
+ response.elapsed,
+ headers,
+ post_body,
+ site,
+ )
+ return response
+
+ def get_wmn_summary(
self,
site_names: list[str] | None = None,
include_categories: list[str] | None = None,
@@ -242,23 +415,24 @@ async def get_wmn_summary(
categories, and known usernames count.
Raises:
- WMNDataError: If site_names contains unknown site names.
+ WMNUnknownSiteError: If site_names contains unknown site names.
+ WMNUnknownCategoriesError: If include_categories or exclude_categories
+ contains unknown categories.
Example:
```python
async with Naminter(wmn_data, wmn_schema) as naminter:
# Get summary of all sites
- summary = await naminter.get_wmn_summary()
+ summary = naminter.get_wmn_summary()
print(f"Total sites: {summary.sites_count}")
# Get summary for specific categories
- summary = await naminter.get_wmn_summary(
- include_categories=["social", "coding"]
+ summary = naminter.get_wmn_summary(
+ include_categories=["social", "coding"],
)
print(f"Social/coding sites: {summary.sites_count}")
```
"""
- assert self._wmn_data is not None
sites = self._filter_sites(
site_names,
include_categories=include_categories,
@@ -266,38 +440,39 @@ async def get_wmn_summary(
)
category_list = [
- site.get(SITE_KEY_CATEGORY) for site in sites if site.get(SITE_KEY_CATEGORY)
+ category
+ for site in sites
+ if (category := site.get(SITE_KEY_CATEGORY)) is not None
]
site_name_list = [
- site.get(SITE_KEY_NAME) for site in sites if site.get(SITE_KEY_NAME)
+ name for site in sites if (name := site.get(SITE_KEY_NAME)) is not None
]
known_count = sum(
- len(site.get(SITE_KEY_KNOWN, []))
+ len(known)
for site in sites
- if isinstance(site.get(SITE_KEY_KNOWN), list)
+ if isinstance((known := site.get(SITE_KEY_KNOWN)), list)
)
- wmn_summary = WMNSummary(
+ summary = WMNSummary(
license=tuple(self._wmn_data.get(WMN_KEY_LICENSE, [])),
authors=tuple(self._wmn_data.get(WMN_KEY_AUTHORS, [])),
- site_names=tuple(str(name) for name in site_name_list),
+ site_names=tuple(site_name_list),
sites_count=len(sites),
- categories=tuple(str(cat) for cat in category_list),
+ categories=tuple(category_list),
categories_count=len(set(category_list)),
known_count=known_count,
)
self._logger.debug(
"WMN summary computed (sites=%d, categories=%d)",
- wmn_summary.sites_count,
- wmn_summary.categories_count,
+ summary.sites_count,
+ summary.categories_count,
)
- return wmn_summary
+ return summary
- @_ensure_initialized
async def enumerate_site(
self,
- site: dict[str, Any],
+ site: WMNSite,
username: str,
mode: WMNMode = WMNMode.ALL,
) -> WMNResult:
@@ -312,20 +487,17 @@ async def enumerate_site(
Args:
site:
A single site configuration dictionary from the WMN dataset. This dict
- must contain, at minimum, the following keys:
- - "name": site name
- - "cat": site category
- - "uri_check": URL template with "{account}" placeholder
- - "e_code": expected HTTP status for a "found" account
- - "e_string": expected string in body for a "found" account
- - "m_code": expected HTTP status for a "missing" account
- - "m_string": expected string in body for a "missing" account
- Optional keys include:
- - "headers": dict of HTTP headers to send with the request.
- - "post_body": POST body template containing "{account}".
- - "strip_bad_char": characters to strip from the username
- before substitution in the URL/body.
- - "uri_pretty": an optional "pretty" URL template for reporting.
+ must contain, at minimum, the following keys: "name" (site name),
+ "cat" (site category), "uri_check" (URL template with "{account}"
+ placeholder), "e_code" (expected HTTP status for an existing account),
+ "e_string" (expected string in body for an existing account),
+ "m_code" (expected HTTP status for a missing account), and
+ "m_string" (expected string in body for a "missing" account).
+ Optional keys include "headers" (dict of HTTP headers to send with
+ the request), "post_body" (POST body template containing "{account}"),
+ "strip_bad_char" (characters to strip from the username before
+ substitution in the URL/body), and "uri_pretty" (an optional "pretty"
+ URL template for reporting).
username:
The raw username to test on this site. It is used to build the
request URL and optional POST body. If the site defines
@@ -333,33 +505,25 @@ async def enumerate_site(
username before substitution.
mode:
Detection mode that controls how the "expected" (E) and "missing" (M)
- criteria are interpreted when classifying the HTTP response:
- - WMNMode.ALL: All configured conditions for a state must match
- (strict AND logic).
- - WMNMode.ANY: Any matching condition is sufficient
- (looser OR logic).
+ criteria are interpreted when classifying the HTTP response.
+ WMNMode.ALL requires all configured conditions for a state to match
+ (strict AND logic), while WMNMode.ANY allows any matching condition
+ to be sufficient (looser OR logic).
Returns:
WMNResult:
- A single WMNResult instance that encapsulates:
- - name: site name (from "name"),
- - category: site category (from "cat"),
- - username: the username that was tested,
- - url: the final URL used for reporting (may be "uri_pretty"),
- - status: high-level classification, e.g. FOUND, NOT_FOUND,
- AMBIGUOUS, UNKNOWN, ERROR, or NOT_VALID,
- - response_code / response_text / elapsed (if the HTTP request
- completed successfully),
- - error message (if an error occurred).
+ A single WMNResult instance that encapsulates the site name (from
+ "name"), category (from "cat"), the username that was tested, the
+ final URL used for reporting (may be "uri_pretty"), a high-level
+ status classification (e.g. EXISTS, PARTIAL, CONFLICTING, MISSING,
+ UNKNOWN, ERROR, or NOT_VALID), status_code, text, and elapsed time
+ (if the HTTP request completed successfully), and an error message
+ (if an error occurred).
Raises:
asyncio.CancelledError:
Propagated if the caller cancels the task while the HTTP request
is in progress.
- WMNDataError:
- Not raised directly from this method, but may be raised earlier
- when initializing the Naminter instance or when validating the
- underlying dataset.
Example:
```python
@@ -378,164 +542,66 @@ async def enumerate_site(
print(result.name, result.username, result.status, result.url)
```
"""
- missing_keys = get_missing_keys(site, REQUIRED_KEYS_ENUMERATE)
- if missing_keys:
- site_name = site.get(SITE_KEY_NAME, DEFAULT_UNKNOWN_VALUE)
- self._logger.warning(
- "Site '%s' is missing required keys: %s",
- site_name,
- missing_keys,
+ try:
+ uri_check, uri_pretty, headers, post_body = self._prepare_request(
+ site,
+ username,
)
+ except WMNEnumerationError as e:
return WMNResult.from_error(
- name=site_name,
- category=site.get(SITE_KEY_CATEGORY, DEFAULT_UNKNOWN_VALUE),
username=username,
- message=f"Site entry missing required keys: {missing_keys}",
+ message=e.message,
+ site=site,
)
- name = site[SITE_KEY_NAME]
- category = site[SITE_KEY_CATEGORY]
- strip_bad_char = site.get(SITE_KEY_STRIP_BAD_CHAR, EMPTY_STRING)
- if strip_bad_char:
- clean_username = username.translate(
- str.maketrans(dict.fromkeys(strip_bad_char))
- )
- else:
- clean_username = username
-
- uri_check_template = site[SITE_KEY_URI_CHECK]
- uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
- uri_pretty = site.get(SITE_KEY_URI_PRETTY, uri_check_template).replace(
- ACCOUNT_PLACEHOLDER, clean_username
- )
-
- headers = site.get(SITE_KEY_HEADERS, {})
- post_body = site.get(SITE_KEY_POST_BODY)
- if post_body:
- post_body = post_body.replace(ACCOUNT_PLACEHOLDER, clean_username)
- self._logger.debug("Checking %s with POST request", uri_check)
- else:
- self._logger.debug("Checking %s with GET request", uri_check)
-
- result: WMNResult | None = None
- response: WMNResponse | None = None
try:
- async with self._semaphore:
- if post_body:
- response = await self._http.post(
- uri_check, headers=headers, data=post_body
- )
- else:
- response = await self._http.get(uri_check, headers=headers)
-
- self._logger.debug(
- "Response from %s: status=%d, elapsed=%.2fs",
- name,
- response.status_code,
- response.elapsed,
- )
+ response = await self._perform_request(uri_check, headers, post_body, site)
except asyncio.CancelledError:
- self._logger.debug("Request cancelled")
+ self._logger.debug("Request cancelled for site: %s", site)
raise
- except HttpTimeoutError as e:
- self._logger.warning("Request to '%s' timed out: %s", name, e)
- result = WMNResult.from_error(
- name=name,
- category=category,
- username=username,
- url=uri_pretty,
- message=f"Request timeout: {e}",
+ except HttpError as e:
+ error_type = type(e).__name__
+ self._logger.warning(
+ "%s for site: %s - %s",
+ error_type,
+ site,
+ e,
)
- except HttpSessionError as e:
- self._logger.warning("Session error for '%s': %s", name, e)
- result = WMNResult.from_error(
- name=name,
- category=category,
+ return WMNResult.from_error(
username=username,
+ message=f"{error_type}: {e}",
+ site=site,
url=uri_pretty,
- message=f"Session error: {e}",
)
- except HttpError as e:
- self._logger.warning("Network error for '%s': %s", name, e)
- result = WMNResult.from_error(
- name=name,
- category=category,
- username=username,
- url=uri_pretty,
- message=f"Network error: {e}",
+ except (OSError, RuntimeError, ValueError, TypeError) as e:
+ self._logger.exception(
+ "Unexpected error during enumeration for site: %s",
+ site,
)
- except Exception as e:
- self._logger.exception("Unexpected error during request for '%s'", name)
- result = WMNResult.from_error(
- name=name,
- category=category,
+ return WMNResult.from_error(
username=username,
- url=uri_pretty,
message=f"Unexpected error: {e}",
+ site=site,
+ url=uri_pretty,
)
- if result is not None:
- return result
-
result = WMNResult.from_response(
- name=name,
- category=category,
username=username,
url=uri_pretty,
- response_code=response.status_code,
- response_text=response.text,
- elapsed=response.elapsed,
+ response=response,
+ site=site,
mode=mode,
- e_code=site[SITE_KEY_E_CODE],
- e_string=site[SITE_KEY_E_STRING],
- m_code=site[SITE_KEY_M_CODE],
- m_string=site[SITE_KEY_M_STRING],
)
self._logger.debug(
- "Check result for '%s': %s (HTTP %d)",
- name,
+ "Check result for site: %s (HTTP %d) - %s",
result.status.name,
response.status_code,
+ site,
)
return result
- @staticmethod
- async def _execute_tasks(
- coroutines: list[Awaitable[T]],
- as_generator: bool,
- ) -> list[T] | AsyncGenerator[T, None]:
- """Execute tasks and return results as list or generator."""
- if as_generator:
-
- async def _generator() -> AsyncGenerator[T, None]:
- tasks = [asyncio.create_task(coroutine) for coroutine in coroutines]
- try:
- for task in asyncio.as_completed(tasks):
- yield await task
- finally:
- for task in tasks:
- if not task.done():
- task.cancel()
- if tasks:
- await asyncio.gather(*tasks, return_exceptions=True)
-
- return _generator()
- return list(await asyncio.gather(*coroutines))
-
- @overload
- async def enumerate_usernames(
- self,
- usernames: list[str],
- site_names: list[str] | None = None,
- include_categories: list[str] | None = None,
- exclude_categories: list[str] | None = None,
- mode: WMNMode = WMNMode.ALL,
- as_generator: Literal[True] = ...,
- ) -> AsyncGenerator[WMNResult, None]: ...
-
- @overload
async def enumerate_usernames(
self,
usernames: list[str],
@@ -543,31 +609,15 @@ async def enumerate_usernames(
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
mode: WMNMode = WMNMode.ALL,
- as_generator: Literal[False] = ...,
- ) -> list[WMNResult]: ...
-
- @_ensure_initialized
- async def enumerate_usernames(
- self,
- usernames: list[str],
- site_names: list[str] | None = None,
- include_categories: list[str] | None = None,
- exclude_categories: list[str] | None = None,
- mode: WMNMode = WMNMode.ALL,
- as_generator: bool = False,
- ) -> list[WMNResult] | AsyncGenerator[WMNResult, None]:
+ ) -> AsyncGenerator[WMNResult, None]:
"""Enumerate one or multiple usernames across one or multiple sites.
- This is the high-level method for running bulk username checks. It takes:
- - one list of usernames, and
- - a selection of sites (by name and/or category filters),
+ This is the high-level method for running bulk username checks. It takes one
+ list of usernames and a selection of sites (by name and/or category filters),
then runs enumerate_site for every (site, username) pair.
- The method can operate in two modes:
- - "batch" mode (as_generator=False): returns a list of all WMNResult objects
- once all checks are complete.
- - "streaming" mode (as_generator=True): returns an async generator that yields
- WMNResult objects one by one as they finish, without waiting for all tasks.
+ The method returns an async generator that yields WMNResult objects one by one
+ as they finish, without waiting for all tasks to complete.
Args:
usernames:
@@ -588,157 +638,92 @@ async def enumerate_usernames(
skipped. This filter is also applied in addition to site_names and
include_categories.
mode:
- Detection mode forwarded to enumerate_site for each check:
- - WMNMode.ALL: strict evaluation (all "found" indicators must match).
- - WMNMode.ANY: relaxed evaluation (any "found" indicator can match).
- as_generator:
- Controls the shape of the returned value:
- - If False (default), all checks are scheduled, awaited, and a full
- list[WMNResult] is returned when everything is done.
- - If True, an AsyncGenerator[WMNResult, None] is returned instead.
- The caller can then `async for` over individual WMNResult objects
- as they become available.
+ Detection mode forwarded to enumerate_site for each check.
+ WMNMode.ALL uses strict evaluation where all "exists" indicators must
+ match, while WMNMode.ANY uses relaxed evaluation where any "exists"
+ indicator can match.
Returns:
- Union[list[WMNResult], AsyncGenerator[WMNResult, None]]:
- - If as_generator is False:
- A flat list of WMNResult objects, one per (site, username) pair.
- The list order is not guaranteed to match submission order.
- - If as_generator is True:
- An async generator that yields WMNResult objects one at a time
- as tasks complete. This allows streaming processing of results.
+ AsyncGenerator[WMNResult, None]:
+ An async generator that yields WMNResult objects one at a time
+ as tasks complete. This allows streaming processing of results.
+ The order is not guaranteed to match submission order.
Raises:
- WMNDataError:
- If any requested site name in site_names does not exist in the
- loaded WMN dataset. This validation is performed during site filtering
- before any network requests are made.
- WMNDataError / WMNValidationError:
- May be raised earlier when preparing the dataset (via _ensure_ready),
- before enumeration starts.
+ WMNUnknownSiteError: If any requested site name in site_names does not
+ exist in the loaded WMN dataset.
+ WMNUnknownCategoriesError: If include_categories or exclude_categories
+ contains unknown categories.
+ WMNArgumentError: If usernames list is empty.
"""
+ if not usernames:
+ msg = "At least one username must be provided"
+ raise WMNArgumentError(msg)
+
sites = self._filter_sites(
site_names,
include_categories=include_categories,
exclude_categories=exclude_categories,
)
+ if not sites:
+ self._logger.info("No sites match the given filters, nothing to enumerate")
+ return
+
self._logger.info(
"Starting enumeration for %d username(s) on %d site(s)",
len(usernames),
len(sites),
)
- coroutines = [
+ coroutines: list[Awaitable[WMNResult]] = [
self.enumerate_site(site, username, mode)
for site in sites
for username in usernames
]
- return await self._execute_tasks(coroutines, as_generator)
-
- @overload
- async def validate_sites(
- self,
- site_names: list[str] | None = None,
- include_categories: list[str] | None = None,
- exclude_categories: list[str] | None = None,
- mode: WMNMode = WMNMode.ALL,
- as_generator: Literal[True] = ...,
- ) -> AsyncGenerator[WMNValidationResult, None]: ...
+ try:
+ async for result in execute_tasks(coroutines):
+ yield result
+ except asyncio.CancelledError:
+ self._logger.debug("Enumeration cancelled")
+ raise
- @overload
- async def validate_sites(
+ async def enumerate_test(
self,
site_names: list[str] | None = None,
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
mode: WMNMode = WMNMode.ALL,
- as_generator: Literal[False] = ...,
- ) -> list[WMNValidationResult]: ...
+ ) -> AsyncGenerator[WMNTestResult, None]:
+ """Test site detection rules using known usernames from the dataset.
- @_ensure_initialized
- async def validate_sites(
- self,
- site_names: list[str] | None = None,
- include_categories: list[str] | None = None,
- exclude_categories: list[str] | None = None,
- mode: WMNMode = WMNMode.ALL,
- as_generator: bool = False,
- ) -> list[WMNValidationResult] | AsyncGenerator[WMNValidationResult, None]:
- """Validate site detection rules using known usernames from the dataset.
-
- This method is intended for maintainers and for automated health checks of
- the WMN dataset. Instead of testing arbitrary usernames, it:
- - Selects a subset of sites (optionally filtered by site_names and
- categories).
- - For each selected site, reads its list of "known good" usernames
- from the "known" field.
- - For each (site, known_username) pair, calls enumerate_site.
- - Aggregates all WMNResult objects into a single WMNValidationResult per site.
-
- This allows you to confirm that:
- - The configured detection rules ("e_code", "e_string", "m_code", "m_string")
- still correctly identify accounts, and
- - The site entries themselves are structurally valid and complete.
+ This method is intended for maintainers and automated health checks of
+ the WMN dataset. It selects sites (optionally filtered by names and
+ categories), tests each site using its "known" usernames, and yields
+ a WMNTestResult per site.
Args:
site_names:
- Optional list of site names to validate. If None, all sites from the
- dataset are considered (subject to category filters). If provided,
- all names must exist in the dataset; unknown names lead to a
- WMNDataError raised during site filtering.
+ Optional list of site names to test. If None, all sites are
+ tested (subject to category filters).
include_categories:
- Optional list of categories (values of the "cat" field) to include
- during validation. Only sites whose category is in this list are
- validated. This is combined with site_names if both are provided.
+ Optional list of categories to include. Only sites in these
+ categories are tested.
exclude_categories:
- Optional list of categories (values of the "cat" field) to exclude
- from validation. Any site whose category is in this list is skipped.
- This exclusion is applied after site_names and include_categories.
+ Optional list of categories to exclude from testing.
mode:
- Detection mode passed down to enumerate_site for each known username:
- - WMNMode.ALL: strict evaluation (recommended for validation).
- - WMNMode.ANY: relaxed evaluation (useful for exploratory checks).
- as_generator:
- Controls the return type:
- - If False (default), returns a list[WMNValidationResult] after all
- sites have been validated.
- - If True, returns an AsyncGenerator[WMNValidationResult, None] that
- yields one WMNValidationResult per site as soon as that site's
- validation has finished.
+ Detection mode for each test. WMNMode.ALL uses strict evaluation,
+ WMNMode.ANY uses relaxed evaluation.
- Returns:
- Union[list[WMNValidationResult], AsyncGenerator[WMNValidationResult, None]]:
- - If as_generator is False:
- A list where each item is a WMNValidationResult describing one
- site and the WMNResult objects for all of its known usernames.
- - If as_generator is True:
- An async generator that yields WMNValidationResult objects for
- each validated site in completion order.
-
- Each WMNValidationResult includes:
- - name: site name,
- - category: site category (the value of the "cat" field),
- - results: list[WMNResult] for each known username (may be empty),
- - status: aggregate status derived from underlying WMNResult values
- (e.g. ERROR if any check failed),
- - error: textual description if validation could not be performed
- for that site (e.g. missing required keys or unexpected error).
+ Yields:
+ WMNTestResult for each site in completion order, containing the
+ site name, category, list of WMNResult objects, aggregate status,
+ and error message if testing failed.
Raises:
- WMNDataError:
- If any of the requested site_names does not exist in the dataset.
- WMNDataError / WMNValidationError:
- May be raised earlier from _ensure_ready if the dataset or schema
- is invalid.
-
- Site-level error handling:
- - If a site is missing required keys needed for self-validation
- (as defined by REQUIRED_KEYS_SELF_ENUM in code), a WMNValidationResult
- is returned with `error` populated and `results` left empty.
- - If an unexpected exception occurs when validating a site, it is caught
- and converted into a WMNValidationResult with `error` set accordingly.
+ WMNUnknownSiteError: If site_names contains unknown sites.
+ WMNUnknownCategoriesError: If categories are unknown.
"""
sites = self._filter_sites(
site_names,
@@ -746,51 +731,42 @@ async def validate_sites(
exclude_categories=exclude_categories,
)
+ if not sites:
+ self._logger.info("No sites match the given filters, nothing to test")
+ return
+
self._logger.info(
- "Starting validation for %d site(s) (mode=%s)",
+ "Starting test for %d site(s) (mode=%s)",
len(sites),
mode,
)
- async def _enumerate_known(site: dict[str, Any]) -> WMNValidationResult:
- """Helper function to validate a site with all its known users."""
- site_name = site.get(SITE_KEY_NAME, DEFAULT_UNKNOWN_VALUE)
- site_category = site.get(SITE_KEY_CATEGORY, DEFAULT_UNKNOWN_VALUE)
-
- missing_keys = get_missing_keys(site, REQUIRED_KEYS_SELF_ENUM)
- if missing_keys:
- self._logger.warning(
- "Site '%s' is missing required keys for validation: %s",
- site_name,
- missing_keys,
- )
- return WMNValidationResult(
- name=site_name,
- category=site_category,
- error=f"Site data missing required keys: {missing_keys}",
- )
-
+ async def test_site(site: WMNSite) -> WMNTestResult:
+ """Test a single site using its known usernames."""
known = site[SITE_KEY_KNOWN]
self._logger.debug(
- "Validating '%s' with %d known user(s)",
- site_name,
+ "Testing site with %d known user(s): %s",
len(known),
+ site,
)
+ coroutines: list[Awaitable[WMNResult]] = [
+ self.enumerate_site(site, username, mode) for username in known
+ ]
try:
- results = await asyncio.gather(
- *(self.enumerate_site(site, username, mode) for username in known)
- )
- return WMNValidationResult(
- name=site_name, category=site_category, results=results
- )
- except Exception as e:
- self._logger.exception("Validation failed for site='%s'", site_name)
- return WMNValidationResult(
- name=site_name,
- category=site_category,
- error=f"Unexpected error during site validation: {e}",
- )
+ results: list[WMNResult] = [
+ result async for result in execute_tasks(coroutines)
+ ]
+ except asyncio.CancelledError:
+ self._logger.debug("Test cancelled for site: %s", site)
+ raise
+ return WMNTestResult.from_site(site, results=results)
+
+ coroutines: list[Awaitable[WMNTestResult]] = [test_site(site) for site in sites]
- coroutines = [_enumerate_known(site) for site in sites]
- return await self._execute_tasks(coroutines, as_generator)
+ try:
+ async for result in execute_tasks(coroutines):
+ yield result
+ except asyncio.CancelledError:
+ self._logger.debug("Test enumeration cancelled")
+ raise
diff --git a/naminter/core/models.py b/naminter/core/models.py
index b114af5..842ad21 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -1,13 +1,16 @@
from __future__ import annotations
-import json
from dataclasses import dataclass, field
-from datetime import UTC, datetime
+from datetime import UTC, datetime, timedelta
from enum import StrEnum, auto
-from typing import TYPE_CHECKING, Any, TypedDict
+import json
+from typing import Any, NotRequired, TypedDict
-if TYPE_CHECKING:
- from collections.abc import Sequence
+from naminter.core.constants import (
+ DEFAULT_UNKNOWN_VALUE,
+ SITE_KEY_CATEGORY,
+ SITE_KEY_NAME,
+)
class WMNMode(StrEnum):
@@ -23,21 +26,60 @@ class WMNMode(StrEnum):
class WMNStatus(StrEnum):
"""Status of username search results."""
- FOUND = auto()
- AMBIGUOUS = auto()
- UNKNOWN = auto()
- NOT_FOUND = auto()
- NOT_VALID = auto()
ERROR = auto()
+ NOT_VALID = auto()
+ CONFLICTING = auto()
+ PARTIAL = auto()
+ EXISTS = auto()
+ MISSING = auto()
+ UNKNOWN = auto()
+
+
+class WMNSite(TypedDict):
+ """Type definition for a single site in the WMN dataset structure.
+
+ Required fields per JSON schema: name, uri_check, e_code, e_string,
+ m_string, m_code, known, cat. Other fields are optional.
+ """
+
+ name: str
+ cat: str
+ uri_check: str
+ uri_pretty: NotRequired[str]
+ headers: NotRequired[dict[str, str]]
+ post_body: NotRequired[str]
+ strip_bad_char: NotRequired[str]
+ e_code: int
+ e_string: str
+ m_code: int
+ m_string: str
+ known: list[str]
+ valid: NotRequired[bool]
+ protection: NotRequired[list[str]]
+
+
+WMN_REQUIRED_KEYS: frozenset[str] = frozenset({
+ "name",
+ "cat",
+ "uri_check",
+ "e_code",
+ "e_string",
+ "m_code",
+ "m_string",
+ "known",
+})
class WMNDataset(TypedDict):
- """Type definition for WMN dataset structure."""
+ """Type definition for WMN dataset structure.
- sites: list[dict[str, Any]]
- categories: list[str]
+ All fields are required per JSON schema.
+ """
+
+ license: list[str]
authors: list[str]
- license: str | list[str]
+ categories: list[str]
+ sites: list[WMNSite]
@dataclass(slots=True, frozen=True, kw_only=True)
@@ -73,9 +115,9 @@ class WMNResult:
username: str
status: WMNStatus
url: str | None = None
- response_code: int | None = None
- response_text: str | None = None
- elapsed: float | None = None
+ status_code: int | None = None
+ text: str | None = None
+ elapsed: timedelta | None = None
error: str | None = None
created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
@@ -83,79 +125,143 @@ class WMNResult:
def from_error(
cls,
*,
- name: str,
- category: str,
username: str,
message: str,
+ site: WMNSite,
url: str | None = None,
) -> WMNResult:
+ """Create error result.
+
+ Args:
+ username: Username being checked.
+ message: Error message.
+ site: Site configuration.
+ url: Optional URL.
+
+ Returns:
+ WMNResult with ERROR status.
+ """
return cls(
- name=name,
- category=category,
+ name=site.get("name", "unknown"),
+ category=site.get("cat", "unknown"),
username=username,
url=url,
status=WMNStatus.ERROR,
error=message,
)
+ @staticmethod
+ def _matches_any(
+ status_code: int,
+ text: str,
+ check_code: int,
+ check_string: str,
+ ) -> bool:
+ """Check if response matches criteria using OR logic (any match is
+ sufficient)."""
+ return status_code == check_code or check_string in text
+
+ @staticmethod
+ def _matches_all(
+ status_code: int,
+ text: str,
+ check_code: int,
+ check_string: str,
+ ) -> bool:
+ """Check if response matches criteria using AND logic (all must match)."""
+ return status_code == check_code and check_string in text
+
+ @staticmethod
+ def _determine_status(
+ *,
+ condition_exists: bool,
+ condition_missing: bool,
+ partial_exists: bool = False,
+ partial_missing: bool = False,
+ ) -> WMNStatus:
+ """Determine result status based on exists/missing conditions.
+
+ Priority order:
+ 1. CONFLICTING - if both exists and missing conditions are True
+ 2. PARTIAL - if partial match detected (only code OR only text matched)
+ 3. EXISTS - if only exists condition is True
+ 4. MISSING - if only missing condition is True
+ 5. UNKNOWN - if neither condition is True
+ """
+ if condition_exists and condition_missing:
+ return WMNStatus.CONFLICTING
+ if partial_exists or partial_missing:
+ return WMNStatus.PARTIAL
+ if condition_exists:
+ return WMNStatus.EXISTS
+ if condition_missing:
+ return WMNStatus.MISSING
+ return WMNStatus.UNKNOWN
+
@classmethod
def from_response(
cls,
*,
- name: str,
- category: str,
username: str,
- url: str | None,
- response_code: int,
- response_text: str,
- elapsed: float | None,
+ url: str,
+ response: WMNResponse,
+ site: WMNSite,
mode: WMNMode,
- e_code: int | None,
- e_string: str | None,
- m_code: int | None,
- m_string: str | None,
) -> WMNResult:
- if mode == WMNMode.ANY:
- condition_found = (e_code is not None and response_code == e_code) or (
- e_string is not None and e_string in response_text
- )
- condition_not_found = (m_code is not None and response_code == m_code) or (
- m_string is not None and m_string in response_text
- )
- else:
- condition_found = (
- (e_code is None or response_code == e_code)
- and (e_string is None or e_string in response_text)
- and (e_code is not None or e_string is not None)
- )
- condition_not_found = (
- (m_code is None or response_code == m_code)
- and (m_string is None or m_string in response_text)
- and (m_code is not None or m_string is not None)
- )
-
- if condition_found and condition_not_found:
- status = WMNStatus.AMBIGUOUS
- elif condition_found:
- status = WMNStatus.FOUND
- elif condition_not_found:
- status = WMNStatus.NOT_FOUND
+ """Create WMNResult from HTTP response by evaluating detection criteria.
+
+ Args:
+ username: Username being checked.
+ url: URL that was checked (computed uri_pretty).
+ response: HTTP response object.
+ site: Site configuration dictionary with detection criteria.
+ mode: Detection mode (ANY or ALL).
+
+ Returns:
+ WMNResult with determined status.
+ """
+ exists_code_match = response.status_code == site["e_code"]
+ exists_text_match = site["e_string"] in response.text
+ missing_code_match = response.status_code == site["m_code"]
+ missing_text_match = site["m_string"] in response.text
+
+ partial_exists = (exists_code_match and not exists_text_match) or (
+ exists_text_match and not exists_code_match
+ )
+ partial_missing = (missing_code_match and not missing_text_match) or (
+ missing_text_match and not missing_code_match
+ )
+
+ if mode == WMNMode.ALL:
+ condition_exists = exists_code_match and exists_text_match
+ condition_missing = missing_code_match and missing_text_match
else:
- status = WMNStatus.UNKNOWN
+ condition_exists = exists_code_match or exists_text_match
+ condition_missing = missing_code_match or missing_text_match
+
+ status = cls._determine_status(
+ condition_exists=condition_exists,
+ condition_missing=condition_missing,
+ partial_exists=partial_exists,
+ partial_missing=partial_missing,
+ )
return cls(
- name=name,
- category=category,
+ name=site["name"],
+ category=site["cat"],
username=username,
url=url,
status=status,
- response_code=response_code,
- elapsed=elapsed,
- response_text=response_text,
+ status_code=response.status_code,
+ elapsed=response.elapsed,
+ text=response.text,
)
def to_dict(
- self, *, exclude_response_text: bool = False, include_none: bool = False
+ self,
+ *,
+ exclude_text: bool = False,
+ exclude_none: bool = True,
) -> dict[str, Any]:
result_dict: dict[str, Any] = {
"name": self.name,
@@ -163,14 +269,14 @@ def to_dict(
"username": self.username,
"status": self.status.value,
"url": self.url,
- "response_code": self.response_code,
- "elapsed": self.elapsed,
+ "status_code": self.status_code,
+ "elapsed": self.elapsed.total_seconds() if self.elapsed else None,
"error": self.error,
"created_at": self.created_at.isoformat(),
}
- if not exclude_response_text:
- result_dict["response_text"] = self.response_text
- if not include_none:
+ if not exclude_text:
+ result_dict["text"] = self.text
+ if exclude_none:
result_dict = {
key: value for key, value in result_dict.items() if value is not None
}
@@ -178,12 +284,12 @@ def to_dict(
@dataclass(slots=True, frozen=True, kw_only=True)
-class WMNValidationResult:
+class WMNTestResult:
"""Result of validation testing for a site's detection methods."""
name: str
category: str
- results: Sequence[WMNResult] | None = None
+ results: list[WMNResult] | None = None
error: str | None = None
created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
status: WMNStatus = field(init=False)
@@ -191,34 +297,86 @@ class WMNValidationResult:
def __post_init__(self) -> None:
object.__setattr__(self, "status", self._get_result_status())
+ @classmethod
+ def from_site(
+ cls,
+ site: WMNSite,
+ *,
+ results: list[WMNResult] | None = None,
+ error: str | None = None,
+ ) -> WMNTestResult:
+ """Create WMNTestResult from a site configuration.
+
+ Args:
+ site: Site configuration dictionary.
+ results: Optional list of WMNResult objects.
+ error: Optional error message.
+
+ Returns:
+ WMNTestResult with name and category extracted from site.
+ """
+ return cls(
+ name=site.get(SITE_KEY_NAME, DEFAULT_UNKNOWN_VALUE),
+ category=site.get(SITE_KEY_CATEGORY, DEFAULT_UNKNOWN_VALUE),
+ results=results,
+ error=error,
+ )
+
def _get_result_status(self) -> WMNStatus:
- status = WMNStatus.UNKNOWN
+ """Determine aggregate status from individual results.
+
+ Priority order:
+ 1. ERROR - if error message exists or any result has ERROR status
+ 2. UNKNOWN - if no results exist
+ 3. Return the single status if all results have the same status
+ 4. CONFLICTING - if both EXISTS and MISSING are present
+ 5. PARTIAL - for other mixed statuses
+ (e.g., EXISTS + UNKNOWN, PARTIAL + MISSING)
+ """
if self.error:
- status = WMNStatus.ERROR
- elif not self.results:
- status = WMNStatus.UNKNOWN
- else:
- statuses = {result.status for result in self.results}
- if WMNStatus.ERROR in statuses:
- status = WMNStatus.ERROR
- elif WMNStatus.FOUND in statuses and WMNStatus.NOT_FOUND in statuses:
- status = WMNStatus.AMBIGUOUS
- elif len(statuses) == 1:
- status = next(iter(statuses))
- return status
-
- def to_dict(self, *, exclude_response_text: bool = False) -> dict[str, Any]:
- return {
+ return WMNStatus.ERROR
+
+ if not self.results:
+ return WMNStatus.UNKNOWN
+
+ statuses = {result.status for result in self.results}
+
+ if WMNStatus.ERROR in statuses:
+ return WMNStatus.ERROR
+
+ if len(statuses) == 1:
+ return next(iter(statuses))
+
+ if WMNStatus.EXISTS in statuses and WMNStatus.MISSING in statuses:
+ return WMNStatus.CONFLICTING
+
+ return WMNStatus.PARTIAL
+
+ def to_dict(
+ self,
+ *,
+ exclude_text: bool = False,
+ exclude_none: bool = True,
+ ) -> dict[str, Any]:
+ result_dict: dict[str, Any] = {
"name": self.name,
"category": self.category,
"results": [
- result.to_dict(exclude_response_text=exclude_response_text)
+ result.to_dict(
+ exclude_text=exclude_text,
+ exclude_none=exclude_none,
+ )
for result in (self.results or [])
],
"error": self.error,
"status": self.status.value,
"created_at": self.created_at.isoformat(),
}
+ if exclude_none:
+ result_dict = {
+ key: value for key, value in result_dict.items() if value is not None
+ }
+ return result_dict
@dataclass(slots=True, frozen=True, kw_only=True)
@@ -227,15 +385,19 @@ class WMNResponse:
status_code: int
text: str
- elapsed: float
+ elapsed: timedelta
+
+ def json(self) -> dict[str, Any] | list[Any] | str | int | float | bool | None:
+ """Parse the response body as JSON and return the resulting object.
- def json(self) -> Any:
- """Parse the response body as JSON and return the resulting object."""
+ Raises:
+ json.JSONDecodeError: If the response text is not valid JSON.
+ """
return json.loads(self.text)
@dataclass(frozen=True, slots=True, kw_only=True)
-class WMNValidationModel:
+class WMNError:
"""Structured representation of a validation error."""
path: str
diff --git a/naminter/core/network.py b/naminter/core/network.py
index f903d9b..abf2282 100644
--- a/naminter/core/network.py
+++ b/naminter/core/network.py
@@ -1,38 +1,122 @@
import asyncio
-import logging
from collections.abc import Mapping
-from typing import TYPE_CHECKING, Any, Protocol, cast, runtime_checkable
+import logging
+from typing import Any, Protocol, cast, runtime_checkable
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
from curl_cffi.requests import AsyncSession, ProxySpec
-from curl_cffi.requests.exceptions import RequestException as CurlRequestException
-from curl_cffi.requests.exceptions import Timeout as CurlTimeout
-
-if TYPE_CHECKING:
- from curl_cffi.requests.session import HttpMethod
+from curl_cffi.requests.exceptions import (
+ CertificateVerifyError as CurlCertificateVerifyError,
+)
+from curl_cffi.requests.exceptions import (
+ ConnectionError as CurlConnectionError,
+)
+from curl_cffi.requests.exceptions import (
+ CookieConflict as CurlCookieConflict,
+)
+from curl_cffi.requests.exceptions import (
+ DNSError as CurlDNSError,
+)
+from curl_cffi.requests.exceptions import (
+ HTTPError as CurlHTTPError,
+)
+from curl_cffi.requests.exceptions import (
+ ImpersonateError as CurlImpersonateError,
+)
+from curl_cffi.requests.exceptions import (
+ IncompleteRead as CurlIncompleteRead,
+)
+from curl_cffi.requests.exceptions import (
+ InterfaceError as CurlInterfaceError,
+)
+from curl_cffi.requests.exceptions import (
+ InvalidProxyURL as CurlInvalidProxyURL,
+)
+from curl_cffi.requests.exceptions import (
+ InvalidURL as CurlInvalidURL,
+)
+from curl_cffi.requests.exceptions import (
+ ProxyError as CurlProxyError,
+)
+from curl_cffi.requests.exceptions import (
+ RequestException as CurlRequestException,
+)
+from curl_cffi.requests.exceptions import (
+ SessionClosed as CurlSessionClosed,
+)
+from curl_cffi.requests.exceptions import (
+ SSLError as CurlSSLError,
+)
+from curl_cffi.requests.exceptions import (
+ Timeout as CurlTimeout,
+)
+from curl_cffi.requests.exceptions import (
+ TooManyRedirects as CurlTooManyRedirects,
+)
-
-from .constants import HTTP_REQUEST_TIMEOUT_SECONDS
-from .exceptions import HttpError, HttpSessionError, HttpTimeoutError
-from .models import WMNResponse
+from naminter.core.constants import (
+ HTTP_METHOD_GET,
+ HTTP_METHOD_POST,
+ HTTP_REQUEST_TIMEOUT_SECONDS,
+ HttpMethod,
+)
+from naminter.core.exceptions import (
+ HttpError,
+ HttpSessionError,
+ HttpStatusError,
+ HttpTimeoutError,
+)
+from naminter.core.models import WMNResponse
@runtime_checkable
class BaseSession(Protocol):
- """Async HTTP client protocol for Naminter adapters."""
+ """Async HTTP client protocol for Naminter adapters.
+
+ Implementations should raise the following exceptions:
+ - HttpSessionError: For session initialization/management errors
+ - HttpTimeoutError: For request timeouts
+ - HttpStatusError: For HTTP error status codes (4xx, 5xx)
+ - HttpError: For other network-related errors
+
+ All exceptions should preserve the underlying cause when available.
+ """
async def open(self) -> None:
- """Open the underlying HTTP session."""
+ """Open the underlying HTTP session.
+
+ Raises:
+ HttpSessionError: If session initialization fails.
+ """
...
async def close(self) -> None:
- """Close the underlying HTTP session."""
+ """Close the underlying HTTP session.
+
+ Should handle errors gracefully and not raise exceptions during cleanup.
+ """
...
async def get(
- self, url: str, headers: Mapping[str, str] | None = None
+ self,
+ url: str,
+ headers: Mapping[str, str] | None = None,
) -> WMNResponse:
- """HTTP GET request (see class docstring for error contract)."""
+ """HTTP GET request.
+
+ Args:
+ url: The URL to request.
+ headers: Optional HTTP headers to include.
+
+ Returns:
+ WMNResponse: Response object with status, text, and elapsed time.
+
+ Raises:
+ HttpSessionError: If session is not initialized or invalid.
+ HttpTimeoutError: If the request times out.
+ HttpStatusError: If HTTP error status code is returned.
+ HttpError: For other network-related errors.
+ """
...
async def post(
@@ -41,21 +125,71 @@ async def post(
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
) -> WMNResponse:
- """HTTP POST request (see class docstring for error contract)."""
+ """HTTP POST request.
+
+ Args:
+ url: The URL to request.
+ headers: Optional HTTP headers to include.
+ data: Optional request body data.
+
+ Returns:
+ WMNResponse: Response object with status, text, and elapsed time.
+
+ Raises:
+ HttpSessionError: If session is not initialized or invalid.
+ HttpTimeoutError: If the request times out.
+ HttpStatusError: If HTTP error status code is returned.
+ HttpError: For other network-related errors.
+ """
...
async def request(
self,
- method: str,
+ method: HttpMethod,
url: str,
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
) -> WMNResponse:
- """Generic HTTP request (see class docstring for error contract)."""
+ """Generic HTTP request.
+
+ Args:
+ method: HTTP method (GET or POST only).
+ url: The URL to request.
+ headers: Optional HTTP headers to include.
+ data: Optional request body data.
+
+ Returns:
+ WMNResponse: Response object with status, text, and elapsed time.
+
+ Raises:
+ HttpSessionError: If session is not initialized or invalid.
+ HttpTimeoutError: If the request times out.
+ HttpStatusError: If HTTP error status code is returned.
+ HttpError: For other network-related errors.
+ """
+ ...
+
+ async def __aenter__(self) -> "BaseSession":
+ """Async context manager entry."""
+ ...
+
+ async def __aexit__(
+ self,
+ exc_type: type | None,
+ exc_val: BaseException | None,
+ exc_tb: object,
+ ) -> None:
+ """Async context manager exit."""
...
class CurlCFFISession:
+ """HTTP session implementation using curl_cffi library.
+
+ Provides browser impersonation, proxy support, SSL verification,
+ and custom fingerprinting capabilities.
+ """
+
def __init__(
self,
*,
@@ -68,6 +202,18 @@ def __init__(
akamai: str | None = None,
extra_fp: ExtraFingerprints | dict[str, Any] | None = None,
) -> None:
+ """Initialize CurlCFFISession with configuration.
+
+ Args:
+ proxies: Proxy configuration as string or dict.
+ verify: Whether to verify SSL certificates.
+ timeout: Request timeout in seconds.
+ allow_redirects: Whether to follow HTTP redirects.
+ impersonate: Browser to impersonate (e.g., 'chrome', 'firefox').
+ ja3: JA3 fingerprint string for TLS fingerprinting.
+ akamai: Akamai fingerprint string.
+ extra_fp: Additional fingerprinting options.
+ """
self._logger = logging.getLogger(__name__)
self._session: AsyncSession | None = None
@@ -86,44 +232,70 @@ def __init__(
self._lock = asyncio.Lock()
async def open(self) -> None:
+ """Open the HTTP session.
+
+ Raises:
+ HttpSessionError: If session initialization fails.
+ """
if self._session is not None:
return
async with self._lock:
- if self._session is None:
- try:
- proxies_spec: ProxySpec | None = cast(
- "ProxySpec | None", self._proxies
- )
- extra_fp_spec: Any = self._extra_fp
- self._session = AsyncSession(
- proxies=proxies_spec,
- verify=self._verify,
- timeout=self._timeout,
- allow_redirects=self._allow_redirects,
- impersonate=self._impersonate,
- ja3=self._ja3,
- akamai=self._akamai,
- extra_fp=extra_fp_spec,
- )
- except Exception as e:
- msg = "Unexpected error opening HTTP session"
- raise HttpSessionError(msg, cause=e) from e
+ if self._session is not None:
+ return
+
+ try:
+ proxies_spec: ProxySpec | None = cast("ProxySpec | None", self._proxies)
+ extra_fp_spec: Any = self._extra_fp
+ self._session = AsyncSession(
+ proxies=proxies_spec,
+ verify=self._verify,
+ timeout=self._timeout,
+ allow_redirects=self._allow_redirects,
+ impersonate=self._impersonate,
+ ja3=self._ja3,
+ akamai=self._akamai,
+ extra_fp=extra_fp_spec,
+ )
+ except CurlImpersonateError as e:
+ msg = f"Browser impersonation failed: {e}"
+ raise HttpSessionError(msg, cause=e) from e
+ except (CurlInvalidProxyURL, CurlInvalidURL) as e:
+ msg = f"Invalid URL in session configuration: {e}"
+ raise HttpError(msg, cause=e) from e
+ except CurlInterfaceError as e:
+ msg = f"Network interface error during session initialization: {e}"
+ raise HttpSessionError(msg, cause=e) from e
+ except CurlRequestException as e:
+ msg = f"Failed to initialize HTTP session: {e}"
+ raise HttpSessionError(msg, cause=e) from e
+ except Exception as e:
+ msg = "Unexpected error opening HTTP session"
+ raise HttpSessionError(msg, cause=e) from e
async def close(self) -> None:
- if not self._session:
+ """Close the HTTP session.
+
+ Handles errors gracefully during cleanup and does not raise exceptions.
+ Catches session closure errors and logs them without propagating.
+ """
+ if self._session is None:
return
try:
await self._session.close()
- except Exception as e:
+ except CurlSessionClosed:
+ self._logger.debug("HTTP session was already closed")
+ except (OSError, RuntimeError, AttributeError) as e:
self._logger.warning("Unexpected error closing HTTP session: %s", e)
finally:
self._session = None
async def get(
- self, url: str, headers: Mapping[str, str] | None = None
+ self,
+ url: str,
+ headers: Mapping[str, str] | None = None,
) -> WMNResponse:
- return await self.request("GET", url, headers=headers)
+ return await self.request(HTTP_METHOD_GET, url, headers=headers)
async def post(
self,
@@ -131,24 +303,53 @@ async def post(
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
) -> WMNResponse:
- return await self.request("POST", url, headers=headers, data=data)
+ return await self.request(HTTP_METHOD_POST, url, headers=headers, data=data)
async def request(
self,
- method: str,
+ method: HttpMethod,
url: str,
headers: Mapping[str, str] | None = None,
data: str | bytes | None = None,
) -> WMNResponse:
- await self.open()
+ """Perform HTTP request.
+
+ Args:
+ method: HTTP method (GET or POST only).
+ url: The URL to request.
+ headers: Optional HTTP headers.
+ data: Optional request body.
+
+ Returns:
+ WMNResponse: Response with status, text, and elapsed time.
+
+ Raises:
+ HttpSessionError: If session is not initialized or was closed.
+ HttpTimeoutError: If the request times out.
+ HttpStatusError: If HTTP error status code is returned (4xx, 5xx).
+ HttpError: For unsupported HTTP methods or other network-related errors.
+ """
+ if self._session is None:
+ msg = "HTTP session not initialized."
+ raise HttpSessionError(msg)
+
+ method_upper = method.upper()
+ if method_upper not in {HTTP_METHOD_GET, HTTP_METHOD_POST}:
+ msg = (
+ f"Unsupported HTTP method: {method!r}. "
+ f"Only {HTTP_METHOD_GET} and {HTTP_METHOD_POST} are supported."
+ )
+ raise HttpError(msg)
- assert self._session is not None
+ headers_dict: dict[str, str] | None = None
+ if headers is not None:
+ headers_dict = dict(headers) if not isinstance(headers, dict) else headers
try:
response = await self._session.request( # type: ignore[reportUnknownMemberType]
method=cast("HttpMethod", method.upper()),
url=url,
- headers=dict(headers) if headers else None,
+ headers=headers_dict,
data=data,
)
@@ -158,15 +359,53 @@ async def request(
elapsed=response.elapsed,
)
except CurlTimeout as e:
- msg = f"{method} timeout for {url}"
+ msg = f"{method_upper} timeout for {url}"
raise HttpTimeoutError(msg, cause=e) from e
+ except CurlSessionClosed as e:
+ msg = f"HTTP session was closed: {e}"
+ raise HttpSessionError(msg, cause=e) from e
+ except CurlHTTPError as e:
+ status_code: int | None = None
+ if hasattr(e, "response") and e.response is not None:
+ status_code = getattr(e.response, "status_code", None)
+ msg = f"{method_upper} returned error status for {url}"
+ raise HttpStatusError(msg, status_code=status_code, url=url, cause=e) from e
+ except (
+ CurlSSLError,
+ CurlCertificateVerifyError,
+ CurlDNSError,
+ CurlConnectionError,
+ CurlProxyError,
+ CurlInterfaceError,
+ CurlTooManyRedirects,
+ CurlInvalidProxyURL,
+ CurlInvalidURL,
+ CurlIncompleteRead,
+ CurlCookieConflict,
+ ) as e:
+ msg = f"{method_upper} request failed: {e}"
+ raise HttpError(msg, cause=e) from e
except CurlRequestException as e:
- msg = f"{method} failed for {url}: {e}"
+ msg = f"{method_upper} request failed: {e}"
raise HttpError(msg, cause=e) from e
except Exception as e:
- msg = f"Unexpected error during {method} request to {url}: {e}"
+ msg = f"Unexpected error during {method_upper} request: {e}"
raise HttpError(msg, cause=e) from e
+ async def __aenter__(self) -> "CurlCFFISession":
+ """Async context manager entry."""
+ await self.open()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: object,
+ ) -> None:
+ """Async context manager exit."""
+ await self.close()
+
__all__ = [
"BaseSession",
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index 08a3ba7..f329430 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -1,115 +1,47 @@
-import json
-import logging
-from collections.abc import Sequence
-from typing import Any
+import asyncio
+from collections.abc import AsyncGenerator, Awaitable, Sequence
+from typing import Any, TypeVar
-from jsonschema import Draft7Validator
-from jsonschema.exceptions import SchemaError as JsonSchemaError
+T = TypeVar("T")
-from .constants import (
- DEFAULT_JSON_ENSURE_ASCII,
- DEFAULT_JSON_INDENT,
- SITE_KEY_NAME,
- WMN_KEY_SITES,
-)
-from .exceptions import WMNSchemaError
-from .models import WMNDataset, WMNValidationModel
-logger = logging.getLogger(__name__)
-
-
-def validate_dataset(
- data: WMNDataset, schema: dict[str, Any]
-) -> list[WMNValidationModel]:
- """Validate WMN dataset against JSON Schema and return list of errors.
-
- Raises WMNSchemaError if the provided schema is invalid.
+def get_missing_keys(data: dict[str, Any], keys: Sequence[str]) -> list[str]:
+ """Return a list of required keys missing from a dictionary.
Args:
- data: WMN dataset to validate
- schema: JSON Schema to validate against
- """
- if not schema:
- return []
-
- try:
- validator = Draft7Validator(schema)
- except JsonSchemaError as error:
- msg = f"Invalid JSON schema: {error}"
- raise WMNSchemaError(msg) from error
-
- errors: list[WMNValidationModel] = []
- for error in validator.iter_errors(data): # type: ignore[reportUnknownMemberType]
- message_text = error.message
- path_string = error.json_path
- data_preview: str | None = None
-
- try:
- if error.absolute_path:
- current_data = data
- for segment in error.absolute_path:
- current_data = current_data[segment]
- if current_data is not None:
- data_preview = json.dumps(
- current_data,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- indent=DEFAULT_JSON_INDENT,
- )
- except Exception:
- data_preview = None
-
- errors.append(
- WMNValidationModel(
- path=path_string,
- data=data_preview,
- message=message_text,
- )
- )
-
- sites_data = data.get(WMN_KEY_SITES, [])
-
- name_indices: dict[str, list[int]] = {}
- for index, site in enumerate(sites_data):
- site_name = site.get(SITE_KEY_NAME)
- if site_name:
- name_indices.setdefault(site_name, []).append(index)
+ data: Dictionary to check for missing keys.
+ keys: Sequence of keys that should be present.
- for site_name, indices in name_indices.items():
- if len(indices) > 1:
- for index in indices:
- path_string = f"$.{WMN_KEY_SITES}[{index}]"
- try:
- site_data = sites_data[index]
- data_preview = json.dumps(
- site_data,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- indent=DEFAULT_JSON_INDENT,
- )
- except Exception:
- data_preview = None
-
- errors.append(
- WMNValidationModel(
- path=path_string,
- data=data_preview,
- message=(
- f"Duplicate site name found: '{site_name}' "
- f"(appears {len(indices)} times)"
- ),
- )
- )
-
- return errors
+ Returns:
+ List of keys that are missing from the dictionary. Empty list if
+ all keys are present.
+ """
+ return [key for key in keys if key not in data]
-def get_missing_keys(data: dict[str, Any], keys: Sequence[str]) -> list[str]:
- """Return a list of required keys missing from a dictionary.
+async def execute_tasks(
+ awaitables: Sequence[Awaitable[T]],
+) -> AsyncGenerator[T, None]:
+ """Execute awaitables concurrently and yield results as they complete.
Args:
- data: Dictionary to check for missing keys
- keys: Sequence of keys that should be present
+ awaitables: Sequence of awaitables to execute.
- Returns:
- List of keys that are missing from the dictionary
+ Yields:
+ Results from completed awaitables.
"""
- return [key for key in keys if key not in data]
+ if not awaitables:
+ return
+
+ scheduled_futures: list[asyncio.Future[T]] = [
+ asyncio.ensure_future(awaitable) for awaitable in awaitables
+ ]
+
+ try:
+ for completed_future in asyncio.as_completed(scheduled_futures):
+ yield await completed_future
+ finally:
+ for scheduled_future in scheduled_futures:
+ if not scheduled_future.done():
+ scheduled_future.cancel()
+ await asyncio.gather(*scheduled_futures, return_exceptions=True)
diff --git a/naminter/core/validator.py b/naminter/core/validator.py
new file mode 100644
index 0000000..70dd21d
--- /dev/null
+++ b/naminter/core/validator.py
@@ -0,0 +1,282 @@
+import contextlib
+from collections import defaultdict
+from collections.abc import Mapping, Sequence
+import json
+import logging
+from typing import Any
+
+from jsonschema.exceptions import SchemaError as JsonSchemaError
+from jsonschema.validators import validator_for
+
+from naminter.core.constants import (
+ DEFAULT_JSON_ENSURE_ASCII,
+ DEFAULT_JSON_INDENT,
+ SITE_KEY_E_CODE,
+ SITE_KEY_E_STRING,
+ SITE_KEY_HEADERS,
+ SITE_KEY_KNOWN,
+ SITE_KEY_M_CODE,
+ SITE_KEY_M_STRING,
+ SITE_KEY_NAME,
+ SITE_KEY_URI_CHECK,
+ WMN_KEY_SITES,
+)
+from naminter.core.exceptions import WMNSchemaError
+from naminter.core.models import WMN_REQUIRED_KEYS, WMNDataset, WMNError, WMNSite
+
+logger = logging.getLogger(__name__)
+
+
+class WMNValidator:
+ """Validates WMN dataset against JSON Schema."""
+
+ def __init__(self, schema: Mapping[str, Any]) -> None:
+ """Initialize validator with schema.
+
+ Args:
+ schema: JSON Schema to validate against. Must not be empty.
+
+ Raises:
+ WMNSchemaError: If the provided schema is empty, invalid, or cannot be used.
+ """
+ self.schema = dict(schema)
+ if not self.schema:
+ msg = "Schema cannot be empty"
+ raise WMNSchemaError(msg)
+ try:
+ validator_cls = validator_for(self.schema)
+ validator_cls.check_schema(self.schema)
+ self.validator = validator_cls(self.schema)
+ except JsonSchemaError as e:
+ msg = f"Invalid JSON schema: {e}"
+ raise WMNSchemaError(msg) from e
+ except Exception as e:
+ msg = f"Failed to initialize JSON schema validator: {e}"
+ raise WMNSchemaError(msg) from e
+
+ def validate(self, data: WMNDataset) -> list[WMNError]:
+ """Validate dataset and return list of errors.
+
+ Args:
+ data: WMN dataset to validate. This will not be modified.
+
+ Returns:
+ List of validation errors. Empty list if validation passes.
+ """
+ errors: list[WMNError] = []
+ errors.extend(self._validate_schema(data))
+ errors.extend(WMNValidator._validate_duplicates(data))
+ return errors
+
+ def _validate_schema(self, data: WMNDataset) -> list[WMNError]:
+ """Validate dataset against JSON schema and return errors.
+
+ Args:
+ data: WMN dataset to validate.
+
+ Returns:
+ List of schema validation errors.
+ """
+ errors: list[WMNError] = []
+ for error in sorted(
+ self.validator.iter_errors(data),
+ key=lambda err: list(err.absolute_path),
+ ):
+ data_preview = WMNValidator._preview(error.instance)
+ errors.append(
+ WMNError(
+ path=error.json_path,
+ data=data_preview,
+ message=error.message,
+ ),
+ )
+ return errors
+
+ @staticmethod
+ def _preview(value: object) -> str | None:
+ """Generate JSON preview of a value.
+
+ Args:
+ value: Value to preview.
+
+ Returns:
+ JSON string preview or None if generation fails.
+ """
+ try:
+ return json.dumps(
+ value,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ indent=DEFAULT_JSON_INDENT,
+ )
+ except (TypeError, ValueError) as e:
+ logger.debug(
+ "Failed to generate data preview: %s",
+ e,
+ exc_info=True,
+ )
+ return None
+
+ @staticmethod
+ def _get_missing_keys(data: dict[str, Any], keys: Sequence[str]) -> list[str]:
+ """Return a list of required keys missing from a dictionary.
+
+ Args:
+ data: Dictionary to check for missing keys.
+ keys: Sequence of keys that should be present.
+
+ Returns:
+ List of keys that are missing from the dictionary. Empty list if
+ all keys are present.
+ """
+ return [key for key in keys if key not in data]
+
+ @staticmethod
+ def _validate_duplicates(data: WMNDataset) -> list[WMNError]:
+ """Validate that site names are unique and return errors if duplicates found.
+
+ Args:
+ data: WMN dataset to check.
+
+ Returns:
+ List of duplicate site errors.
+ """
+ sites_data = data.get(WMN_KEY_SITES, [])
+ if not isinstance(sites_data, list):
+ return []
+
+ name_indices: dict[str, list[int]] = defaultdict(list)
+ for index, site in enumerate(sites_data):
+ if not isinstance(site, dict):
+ continue
+ raw = site.get(SITE_KEY_NAME)
+ if not isinstance(raw, str):
+ continue
+ name = raw.strip()
+ if not name:
+ continue
+ name_indices[name].append(index)
+
+ errors: list[WMNError] = []
+ for site_name, indices in name_indices.items():
+ if len(indices) > 1:
+ for index in indices:
+ path_string = f"$.{WMN_KEY_SITES}[{index}].{SITE_KEY_NAME}"
+ data_preview = WMNValidator._preview(sites_data[index])
+
+ errors.append(
+ WMNError(
+ path=path_string,
+ data=data_preview,
+ message=(
+ f"Duplicate site name found: '{site_name}' "
+ f"(appears {len(indices)} times)"
+ ),
+ ),
+ )
+ return errors
+
+ def validate_sites(self, sites: list[WMNSite]) -> list[WMNError]:
+ """Validate all site configurations.
+
+ Args:
+ sites: List of site configurations to validate.
+
+ Returns:
+ List of validation errors. Empty if all sites are valid.
+ """
+ errors: list[WMNError] = []
+
+ for index, site in enumerate(sites):
+ base_path = f"$.{WMN_KEY_SITES}[{index}]"
+ site_name = site.get(SITE_KEY_NAME, "unknown")
+ site_errors: list[WMNError] = []
+
+ def _create_error(
+ path_suffix: str,
+ message: str,
+ data: object | None = None,
+ *,
+ _base_path: str = base_path,
+ _site_errors: list[WMNError] = site_errors,
+ ) -> None:
+ """Helper to create WMNError with path and data."""
+ path = (
+ f"{_base_path}.{path_suffix}" if path_suffix else _base_path
+ )
+ data_preview = None
+ if data is not None:
+ with contextlib.suppress(TypeError, ValueError):
+ data_preview = json.dumps(
+ data,
+ ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
+ indent=DEFAULT_JSON_INDENT,
+ )
+ error = WMNError(path=path, data=data_preview, message=message)
+ _site_errors.append(error)
+ errors.append(error)
+
+ missing_keys = WMNValidator._get_missing_keys(site, WMN_REQUIRED_KEYS)
+ if missing_keys:
+ _create_error("", f"Missing required keys: {missing_keys}", site)
+ if site_errors:
+ error_messages = [error.message for error in site_errors]
+ logger.warning(
+ "Invalid site %s: %s",
+ site_name,
+ "; ".join(error_messages),
+ )
+ continue
+
+ uri_check = site[SITE_KEY_URI_CHECK]
+ if not isinstance(uri_check, str) or not uri_check:
+ _create_error(
+ SITE_KEY_URI_CHECK,
+ f"Invalid {SITE_KEY_URI_CHECK}: must be non-empty string",
+ uri_check,
+ )
+
+ for key in (SITE_KEY_E_CODE, SITE_KEY_M_CODE):
+ value = site[key]
+ if not isinstance(value, int):
+ _create_error(
+ key,
+ f"Invalid {key}: must be integer, got {type(value).__name__}",
+ value,
+ )
+
+ for key in (SITE_KEY_E_STRING, SITE_KEY_M_STRING):
+ value = site[key]
+ if not isinstance(value, str):
+ _create_error(
+ key,
+ f"Invalid {key}: must be string, got {type(value).__name__}",
+ value,
+ )
+
+ if SITE_KEY_HEADERS in site:
+ headers = site[SITE_KEY_HEADERS]
+ if headers is not None and not isinstance(headers, dict):
+ _create_error(
+ SITE_KEY_HEADERS,
+ f"Invalid {SITE_KEY_HEADERS}: must be dict or None, "
+ f"got {type(headers).__name__}",
+ headers,
+ )
+
+ known = site[SITE_KEY_KNOWN]
+ if not isinstance(known, list):
+ msg = (
+ f"Invalid {SITE_KEY_KNOWN}: must be list, "
+ f"got {type(known).__name__}"
+ )
+ _create_error(SITE_KEY_KNOWN, msg, known)
+
+ if site_errors:
+ error_messages = [error.message for error in site_errors]
+ logger.warning(
+ "Invalid site %s: %s",
+ site_name,
+ "; ".join(error_messages),
+ )
+
+ return errors
diff --git a/pyproject.toml b/pyproject.toml
index d2ef3ff..b8ba17a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -33,19 +33,22 @@ keywords = [
"naminter"
]
dependencies = [
- "click>=8.3.0",
- "curl-cffi>=0.13.0",
+ "click>=8.3.1",
+ "curl-cffi>=0.14.0",
"aiofiles>=25.1.0",
"jinja2>=3.1.6",
- "jsonschema>=4.25.1",
+ "jsonschema>=4.26.0",
"rich>=14.2.0",
- "rich-click>=1.9.4",
- "weasyprint>=66.0",
+ "rich-click>=1.9.5",
+ "weasyprint>=67.0",
]
[project.optional-dependencies]
dev = [
- "ruff>=0.14.4",
+ "ruff>=0.14.11",
+ "mkdocs>=1.6.1",
+ "mkdocs-material>=9.7.1",
+ "mkdocstrings[python]>=1.0.0",
]
[project.urls]
@@ -62,12 +65,39 @@ naminter = "naminter.cli.main:entry_point"
requires = ["hatchling"]
build-backend = "hatchling.build"
-
# Ruff configuration
[tool.ruff]
line-length = 88
target-version = "py311"
-exclude = []
+exclude = [
+ ".bzr",
+ ".direnv",
+ ".eggs",
+ ".git",
+ ".git-rewrite",
+ ".hg",
+ ".ipynb_checkpoints",
+ ".mypy_cache",
+ ".nox",
+ ".pants.d",
+ ".pyenv",
+ ".pytest_cache",
+ ".pytype",
+ ".ruff_cache",
+ ".svn",
+ ".tox",
+ ".venv",
+ ".vscode",
+ "__pypackages__",
+ "_build",
+ "buck-out",
+ "build",
+ "dist",
+ "node_modules",
+ "site-packages",
+ "venv",
+]
+respect-gitignore = true
[tool.ruff.lint]
preview = true
@@ -80,7 +110,7 @@ select = [
'B', # flake8-bugbear
'C4', # flake8-comprehensions
'UP', # pyupgrade
- 'ARG001', # unused-function-args
+ 'ARG', # flake8-unused-arguments
'SIM', # flake8-simplify
'TCH', # flake8-type-checking
'TID', # flake8-tidy-imports
@@ -96,29 +126,39 @@ select = [
'PIE', # flake8-pie
'T20', # flake8-print
'YTT', # flake8-2020
-]
-ignore = [
- 'PLR0913', # too-many-arguments
- 'PLR0917', # too-many-locals
- 'PLR0904', # too-many-branches
- 'E701', # multiple-statements-on-one-line
- 'B008', # do-not-perform-function-calls-in-argument-defaults
- 'C901', # too-complex
- 'PLR2004', # magic-value-comparison
- 'PLR0912', # too-many-branches
- 'PLR0915', # too-many-statements
- 'PLR0916', # too-many-boolean-expressions
- 'COM812', # missing-trailing-comma (conflicts with formatter)
+ 'ANN', # flake8-annotations
+ 'ASYNC', # flake8-async
+ 'S', # flake8-bandit
+ 'BLE', # flake8-blind-except
+ 'FBT', # flake8-boolean-trap
+ 'B904', # raise-without-from-inside-except
+ 'ERA', # eradicate
+ 'PD', # pandas-vet
+ 'PGH', # pygrep-hooks
+ 'PT', # flake8-pytest-style
+ 'RET', # flake8-return
+ 'SLF', # flake8-self
+ 'SLOT', # flake8-slots
+ 'TRY', # tryceratops
+ 'FA', # flake8-future-annotations
+ 'ISC', # flake8-implicit-str-concat
+ 'PTH', # flake8-use-pathlib
+ 'PERF', # perflint
+ 'NPY', # numpy
]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"] # unused imports
+"tests/**/*.py" = ["S101", "PLR2004"] # allow assert and magic values in tests
[tool.ruff.lint.isort]
known-first-party = ["naminter"]
+force-sort-within-sections = true
+split-on-trailing-comma = true
[tool.ruff.lint.flake8-quotes]
docstring-quotes = "double"
+inline-quotes = "double"
[tool.ruff.lint.mccabe]
max-complexity = 10
@@ -132,5 +172,8 @@ max-statements = 50
[tool.ruff.format]
preview = true
quote-style = "double"
+indent-style = "space"
+skip-magic-trailing-comma = false
+line-ending = "auto"
docstring-code-format = true
docstring-code-line-length = 79
diff --git a/uv.lock b/uv.lock
index 2f751e5..06ad3a6 100644
--- a/uv.lock
+++ b/uv.lock
@@ -20,6 +20,29 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
]
+[[package]]
+name = "babel"
+version = "2.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" },
+]
+
+[[package]]
+name = "backrefs"
+version = "6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/86/e3/bb3a439d5cb255c4774724810ad8073830fac9c9dee123555820c1bcc806/backrefs-6.1.tar.gz", hash = "sha256:3bba1749aafe1db9b915f00e0dd166cba613b6f788ffd63060ac3485dc9be231", size = 7011962, upload-time = "2025-11-15T14:52:08.323Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/ee/c216d52f58ea75b5e1841022bbae24438b19834a29b163cb32aa3a2a7c6e/backrefs-6.1-py310-none-any.whl", hash = "sha256:2a2ccb96302337ce61ee4717ceacfbf26ba4efb1d55af86564b8bbaeda39cac1", size = 381059, upload-time = "2025-11-15T14:51:59.758Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9a/8da246d988ded941da96c7ed945d63e94a445637eaad985a0ed88787cb89/backrefs-6.1-py311-none-any.whl", hash = "sha256:e82bba3875ee4430f4de4b6db19429a27275d95a5f3773c57e9e18abc23fd2b7", size = 392854, upload-time = "2025-11-15T14:52:01.194Z" },
+ { url = "https://files.pythonhosted.org/packages/37/c9/fd117a6f9300c62bbc33bc337fd2b3c6bfe28b6e9701de336b52d7a797ad/backrefs-6.1-py312-none-any.whl", hash = "sha256:c64698c8d2269343d88947c0735cb4b78745bd3ba590e10313fbf3f78c34da5a", size = 398770, upload-time = "2025-11-15T14:52:02.584Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/95/7118e935b0b0bd3f94dfec2d852fd4e4f4f9757bdb49850519acd245cd3a/backrefs-6.1-py313-none-any.whl", hash = "sha256:4c9d3dc1e2e558965202c012304f33d4e0e477e1c103663fd2c3cc9bb18b0d05", size = 400726, upload-time = "2025-11-15T14:52:04.093Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/72/6296bad135bfafd3254ae3648cd152980a424bd6fed64a101af00cc7ba31/backrefs-6.1-py314-none-any.whl", hash = "sha256:13eafbc9ccd5222e9c1f0bec563e6d2a6d21514962f11e7fc79872fd56cbc853", size = 412584, upload-time = "2025-11-15T14:52:05.233Z" },
+ { url = "https://files.pythonhosted.org/packages/02/e3/a4fa1946722c4c7b063cc25043a12d9ce9b4323777f89643be74cef2993c/backrefs-6.1-py39-none-any.whl", hash = "sha256:a9e99b8a4867852cad177a6430e31b0f6e495d65f8c6c134b68c14c3c95bf4b0", size = 381058, upload-time = "2025-11-15T14:52:06.698Z" },
+]
+
[[package]]
name = "brotli"
version = "1.1.0"
@@ -170,16 +193,89 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
]
+[[package]]
+name = "charset-normalizer"
+version = "3.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" },
+ { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" },
+ { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" },
+ { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" },
+ { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" },
+ { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" },
+ { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" },
+ { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" },
+ { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
+ { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
+ { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
+ { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
+ { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
+ { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
+ { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
+ { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
+ { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
+ { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
+ { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
+ { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
+ { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
+ { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
+ { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
+ { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
+ { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
+ { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
+ { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
+]
+
[[package]]
name = "click"
-version = "8.3.0"
+version = "8.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" },
+ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
]
[[package]]
@@ -206,23 +302,25 @@ wheels = [
[[package]]
name = "curl-cffi"
-version = "0.13.0"
+version = "0.14.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "cffi" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/4e/3d/f39ca1f8fdf14408888e7c25e15eed63eac5f47926e206fb93300d28378c/curl_cffi-0.13.0.tar.gz", hash = "sha256:62ecd90a382bd5023750e3606e0aa7cb1a3a8ba41c14270b8e5e149ebf72c5ca", size = 151303, upload-time = "2025-08-06T13:05:42.988Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/9b/c9/0067d9a25ed4592b022d4558157fcdb6e123516083700786d38091688767/curl_cffi-0.14.0.tar.gz", hash = "sha256:5ffbc82e59f05008ec08ea432f0e535418823cda44178ee518906a54f27a5f0f", size = 162633, upload-time = "2025-12-16T03:25:07.931Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/19/d1/acabfd460f1de26cad882e5ef344d9adde1507034528cb6f5698a2e6a2f1/curl_cffi-0.13.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:434cadbe8df2f08b2fc2c16dff2779fb40b984af99c06aa700af898e185bb9db", size = 5686337, upload-time = "2025-08-06T13:05:28.985Z" },
- { url = "https://files.pythonhosted.org/packages/2c/1c/cdb4fb2d16a0e9de068e0e5bc02094e105ce58a687ff30b4c6f88e25a057/curl_cffi-0.13.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:59afa877a9ae09efa04646a7d068eeea48915a95d9add0a29854e7781679fcd7", size = 2994613, upload-time = "2025-08-06T13:05:31.027Z" },
- { url = "https://files.pythonhosted.org/packages/04/3e/fdf617c1ec18c3038b77065d484d7517bb30f8fb8847224eb1f601a4e8bc/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06ed389e45a7ca97b17c275dbedd3d6524560270e675c720e93a2018a766076", size = 7931353, upload-time = "2025-08-06T13:05:32.273Z" },
- { url = "https://files.pythonhosted.org/packages/3d/10/6f30c05d251cf03ddc2b9fd19880f3cab8c193255e733444a2df03b18944/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4e0de45ab3b7a835c72bd53640c2347415111b43421b5c7a1a0b18deae2e541", size = 7486378, upload-time = "2025-08-06T13:05:33.672Z" },
- { url = "https://files.pythonhosted.org/packages/77/81/5bdb7dd0d669a817397b2e92193559bf66c3807f5848a48ad10cf02bf6c7/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eb4083371bbb94e9470d782de235fb5268bf43520de020c9e5e6be8f395443f", size = 8328585, upload-time = "2025-08-06T13:05:35.28Z" },
- { url = "https://files.pythonhosted.org/packages/ce/c1/df5c6b4cfad41c08442e0f727e449f4fb5a05f8aa564d1acac29062e9e8e/curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:28911b526e8cd4aa0e5e38401bfe6887e8093907272f1f67ca22e6beb2933a51", size = 8739831, upload-time = "2025-08-06T13:05:37.078Z" },
- { url = "https://files.pythonhosted.org/packages/1a/91/6dd1910a212f2e8eafe57877bcf97748eb24849e1511a266687546066b8a/curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d433ffcb455ab01dd0d7bde47109083aa38b59863aa183d29c668ae4c96bf8e", size = 8711908, upload-time = "2025-08-06T13:05:38.741Z" },
- { url = "https://files.pythonhosted.org/packages/6d/e4/15a253f9b4bf8d008c31e176c162d2704a7e0c5e24d35942f759df107b68/curl_cffi-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:66a6b75ce971de9af64f1b6812e275f60b88880577bac47ef1fa19694fa21cd3", size = 1614510, upload-time = "2025-08-06T13:05:40.451Z" },
- { url = "https://files.pythonhosted.org/packages/f9/0f/9c5275f17ad6ff5be70edb8e0120fdc184a658c9577ca426d4230f654beb/curl_cffi-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:d438a3b45244e874794bc4081dc1e356d2bb926dcc7021e5a8fef2e2105ef1d8", size = 1365753, upload-time = "2025-08-06T13:05:41.879Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/f0/0f21e9688eaac85e705537b3a87a5588d0cefb2f09d83e83e0e8be93aa99/curl_cffi-0.14.0-cp39-abi3-macosx_14_0_arm64.whl", hash = "sha256:e35e89c6a69872f9749d6d5fda642ed4fc159619329e99d577d0104c9aad5893", size = 3087277, upload-time = "2025-12-16T03:24:49.607Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/a3/0419bd48fce5b145cb6a2344c6ac17efa588f5b0061f212c88e0723da026/curl_cffi-0.14.0-cp39-abi3-macosx_15_0_x86_64.whl", hash = "sha256:5945478cd28ad7dfb5c54473bcfb6743ee1d66554d57951fdf8fc0e7d8cf4e45", size = 5804650, upload-time = "2025-12-16T03:24:51.518Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/07/a238dd062b7841b8caa2fa8a359eb997147ff3161288f0dd46654d898b4d/curl_cffi-0.14.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c42e8fa3c667db9ccd2e696ee47adcd3cd5b0838d7282f3fc45f6c0ef3cfdfa7", size = 8231918, upload-time = "2025-12-16T03:24:52.862Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/d2/ce907c9b37b5caf76ac08db40cc4ce3d9f94c5500db68a195af3513eacbc/curl_cffi-0.14.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:060fe2c99c41d3cb7f894de318ddf4b0301b08dca70453d769bd4e74b36b8483", size = 8654624, upload-time = "2025-12-16T03:24:54.579Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/ae/6256995b18c75e6ef76b30753a5109e786813aa79088b27c8eabb1ef85c9/curl_cffi-0.14.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b158c41a25388690dd0d40b5bc38d1e0f512135f17fdb8029868cbc1993d2e5b", size = 8010654, upload-time = "2025-12-16T03:24:56.507Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/10/ff64249e516b103cb762e0a9dca3ee0f04cf25e2a1d5d9838e0f1273d071/curl_cffi-0.14.0-cp39-abi3-manylinux_2_28_i686.whl", hash = "sha256:1439fbef3500fb723333c826adf0efb0e2e5065a703fb5eccce637a2250db34a", size = 7781969, upload-time = "2025-12-16T03:24:57.885Z" },
+ { url = "https://files.pythonhosted.org/packages/51/76/d6f7bb76c2d12811aa7ff16f5e17b678abdd1b357b9a8ac56310ceccabd5/curl_cffi-0.14.0-cp39-abi3-manylinux_2_34_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e7176f2c2d22b542e3cf261072a81deb018cfa7688930f95dddef215caddb469", size = 7969133, upload-time = "2025-12-16T03:24:59.261Z" },
+ { url = "https://files.pythonhosted.org/packages/23/7c/cca39c0ed4e1772613d3cba13091c0e9d3b89365e84b9bf9838259a3cd8f/curl_cffi-0.14.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:03f21ade2d72978c2bb8670e9b6de5260e2755092b02d94b70b906813662998d", size = 9080167, upload-time = "2025-12-16T03:25:00.946Z" },
+ { url = "https://files.pythonhosted.org/packages/75/03/a942d7119d3e8911094d157598ae0169b1c6ca1bd3f27d7991b279bcc45b/curl_cffi-0.14.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:58ebf02de64ee5c95613209ddacb014c2d2f86298d7080c0a1c12ed876ee0690", size = 9520464, upload-time = "2025-12-16T03:25:02.922Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/77/78900e9b0833066d2274bda75cba426fdb4cef7fbf6a4f6a6ca447607bec/curl_cffi-0.14.0-cp39-abi3-win_amd64.whl", hash = "sha256:6e503f9a103f6ae7acfb3890c843b53ec030785a22ae7682a22cc43afb94123e", size = 1677416, upload-time = "2025-12-16T03:25:04.902Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/7c/d2ba86b0b3e1e2830bd94163d047de122c69a8df03c5c7c36326c456ad82/curl_cffi-0.14.0-cp39-abi3-win_arm64.whl", hash = "sha256:2eed50a969201605c863c4c31269dfc3e0da52916086ac54553cfa353022425c", size = 1425067, upload-time = "2025-12-16T03:25:06.454Z" },
]
[[package]]
@@ -281,6 +379,39 @@ woff = [
{ name = "zopfli" },
]
+[[package]]
+name = "ghp-import"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943, upload-time = "2022-05-02T15:47:16.11Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" },
+]
+
+[[package]]
+name = "griffe"
+version = "1.15.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" },
+]
+
+[[package]]
+name = "idna"
+version = "3.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
+]
+
[[package]]
name = "jinja2"
version = "3.1.6"
@@ -295,7 +426,7 @@ wheels = [
[[package]]
name = "jsonschema"
-version = "4.25.1"
+version = "4.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "attrs" },
@@ -303,9 +434,9 @@ dependencies = [
{ name = "referencing" },
{ name = "rpds-py" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" },
+ { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" },
]
[[package]]
@@ -320,6 +451,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
]
+[[package]]
+name = "markdown"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/7dd27d9d863b3376fcf23a5a13cb5d024aed1db46f963f1b5735ae43b3be/markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e", size = 364931, upload-time = "2025-11-03T19:51:15.007Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" },
+]
+
[[package]]
name = "markdown-it-py"
version = "4.0.0"
@@ -415,6 +555,134 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
+[[package]]
+name = "mergedeep"
+version = "1.3.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661, upload-time = "2021-02-05T18:55:30.623Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
+]
+
+[[package]]
+name = "mkdocs"
+version = "1.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "ghp-import" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mergedeep" },
+ { name = "mkdocs-get-deps" },
+ { name = "packaging" },
+ { name = "pathspec" },
+ { name = "pyyaml" },
+ { name = "pyyaml-env-tag" },
+ { name = "watchdog" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159, upload-time = "2024-08-30T12:24:06.899Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451, upload-time = "2024-08-30T12:24:05.054Z" },
+]
+
+[[package]]
+name = "mkdocs-autorefs"
+version = "1.4.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mkdocs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/51/fa/9124cd63d822e2bcbea1450ae68cdc3faf3655c69b455f3a7ed36ce6c628/mkdocs_autorefs-1.4.3.tar.gz", hash = "sha256:beee715b254455c4aa93b6ef3c67579c399ca092259cc41b7d9342573ff1fc75", size = 55425, upload-time = "2025-08-26T14:23:17.223Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9f/4d/7123b6fa2278000688ebd338e2a06d16870aaf9eceae6ba047ea05f92df1/mkdocs_autorefs-1.4.3-py3-none-any.whl", hash = "sha256:469d85eb3114801d08e9cc55d102b3ba65917a869b893403b8987b601cf55dc9", size = 25034, upload-time = "2025-08-26T14:23:15.906Z" },
+]
+
+[[package]]
+name = "mkdocs-get-deps"
+version = "0.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mergedeep" },
+ { name = "platformdirs" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239, upload-time = "2023-11-20T17:51:09.981Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" },
+]
+
+[[package]]
+name = "mkdocs-material"
+version = "9.7.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "babel" },
+ { name = "backrefs" },
+ { name = "colorama" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "mkdocs" },
+ { name = "mkdocs-material-extensions" },
+ { name = "paginate" },
+ { name = "pygments" },
+ { name = "pymdown-extensions" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/27/e2/2ffc356cd72f1473d07c7719d82a8f2cbd261666828614ecb95b12169f41/mkdocs_material-9.7.1.tar.gz", hash = "sha256:89601b8f2c3e6c6ee0a918cc3566cb201d40bf37c3cd3c2067e26fadb8cce2b8", size = 4094392, upload-time = "2025-12-18T09:49:00.308Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3e/32/ed071cb721aca8c227718cffcf7bd539620e9799bbf2619e90c757bfd030/mkdocs_material-9.7.1-py3-none-any.whl", hash = "sha256:3f6100937d7d731f87f1e3e3b021c97f7239666b9ba1151ab476cabb96c60d5c", size = 9297166, upload-time = "2025-12-18T09:48:56.664Z" },
+]
+
+[[package]]
+name = "mkdocs-material-extensions"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/79/9b/9b4c96d6593b2a541e1cb8b34899a6d021d208bb357042823d4d2cabdbe7/mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443", size = 11847, upload-time = "2023-11-22T19:09:45.208Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5b/54/662a4743aa81d9582ee9339d4ffa3c8fd40a4965e033d77b9da9774d3960/mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31", size = 8728, upload-time = "2023-11-22T19:09:43.465Z" },
+]
+
+[[package]]
+name = "mkdocstrings"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mkdocs" },
+ { name = "mkdocs-autorefs" },
+ { name = "pymdown-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e5/13/10bbf9d56565fd91b91e6f5a8cd9b9d8a2b101c4e8ad6eeafa35a706301d/mkdocstrings-1.0.0.tar.gz", hash = "sha256:351a006dbb27aefce241ade110d3cd040c1145b7a3eb5fd5ac23f03ed67f401a", size = 101086, upload-time = "2025-11-27T15:39:40.534Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/fc/80aa31b79133634721cf7855d37b76ea49773599214896f2ff10be03de2a/mkdocstrings-1.0.0-py3-none-any.whl", hash = "sha256:4c50eb960bff6e05dfc631f6bc00dfabffbcb29c5ff25f676d64daae05ed82fa", size = 35135, upload-time = "2025-11-27T15:39:39.301Z" },
+]
+
+[package.optional-dependencies]
+python = [
+ { name = "mkdocstrings-python" },
+]
+
+[[package]]
+name = "mkdocstrings-python"
+version = "2.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffe" },
+ { name = "mkdocs-autorefs" },
+ { name = "mkdocstrings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/24/75/d30af27a2906f00eb90143470272376d728521997800f5dce5b340ba35bc/mkdocstrings_python-2.0.1.tar.gz", hash = "sha256:843a562221e6a471fefdd4b45cc6c22d2607ccbad632879234fa9692e9cf7732", size = 199345, upload-time = "2025-12-03T14:26:11.755Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/06/c5f8deba7d2cbdfa7967a716ae801aa9ca5f734b8f54fd473ef77a088dbe/mkdocstrings_python-2.0.1-py3-none-any.whl", hash = "sha256:66ecff45c5f8b71bf174e11d49afc845c2dfc7fc0ab17a86b6b337e0f24d8d90", size = 105055, upload-time = "2025-12-03T14:26:10.184Z" },
+]
+
[[package]]
name = "naminter"
version = "1.0.7"
@@ -432,23 +700,56 @@ dependencies = [
[package.optional-dependencies]
dev = [
+ { name = "mkdocs" },
+ { name = "mkdocs-material" },
+ { name = "mkdocstrings", extra = ["python"] },
{ name = "ruff" },
]
[package.metadata]
requires-dist = [
{ name = "aiofiles", specifier = ">=25.1.0" },
- { name = "click", specifier = ">=8.3.0" },
- { name = "curl-cffi", specifier = ">=0.13.0" },
+ { name = "click", specifier = ">=8.3.1" },
+ { name = "curl-cffi", specifier = ">=0.14.0" },
{ name = "jinja2", specifier = ">=3.1.6" },
- { name = "jsonschema", specifier = ">=4.25.1" },
+ { name = "jsonschema", specifier = ">=4.26.0" },
+ { name = "mkdocs", marker = "extra == 'dev'", specifier = ">=1.6.1" },
+ { name = "mkdocs-material", marker = "extra == 'dev'", specifier = ">=9.7.1" },
+ { name = "mkdocstrings", extras = ["python"], marker = "extra == 'dev'", specifier = ">=1.0.0" },
{ name = "rich", specifier = ">=14.2.0" },
- { name = "rich-click", specifier = ">=1.9.4" },
- { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.14.4" },
- { name = "weasyprint", specifier = ">=66.0" },
+ { name = "rich-click", specifier = ">=1.9.5" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.14.11" },
+ { name = "weasyprint", specifier = ">=67.0" },
]
provides-extras = ["dev"]
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+]
+
+[[package]]
+name = "paginate"
+version = "0.5.7"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ec/46/68dde5b6bc00c1296ec6466ab27dddede6aec9af1b99090e1107091b3b84/paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", size = 19252, upload-time = "2024-08-25T14:17:24.139Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746, upload-time = "2024-08-25T14:17:22.55Z" },
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
+]
+
[[package]]
name = "pillow"
version = "12.0.0"
@@ -536,6 +837,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/95/7e/f896623c3c635a90537ac093c6a618ebe1a90d87206e42309cb5d98a1b9e/pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5", size = 6997850, upload-time = "2025-10-15T18:24:11.495Z" },
]
+[[package]]
+name = "platformdirs"
+version = "4.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
+]
+
[[package]]
name = "pycparser"
version = "2.23"
@@ -563,6 +873,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
]
+[[package]]
+name = "pymdown-extensions"
+version = "10.17.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/25/6d/af5378dbdb379fddd9a277f8b9888c027db480cde70028669ebd009d642a/pymdown_extensions-10.17.2.tar.gz", hash = "sha256:26bb3d7688e651606260c90fb46409fbda70bf9fdc3623c7868643a1aeee4713", size = 847344, upload-time = "2025-11-26T15:43:57.004Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/93/78/b93cb80bd673bdc9f6ede63d8eb5b4646366953df15667eb3603be57a2b1/pymdown_extensions-10.17.2-py3-none-any.whl", hash = "sha256:bffae79a2e8b9e44aef0d813583a8fea63457b7a23643a43988055b7b79b4992", size = 266556, upload-time = "2025-11-26T15:43:55.162Z" },
+]
+
[[package]]
name = "pyphen"
version = "0.17.2"
@@ -572,6 +895,85 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7b/1f/c2142d2edf833a90728e5cdeb10bdbdc094dde8dbac078cee0cf33f5e11b/pyphen-0.17.2-py3-none-any.whl", hash = "sha256:3a07fb017cb2341e1d9ff31b8634efb1ae4dc4b130468c7c39dd3d32e7c3affd", size = 2079358, upload-time = "2025-01-20T13:18:29.629Z" },
]
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" },
+ { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" },
+ { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" },
+ { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" },
+ { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
+ { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
+ { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
+ { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
+ { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
+]
+
+[[package]]
+name = "pyyaml-env-tag"
+version = "1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/79c822141bfd05a853236b504869ebc6b70159afc570e1d5a20641782eaa/pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff", size = 5737, upload-time = "2025-05-13T15:24:01.64Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/11/432f32f8097b03e3cd5fe57e88efb685d964e2e5178a48ed61e841f7fdce/pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04", size = 4722, upload-time = "2025-05-13T15:23:59.629Z" },
+]
+
[[package]]
name = "referencing"
version = "0.37.0"
@@ -586,6 +988,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
]
+[[package]]
+name = "requests"
+version = "2.32.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
+]
+
[[package]]
name = "rich"
version = "14.2.0"
@@ -601,16 +1018,16 @@ wheels = [
[[package]]
name = "rich-click"
-version = "1.9.4"
+version = "1.9.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "rich" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/bf/d8/f2c1b7e9a645ba40f756d7a5b195fc104729bc6b19061ba3ab385f342931/rich_click-1.9.4.tar.gz", hash = "sha256:af73dc68e85f3bebb80ce302a642b9fe3b65f3df0ceb42eb9a27c467c1b678c8", size = 73632, upload-time = "2025-10-25T01:08:49.142Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/6b/d1/b60ca6a8745e76800b50c7ee246fd73f08a3be5d8e0b551fc93c19fa1203/rich_click-1.9.5.tar.gz", hash = "sha256:48120531493f1533828da80e13e839d471979ec8d7d0ca7b35f86a1379cc74b6", size = 73927, upload-time = "2025-12-21T14:49:44.167Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/5b/6a/1f03adcb3cc7beb6f63aecc21565e9d515ccee653187fc4619cd0b42713b/rich_click-1.9.4-py3-none-any.whl", hash = "sha256:d70f39938bcecaf5543e8750828cbea94ef51853f7d0e174cda1e10543767389", size = 70245, upload-time = "2025-10-25T01:08:47.939Z" },
+ { url = "https://files.pythonhosted.org/packages/25/0a/d865895e1e5d88a60baee0fc3703eb111c502ee10c8c107516bc7623abf8/rich_click-1.9.5-py3-none-any.whl", hash = "sha256:9b195721a773b1acf0e16ff9ec68cef1e7d237e53471e6e3f7ade462f86c403a", size = 70580, upload-time = "2025-12-21T14:49:42.905Z" },
]
[[package]]
@@ -723,40 +1140,49 @@ wheels = [
[[package]]
name = "ruff"
-version = "0.14.4"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/df/55/cccfca45157a2031dcbb5a462a67f7cf27f8b37d4b3b1cd7438f0f5c1df6/ruff-0.14.4.tar.gz", hash = "sha256:f459a49fe1085a749f15414ca76f61595f1a2cc8778ed7c279b6ca2e1fd19df3", size = 5587844, upload-time = "2025-11-06T22:07:45.033Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/17/b9/67240254166ae1eaa38dec32265e9153ac53645a6c6670ed36ad00722af8/ruff-0.14.4-py3-none-linux_armv6l.whl", hash = "sha256:e6604613ffbcf2297cd5dcba0e0ac9bd0c11dc026442dfbb614504e87c349518", size = 12606781, upload-time = "2025-11-06T22:07:01.841Z" },
- { url = "https://files.pythonhosted.org/packages/46/c8/09b3ab245d8652eafe5256ab59718641429f68681ee713ff06c5c549f156/ruff-0.14.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d99c0b52b6f0598acede45ee78288e5e9b4409d1ce7f661f0fa36d4cbeadf9a4", size = 12946765, upload-time = "2025-11-06T22:07:05.858Z" },
- { url = "https://files.pythonhosted.org/packages/14/bb/1564b000219144bf5eed2359edc94c3590dd49d510751dad26202c18a17d/ruff-0.14.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9358d490ec030f1b51d048a7fd6ead418ed0826daf6149e95e30aa67c168af33", size = 11928120, upload-time = "2025-11-06T22:07:08.023Z" },
- { url = "https://files.pythonhosted.org/packages/a3/92/d5f1770e9988cc0742fefaa351e840d9aef04ec24ae1be36f333f96d5704/ruff-0.14.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b40d27924f1f02dfa827b9c0712a13c0e4b108421665322218fc38caf615c2", size = 12370877, upload-time = "2025-11-06T22:07:10.015Z" },
- { url = "https://files.pythonhosted.org/packages/e2/29/e9282efa55f1973d109faf839a63235575519c8ad278cc87a182a366810e/ruff-0.14.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5e649052a294fe00818650712083cddc6cc02744afaf37202c65df9ea52efa5", size = 12408538, upload-time = "2025-11-06T22:07:13.085Z" },
- { url = "https://files.pythonhosted.org/packages/8e/01/930ed6ecfce130144b32d77d8d69f5c610e6d23e6857927150adf5d7379a/ruff-0.14.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa082a8f878deeba955531f975881828fd6afd90dfa757c2b0808aadb437136e", size = 13141942, upload-time = "2025-11-06T22:07:15.386Z" },
- { url = "https://files.pythonhosted.org/packages/6a/46/a9c89b42b231a9f487233f17a89cbef9d5acd538d9488687a02ad288fa6b/ruff-0.14.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1043c6811c2419e39011890f14d0a30470f19d47d197c4858b2787dfa698f6c8", size = 14544306, upload-time = "2025-11-06T22:07:17.631Z" },
- { url = "https://files.pythonhosted.org/packages/78/96/9c6cf86491f2a6d52758b830b89b78c2ae61e8ca66b86bf5a20af73d20e6/ruff-0.14.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f3a936ac27fb7c2a93e4f4b943a662775879ac579a433291a6f69428722649", size = 14210427, upload-time = "2025-11-06T22:07:19.832Z" },
- { url = "https://files.pythonhosted.org/packages/71/f4/0666fe7769a54f63e66404e8ff698de1dcde733e12e2fd1c9c6efb689cb5/ruff-0.14.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95643ffd209ce78bc113266b88fba3d39e0461f0cbc8b55fb92505030fb4a850", size = 13658488, upload-time = "2025-11-06T22:07:22.32Z" },
- { url = "https://files.pythonhosted.org/packages/ee/79/6ad4dda2cfd55e41ac9ed6d73ef9ab9475b1eef69f3a85957210c74ba12c/ruff-0.14.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:456daa2fa1021bc86ca857f43fe29d5d8b3f0e55e9f90c58c317c1dcc2afc7b5", size = 13354908, upload-time = "2025-11-06T22:07:24.347Z" },
- { url = "https://files.pythonhosted.org/packages/b5/60/f0b6990f740bb15c1588601d19d21bcc1bd5de4330a07222041678a8e04f/ruff-0.14.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f911bba769e4a9f51af6e70037bb72b70b45a16db5ce73e1f72aefe6f6d62132", size = 13587803, upload-time = "2025-11-06T22:07:26.327Z" },
- { url = "https://files.pythonhosted.org/packages/c9/da/eaaada586f80068728338e0ef7f29ab3e4a08a692f92eb901a4f06bbff24/ruff-0.14.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76158a7369b3979fa878612c623a7e5430c18b2fd1c73b214945c2d06337db67", size = 12279654, upload-time = "2025-11-06T22:07:28.46Z" },
- { url = "https://files.pythonhosted.org/packages/66/d4/b1d0e82cf9bf8aed10a6d45be47b3f402730aa2c438164424783ac88c0ed/ruff-0.14.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f3b8f3b442d2b14c246e7aeca2e75915159e06a3540e2f4bed9f50d062d24469", size = 12357520, upload-time = "2025-11-06T22:07:31.468Z" },
- { url = "https://files.pythonhosted.org/packages/04/f4/53e2b42cc82804617e5c7950b7079d79996c27e99c4652131c6a1100657f/ruff-0.14.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c62da9a06779deecf4d17ed04939ae8b31b517643b26370c3be1d26f3ef7dbde", size = 12719431, upload-time = "2025-11-06T22:07:33.831Z" },
- { url = "https://files.pythonhosted.org/packages/a2/94/80e3d74ed9a72d64e94a7b7706b1c1ebaa315ef2076fd33581f6a1cd2f95/ruff-0.14.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a443a83a1506c684e98acb8cb55abaf3ef725078be40237463dae4463366349", size = 13464394, upload-time = "2025-11-06T22:07:35.905Z" },
- { url = "https://files.pythonhosted.org/packages/54/1a/a49f071f04c42345c793d22f6cf5e0920095e286119ee53a64a3a3004825/ruff-0.14.4-py3-none-win32.whl", hash = "sha256:643b69cb63cd996f1fc7229da726d07ac307eae442dd8974dbc7cf22c1e18fff", size = 12493429, upload-time = "2025-11-06T22:07:38.43Z" },
- { url = "https://files.pythonhosted.org/packages/bc/22/e58c43e641145a2b670328fb98bc384e20679b5774258b1e540207580266/ruff-0.14.4-py3-none-win_amd64.whl", hash = "sha256:26673da283b96fe35fa0c939bf8411abec47111644aa9f7cfbd3c573fb125d2c", size = 13635380, upload-time = "2025-11-06T22:07:40.496Z" },
- { url = "https://files.pythonhosted.org/packages/30/bd/4168a751ddbbf43e86544b4de8b5c3b7be8d7167a2a5cb977d274e04f0a1/ruff-0.14.4-py3-none-win_arm64.whl", hash = "sha256:dd09c292479596b0e6fec8cd95c65c3a6dc68e9ad17b8f2382130f87ff6a75bb", size = 12663065, upload-time = "2025-11-06T22:07:42.603Z" },
+version = "0.14.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d4/77/9a7fe084d268f8855d493e5031ea03fa0af8cc05887f638bf1c4e3363eb8/ruff-0.14.11.tar.gz", hash = "sha256:f6dc463bfa5c07a59b1ff2c3b9767373e541346ea105503b4c0369c520a66958", size = 5993417, upload-time = "2026-01-08T19:11:58.322Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f0/a6/a4c40a5aaa7e331f245d2dc1ac8ece306681f52b636b40ef87c88b9f7afd/ruff-0.14.11-py3-none-linux_armv6l.whl", hash = "sha256:f6ff2d95cbd335841a7217bdfd9c1d2e44eac2c584197ab1385579d55ff8830e", size = 12951208, upload-time = "2026-01-08T19:12:09.218Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/5c/360a35cb7204b328b685d3129c08aca24765ff92b5a7efedbdd6c150d555/ruff-0.14.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f6eb5c1c8033680f4172ea9c8d3706c156223010b8b97b05e82c59bdc774ee6", size = 13330075, upload-time = "2026-01-08T19:12:02.549Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/9e/0cc2f1be7a7d33cae541824cf3f95b4ff40d03557b575912b5b70273c9ec/ruff-0.14.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2fc34cc896f90080fca01259f96c566f74069a04b25b6205d55379d12a6855e", size = 12257809, upload-time = "2026-01-08T19:12:00.366Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/e5/5faab97c15bb75228d9f74637e775d26ac703cc2b4898564c01ab3637c02/ruff-0.14.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53386375001773ae812b43205d6064dae49ff0968774e6befe16a994fc233caa", size = 12678447, upload-time = "2026-01-08T19:12:13.899Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/33/e9767f60a2bef779fb5855cab0af76c488e0ce90f7bb7b8a45c8a2ba4178/ruff-0.14.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a697737dce1ca97a0a55b5ff0434ee7205943d4874d638fe3ae66166ff46edbe", size = 12758560, upload-time = "2026-01-08T19:11:42.55Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/84/4c6cf627a21462bb5102f7be2a320b084228ff26e105510cd2255ea868e5/ruff-0.14.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6845ca1da8ab81ab1dce755a32ad13f1db72e7fba27c486d5d90d65e04d17b8f", size = 13599296, upload-time = "2026-01-08T19:11:30.371Z" },
+ { url = "https://files.pythonhosted.org/packages/88/e1/92b5ed7ea66d849f6157e695dc23d5d6d982bd6aa8d077895652c38a7cae/ruff-0.14.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e36ce2fd31b54065ec6f76cb08d60159e1b32bdf08507862e32f47e6dde8bcbf", size = 15048981, upload-time = "2026-01-08T19:12:04.742Z" },
+ { url = "https://files.pythonhosted.org/packages/61/df/c1bd30992615ac17c2fb64b8a7376ca22c04a70555b5d05b8f717163cf9f/ruff-0.14.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590bcc0e2097ecf74e62a5c10a6b71f008ad82eb97b0a0079e85defe19fe74d9", size = 14633183, upload-time = "2026-01-08T19:11:40.069Z" },
+ { url = "https://files.pythonhosted.org/packages/04/e9/fe552902f25013dd28a5428a42347d9ad20c4b534834a325a28305747d64/ruff-0.14.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53fe71125fc158210d57fe4da26e622c9c294022988d08d9347ec1cf782adafe", size = 14050453, upload-time = "2026-01-08T19:11:37.555Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/93/f36d89fa021543187f98991609ce6e47e24f35f008dfe1af01379d248a41/ruff-0.14.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a35c9da08562f1598ded8470fcfef2afb5cf881996e6c0a502ceb61f4bc9c8a3", size = 13757889, upload-time = "2026-01-08T19:12:07.094Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/9f/c7fb6ecf554f28709a6a1f2a7f74750d400979e8cd47ed29feeaa1bd4db8/ruff-0.14.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0f3727189a52179393ecf92ec7057c2210203e6af2676f08d92140d3e1ee72c1", size = 13955832, upload-time = "2026-01-08T19:11:55.064Z" },
+ { url = "https://files.pythonhosted.org/packages/db/a0/153315310f250f76900a98278cf878c64dfb6d044e184491dd3289796734/ruff-0.14.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eb09f849bd37147a789b85995ff734a6c4a095bed5fd1608c4f56afc3634cde2", size = 12586522, upload-time = "2026-01-08T19:11:35.356Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/2b/a73a2b6e6d2df1d74bf2b78098be1572191e54bec0e59e29382d13c3adc5/ruff-0.14.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:c61782543c1231bf71041461c1f28c64b961d457d0f238ac388e2ab173d7ecb7", size = 12724637, upload-time = "2026-01-08T19:11:47.796Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/41/09100590320394401cd3c48fc718a8ba71c7ddb1ffd07e0ad6576b3a3df2/ruff-0.14.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82ff352ea68fb6766140381748e1f67f83c39860b6446966cff48a315c3e2491", size = 13145837, upload-time = "2026-01-08T19:11:32.87Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/d8/e035db859d1d3edf909381eb8ff3e89a672d6572e9454093538fe6f164b0/ruff-0.14.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:728e56879df4ca5b62a9dde2dd0eb0edda2a55160c0ea28c4025f18c03f86984", size = 13850469, upload-time = "2026-01-08T19:12:11.694Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/02/bb3ff8b6e6d02ce9e3740f4c17dfbbfb55f34c789c139e9cd91985f356c7/ruff-0.14.11-py3-none-win32.whl", hash = "sha256:337c5dd11f16ee52ae217757d9b82a26400be7efac883e9e852646f1557ed841", size = 12851094, upload-time = "2026-01-08T19:11:45.163Z" },
+ { url = "https://files.pythonhosted.org/packages/58/f1/90ddc533918d3a2ad628bc3044cdfc094949e6d4b929220c3f0eb8a1c998/ruff-0.14.11-py3-none-win_amd64.whl", hash = "sha256:f981cea63d08456b2c070e64b79cb62f951aa1305282974d4d5216e6e0178ae6", size = 14001379, upload-time = "2026-01-08T19:11:52.591Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/1c/1dbe51782c0e1e9cfce1d1004752672d2d4629ea46945d19d731ad772b3b/ruff-0.14.11-py3-none-win_arm64.whl", hash = "sha256:649fb6c9edd7f751db276ef42df1f3df41c38d67d199570ae2a7bd6cbc3590f0", size = 12938644, upload-time = "2026-01-08T19:11:50.027Z" },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
]
[[package]]
name = "tinycss2"
-version = "1.4.0"
+version = "1.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "webencodings" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a3/ae/2ca4913e5c0f09781d75482874c3a95db9105462a92ddd303c7d285d3df2/tinycss2-1.5.1.tar.gz", hash = "sha256:d339d2b616ba90ccce58da8495a78f46e55d4d25f9fd71dfd526f07e7d53f957", size = 88195, upload-time = "2025-11-23T10:29:10.082Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" },
+ { url = "https://files.pythonhosted.org/packages/60/45/c7b5c3168458db837e8ceab06dc77824e18202679d0463f0e8f002143a97/tinycss2-1.5.1-py3-none-any.whl", hash = "sha256:3415ba0f5839c062696996998176c4a3751d18b7edaaeeb658c9ce21ec150661", size = 28404, upload-time = "2025-11-23T10:29:08.676Z" },
]
[[package]]
@@ -780,9 +1206,45 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
+[[package]]
+name = "urllib3"
+version = "2.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1c/43/554c2569b62f49350597348fc3ac70f786e3c32e7f19d266e19817812dd3/urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1", size = 432585, upload-time = "2025-12-05T15:08:47.885Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/56/1a/9ffe814d317c5224166b23e7c47f606d6e473712a2fad0f704ea9b99f246/urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f", size = 131083, upload-time = "2025-12-05T15:08:45.983Z" },
+]
+
+[[package]]
+name = "watchdog"
+version = "6.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" },
+ { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" },
+ { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" },
+ { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" },
+ { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" },
+ { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" },
+ { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
+ { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" },
+ { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" },
+ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" },
+]
+
[[package]]
name = "weasyprint"
-version = "66.0"
+version = "67.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi" },
@@ -794,9 +1256,9 @@ dependencies = [
{ name = "tinycss2" },
{ name = "tinyhtml5" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/32/99/480b5430b7eb0916e7d5df1bee7d9508b28b48fee28da894d0a050e0e930/weasyprint-66.0.tar.gz", hash = "sha256:da71dc87dc129ac9cffdc65e5477e90365ab9dbae45c744014ec1d06303dde40", size = 504224, upload-time = "2025-07-24T11:44:42.771Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/fd/bc/79a65b3a406cb62a1982fec8b49134b25a3b31abb094ca493c9fddff5492/weasyprint-67.0.tar.gz", hash = "sha256:fdfbccf700e8086c8fd1607ec42e25d4b584512c29af2d9913587a4e448dead4", size = 1534152, upload-time = "2025-12-02T16:11:36.972Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/0f/d1/c5d9b341bf3d556c1e4c6566b3efdda0b1bb175510aa7b09dd3eee246923/weasyprint-66.0-py3-none-any.whl", hash = "sha256:82b0783b726fcd318e2c977dcdddca76515b30044bc7a830cc4fbe717582a6d0", size = 301965, upload-time = "2025-07-24T11:44:40.968Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/3a/a225e214ae2accd8781e4d22e9397bd51290c631ea0943d3a0a1840bc667/weasyprint-67.0-py3-none-any.whl", hash = "sha256:abc2f40872ea01c29c11f7799dafc4b23c078335bf7777f72a8affeb36e1d201", size = 316309, upload-time = "2025-12-02T16:11:35.402Z" },
]
[[package]]
From 2161153e075d883695a54bd4f6e4a1635e096f07 Mon Sep 17 00:00:00 2001
From: 3xp0rt <61662492+3xp0rt@users.noreply.github.com>
Date: Sun, 25 Jan 2026 02:47:15 +0200
Subject: [PATCH 19/19] refactor: update validation flow, cli outputs, and docs
---
README.md | 289 +---------------
docs/contributing.md | 71 +++-
docs/getting-started/installation.md | 55 +++-
mkdocs.yml | 5 +
naminter/cli/config.py | 270 ++++++++-------
naminter/cli/console.py | 16 +-
naminter/cli/constants.py | 43 +--
naminter/cli/exporters.py | 23 +-
naminter/cli/main.py | 344 ++++++++++----------
naminter/cli/progress.py | 26 +-
naminter/cli/utils.py | 147 +++------
naminter/core/constants.py | 48 ++-
naminter/core/exceptions.py | 13 +-
naminter/core/formatter.py | 47 +--
naminter/core/main.py | 124 ++++---
naminter/core/models.py | 87 ++---
naminter/core/network.py | 52 +--
naminter/core/utils.py | 16 +-
naminter/core/validator.py | 470 +++++++++++++++++++++------
pyproject.toml | 39 ++-
uv.lock | 215 ++++++++++--
21 files changed, 1336 insertions(+), 1064 deletions(-)
diff --git a/README.md b/README.md
index 8e70de6..53272ff 100644
--- a/README.md
+++ b/README.md
@@ -12,300 +12,27 @@ Naminter is a Python package and command-line interface (CLI) tool for asynchron
-## Table of Contents
-
-- [Installation](#installation)
- - [From PyPI](#from-pypi)
- - [From Source](#from-source)
- - [From Docker](#using-docker)
-- [Usage](#usage)
- - [Basic CLI Usage](#basic-cli-usage)
- - [Advanced CLI Options](#advanced-cli-options)
- - [Using as a Python Package](#using-as-a-python-package)
-- [Command Line Options](#command-line-options)
-- [Contributing](#contributing)
-- [License](#license)
-
## Installation
-### From PyPI
-
-Install Naminter with pip:
+Quick install from PyPI:
```bash
pip install naminter
```
-### From Source
-
-Clone the repository and install in editable mode:
-
-```bash
-git clone https://github.com/3xp0rt/naminter.git
-cd naminter
-pip install -e .
-```
-
-### Using Docker
-
-All needed folders are mounted on the first start of the docker compose run command.
-
-```bash
-# Using the prebuilt docker image from the GitHub registry
-docker run --rm -it ghcr.io/3xp0rt/naminter --username john_doe
-
-# Build the docker from the source yourself
-git clone https://github.com/3xp0rt/naminter.git && cd naminter
-docker build -t naminter .
-docker compose run --rm naminter --username john_doe
-```
-
-## Usage
-
-### Basic CLI Usage
-
-Enumerate a single username:
-
-```bash
-naminter --username john_doe
-```
-
-Enumerate multiple usernames:
-
-```bash
-naminter --username user1 --username user2 --username user3
-```
-
-### Advanced CLI Options
-
-Customize the enumerator with various command-line arguments:
-
-```bash
-# Basic username enumeration with custom settings
-naminter --username john_doe \
- --max-tasks 100 \
- --timeout 15 \
- --impersonate chrome \
- --include-categories social coding
-
-# Using proxy and saving responses
-naminter --username jane_smith \
- --proxy http://proxy:8080 \
- --save-response \
- --open-response
-
-# Using custom schema validation
-naminter --username alice_bob \
- --local-schema ./custom-schema.json \
- --local-list ./my-sites.json
-
-# Using remote schema with custom list
-naminter --username test_user \
- --remote-schema https://example.com/custom-schema.json \
- --remote-list https://example.com/my-sites.json
-
-# Export results in multiple formats
-naminter --username alice_bob \
- --csv \
- --json \
- --html \
- --filter-all
-
-# Export with custom paths using merged flags
-naminter --username alice_bob \
- --csv results.csv \
- --json results.json \
- --html report.html
-
-# Site validation with detailed output
-naminter --test \
- --show-details \
- --log-level DEBUG \
- --log-file debug.log
-```
-
-### Using as a Python Package
-
-Naminter can be used programmatically in Python projects to enumerate usernames across various platforms.
-
-#### Basic Example
-
-```python
-import asyncio
-from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
-
-async def main():
- async with CurlCFFISession() as http_client:
- wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
-
- async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
- async for result in naminter.enumerate_usernames(["example_username"]):
- if result.status.value == "exists":
- print(f"✅ {result.username} found on {result.name}: {result.url}")
- elif result.status.value == "missing":
- print(f"❌ {result.username} not found on {result.name}")
- elif result.status.value == "error":
- print(f"⚠️ Error checking {result.username} on {result.name}: {result.error}")
-
-asyncio.run(main())
-```
-
-
-#### Advanced Configuration
-
-```python
-import asyncio
-from naminter import Naminter, CurlCFFISession, WMNMode, WMN_REMOTE_URL
-
-async def main():
- async with CurlCFFISession(
- timeout=15,
- impersonate="chrome",
- verify=True,
- proxies="http://proxy:8080"
- ) as http_client:
- wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
-
- async with Naminter(
- http_client=http_client,
- wmn_data=wmn_data,
- max_tasks=100
- ) as naminter:
- usernames = ["user1", "user2", "user3"]
- async for result in naminter.enumerate_usernames(usernames, mode=WMNMode.ANY):
- if result.status.value == "exists":
- print(f"✅ {result.username} on {result.name}: {result.url}")
-
-asyncio.run(main())
-```
-
-#### Site Validation
-
-```python
-import asyncio
-from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL
-
-async def main():
- async with CurlCFFISession() as http_client:
- wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
-
- async with Naminter(http_client=http_client, wmn_data=wmn_data) as naminter:
- async for site_result in naminter.enumerate_test():
- if site_result.error:
- print(f"❌ {site_result.name}: {site_result.error}")
- else:
- found = sum(1 for r in site_result.results if r.status.value == "exists")
- total = len(site_result.results)
- print(f"✅ {site_result.name}: {found}/{total} known accounts found")
-
-asyncio.run(main())
-```
-
-#### Getting WMN Summary
-
-```python
-import asyncio
-from naminter import Naminter, CurlCFFISession, WMN_REMOTE_URL, WMN_SCHEMA_URL
-
-async def main():
- async with CurlCFFISession() as http_client:
- # Load data and (optionally) schema using public constants
- wmn_data = (await http_client.get(WMN_REMOTE_URL)).json()
- wmn_schema = (await http_client.get(WMN_SCHEMA_URL)).json()
-
- async with Naminter(
- http_client=http_client,
- wmn_data=wmn_data,
- wmn_schema=wmn_schema,
- ) as naminter:
- summary = naminter.get_wmn_summary()
- print(f"Total sites: {summary.sites_count}")
- print(f"Total categories: {summary.categories_count}")
- print(f"Known accounts: {summary.known_count}")
-
-asyncio.run(main())
-```
-
-## Command Line Options
-
-### Basic Usage
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--username, -u` | Username(s) to search |
-| `--site, -s` | Specific site name(s) to enumerate |
-| `--version` | Show version information |
-| `--no-color` | Disable colored output |
-| `--no-progressbar` | Disable progress bar display |
-
-### Input Lists
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--local-list` | Path to a local file containing the list of sites to enumerate |
-| `--remote-list` | URL to fetch a remote list of sites to enumerate |
-| `--skip-validation` | Skip WhatsMyName schema validation for lists |
-| `--local-schema` | Path to local WhatsMyName schema file |
-| `--remote-schema` | URL to fetch custom WhatsMyName schema |
-
-### Site Validation
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--test` | Validate site detection methods by checking known usernames |
-
-### Category Filters
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--include-categories` | Categories of sites to include in the search |
-| `--exclude-categories` | Categories of sites to exclude from the search |
-
-### Network Options
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--proxy` | Proxy server to use for requests |
-| `--timeout` | Maximum time in seconds to wait for each request (default: 30) |
-| `--allow-redirects` | Whether to follow URL redirects |
-| `--verify-ssl` | Whether to verify SSL certificates |
-| `--impersonate, -i` | Browser to impersonate in requests (chrome, chrome_android, safari, safari_ios, edge, firefox) |
-
-### Concurrency & Debug
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--max-tasks` | Maximum number of concurrent tasks (default: 50) |
-| `--mode` | Validation mode: `all` for strict matching (all detection criteria must match) or `any` for permissive matching (at least one detection criterion must match) |
-| `--log-level` | Set logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) |
-| `--log-file` | Path to log file for debug output |
-| `--show-details` | Show detailed information in console output |
-| `--browse` | Open found profiles in web browser |
-
-### Response Handling
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--save-response [DIR]` | Save HTTP response body; optionally specify directory |
-| `--open-response` | Open saved response file in browser |
+For detailed installation instructions including optional dependencies, Docker, and source installation, see the [Installation Guide](https://naminter.github.io/getting-started/installation/).
-### Export Options
-| Option | Description |
-|---------------|-------------|
-| `--csv [PATH]` | Export results to CSV; optionally specify output path |
-| `--pdf [PATH]` | Export results to PDF; optionally specify output path |
-| `--html [PATH]` | Export results to HTML; optionally specify output path |
-| `--json [PATH]` | Export results to JSON; optionally specify output path |
+## Documentation
-### Result Filters
-| Option | Description |
-|-----------------------------|------------------------------------------------------------|
-| `--filter-all` | Include all results in console and exports |
-| `--filter-exists` | Show only existing username results in console and exports |
-| `--filter-partial` | Show only partial match results in console and exports |
-| `--filter-conflicting` | Show only conflicting results in console and exports |
-| `--filter-unknown` | Show only unknown results in console and exports |
-| `--filter-missing` | Show only missing username results in console and exports |
-| `--filter-not-valid` | Show only not valid results in console and exports |
-| `--filter-errors` | Show only error results in console and exports |
+Full documentation is available at [https://naminter.github.io/](https://naminter.github.io/), including:
+- [Installation Guide](https://naminter.github.io/getting-started/installation/) - Detailed installation instructions
+- [Usage Examples](https://naminter.github.io/getting-started/usage/) - CLI and Python API examples
+- [API Reference](https://naminter.github.io/api/core/main/) - API documentation
## Contributing
-Contributions are always welcome! Please submit a pull request with your improvements or open an issue to discuss.
+Contributions are welcome! See the [Contributing Guide](https://naminter.github.io/contributing/) for guidelines.
## License
diff --git a/docs/contributing.md b/docs/contributing.md
index 763fd7c..0a2254f 100644
--- a/docs/contributing.md
+++ b/docs/contributing.md
@@ -2,7 +2,7 @@
Contributions are always welcome! Please submit a pull request with your improvements or open an issue to discuss.
-## Development Setup
+## Development
1. Clone the repository:
```bash
@@ -28,6 +28,36 @@ uv run ruff format
uv run ruff check
```
+## Documentation
+
+This project uses [MkDocs](https://www.mkdocs.org/) with [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) for documentation.
+
+### Serving docs locally
+
+To preview documentation changes locally:
+
+```bash
+mkdocs serve
+```
+
+This starts a local server at `http://127.0.0.1:8000/` with live reload.
+
+### Building docs
+
+To build the static documentation site:
+
+```bash
+mkdocs build
+```
+
+The built site will be in the `site/` directory.
+
+### Documentation structure
+
+- `docs/` - Documentation source files (Markdown)
+- `mkdocs.yml` - MkDocs configuration
+- API documentation is auto-generated from docstrings using `mkdocstrings`
+
## Code Style
This project uses:
@@ -42,10 +72,47 @@ This project uses:
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
3. Make your changes following the code style guidelines
4. Run linting and ensure all checks pass
-5. Commit your changes with clear, descriptive messages
+5. Commit your changes using [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) format
6. Push to your fork (`git push origin feature/amazing-feature`)
7. Open a pull request with a detailed description of your changes
+## Commit Message Guidelines
+
+This project follows the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification. Each commit message should be structured as follows:
+
+```
+[optional scope]:
+
+[optional body]
+
+[optional footer(s)]
+```
+
+### Commit Types
+
+| Type | Description |
+|------|-------------|
+| `feat` | A new feature |
+| `fix` | A bug fix |
+| `docs` | Documentation only changes |
+| `style` | Changes that do not affect the meaning of the code (formatting, etc.) |
+| `refactor` | A code change that neither fixes a bug nor adds a feature |
+| `perf` | A code change that improves performance |
+| `test` | Adding missing tests or correcting existing tests |
+| `build` | Changes that affect the build system or external dependencies |
+| `ci` | Changes to CI configuration files and scripts |
+| `chore` | Other changes that don't modify src or test files |
+
+### Examples
+
+```bash
+feat: add validation support
+fix: resolve timeout issue in network requests
+docs: update installation instructions
+refactor(core): simplify validation logic
+chore(release): bump version to 1.0.7
+```
+
## Pull Request Guidelines
- Provide a clear description of what the PR does
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
index f994c9c..4989cc6 100644
--- a/docs/getting-started/installation.md
+++ b/docs/getting-started/installation.md
@@ -5,11 +5,32 @@
Install Naminter with pip or uv:
```bash
-# Using pip
+# Default installation (includes both CLI and core)
pip install naminter
# Using uv
-uv tool install naminter
+uvx naminter
+```
+
+### Optional Dependencies
+
+Naminter supports optional dependency groups:
+
+```bash
+# Install core dependencies only (for library usage)
+pip install naminter[core]
+# or with uv
+uv pip install naminter[core]
+
+# Install with CLI dependencies (same as default)
+pip install naminter[cli]
+# or with uv
+uv pip install naminter[cli]
+
+# Install with development dependencies
+pip install naminter[dev]
+# or with uv
+uv pip install naminter[dev]
```
## From Source
@@ -25,12 +46,6 @@ pip install -e .
uv pip install -e .
```
-For development with dev dependencies:
-
-```bash
-uv sync --extra dev
-```
-
## Using Docker
All needed folders are mounted on the first start of the docker compose run command.
@@ -48,7 +63,29 @@ docker compose run --rm naminter --username john_doe
## Requirements
- Python 3.11 or higher
-- See `pyproject.toml` for full dependency list
+
+### Core Dependencies
+
+The core module requires:
+
+- `curl-cffi` - HTTP client with browser impersonation
+- `jsonschema` - JSON schema validation
+- `orjson` - Fast JSON parsing
+
+### CLI Dependencies
+
+The CLI module additionally requires:
+
+- `click` - Command-line interface framework
+- `rich` - Rich console output
+- `rich-click` - Rich click integration
+- `aiofiles` - Async file I/O
+- `jinja2` - Template engine (for HTML export)
+- `weasyprint` - PDF generation
+- `pathvalidate` - Path validation
+- `uvloop` - Fast event loop implementation
+
+See `pyproject.toml` for the reference.
## Verification
diff --git a/mkdocs.yml b/mkdocs.yml
index 91ed3d0..f0112a4 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -132,3 +132,8 @@ plugins:
# extra_javascript:
# - javascripts/extra.js
+# Watch additional directories for live reload
+watch:
+ - docs
+ - naminter
+
diff --git a/naminter/cli/config.py b/naminter/cli/config.py
index af75dce..630efe0 100644
--- a/naminter/cli/config.py
+++ b/naminter/cli/config.py
@@ -1,10 +1,10 @@
+from dataclasses import dataclass, field
from functools import cached_property
-import json
+import orjson
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, cast
from naminter.cli.console import display_warning
-from naminter.cli.constants import OPTION_AUTO_VALUE
from naminter.cli.exceptions import ConfigurationError
from naminter.core.constants import (
BROWSER_IMPERSONATE_AGENT,
@@ -22,6 +22,7 @@
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
+@dataclass(frozen=True)
class NaminterConfig:
"""Configuration for Naminter CLI tool.
@@ -30,145 +31,143 @@ class NaminterConfig:
parameters.
"""
- def __init__(self, /, **kwargs: object) -> None:
- """Initialize config from kwargs (CLI or direct parameters).
+ # Input/Output
+ usernames: list[str] = field(default_factory=lambda: list[str]())
+ sites: list[str] | None = None
+ local_list_path: Path | str | None = None
+ remote_list_url: str | None = None
+ local_schema_path: Path | str | None = None
+ remote_schema_url: str = WMN_SCHEMA_URL
+
+ # Validation & Filtering
+ skip_validation: bool = False
+ include_categories: list[str] = field(default_factory=lambda: list[str]())
+ exclude_categories: list[str] = field(default_factory=lambda: list[str]())
+ filter_all: bool = False
+ filter_exists: bool = False
+ filter_partial: bool = False
+ filter_conflicting: bool = False
+ filter_unknown: bool = False
+ filter_missing: bool = False
+ filter_not_valid: bool = False
+ filter_errors: bool = False
+
+ # Network/HTTP
+ max_tasks: int = MAX_CONCURRENT_TASKS
+ timeout: int = HTTP_REQUEST_TIMEOUT_SECONDS
+ proxy: str | None = None
+ allow_redirects: bool = HTTP_ALLOW_REDIRECTS
+ verify_ssl: bool = HTTP_SSL_VERIFY
+ impersonate: "BrowserTypeLiteral | str | None" = BROWSER_IMPERSONATE_AGENT
+ ja3: str | None = None
+ akamai: str | None = None
+ extra_fp: "ExtraFingerprints | dict[str, Any] | str | None" = None
+
+ # Behavior/Output
+ browse: bool = False
+ mode: WMNMode = field(default_factory=lambda: WMNMode.ALL)
+ test: bool = False
+ no_progressbar: bool = False
+ log_level: str | None = None
+ log_file: str | None = None
+ show_details: bool = False
+
+ # Response saving
+ save_response: bool = False
+ response_dir: Path | str | None = None
+ open_response: bool = False
+
+ # Export formats
+ csv_export: bool = False
+ csv_path: Path | str | None = None
+ pdf_export: bool = False
+ pdf_path: Path | str | None = None
+ html_export: bool = False
+ html_path: Path | str | None = None
+ json_export: bool = False
+ json_path: Path | str | None = None
- Args:
- **kwargs: Configuration parameters. Can be either:
- - Direct field names (usernames, sites, etc.)
- - CLI-specific names (username, site, csv_opt, etc.)
+ def __post_init__(self) -> None:
+ """Validate and normalize configuration after initialization."""
+ self._validate_usernames()
+ self._validate_mode()
+ self._validate_sources()
+ self._normalize_filters()
+ self._normalize_impersonate()
+ self._normalize_fingerprint()
- Note:
- At least one keyword argument must be provided. Positional arguments
- are not allowed (enforced by position-only `/` parameter).
- """
- if not kwargs:
- msg = "NaminterConfig requires at least one keyword argument"
- raise ConfigurationError(msg)
+ @classmethod
+ def from_click(cls, **kwargs: Any) -> "NaminterConfig":
+ """Create NaminterConfig from Click CLI arguments.
- # Parse CLI-specific kwargs if present
- parsed = self._parse_cli_kwargs(kwargs)
-
- # Set all fields with defaults
- self.usernames: list[str] = parsed.get("usernames", [])
- self.sites: list[str] | None = parsed.get("sites")
- self.local_list_path: Path | str | None = parsed.get("local_list")
- self.remote_list_url: str | None = parsed.get("remote_list")
- self.local_schema_path: Path | str | None = parsed.get("local_schema")
- self.remote_schema_url: str | None = parsed.get(
- "remote_schema",
- WMN_SCHEMA_URL,
- )
- self.skip_validation: bool = parsed.get("skip_validation", False)
- self.include_categories: list[str] = parsed.get("include_categories", [])
- self.exclude_categories: list[str] = parsed.get("exclude_categories", [])
- self.filter_all: bool = parsed.get("filter_all", False)
- self.filter_exists: bool = parsed.get("filter_exists", False)
- self.filter_partial: bool = parsed.get("filter_partial", False)
- self.filter_conflicting: bool = parsed.get("filter_conflicting", False)
- self.filter_unknown: bool = parsed.get("filter_unknown", False)
- self.filter_missing: bool = parsed.get("filter_missing", False)
- self.filter_not_valid: bool = parsed.get("filter_not_valid", False)
- self.filter_errors: bool = parsed.get("filter_errors", False)
- self.max_tasks: int = parsed.get("max_tasks", MAX_CONCURRENT_TASKS)
- self.timeout: int = parsed.get("timeout", HTTP_REQUEST_TIMEOUT_SECONDS)
- self.proxy: str | None = parsed.get("proxy")
- self.allow_redirects: bool = parsed.get("allow_redirects", HTTP_ALLOW_REDIRECTS)
- self.verify_ssl: bool = parsed.get("verify_ssl", HTTP_SSL_VERIFY)
- self.impersonate: BrowserTypeLiteral | str | None = parsed.get(
- "impersonate",
- BROWSER_IMPERSONATE_AGENT,
- )
- self.ja3: str | None = parsed.get("ja3")
- self.akamai: str | None = parsed.get("akamai")
- self.extra_fp: ExtraFingerprints | dict[str, Any] | str | None = parsed.get(
- "extra_fp",
- )
- self.browse: bool = parsed.get("browse", False)
- self.mode: WMNMode = parsed.get("mode", WMNMode.ALL)
- self.test: bool = parsed.get("test", False)
- self.no_progressbar: bool = parsed.get("no_progressbar", False)
- self.log_level: str | None = parsed.get("log_level")
- self.log_file: str | None = parsed.get("log_file")
- self.show_details: bool = parsed.get("show_details", False)
- self.save_response: bool = parsed.get("save_response", False)
- self.response_path: str | None = parsed.get("response_path")
- self.open_response: bool = parsed.get("open_response", False)
- self.csv_export: bool = parsed.get("csv_export", False)
- self.csv_path: str | None = parsed.get("csv_path")
- self.pdf_export: bool = parsed.get("pdf_export", False)
- self.pdf_path: str | None = parsed.get("pdf_path")
- self.html_export: bool = parsed.get("html_export", False)
- self.html_path: str | None = parsed.get("html_path")
- self.json_export: bool = parsed.get("json_export", False)
- self.json_path: str | None = parsed.get("json_path")
-
- self.__post_init__()
-
- @staticmethod
- def _parse_option_path(option_value: str | None) -> str | None:
- """Parse export/response option value, returning None for auto or unset.
+ This method handles the transformation of Click-specific kwargs
+ (with CLI naming conventions) into the internal config field names.
Args:
- option_value: The option value to parse. Can be None, OPTION_AUTO_VALUE,
- or a path string.
+ **kwargs: Raw kwargs from Click CLI.
Returns:
- None if the option is unset or set to auto mode, otherwise the path string.
- """
- if option_value in {None, OPTION_AUTO_VALUE}:
- return None
- return option_value
-
- @staticmethod
- def _parse_cli_kwargs(kwargs: dict[str, Any]) -> dict[str, Any]:
- """Parse CLI-specific kwargs into config field names.
+ Initialized NaminterConfig instance.
- Args:
- kwargs: Raw kwargs from CLI or direct parameters.
-
- Returns:
- Dictionary with parsed configuration values.
+ Raises:
+ ConfigurationError: If no kwargs are provided or validation fails.
"""
+ if not kwargs:
+ msg = "NaminterConfig requires at least one keyword argument"
+ raise ConfigurationError(msg)
+
parsed = kwargs.copy()
- # Handle CLI-specific username/site parameters (tuples from click)
+ # Remove CLI-only options that are not NaminterConfig fields
+ cli_only_keys = ["no_color"]
+ for key in cli_only_keys:
+ parsed.pop(key, None)
+
+ # Input/Output: Handle username/site and data source parameters
if "username" in parsed:
- parsed["usernames"] = list(parsed.pop("username") or [])
+ parsed["usernames"] = cast("list[str]", list(parsed.pop("username") or []))
if "site" in parsed:
- sites = list(parsed.pop("site") or [])
+ sites = cast("list[str]", list(parsed.pop("site") or []))
parsed["sites"] = sites if sites else None
- # Handle include/exclude categories
+ if "local_list" in parsed:
+ parsed["local_list_path"] = parsed.pop("local_list")
+ if "remote_list" in parsed:
+ parsed["remote_list_url"] = parsed.pop("remote_list")
+ if "local_schema" in parsed:
+ parsed["local_schema_path"] = parsed.pop("local_schema")
+ if "remote_schema" in parsed:
+ parsed["remote_schema_url"] = parsed.pop("remote_schema")
+
+ # Validation & Filtering: Handle categories (convert tuples to lists)
if "include_categories" in parsed and isinstance(
parsed["include_categories"],
tuple,
):
- parsed["include_categories"] = list(parsed["include_categories"])
+ parsed["include_categories"] = list(
+ cast("tuple[str, ...]", parsed["include_categories"]),
+ )
if "exclude_categories" in parsed and isinstance(
parsed["exclude_categories"],
tuple,
):
- parsed["exclude_categories"] = list(parsed["exclude_categories"])
+ parsed["exclude_categories"] = list(
+ cast("tuple[str, ...]", parsed["exclude_categories"]),
+ )
- # Parse export format options (csv_opt -> csv_export + csv_path)
- for fmt in ["csv", "pdf", "html", "json"]:
- opt_key = f"{fmt}_opt"
- if opt_key in parsed:
- opt_value = parsed.pop(opt_key)
- parsed[f"{fmt}_export"] = opt_value is not None
- parsed[f"{fmt}_path"] = NaminterConfig._parse_option_path(opt_value)
-
- # Parse response saving option
- if "save_response_opt" in parsed:
- opt_value = parsed.pop("save_response_opt")
- parsed["save_response"] = opt_value is not None
- parsed["response_path"] = NaminterConfig._parse_option_path(opt_value)
-
- # Convert mode string to WMNMode enum if needed
+ # Behavior/Output: Convert mode string to WMNMode enum if needed
if "mode" in parsed and isinstance(parsed["mode"], str):
parsed["mode"] = WMNMode(parsed["mode"])
+ # Export Formats: Parse export format options (separate boolean flags and paths)
+ for fmt in ["csv", "pdf", "html", "json"]:
+ flag_key = fmt
+ path_key = f"{fmt}_path"
+ if flag_key in parsed:
+ parsed[f"{fmt}_export"] = parsed.pop(flag_key)
+ if path_key in parsed:
+ parsed[f"{fmt}_path"] = parsed.pop(path_key)
+
# Convert boolean strings to actual booleans
bool_fields = [
"skip_validation",
@@ -192,16 +191,7 @@ def _parse_cli_kwargs(kwargs: dict[str, Any]) -> dict[str, Any]:
if field_name in parsed and not isinstance(parsed[field_name], bool):
parsed[field_name] = bool(parsed[field_name])
- return parsed
-
- def __post_init__(self) -> None:
- """Validate and normalize configuration after initialization."""
- self._validate_usernames()
- self._validate_mode()
- self._validate_sources()
- self._normalize_filters()
- self._normalize_impersonate()
- self._normalize_fingerprint()
+ return cls(**parsed)
def _validate_usernames(self) -> None:
"""Ensure usernames are provided when not running in test mode."""
@@ -231,7 +221,11 @@ def _validate_sources(self) -> None:
raise ConfigurationError(msg)
if not self.local_list_path and not self.remote_list_url:
- self.remote_list_url = WMN_REMOTE_URL
+ object.__setattr__(self, "remote_list_url", WMN_REMOTE_URL)
+
+ # Skip schema source validation if validation is disabled
+ if self.skip_validation:
+ return
# Validate schema sources
if self.local_schema_path and self.remote_schema_url != WMN_SCHEMA_URL:
@@ -242,7 +236,7 @@ def _validate_sources(self) -> None:
raise ConfigurationError(msg)
if not self.local_schema_path and not self.remote_schema_url:
- self.remote_schema_url = WMN_SCHEMA_URL
+ object.__setattr__(self, "remote_schema_url", WMN_SCHEMA_URL)
def _normalize_filters(self) -> None:
"""Normalize filter settings to ensure at least one filter is active."""
@@ -258,7 +252,7 @@ def _normalize_filters(self) -> None:
])
if not has_any_filter:
- self.filter_exists = True
+ object.__setattr__(self, "filter_exists", True)
def _normalize_impersonate(self) -> None:
"""Normalize impersonate setting to handle 'none' string value."""
@@ -266,7 +260,7 @@ def _normalize_impersonate(self) -> None:
isinstance(self.impersonate, str)
and self.impersonate.lower() == BROWSER_IMPERSONATE_NONE
):
- self.impersonate = None
+ object.__setattr__(self, "impersonate", None)
def _normalize_fingerprint(self) -> None:
"""Parse and normalize extra_fp from JSON string to dict if needed."""
@@ -275,35 +269,35 @@ def _normalize_fingerprint(self) -> None:
extra_fp_str = self.extra_fp.strip()
if not extra_fp_str:
- self.extra_fp = None
+ object.__setattr__(self, "extra_fp", None)
return
try:
- parsed = json.loads(extra_fp_str)
+ parsed = orjson.loads(extra_fp_str)
if not isinstance(parsed, dict):
msg = (
f"Invalid extra_fp format: expected JSON object, "
f"got {type(parsed).__name__}"
)
raise ConfigurationError(msg)
- self.extra_fp = parsed
- except json.JSONDecodeError as e:
+ object.__setattr__(self, "extra_fp", parsed)
+ except orjson.JSONDecodeError as e:
msg = f"Invalid JSON in extra_fp parameter: {e}"
raise ConfigurationError(msg) from e
@cached_property
- def response_dir(self) -> Path | None:
+ def response_dir_path(self) -> Path | None:
"""Return response directory Path if save_response is enabled."""
if not self.save_response:
return None
- if self.response_path:
- return Path(self.response_path)
+ if self.response_dir:
+ return Path(self.response_dir)
- return Path.cwd() / "responses"
+ return Path.cwd()
@cached_property
- def export_formats(self) -> dict[str, str | None]:
+ def export_formats(self) -> dict[str, Path | str | None]:
"""Return enabled export formats with their custom paths."""
export_configs = [
("csv", self.csv_export, self.csv_path),
diff --git a/naminter/cli/console.py b/naminter/cli/console.py
index c0c6ef3..ba48886 100644
--- a/naminter/cli/console.py
+++ b/naminter/cli/console.py
@@ -53,7 +53,7 @@ def _get_status_symbol(status: WMNStatus) -> str:
Returns:
Symbol character for the status.
"""
- return STATUS_SYMBOLS.get(status.value, "?")
+ return STATUS_SYMBOLS.get(status, "?")
def _get_status_style(status: WMNStatus) -> Style:
@@ -65,7 +65,7 @@ def _get_status_style(status: WMNStatus) -> Style:
Returns:
Rich Style object with appropriate color and formatting.
"""
- style_str = STATUS_STYLES.get(status.value, "white")
+ style_str = STATUS_STYLES.get(status, "white")
return Style.parse(style_str)
@@ -277,16 +277,24 @@ def display_success(message: str) -> None:
_display_message(message, THEME.success, "+", "SUCCESS")
-def display_validation_errors(errors: list[Any]) -> None:
+def display_errors(errors: list[Any], title: str | None = None) -> None:
"""Display validation errors in a formatted tree structure.
Args:
errors: List of validation errors to display.
+ title: Optional title to display above the errors.
"""
if not errors:
return
- root_label = Text()
+ if title:
+ root_label = Text()
+ root_label.append(f"{title} ", style=THEME.error)
+ root_label.append(f"({len(errors)})", style=THEME.muted)
+ else:
+ root_label = Text()
+ console.print()
+
tree = Tree(root_label, guide_style=THEME.muted, expanded=True)
for error in errors:
diff --git a/naminter/cli/constants.py b/naminter/cli/constants.py
index d3dc175..a938860 100644
--- a/naminter/cli/constants.py
+++ b/naminter/cli/constants.py
@@ -1,5 +1,7 @@
from typing import Final
+from naminter.core.models import WMNStatus
+
# Constants for file operations
RESPONSE_FILE_DATE_FORMAT: Final[str] = "%Y%m%d_%H%M%S"
RESPONSE_FILE_EXTENSION: Final[str] = ".html"
@@ -19,30 +21,29 @@
MAX_FILENAME_LENGTH: Final[int] = 200
# Status Display Configuration (for CLI/UI)
-# Symbol keys match WMNStatus enum values
-STATUS_SYMBOLS: Final[dict[str, str]] = {
- "exists": "+",
- "partial": "~",
- "conflicting": "*",
- "unknown": "?",
- "missing": "-",
- "not_valid": "X",
- "error": "!",
+# Symbol keys use WMNStatus enum members
+STATUS_SYMBOLS: Final[dict[WMNStatus, str]] = {
+ WMNStatus.EXISTS: "+",
+ WMNStatus.PARTIAL_EXISTS: "~",
+ WMNStatus.PARTIAL_MISSING: "~",
+ WMNStatus.CONFLICTING: "*",
+ WMNStatus.UNKNOWN: "?",
+ WMNStatus.MISSING: "-",
+ WMNStatus.NOT_VALID: "X",
+ WMNStatus.ERROR: "!",
}
-# Style keys match WMNStatus enum values
-STATUS_STYLES: Final[dict[str, str]] = {
- "exists": "bright_green bold",
- "partial": "bright_yellow",
- "conflicting": "bright_yellow bold",
- "unknown": "bright_yellow",
- "missing": "bright_red",
- "not_valid": "bright_red",
- "error": "bright_red bold",
+# Style keys use WMNStatus enum members
+STATUS_STYLES: Final[dict[WMNStatus, str]] = {
+ WMNStatus.EXISTS: "bright_green bold",
+ WMNStatus.PARTIAL_EXISTS: "bright_yellow",
+ WMNStatus.PARTIAL_MISSING: "bright_yellow",
+ WMNStatus.CONFLICTING: "bright_yellow bold",
+ WMNStatus.UNKNOWN: "bright_yellow",
+ WMNStatus.MISSING: "bright_red",
+ WMNStatus.NOT_VALID: "bright_red",
+ WMNStatus.ERROR: "bright_red bold",
}
# Export field ordering
HTML_FIELDS_ORDER: Final[list[str]] = ["name", "url", "elapsed"]
-
-# Option parsing
-OPTION_AUTO_VALUE: Final[str] = "__AUTO__"
diff --git a/naminter/cli/exporters.py b/naminter/cli/exporters.py
index 872707e..3a9a30f 100644
--- a/naminter/cli/exporters.py
+++ b/naminter/cli/exporters.py
@@ -2,22 +2,18 @@
from datetime import UTC, datetime
import importlib.resources
from io import StringIO
-import json
+import orjson
from pathlib import Path
from typing import Any, Literal, Protocol, get_args
import jinja2
-from weasyprint import HTML
+from weasyprint import HTML # type: ignore[import-untyped]
from naminter import __version__
from naminter.cli.constants import HTML_FIELDS_ORDER
from naminter.cli.exceptions import ExportError, FileError
from naminter.cli.utils import read_file, write_file
-from naminter.core.constants import (
- DEFAULT_JSON_ENSURE_ASCII,
- DEFAULT_JSON_INDENT,
- EMPTY_STRING,
-)
+from naminter.core.constants import EMPTY_STRING
from naminter.core.models import WMNResult, WMNTestResult
FormatName = Literal["json", "csv", "html", "pdf"]
@@ -130,16 +126,14 @@ async def _export_json(results: list[ResultDict], output_path: Path) -> None:
ExportError: If JSON serialization fails or unexpected error occurs.
"""
try:
- json_content = json.dumps(
- results,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- indent=DEFAULT_JSON_INDENT,
+ json_content = orjson.dumps(results, option=orjson.OPT_INDENT_2).decode(
+ "utf-8"
)
await write_file(output_path, json_content)
except FileError as e:
msg = f"File access error during JSON export: {e}"
raise ExportError(msg) from e
- except (TypeError, ValueError, RecursionError) as e:
+ except (TypeError, ValueError, RecursionError, orjson.JSONEncodeError) as e:
msg = f"JSON serialization error: {e}"
raise ExportError(msg) from e
except Exception as e:
@@ -233,6 +227,9 @@ async def _export_pdf(self, results: list[ResultDict], output_path: Path) -> Non
html = await self._generate_html(results)
weasyprint_html = HTML(string=html)
pdf_bytes = weasyprint_html.write_pdf()
+ if pdf_bytes is None:
+ msg = "PDF generation returned empty content"
+ raise ExportError(msg)
await write_file(output_path, pdf_bytes)
except FileError as e:
msg = f"File access error during PDF export: {e}"
@@ -256,5 +253,5 @@ def _resolve_path(format_name: FormatName, custom: str | Path | None) -> Path:
return Path(custom)
timestamp = datetime.now(UTC).strftime("%Y%m%d_%H%M%S")
- filename = f"results_{timestamp}.{format_name}"
+ filename = f"results_{timestamp}_UTC.{format_name}"
return Path.cwd() / filename
diff --git a/naminter/cli/main.py b/naminter/cli/main.py
index d254601..20e6da6 100644
--- a/naminter/cli/main.py
+++ b/naminter/cli/main.py
@@ -1,10 +1,15 @@
-import asyncio
+from functools import wraps
import logging
from pathlib import Path
-from typing import Any, Final, get_args
+from typing import TYPE_CHECKING, Any, Final, cast, get_args
+import uvloop
from curl_cffi import BrowserTypeLiteral
import rich_click as click
+from pathvalidate.click import validate_filepath_arg
+
+if TYPE_CHECKING:
+ from curl_cffi import ExtraFingerprints
from naminter.cli.config import NaminterConfig
from naminter.cli.console import (
@@ -12,14 +17,13 @@
console,
display_diff,
display_error,
- display_validation_errors,
+ display_errors,
display_version,
display_warning,
)
from naminter.cli.constants import (
EXIT_CODE_ERROR,
EXIT_CODE_INTERRUPTED,
- OPTION_AUTO_VALUE,
)
from naminter.cli.exceptions import (
BrowserError,
@@ -59,7 +63,13 @@
)
from naminter.core.formatter import WMNFormatter
from naminter.core.main import Naminter
-from naminter.core.models import WMNMode, WMNResult, WMNStatus, WMNTestResult
+from naminter.core.models import (
+ WMNDataset,
+ WMNMode,
+ WMNResult,
+ WMNStatus,
+ WMNTestResult,
+)
from naminter.core.network import CurlCFFISession
from naminter.core.validator import WMNValidator
@@ -93,7 +103,8 @@ def _create_status_filters(self) -> dict[WMNStatus, bool]:
"""Create status filter mapping from config."""
return {
WMNStatus.EXISTS: self._config.filter_exists,
- WMNStatus.PARTIAL: self._config.filter_partial,
+ WMNStatus.PARTIAL_EXISTS: self._config.filter_partial,
+ WMNStatus.PARTIAL_MISSING: self._config.filter_partial,
WMNStatus.CONFLICTING: self._config.filter_conflicting,
WMNStatus.UNKNOWN: self._config.filter_unknown,
WMNStatus.MISSING: self._config.filter_missing,
@@ -110,28 +121,24 @@ def _setup_response_dir(self) -> Path | None:
if not self._config.save_response:
return None
- dir_path = self._config.response_dir
- if not dir_path:
- display_warning("Response saving enabled but no directory configured")
- self._config.save_response = False
+ dir_path = self._config.response_dir_path
+ if dir_path is None:
return None
try:
dir_path.mkdir(parents=True, exist_ok=True)
except PermissionError as e:
display_warning(
- f"Permission denied creating response directory, disabling: {e}",
+ f"Permission denied creating response directory: {e}",
)
- self._config.save_response = False
return None
except OSError as e:
display_warning(
- f"OS error creating response directory, disabling: {e}",
+ f"OS error creating response directory: {e}",
)
- self._config.save_response = False
return None
- else:
- return dir_path
+
+ return dir_path
@staticmethod
def setup_logging(config: NaminterConfig) -> None:
@@ -182,30 +189,36 @@ async def run(self) -> None:
verify=self._config.verify_ssl,
timeout=self._config.timeout,
allow_redirects=self._config.allow_redirects,
- impersonate=self._config.impersonate,
+ impersonate=cast("BrowserTypeLiteral | None", self._config.impersonate),
ja3=self._config.ja3,
akamai=self._config.akamai,
- extra_fp=self._config.extra_fp,
+ extra_fp=cast("ExtraFingerprints | None", self._config.extra_fp),
) as http_client:
wmn_data: dict[str, Any] | None = None
if self._config.local_list_path:
wmn_data = await read_json(self._config.local_list_path)
elif self._config.remote_list_url:
- wmn_data = await fetch_json(http_client, self._config.remote_list_url)
+ wmn_data = cast(
+ "dict[str, Any]",
+ await fetch_json(http_client, self._config.remote_list_url),
+ )
wmn_schema: dict[str, Any] | None = None
if not self._config.skip_validation:
if self._config.local_schema_path:
wmn_schema = await read_json(self._config.local_schema_path)
elif self._config.remote_schema_url:
- wmn_schema = await fetch_json(
- http_client,
- self._config.remote_schema_url,
+ wmn_schema = cast(
+ "dict[str, Any]",
+ await fetch_json(
+ http_client,
+ self._config.remote_schema_url,
+ ),
)
async with Naminter(
http_client=http_client,
- wmn_data=wmn_data,
+ wmn_data=cast("WMNDataset | None", wmn_data),
wmn_schema=wmn_schema,
max_tasks=self._config.max_tasks,
) as naminter:
@@ -216,7 +229,12 @@ async def run(self) -> None:
if self._config.export_formats and results:
exporter = Exporter(self._config.usernames or [])
- await exporter.export(results, self._config.export_formats)
+ await exporter.export(
+ cast("list[WMNResult | WMNTestResult]", results),
+ cast(
+ "dict[Any, str | Path | None]", self._config.export_formats
+ ),
+ )
async def _run_check(self, naminter: Naminter) -> list[WMNResult]:
"""Run the username enumeration functionality."""
@@ -246,12 +264,13 @@ async def _run_check(self, naminter: Naminter) -> list[WMNResult]:
include_categories=self._config.include_categories,
exclude_categories=self._config.exclude_categories,
mode=self._config.mode,
+ exclude_text=not self._config.save_response,
):
progress_bar.add_result(result)
if self._filter_result(result):
try:
- file_path = await self._save_response_file(result)
+ file_path = await self._save_response(result)
await self._open_in_browser(result, file_path)
formatted_output = self._formatter.format_result(result, file_path)
console.print(formatted_output)
@@ -290,6 +309,7 @@ async def _run_validation(self, naminter: Naminter) -> list[WMNTestResult]:
include_categories=self._config.include_categories,
exclude_categories=self._config.exclude_categories,
mode=self._config.mode,
+ exclude_text=not self._config.save_response,
):
if result.results:
for site_result in result.results:
@@ -300,7 +320,7 @@ async def _run_validation(self, naminter: Naminter) -> list[WMNTestResult]:
response_files: list[Path | None] = []
if result.results:
for site_result in result.results:
- file_path = await self._save_response_file(site_result)
+ file_path = await self._save_response(site_result)
await self._open_in_browser(site_result, file_path)
response_files.append(file_path)
formatted_output = self._formatter.format_validation(
@@ -338,15 +358,12 @@ async def _open_in_browser(self, result: WMNResult, file_path: Path | None) -> N
display_error(f"Browser error opening {result.url}: {e}")
if self._config.open_response and file_path:
- file_uri = await asyncio.to_thread(
- lambda: file_path.resolve().as_uri(), # noqa: ASYNC240
- )
try:
- await open_url(file_uri)
+ await open_url(file_path)
except BrowserError as e:
- display_error(f"Browser error opening response file {file_uri}: {e}")
+ display_error(f"Browser error opening response file {file_path}: {e}")
- async def _save_response_file(self, result: WMNResult) -> Path | None:
+ async def _save_response(self, result: WMNResult) -> Path | None:
"""Save HTTP response to file if configured."""
if not self._config.save_response:
return None
@@ -374,17 +391,60 @@ def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
error: The exception that was raised.
"""
if isinstance(error, WMNValidationError):
- display_error(str(error), end="" if error.errors else "\n")
- if error.errors:
- display_validation_errors(error.errors)
+ display_error(str(error), end="")
+ if error.schema_errors:
+ display_errors(error.schema_errors, "Schema Errors")
+ if error.dataset_errors:
+ display_errors(error.dataset_errors, "Dataset Errors")
elif isinstance(error, CLIError):
display_error(str(error))
else:
- display_error(f"Unexpected error: {error}")
+ display_error(str(error))
ctx.exit(EXIT_CODE_ERROR)
+def handle_cli_errors(func: Any) -> Any:
+ """Decorator to centralize CLI error handling.
+
+ Handles KeyboardInterrupt and common CLI exceptions for Click commands.
+ The decorated function must accept `ctx` as its first parameter.
+
+ Args:
+ func: The Click command function to wrap.
+
+ Returns:
+ Wrapped function with error handling.
+ """
+
+ @wraps(func)
+ def wrapper(ctx: click.Context, *args: Any, **kwargs: Any) -> Any:
+ try:
+ return func(ctx, *args, **kwargs)
+ except KeyboardInterrupt:
+ display_warning("Operation interrupted")
+ ctx.exit(EXIT_CODE_INTERRUPTED)
+ except (
+ ConfigurationError,
+ ValidationError,
+ FileError,
+ NetworkError,
+ HttpError,
+ WMNFormatError,
+ WMNValidationError,
+ WMNDataError,
+ BrowserError,
+ ExportError,
+ CLIError,
+ ) as e:
+ _handle_cli_error(ctx, e)
+ except Exception as e:
+ display_error(f"Unexpected error: {type(e).__name__}: {e}")
+ ctx.exit(EXIT_CODE_ERROR)
+
+ return wrapper
+
+
@click.group(
invoke_without_command=True,
no_args_is_help=True,
@@ -478,7 +538,7 @@ def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
)
@click.option(
"--timeout",
- type=int,
+ type=click.IntRange(1, 300),
default=HTTP_REQUEST_TIMEOUT_SECONDS,
help="Maximum time in seconds to wait for each HTTP request",
)
@@ -490,7 +550,7 @@ def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
@click.option(
"--verify-ssl/--no-verify-ssl",
default=HTTP_SSL_VERIFY,
- help="Whether to verify SSL/TLS certificates for HTTPS requests",
+ help="Verify SSL certificates",
)
@click.option(
"--impersonate",
@@ -515,7 +575,7 @@ def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
# Concurrency & Debugging
@click.option(
"--max-tasks",
- type=int,
+ type=click.IntRange(1, 1000),
default=MAX_CONCURRENT_TASKS,
help="Maximum number of concurrent tasks",
)
@@ -523,61 +583,77 @@ def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
"--mode",
type=click.Choice([WMNMode.ANY.value, WMNMode.ALL.value]),
default=WMNMode.ALL.value,
- help="Validation mode: 'all' for strict (AND), 'any' for fuzzy (OR)",
+ help="Validation mode: all or any",
)
@click.option(
"--log-level",
type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]),
help="Set logging level",
)
-@click.option("--log-file", help="Path to log file for debug output")
+@click.option("--log-file", help="Path to log file")
# Response Handling
@click.option(
"--save-response",
- "save_response_opt",
- type=str,
- flag_value=OPTION_AUTO_VALUE,
+ is_flag=True,
+ help="Save HTTP responses",
+)
+@click.option(
+ "--response-dir",
+ callback=validate_filepath_arg,
+ type=click.Path(file_okay=False, dir_okay=True),
default=None,
- help="Save HTTP responses; optionally specify directory path",
+ help="Custom directory for responses",
)
@click.option(
"--open-response",
is_flag=True,
- help="Open saved response files in web browser",
+ help="Open response files in browser",
)
@click.option("--browse", is_flag=True, help="Open found profiles in web browser")
# Export Options
@click.option(
"--csv",
- "csv_opt",
- type=str,
- flag_value=OPTION_AUTO_VALUE,
+ is_flag=True,
+ help="Export results to CSV",
+)
+@click.option(
+ "--csv-path",
+ callback=validate_filepath_arg,
default=None,
- help="Export results to CSV; optionally specify a custom path",
+ help="CSV export file path",
)
@click.option(
"--json",
- "json_opt",
- type=str,
- flag_value=OPTION_AUTO_VALUE,
+ is_flag=True,
+ help="Export results to JSON",
+)
+@click.option(
+ "--json-path",
+ callback=validate_filepath_arg,
default=None,
- help="Export results to JSON; optionally specify a custom path",
+ help="JSON export file path",
)
@click.option(
"--html",
- "html_opt",
- type=str,
- flag_value=OPTION_AUTO_VALUE,
+ is_flag=True,
+ help="Export results to HTML",
+)
+@click.option(
+ "--html-path",
+ callback=validate_filepath_arg,
default=None,
- help="Export results to HTML; optionally specify a custom path",
+ help="HTML export file path",
)
@click.option(
"--pdf",
- "pdf_opt",
- type=str,
- flag_value=OPTION_AUTO_VALUE,
+ is_flag=True,
+ help="Export results to PDF",
+)
+@click.option(
+ "--pdf-path",
+ callback=validate_filepath_arg,
default=None,
- help="Export results to PDF; optionally specify a custom path",
+ help="PDF export file path",
)
# Result Filtering
@click.option(
@@ -621,6 +697,7 @@ def _handle_cli_error(ctx: click.Context, error: BaseException) -> None:
help="Show only error results in console output and exports",
)
@click.pass_context
+@handle_cli_errors
def main(ctx: click.Context, **kwargs: dict[str, Any]) -> None:
"""A Python package and CLI tool for asynchronous OSINT username enumeration.
@@ -633,27 +710,10 @@ def main(ctx: click.Context, **kwargs: dict[str, Any]) -> None:
if kwargs.get("no_color"):
console.no_color = True
- try:
- config = NaminterConfig(**kwargs)
- NaminterCLI.setup_logging(config)
- naminter_cli = NaminterCLI(config)
- asyncio.run(naminter_cli.run())
- except KeyboardInterrupt:
- display_warning("Operation interrupted")
- ctx.exit(EXIT_CODE_INTERRUPTED)
- except (
- ConfigurationError,
- ValidationError,
- FileError,
- NetworkError,
- HttpError,
- WMNValidationError,
- WMNDataError,
- BrowserError,
- ExportError,
- CLIError,
- ) as e:
- _handle_cli_error(ctx, e)
+ config = NaminterConfig.from_click(**kwargs)
+ NaminterCLI.setup_logging(config)
+ naminter_cli = NaminterCLI(config)
+ uvloop.run(naminter_cli.run())
@main.command(name="validate")
@@ -671,6 +731,7 @@ def main(ctx: click.Context, **kwargs: dict[str, Any]) -> None:
)
@click.option("--no-color", is_flag=True, help="Disable colored console output")
@click.pass_context
+@handle_cli_errors
def validator_command(
ctx: click.Context,
local_schema: Path,
@@ -684,43 +745,26 @@ def validator_command(
async def run_validator() -> None:
"""Run validation asynchronously."""
- try:
- schema = await read_json(local_schema)
- data = await read_json(local_data)
-
- validator = WMNValidator(schema)
- errors = validator.validate(data)
-
- if errors:
- display_validation_errors(errors)
- ctx.exit(EXIT_CODE_ERROR)
- else:
- console.print(
- "[green]+ [Validator] Validation passed: No errors found[/green]",
- )
- except (
- FileError,
- WMNValidationError,
- WMNDataError,
- ) as e:
- _handle_cli_error(ctx, e)
+ schema = await read_json(local_schema)
+ data = await read_json(local_data)
+ wmn_data = cast("WMNDataset", data)
+
+ validator = WMNValidator(schema)
+ schema_errors = validator.validate_schema(wmn_data)
+ dataset_errors = WMNValidator.validate_dataset(wmn_data)
+
+ if schema_errors or dataset_errors:
+ if schema_errors:
+ display_errors(schema_errors, "Schema Errors")
+ if dataset_errors:
+ display_errors(dataset_errors, "Dataset Errors")
+ ctx.exit(EXIT_CODE_ERROR)
+
+ console.print(
+ "[green]+ [Validator] Validation passed: No errors found[/green]",
+ )
- try:
- asyncio.run(run_validator())
- except KeyboardInterrupt:
- display_warning("Operation interrupted")
- ctx.exit(EXIT_CODE_INTERRUPTED)
- except (
- ConfigurationError,
- ValidationError,
- FileError,
- WMNValidationError,
- WMNDataError,
- BrowserError,
- ExportError,
- CLIError,
- ) as e:
- _handle_cli_error(ctx, e)
+ uvloop.run(run_validator())
@main.command(name="format")
@@ -739,11 +783,12 @@ async def run_validator() -> None:
@click.option(
"--output",
"-o",
- type=click.Path(path_type=Path),
+ callback=validate_filepath_arg,
help="Output file path (defaults to overwriting input file)",
)
@click.option("--no-color", is_flag=True, help="Disable colored console output")
@click.pass_context
+@handle_cli_errors
def format_command(
ctx: click.Context,
local_schema: Path,
@@ -758,54 +803,27 @@ def format_command(
async def run_formatter() -> None:
"""Run formatting asynchronously."""
- try:
- schema_data = await read_json(local_schema)
- data = await read_json(local_data)
+ schema_data = await read_json(local_schema)
+ data = await read_json(local_data)
- original_content = await read_file(local_data)
+ original_content = await read_file(local_data)
- formatter = WMNFormatter(schema_data)
- formatted_content = formatter.format_dataset(data)
+ formatter = WMNFormatter(schema_data)
+ formatted_content = formatter.format_dataset(cast("WMNDataset", data))
- output_path = output or local_data
+ output_path = output or local_data
- if original_content != formatted_content:
- await write_file(output_path, formatted_content)
- display_diff(original_content, formatted_content, output_path)
- msg = (
- f"[green]+ [Formatter] Formatted data written to: "
- f"{output_path}[/green]"
- )
- console.print(msg)
- else:
- console.print("[green]+ [Formatter] Data is already formatted[/green]")
- except (
- FileError,
- WMNFormatError,
- WMNValidationError,
- WMNDataError,
- ) as e:
- _handle_cli_error(ctx, e)
+ if original_content != formatted_content:
+ await write_file(output_path, formatted_content)
+ display_diff(original_content, formatted_content, output_path)
+ msg = (
+ f"[green]+ [Formatter] Formatted data written to: {output_path}[/green]"
+ )
+ console.print(msg)
+ else:
+ console.print("[green]+ [Formatter] Data is already formatted[/green]")
- try:
- asyncio.run(run_formatter())
- except KeyboardInterrupt:
- display_warning("Operation interrupted")
- ctx.exit(EXIT_CODE_INTERRUPTED)
- except (
- ConfigurationError,
- ValidationError,
- FileError,
- NetworkError,
- HttpError,
- WMNFormatError,
- WMNValidationError,
- WMNDataError,
- BrowserError,
- ExportError,
- CLIError,
- ) as e:
- _handle_cli_error(ctx, e)
+ uvloop.run(run_formatter())
def entry_point() -> None:
diff --git a/naminter/cli/progress.py b/naminter/cli/progress.py
index 3818378..a679b86 100644
--- a/naminter/cli/progress.py
+++ b/naminter/cli/progress.py
@@ -1,3 +1,4 @@
+from collections import defaultdict
import time
from types import TracebackType
@@ -36,7 +37,7 @@ def __init__(self, console: Console, *, disabled: bool = False) -> None:
self.total_sites: int = 0
self.results_count: int = 0
self.start_time: float | None = None
- self.status_counts: dict[WMNStatus, int] = dict.fromkeys(WMNStatus, 0)
+ self.status_counts: dict[WMNStatus, int] = defaultdict(int)
def add_result(self, result: WMNResult) -> None:
"""Update counters with a new result and refresh progress display."""
@@ -52,7 +53,8 @@ def _get_progress_text(self) -> str:
elapsed = time.time() - self.start_time if self.start_time else 0.0
exists = self.status_counts[WMNStatus.EXISTS]
- partial = self.status_counts[WMNStatus.PARTIAL]
+ partial_exists = self.status_counts[WMNStatus.PARTIAL_EXISTS]
+ partial_missing = self.status_counts[WMNStatus.PARTIAL_MISSING]
conflicting = self.status_counts[WMNStatus.CONFLICTING]
unknown = self.status_counts[WMNStatus.UNKNOWN]
missing = self.status_counts[WMNStatus.MISSING]
@@ -64,29 +66,33 @@ def _get_progress_text(self) -> str:
sections = [
f"[{THEME.primary}]{rate:.1f} req/s[/]",
- f"[{THEME.success}]{STATUS_SYMBOLS['exists']} {exists}[/]",
- f"[{THEME.error}]{STATUS_SYMBOLS['missing']} {missing}[/]",
+ f"[{THEME.success}]{STATUS_SYMBOLS[WMNStatus.EXISTS]} {exists}[/]",
+ f"[{THEME.error}]{STATUS_SYMBOLS[WMNStatus.MISSING]} {missing}[/]",
]
if unknown > 0:
sections.append(
- f"[{THEME.warning}]{STATUS_SYMBOLS['unknown']} {unknown}[/]",
+ f"[{THEME.warning}]{STATUS_SYMBOLS[WMNStatus.UNKNOWN]} {unknown}[/]",
)
- if partial > 0:
+ if partial_exists > 0:
sections.append(
- f"[{THEME.warning}]{STATUS_SYMBOLS['partial']} {partial}[/]",
+ f"[{THEME.warning}]{STATUS_SYMBOLS[WMNStatus.PARTIAL_EXISTS]} ~E {partial_exists}[/]",
+ )
+ if partial_missing > 0:
+ sections.append(
+ f"[{THEME.warning}]{STATUS_SYMBOLS[WMNStatus.PARTIAL_MISSING]} ~M {partial_missing}[/]",
)
if conflicting > 0:
sections.append(
- f"[{THEME.warning}]{STATUS_SYMBOLS['conflicting']} {conflicting}[/]",
+ f"[{THEME.warning}]{STATUS_SYMBOLS[WMNStatus.CONFLICTING]} {conflicting}[/]",
)
if errors > 0:
sections.append(
- f"[{THEME.error}]{STATUS_SYMBOLS['error']} {errors}[/]",
+ f"[{THEME.error}]{STATUS_SYMBOLS[WMNStatus.ERROR]} {errors}[/]",
)
if not_valid > 0:
sections.append(
- f"[{THEME.warning}]{STATUS_SYMBOLS['not_valid']} {not_valid}[/]",
+ f"[{THEME.warning}]{STATUS_SYMBOLS[WMNStatus.NOT_VALID]} {not_valid}[/]",
)
sections.append(f"[{THEME.primary}]{self.results_count}/{self.total_sites}[/]")
diff --git a/naminter/cli/utils.py b/naminter/cli/utils.py
index 44812c8..9ea5ee5 100644
--- a/naminter/cli/utils.py
+++ b/naminter/cli/utils.py
@@ -1,15 +1,15 @@
import asyncio
-import json
+import orjson
from pathlib import Path
from typing import Any
import webbrowser
import aiofiles
+from pathvalidate import sanitize_filename
from naminter.cli.constants import (
DEFAULT_UNNAMED_VALUE,
MAX_FILENAME_LENGTH,
- OPTION_AUTO_VALUE,
RESPONSE_FILE_DATE_FORMAT,
RESPONSE_FILE_EXTENSION,
)
@@ -19,75 +19,13 @@
NetworkError,
ValidationError,
)
-from naminter.core.constants import (
- ASCII_CONTROL_CHAR_THRESHOLD,
- DEFAULT_FILE_ENCODING,
- EMPTY_STRING,
- HTTP_STATUS_OK,
-)
+from naminter.core.constants import DEFAULT_FILE_ENCODING
from naminter.core.exceptions import HttpError
from naminter.core.models import WMNResult
from naminter.core.network import BaseSession
-# Option parsing utilities
-def parse_option_path(option_value: str | None) -> str | None:
- """Parse export/response option value, returning None for auto or unset.
-
- Args:
- option_value: The option value to parse. Can be None, OPTION_AUTO_VALUE, or
- a path string.
-
- Returns:
- None if the option is unset or set to auto mode, otherwise the path string.
- """
- if option_value in {None, OPTION_AUTO_VALUE}:
- return None
- return option_value
-
-
# Filename utilities
-def sanitize_filename(filename: str) -> str | None:
- """Sanitize filename for cross-platform compatibility.
-
- Removes or replaces invalid characters that are not allowed in filenames
- on various operating systems (Windows, macOS, Linux).
-
- Args:
- filename: The filename to sanitize.
-
- Returns:
- A sanitized filename safe for all platforms, or None if invalid.
-
- Raises:
- ValidationError: If filename cannot be converted to string.
- """
- if not filename:
- return None
-
- try:
- filename_str = str(filename).strip()
- except (TypeError, ValueError) as e:
- msg = f"Failed to convert filename to string: {e}"
- raise ValidationError(msg) from e
-
- if not filename_str:
- return None
-
- invalid_chars = '<>:"|?*\\/\0'
- translation_table = str.maketrans(invalid_chars, "_" * len(invalid_chars))
- sanitized = EMPTY_STRING.join(
- "_" if ord(c) < ASCII_CONTROL_CHAR_THRESHOLD else c
- for c in filename_str.translate(translation_table)
- )
- sanitized = sanitized.strip(" .")
-
- if len(sanitized) > MAX_FILENAME_LENGTH:
- sanitized = sanitized[:MAX_FILENAME_LENGTH].rstrip(" .")
-
- return sanitized or None
-
-
def get_response_filename(result: WMNResult) -> str:
"""Generate a sanitized filename for saving response data.
@@ -101,8 +39,18 @@ def get_response_filename(result: WMNResult) -> str:
ValidationError: If WMNResult is missing required attributes.
"""
try:
- safe_site_name = sanitize_filename(result.name) or DEFAULT_UNNAMED_VALUE
- safe_username = sanitize_filename(result.username) or DEFAULT_UNNAMED_VALUE
+ safe_site_name = (
+ sanitize_filename(
+ str(result.name or "").strip(), max_len=MAX_FILENAME_LENGTH
+ )
+ or DEFAULT_UNNAMED_VALUE
+ )
+ safe_username = (
+ sanitize_filename(
+ str(result.username or "").strip(), max_len=MAX_FILENAME_LENGTH
+ )
+ or DEFAULT_UNNAMED_VALUE
+ )
status_str = result.status.value
created_at_str = result.created_at.strftime(RESPONSE_FILE_DATE_FORMAT)
status_code = result.status_code
@@ -113,7 +61,10 @@ def get_response_filename(result: WMNResult) -> str:
base_name = (
f"{status_str}_{status_code}_{safe_site_name}_{safe_username}_{created_at_str}"
)
- safe_base_name = sanitize_filename(base_name) or DEFAULT_UNNAMED_VALUE
+ safe_base_name = (
+ sanitize_filename(base_name, max_len=MAX_FILENAME_LENGTH)
+ or DEFAULT_UNNAMED_VALUE
+ )
return f"{safe_base_name}{RESPONSE_FILE_EXTENSION}"
@@ -175,13 +126,10 @@ async def read_json(path: str | Path) -> dict[str, Any]:
"""
content = await read_file(path)
try:
- return json.loads(content)
- except json.JSONDecodeError as e:
+ return orjson.loads(content)
+ except orjson.JSONDecodeError as e:
path_obj = Path(path)
- msg = (
- f"Invalid JSON in file {path_obj} at line {e.lineno}, "
- f"column {e.colno}: {e.msg}"
- )
+ msg = f"Invalid JSON in file {path_obj} at position {e.pos}: {e.msg}"
raise FileError(msg) from e
@@ -204,6 +152,9 @@ async def write_file(file_path: str | Path, data: str | bytes) -> None:
try:
path_obj.parent.mkdir(parents=True, exist_ok=True)
+ except FileExistsError as e:
+ msg = f"Cannot create directory, file exists at path: {path_obj.parent}"
+ raise FileError(msg) from e
except PermissionError as e:
msg = f"Permission denied creating directory for {path_obj}"
raise FileError(msg) from e
@@ -212,10 +163,12 @@ async def write_file(file_path: str | Path, data: str | bytes) -> None:
raise FileError(msg) from e
try:
- mode = "wb" if isinstance(data, bytes) else "w"
- encoding = None if isinstance(data, bytes) else DEFAULT_FILE_ENCODING
- async with aiofiles.open(path_obj, mode=mode, encoding=encoding) as f:
- await f.write(data)
+ if isinstance(data, bytes):
+ async with aiofiles.open(path_obj, mode="wb") as f:
+ await f.write(data)
+ else:
+ async with aiofiles.open(path_obj, mode="w", encoding=DEFAULT_FILE_ENCODING) as f:
+ await f.write(data)
except PermissionError as e:
msg = f"Permission denied writing to {path_obj}"
raise FileError(msg) from e
@@ -228,7 +181,7 @@ async def write_file(file_path: str | Path, data: str | bytes) -> None:
# Network operations
-async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any]:
+async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any] | list[Any]:
"""Fetch and parse JSON from a URL.
Args:
@@ -236,7 +189,7 @@ async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any]:
url: URL to fetch JSON from.
Returns:
- Parsed JSON data as dictionary.
+ Parsed JSON data as dictionary or list.
Raises:
ValidationError: If http_client or url is missing or invalid.
@@ -253,45 +206,47 @@ async def fetch_json(http_client: BaseSession, url: str) -> dict[str, Any]:
msg = f"Network error fetching {url_stripped}: {e}"
raise NetworkError(msg) from e
- if response.status_code != HTTP_STATUS_OK:
- msg = f"Failed to fetch from {url_stripped}: HTTP {response.status_code}"
- raise NetworkError(msg)
-
if not response.text or not response.text.strip():
msg = f"Empty response from {url_stripped}"
raise NetworkError(msg)
try:
- return response.json()
- except (ValueError, json.JSONDecodeError) as e:
+ result = response.json()
+ if not isinstance(result, (dict, list)):
+ msg = f"Unexpected JSON type from {url_stripped}: expected dict or list"
+ raise NetworkError(msg)
+ return result
+ except (ValueError, orjson.JSONDecodeError) as e:
msg = f"Failed to parse JSON from {url_stripped}: {e}"
raise NetworkError(msg) from e
# Browser operations
-async def open_url(url: str) -> None:
+async def open_url(url: str | Path) -> None:
"""Open a URL in the browser with error handling.
Args:
- url: URL to open in the default browser.
+ url: URL string or Path to open in the default browser. Paths are converted
+ to file URIs automatically.
Raises:
ValidationError: If url is missing or invalid.
BrowserError: For any issue with the browser operation.
"""
- url_stripped = url.strip() if url else ""
- if not url_stripped:
+ if isinstance(url, Path):
+ url_str = url.resolve().as_uri()
+ else:
+ url_str = url.strip() if url else ""
+
+ if not url_str:
msg = "URL is required and cannot be empty"
raise ValidationError(msg)
try:
- await asyncio.to_thread(webbrowser.open, url_stripped)
+ await asyncio.to_thread(webbrowser.open, url_str)
except webbrowser.Error as e:
- msg = f"Browser error opening {url_stripped}: {e}"
+ msg = f"Browser error opening {url_str}: {e}"
raise BrowserError(msg) from e
except OSError as e:
- msg = f"OS error opening browser for {url_stripped}: {e}"
- raise BrowserError(msg) from e
- except Exception as e:
- msg = f"Unexpected error opening browser for {url_stripped}: {e}"
+ msg = f"OS error opening browser for {url_str}: {e}"
raise BrowserError(msg) from e
diff --git a/naminter/core/constants.py b/naminter/core/constants.py
index 8445830..faad76d 100644
--- a/naminter/core/constants.py
+++ b/naminter/core/constants.py
@@ -27,11 +27,11 @@
ACCOUNT_PLACEHOLDER: Final[str] = "{account}"
# WMN Dataset Structure Keys
-WMN_KEY_SITES: Final[str] = "sites"
-WMN_KEY_CATEGORIES: Final[str] = "categories"
-WMN_KEY_AUTHORS: Final[str] = "authors"
-WMN_KEY_LICENSE: Final[str] = "license"
-WMN_KEY_NAME: Final[str] = "name"
+WMN_KEY_SITES: Final[Literal["sites"]] = "sites"
+WMN_KEY_CATEGORIES: Final[Literal["categories"]] = "categories"
+WMN_KEY_AUTHORS: Final[Literal["authors"]] = "authors"
+WMN_KEY_LICENSE: Final[Literal["license"]] = "license"
+WMN_KEY_NAME: Final[Literal["name"]] = "name"
WMN_LIST_FIELDS: Final[tuple[str, ...]] = (
WMN_KEY_SITES,
@@ -41,18 +41,18 @@
)
# Site Object Structure Keys
-SITE_KEY_NAME: Final[str] = "name"
-SITE_KEY_CATEGORY: Final[str] = "cat"
-SITE_KEY_URI_CHECK: Final[str] = "uri_check"
-SITE_KEY_URI_PRETTY: Final[str] = "uri_pretty"
-SITE_KEY_HEADERS: Final[str] = "headers"
-SITE_KEY_POST_BODY: Final[str] = "post_body"
-SITE_KEY_STRIP_BAD_CHAR: Final[str] = "strip_bad_char"
-SITE_KEY_E_CODE: Final[str] = "e_code"
-SITE_KEY_E_STRING: Final[str] = "e_string"
-SITE_KEY_M_STRING: Final[str] = "m_string"
-SITE_KEY_M_CODE: Final[str] = "m_code"
-SITE_KEY_KNOWN: Final[str] = "known"
+SITE_KEY_NAME: Final[Literal["name"]] = "name"
+SITE_KEY_CATEGORY: Final[Literal["cat"]] = "cat"
+SITE_KEY_URI_CHECK: Final[Literal["uri_check"]] = "uri_check"
+SITE_KEY_URI_PRETTY: Final[Literal["uri_pretty"]] = "uri_pretty"
+SITE_KEY_HEADERS: Final[Literal["headers"]] = "headers"
+SITE_KEY_POST_BODY: Final[Literal["post_body"]] = "post_body"
+SITE_KEY_STRIP_BAD_CHAR: Final[Literal["strip_bad_char"]] = "strip_bad_char"
+SITE_KEY_E_CODE: Final[Literal["e_code"]] = "e_code"
+SITE_KEY_E_STRING: Final[Literal["e_string"]] = "e_string"
+SITE_KEY_M_STRING: Final[Literal["m_string"]] = "m_string"
+SITE_KEY_M_CODE: Final[Literal["m_code"]] = "m_code"
+SITE_KEY_KNOWN: Final[Literal["known"]] = "known"
# JSON Configuration
DEFAULT_JSON_INDENT: Final[int] = 2
@@ -69,13 +69,11 @@
DEFAULT_UNKNOWN_VALUE: Final[str] = "unknown"
EMPTY_STRING: Final[str] = ""
-# Character constants
-ASCII_CONTROL_CHAR_THRESHOLD: Final[int] = 32
-
-# HTTP Status codes
-HTTP_STATUS_OK: Final[int] = 200
-
# HTTP Methods
-HTTP_METHOD_GET: Final[str] = "GET"
-HTTP_METHOD_POST: Final[str] = "POST"
HttpMethod = Literal["GET", "POST"]
+HTTP_METHOD_GET: Final[HttpMethod] = "GET"
+HTTP_METHOD_POST: Final[HttpMethod] = "POST"
+
+# HTTP Status Code Ranges
+HTTP_STATUS_CODE_MIN: Final[int] = 100
+HTTP_STATUS_CODE_MAX: Final[int] = 599
diff --git a/naminter/core/exceptions.py b/naminter/core/exceptions.py
index 0116d5c..5cffd06 100644
--- a/naminter/core/exceptions.py
+++ b/naminter/core/exceptions.py
@@ -116,20 +116,25 @@ class WMNSchemaError(WMNDataError):
class WMNValidationError(WMNDataError):
- """Raised when WMN dataset does not conform to the provided JSON Schema.
+ """Raised when WMN dataset validation fails.
Attributes:
- errors: Structured list of validation errors to display/inspect.
+ schema_errors: List of JSON schema validation errors.
+ dataset_errors: List of custom dataset validation errors
+ (license, authors, categories, duplicates, and site configurations).
"""
def __init__(
self,
message: str,
- errors: list[Any] | None = None,
+ schema_errors: list[Any] | None = None,
+ dataset_errors: list[Any] | None = None,
cause: Exception | None = None,
) -> None:
super().__init__(message, cause)
- self.errors: list[Any] = errors or []
+ self.schema_errors: list[Any] = schema_errors or []
+ self.dataset_errors: list[Any] = dataset_errors or []
+
class WMNArgumentError(WMNDataError):
diff --git a/naminter/core/formatter.py b/naminter/core/formatter.py
index a6f952c..af61bd8 100644
--- a/naminter/core/formatter.py
+++ b/naminter/core/formatter.py
@@ -1,16 +1,15 @@
from collections.abc import Mapping, Sequence
-import json
-from typing import Any
+import orjson
+from typing import Any, cast
from naminter.core.constants import (
- DEFAULT_JSON_ENSURE_ASCII,
- DEFAULT_JSON_INDENT,
SCHEMA_KEY_ITEMS,
SCHEMA_KEY_PROPERTIES,
SITE_KEY_HEADERS,
SITE_KEY_NAME,
WMN_KEY_AUTHORS,
WMN_KEY_CATEGORIES,
+ WMN_KEY_LICENSE,
WMN_KEY_SITES,
)
from naminter.core.exceptions import WMNFormatError, WMNSchemaError
@@ -46,7 +45,8 @@ def _sort_sites_by_name(sites: Sequence[Any]) -> list[dict[str, Any]]:
f"got {type(site).__name__} at index {i}"
)
raise WMNFormatError(msg)
- site_dicts.append(dict(site))
+ site_cast: Mapping[str, Any] = cast("Mapping[str, Any]", site)
+ site_dicts.append(dict(site_cast))
return sorted(
site_dicts,
@@ -68,9 +68,10 @@ def _sort_site_headers(
f"got {type(headers).__name__}"
)
raise WMNFormatError(msg)
+ headers_cast: dict[str, Any] = cast("dict[str, Any]", headers)
result[SITE_KEY_HEADERS] = dict(
sorted(
- headers.items(),
+ headers_cast.items(),
key=lambda item: str(item[0]).casefold(),
),
)
@@ -97,12 +98,8 @@ def _reorder_site_keys(
def _dumps(obj: object, *, what: str) -> str:
"""Serialize object to JSON string with consistent error handling."""
try:
- return json.dumps(
- obj,
- indent=DEFAULT_JSON_INDENT,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- )
- except (TypeError, ValueError, RecursionError) as error:
+ return orjson.dumps(obj, option=orjson.OPT_INDENT_2).decode("utf-8")
+ except (TypeError, ValueError, RecursionError, orjson.JSONEncodeError) as error:
msg = f"{what} is not JSON-serializable: {error}"
raise WMNFormatError(msg) from error
@@ -126,16 +123,17 @@ def format_dataset(self, data: WMNDataset) -> str:
formatted_categories = self._format_string_array(data, WMN_KEY_CATEGORIES)
formatted_sites = self._format_sites(data)
- excluded_keys = {WMN_KEY_AUTHORS, WMN_KEY_CATEGORIES, WMN_KEY_SITES}
- other_keys = {
- key: value for key, value in data.items() if key not in excluded_keys
- }
+ allowed_keys = {WMN_KEY_AUTHORS, WMN_KEY_CATEGORIES, WMN_KEY_SITES, WMN_KEY_LICENSE}
+ unknown_keys = set(data.keys()) - allowed_keys
+ if unknown_keys:
+ msg = f"Unknown keys found in dataset: {sorted(unknown_keys)}"
+ raise WMNFormatError(msg)
- formatted_data = {
+ formatted_data: dict[str, Any] = {
+ WMN_KEY_LICENSE: data[WMN_KEY_LICENSE],
WMN_KEY_AUTHORS: formatted_authors,
WMN_KEY_CATEGORIES: formatted_categories,
WMN_KEY_SITES: formatted_sites,
- **other_keys,
}
return self._dumps(formatted_data, what="Data")
@@ -169,7 +167,8 @@ def _get_site_key_order(self) -> list[str]:
)
raise WMNSchemaError(msg)
- self._site_key_order = list(site_schema.keys())
+ site_schema_cast: dict[str, Any] = cast("dict[str, Any]", site_schema)
+ self._site_key_order = list(site_schema_cast.keys())
self._site_key_order_set = set(self._site_key_order)
return self._site_key_order
@@ -185,14 +184,17 @@ def _format_string_array(self, data: Mapping[str, Any], key: str) -> list[str]:
if not array_data:
msg = f"'{key}' must be a non-empty list"
raise WMNFormatError(msg)
- for item in array_data:
+
+ array_data_list: list[Any] = cast("list[Any]", array_data)
+ for item in array_data_list:
if not isinstance(item, str):
msg = f"'{key}' must contain only strings, got {type(item).__name__}"
raise WMNFormatError(msg)
if not item.strip():
msg = f"'{key}' must contain non-empty strings"
raise WMNFormatError(msg)
- return self._sort_array_alphabetically(array_data)
+ array_data_cast: list[str] = cast("list[str]", array_data)
+ return self._sort_array_alphabetically(array_data_cast)
def _format_site(
self,
@@ -210,6 +212,7 @@ def _format_sites(self, data: Mapping[str, Any]) -> list[dict[str, Any]]:
msg = f"'{WMN_KEY_SITES}' must be a list, got {type(sites).__name__}"
raise WMNFormatError(msg)
- sorted_sites = self._sort_sites_by_name(sites)
+ sites_cast: Sequence[Any] = cast("Sequence[Any]", sites)
+ sorted_sites = self._sort_sites_by_name(sites_cast)
key_order = self._get_site_key_order()
return [self._format_site(site_data, key_order) for site_data in sorted_sites]
diff --git a/naminter/core/main.py b/naminter/core/main.py
index e837eeb..a67580b 100644
--- a/naminter/core/main.py
+++ b/naminter/core/main.py
@@ -5,19 +5,13 @@
from naminter.core.constants import (
ACCOUNT_PLACEHOLDER,
- DEFAULT_JSON_ENSURE_ASCII,
- DEFAULT_JSON_INDENT,
EMPTY_STRING,
HTTP_METHOD_GET,
HTTP_METHOD_POST,
MAX_CONCURRENT_TASKS,
SITE_KEY_CATEGORY,
- SITE_KEY_E_CODE,
- SITE_KEY_E_STRING,
SITE_KEY_HEADERS,
SITE_KEY_KNOWN,
- SITE_KEY_M_CODE,
- SITE_KEY_M_STRING,
SITE_KEY_NAME,
SITE_KEY_POST_BODY,
SITE_KEY_STRIP_BAD_CHAR,
@@ -26,6 +20,7 @@
WMN_KEY_AUTHORS,
WMN_KEY_LICENSE,
WMN_KEY_SITES,
+ HttpMethod,
)
from naminter.core.exceptions import (
HttpError,
@@ -82,7 +77,7 @@ def __init__(
if self._wmn_schema:
try:
self._validator = WMNValidator(self._wmn_schema)
- except WMNSchemaError as e:
+ except WMNSchemaError:
self._logger.exception("WMN schema error during initialization")
raise
@@ -135,28 +130,27 @@ def _validate_dataset(self) -> None:
msg = "WMN data must be provided to Naminter constructor"
raise WMNUninitializedError(msg)
- validation_errors: list[WMNError] = []
+ schema_errors: list[WMNError] = []
+ dataset_errors: list[WMNError] = []
+
try:
if self._validator:
- validation_errors = self._validator.validate(self._wmn_data)
+ schema_errors = self._validator.validate_schema(self._wmn_data)
+ dataset_errors = WMNValidator.validate_dataset(self._wmn_data)
except (TypeError, ValueError, KeyError, AttributeError) as e:
self._logger.exception("Unexpected error loading WMN data")
msg = f"Unexpected error loading WMN data: {e}"
raise WMNDataError(msg) from e
- if validation_errors:
+ if schema_errors or dataset_errors:
msg = "WMN dataset validation failed"
- raise WMNValidationError(msg, errors=validation_errors)
+ raise WMNValidationError(
+ msg,
+ schema_errors=schema_errors,
+ dataset_errors=dataset_errors,
+ )
sites = self._wmn_data.get(WMN_KEY_SITES, [])
- site_errors: list[WMNError] = []
- if self._validator:
- site_errors = self._validator.validate_sites(sites)
-
- if site_errors:
- msg = f"Site validation failed for {len(site_errors)} site(s)"
- raise WMNValidationError(msg, errors=site_errors)
-
self._logger.info("Dataset loaded: %d sites", len(sites))
async def close(self) -> None:
@@ -225,19 +219,17 @@ def _filter_sites(
filtered_names: frozenset[str] | None = None
if site_names:
filtered_names = frozenset(site_names)
- available_names = frozenset(
- site.get(SITE_KEY_NAME)
- for site in sites
- if site.get(SITE_KEY_NAME) is not None
+ available_names: frozenset[str] = frozenset(
+ site[SITE_KEY_NAME] for site in sites
)
if missing_names := filtered_names - available_names:
msg = f"Unknown site names: {sorted(missing_names)}"
raise WMNUnknownSiteError(msg, site_names=sorted(missing_names))
- include_set = (
+ include_set: frozenset[str] = (
frozenset(include_categories) if include_categories else frozenset()
)
- exclude_set = (
+ exclude_set: frozenset[str] = (
frozenset(exclude_categories) if exclude_categories else frozenset()
)
@@ -248,12 +240,10 @@ def _filter_sites(
return []
if include_set or exclude_set:
- available_categories = frozenset(
- category
- for site in sites
- if (category := site.get(SITE_KEY_CATEGORY)) is not None
+ available_categories: frozenset[str] = frozenset(
+ site[SITE_KEY_CATEGORY] for site in sites
)
- requested_categories = include_set | exclude_set
+ requested_categories: frozenset[str] = include_set | exclude_set
if unknown_categories := requested_categories - available_categories:
msg = f"Unknown categories: {sorted(unknown_categories)}"
raise WMNUnknownCategoriesError(
@@ -261,7 +251,7 @@ def _filter_sites(
categories=sorted(unknown_categories),
)
- filtered_sites = [
+ filtered_sites: list[WMNSite] = [
site
for site in sites
if (filtered_names is None or site.get(SITE_KEY_NAME) in filtered_names)
@@ -295,13 +285,18 @@ def _prepare_request(
"""
clean_username = self._prepare_username(username, site)
- uri_check_template = site[SITE_KEY_URI_CHECK]
+ uri_check_template: str = site[SITE_KEY_URI_CHECK]
uri_check = uri_check_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
- uri_pretty_template = site.get(SITE_KEY_URI_PRETTY, uri_check_template)
+ uri_pretty_template_raw = site.get(SITE_KEY_URI_PRETTY)
+ uri_pretty_template: str = (
+ uri_pretty_template_raw
+ if uri_pretty_template_raw is not None
+ else uri_check_template
+ )
uri_pretty = uri_pretty_template.replace(ACCOUNT_PLACEHOLDER, clean_username)
- headers = site.get(SITE_KEY_HEADERS) or {}
+ headers: dict[str, Any] = site.get(SITE_KEY_HEADERS) or {}
post_body_template = site.get(SITE_KEY_POST_BODY)
post_body = (
@@ -369,7 +364,7 @@ async def _perform_request(
HttpError: If an HTTP error occurs.
"""
async with self._semaphore:
- method = HTTP_METHOD_POST if post_body else HTTP_METHOD_GET
+ method: HttpMethod = HTTP_METHOD_POST if post_body else HTTP_METHOD_GET
response = await self._http.request(
method=method,
url=uri_check,
@@ -439,23 +434,20 @@ def get_wmn_summary(
exclude_categories=exclude_categories,
)
- category_list = [
- category
- for site in sites
- if (category := site.get(SITE_KEY_CATEGORY)) is not None
- ]
- site_name_list = [
- name for site in sites if (name := site.get(SITE_KEY_NAME)) is not None
- ]
- known_count = sum(
- len(known)
- for site in sites
- if isinstance((known := site.get(SITE_KEY_KNOWN)), list)
+ category_list: list[str] = [site[SITE_KEY_CATEGORY] for site in sites]
+ site_name_list: list[str] = [site[SITE_KEY_NAME] for site in sites]
+ known_count: int = sum(len(site[SITE_KEY_KNOWN]) for site in sites)
+
+ license_list: list[str] = (
+ list(self._wmn_data[WMN_KEY_LICENSE]) if self._wmn_data else []
+ )
+ authors_list: list[str] = (
+ list(self._wmn_data[WMN_KEY_AUTHORS]) if self._wmn_data else []
)
summary = WMNSummary(
- license=tuple(self._wmn_data.get(WMN_KEY_LICENSE, [])),
- authors=tuple(self._wmn_data.get(WMN_KEY_AUTHORS, [])),
+ license=tuple(license_list),
+ authors=tuple(authors_list),
site_names=tuple(site_name_list),
sites_count=len(sites),
categories=tuple(category_list),
@@ -475,6 +467,8 @@ async def enumerate_site(
site: WMNSite,
username: str,
mode: WMNMode = WMNMode.ALL,
+ *,
+ exclude_text: bool = False,
) -> WMNResult:
"""Enumerate a single site for the given username.
@@ -509,14 +503,16 @@ async def enumerate_site(
WMNMode.ALL requires all configured conditions for a state to match
(strict AND logic), while WMNMode.ANY allows any matching condition
to be sufficient (looser OR logic).
+ exclude_text:
+ When True, omit response text from the returned result.
Returns:
WMNResult:
A single WMNResult instance that encapsulates the site name (from
"name"), category (from "cat"), the username that was tested, the
final URL used for reporting (may be "uri_pretty"), a high-level
- status classification (e.g. EXISTS, PARTIAL, CONFLICTING, MISSING,
- UNKNOWN, ERROR, or NOT_VALID), status_code, text, and elapsed time
+ status classification (e.g. EXISTS, PARTIAL_EXISTS, PARTIAL_MISSING,
+ CONFLICTING, MISSING, UNKNOWN, ERROR, or NOT_VALID), status_code, text, and elapsed time
(if the HTTP request completed successfully), and an error message
(if an error occurred).
@@ -591,6 +587,7 @@ async def enumerate_site(
response=response,
site=site,
mode=mode,
+ exclude_text=exclude_text,
)
self._logger.debug(
@@ -609,6 +606,8 @@ async def enumerate_usernames(
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
mode: WMNMode = WMNMode.ALL,
+ *,
+ exclude_text: bool = False,
) -> AsyncGenerator[WMNResult, None]:
"""Enumerate one or multiple usernames across one or multiple sites.
@@ -642,6 +641,8 @@ async def enumerate_usernames(
WMNMode.ALL uses strict evaluation where all "exists" indicators must
match, while WMNMode.ANY uses relaxed evaluation where any "exists"
indicator can match.
+ exclude_text:
+ When True, omit response text from each yielded result.
Returns:
AsyncGenerator[WMNResult, None]:
@@ -677,7 +678,12 @@ async def enumerate_usernames(
)
coroutines: list[Awaitable[WMNResult]] = [
- self.enumerate_site(site, username, mode)
+ self.enumerate_site(
+ site,
+ username,
+ mode,
+ exclude_text=exclude_text,
+ )
for site in sites
for username in usernames
]
@@ -695,6 +701,8 @@ async def enumerate_test(
include_categories: list[str] | None = None,
exclude_categories: list[str] | None = None,
mode: WMNMode = WMNMode.ALL,
+ *,
+ exclude_text: bool = False,
) -> AsyncGenerator[WMNTestResult, None]:
"""Test site detection rules using known usernames from the dataset.
@@ -715,6 +723,8 @@ async def enumerate_test(
mode:
Detection mode for each test. WMNMode.ALL uses strict evaluation,
WMNMode.ANY uses relaxed evaluation.
+ exclude_text:
+ When True, omit response text from results within each test.
Yields:
WMNTestResult for each site in completion order, containing the
@@ -743,7 +753,7 @@ async def enumerate_test(
async def test_site(site: WMNSite) -> WMNTestResult:
"""Test a single site using its known usernames."""
- known = site[SITE_KEY_KNOWN]
+ known: list[str] = site[SITE_KEY_KNOWN]
self._logger.debug(
"Testing site with %d known user(s): %s",
len(known),
@@ -751,7 +761,13 @@ async def test_site(site: WMNSite) -> WMNTestResult:
)
coroutines: list[Awaitable[WMNResult]] = [
- self.enumerate_site(site, username, mode) for username in known
+ self.enumerate_site(
+ site,
+ username,
+ mode,
+ exclude_text=exclude_text,
+ )
+ for username in known
]
try:
results: list[WMNResult] = [
diff --git a/naminter/core/models.py b/naminter/core/models.py
index 842ad21..2b99312 100644
--- a/naminter/core/models.py
+++ b/naminter/core/models.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass, field
from datetime import UTC, datetime, timedelta
from enum import StrEnum, auto
-import json
+import orjson
from typing import Any, NotRequired, TypedDict
from naminter.core.constants import (
@@ -26,13 +26,14 @@ class WMNMode(StrEnum):
class WMNStatus(StrEnum):
"""Status of username search results."""
- ERROR = auto()
- NOT_VALID = auto()
- CONFLICTING = auto()
- PARTIAL = auto()
EXISTS = auto()
MISSING = auto()
+ PARTIAL_EXISTS = auto()
+ PARTIAL_MISSING = auto()
+ CONFLICTING = auto()
UNKNOWN = auto()
+ NOT_VALID = auto()
+ ERROR = auto()
class WMNSite(TypedDict):
@@ -142,35 +143,14 @@ def from_error(
WMNResult with ERROR status.
"""
return cls(
- name=site.get("name", "unknown"),
- category=site.get("cat", "unknown"),
+ name=site.get("name", DEFAULT_UNKNOWN_VALUE),
+ category=site.get("cat", DEFAULT_UNKNOWN_VALUE),
username=username,
url=url,
status=WMNStatus.ERROR,
error=message,
)
- @staticmethod
- def _matches_any(
- status_code: int,
- text: str,
- check_code: int,
- check_string: str,
- ) -> bool:
- """Check if response matches criteria using OR logic (any match is
- sufficient)."""
- return status_code == check_code or check_string in text
-
- @staticmethod
- def _matches_all(
- status_code: int,
- text: str,
- check_code: int,
- check_string: str,
- ) -> bool:
- """Check if response matches criteria using AND logic (all must match)."""
- return status_code == check_code and check_string in text
-
@staticmethod
def _determine_status(
*,
@@ -183,19 +163,22 @@ def _determine_status(
Priority order:
1. CONFLICTING - if both exists and missing conditions are True
- 2. PARTIAL - if partial match detected (only code OR only text matched)
- 3. EXISTS - if only exists condition is True
- 4. MISSING - if only missing condition is True
- 5. UNKNOWN - if neither condition is True
+ 2. EXISTS - if exists condition is True
+ 3. MISSING - if missing condition is True
+ 4. PARTIAL_EXISTS - if only code OR text matched for exists
+ 5. PARTIAL_MISSING - if only code OR text matched for missing
+ 6. UNKNOWN - if no condition matched
"""
if condition_exists and condition_missing:
return WMNStatus.CONFLICTING
- if partial_exists or partial_missing:
- return WMNStatus.PARTIAL
if condition_exists:
return WMNStatus.EXISTS
if condition_missing:
return WMNStatus.MISSING
+ if partial_exists:
+ return WMNStatus.PARTIAL_EXISTS
+ if partial_missing:
+ return WMNStatus.PARTIAL_MISSING
return WMNStatus.UNKNOWN
@classmethod
@@ -207,6 +190,7 @@ def from_response(
response: WMNResponse,
site: WMNSite,
mode: WMNMode,
+ exclude_text: bool = False,
) -> WMNResult:
"""Create WMNResult from HTTP response by evaluating detection criteria.
@@ -216,6 +200,7 @@ def from_response(
response: HTTP response object.
site: Site configuration dictionary with detection criteria.
mode: Detection mode (ANY or ALL).
+ exclude_text: When True, omit response text from the result.
Returns:
WMNResult with determined status.
@@ -254,7 +239,7 @@ def from_response(
status=status,
status_code=response.status_code,
elapsed=response.elapsed,
- text=response.text,
+ text=None if exclude_text else response.text,
)
def to_dict(
@@ -330,8 +315,9 @@ def _get_result_status(self) -> WMNStatus:
2. UNKNOWN - if no results exist
3. Return the single status if all results have the same status
4. CONFLICTING - if both EXISTS and MISSING are present
- 5. PARTIAL - for other mixed statuses
- (e.g., EXISTS + UNKNOWN, PARTIAL + MISSING)
+ 5. PARTIAL_EXISTS - if PARTIAL_EXISTS is present in mixed statuses
+ 6. PARTIAL_MISSING - if PARTIAL_MISSING is present in mixed statuses
+ 7. UNKNOWN - for other mixed statuses
"""
if self.error:
return WMNStatus.ERROR
@@ -350,7 +336,13 @@ def _get_result_status(self) -> WMNStatus:
if WMNStatus.EXISTS in statuses and WMNStatus.MISSING in statuses:
return WMNStatus.CONFLICTING
- return WMNStatus.PARTIAL
+ if WMNStatus.PARTIAL_EXISTS in statuses:
+ return WMNStatus.PARTIAL_EXISTS
+
+ if WMNStatus.PARTIAL_MISSING in statuses:
+ return WMNStatus.PARTIAL_MISSING
+
+ return WMNStatus.UNKNOWN
def to_dict(
self,
@@ -386,14 +378,15 @@ class WMNResponse:
status_code: int
text: str
elapsed: timedelta
+ headers: dict[str, str] | None = None
def json(self) -> dict[str, Any] | list[Any] | str | int | float | bool | None:
"""Parse the response body as JSON and return the resulting object.
Raises:
- json.JSONDecodeError: If the response text is not valid JSON.
+ orjson.JSONDecodeError: If the response text is not valid JSON.
"""
- return json.loads(self.text)
+ return orjson.loads(self.text)
@dataclass(frozen=True, slots=True, kw_only=True)
@@ -403,3 +396,17 @@ class WMNError:
path: str
data: str | None
message: str
+
+
+__all__ = [
+ "WMN_REQUIRED_KEYS",
+ "WMNDataset",
+ "WMNError",
+ "WMNMode",
+ "WMNResponse",
+ "WMNResult",
+ "WMNSite",
+ "WMNStatus",
+ "WMNSummary",
+ "WMNTestResult",
+]
diff --git a/naminter/core/network.py b/naminter/core/network.py
index abf2282..6da4917 100644
--- a/naminter/core/network.py
+++ b/naminter/core/network.py
@@ -4,7 +4,7 @@
from typing import Any, Protocol, cast, runtime_checkable
from curl_cffi import BrowserTypeLiteral, ExtraFingerprints
-from curl_cffi.requests import AsyncSession, ProxySpec
+from curl_cffi.requests import AsyncSession, ProxySpec, Response
from curl_cffi.requests.exceptions import (
CertificateVerifyError as CurlCertificateVerifyError,
)
@@ -237,9 +237,6 @@ async def open(self) -> None:
Raises:
HttpSessionError: If session initialization fails.
"""
- if self._session is not None:
- return
-
async with self._lock:
if self._session is not None:
return
@@ -278,17 +275,21 @@ async def close(self) -> None:
Handles errors gracefully during cleanup and does not raise exceptions.
Catches session closure errors and logs them without propagating.
+ CancelledError is re-raised to allow proper cancellation handling.
"""
- if self._session is None:
- return
- try:
- await self._session.close()
- except CurlSessionClosed:
- self._logger.debug("HTTP session was already closed")
- except (OSError, RuntimeError, AttributeError) as e:
- self._logger.warning("Unexpected error closing HTTP session: %s", e)
- finally:
- self._session = None
+ async with self._lock:
+ if self._session is None:
+ return
+ try:
+ await self._session.close()
+ except asyncio.CancelledError:
+ raise
+ except CurlSessionClosed:
+ self._logger.debug("HTTP session was already closed")
+ except (OSError, RuntimeError, AttributeError) as e:
+ self._logger.warning("Unexpected error closing HTTP session: %s", e)
+ finally:
+ self._session = None
async def get(
self,
@@ -333,8 +334,12 @@ async def request(
msg = "HTTP session not initialized."
raise HttpSessionError(msg)
- method_upper = method.upper()
- if method_upper not in {HTTP_METHOD_GET, HTTP_METHOD_POST}:
+ method_upper: HttpMethod
+ if method.upper() == HTTP_METHOD_GET:
+ method_upper = HTTP_METHOD_GET
+ elif method.upper() == HTTP_METHOD_POST:
+ method_upper = HTTP_METHOD_POST
+ else:
msg = (
f"Unsupported HTTP method: {method!r}. "
f"Only {HTTP_METHOD_GET} and {HTTP_METHOD_POST} are supported."
@@ -343,20 +348,24 @@ async def request(
headers_dict: dict[str, str] | None = None
if headers is not None:
- headers_dict = dict(headers) if not isinstance(headers, dict) else headers
+ headers_dict = {key: value for key, value in headers.items()}
try:
- response = await self._session.request( # type: ignore[reportUnknownMemberType]
- method=cast("HttpMethod", method.upper()),
+ response: Response = await self._session.request(
+ method=method_upper,
url=url,
headers=headers_dict,
data=data,
)
+ response_headers: dict[str, str] = {
+ key: value for key, value in response.headers.items() if value is not None
+ }
return WMNResponse(
status_code=response.status_code,
text=response.text,
elapsed=response.elapsed,
+ headers=response_headers,
)
except CurlTimeout as e:
msg = f"{method_upper} timeout for {url}"
@@ -366,8 +375,9 @@ async def request(
raise HttpSessionError(msg, cause=e) from e
except CurlHTTPError as e:
status_code: int | None = None
- if hasattr(e, "response") and e.response is not None:
- status_code = getattr(e.response, "status_code", None)
+ err_response: Response | None = getattr(e, "response", None)
+ if err_response is not None:
+ status_code = err_response.status_code
msg = f"{method_upper} returned error status for {url}"
raise HttpStatusError(msg, status_code=status_code, url=url, cause=e) from e
except (
diff --git a/naminter/core/utils.py b/naminter/core/utils.py
index f329430..e8beda9 100644
--- a/naminter/core/utils.py
+++ b/naminter/core/utils.py
@@ -1,24 +1,10 @@
import asyncio
from collections.abc import AsyncGenerator, Awaitable, Sequence
-from typing import Any, TypeVar
+from typing import TypeVar
T = TypeVar("T")
-def get_missing_keys(data: dict[str, Any], keys: Sequence[str]) -> list[str]:
- """Return a list of required keys missing from a dictionary.
-
- Args:
- data: Dictionary to check for missing keys.
- keys: Sequence of keys that should be present.
-
- Returns:
- List of keys that are missing from the dictionary. Empty list if
- all keys are present.
- """
- return [key for key in keys if key not in data]
-
-
async def execute_tasks(
awaitables: Sequence[Awaitable[T]],
) -> AsyncGenerator[T, None]:
diff --git a/naminter/core/validator.py b/naminter/core/validator.py
index 70dd21d..d05beae 100644
--- a/naminter/core/validator.py
+++ b/naminter/core/validator.py
@@ -1,16 +1,18 @@
-import contextlib
from collections import defaultdict
-from collections.abc import Mapping, Sequence
-import json
+from collections.abc import Mapping
+import orjson
import logging
-from typing import Any
+from typing import Any, cast
from jsonschema.exceptions import SchemaError as JsonSchemaError
+from jsonschema.exceptions import ValidationError
+from jsonschema.protocols import Validator
from jsonschema.validators import validator_for
from naminter.core.constants import (
- DEFAULT_JSON_ENSURE_ASCII,
- DEFAULT_JSON_INDENT,
+ ACCOUNT_PLACEHOLDER,
+ HTTP_STATUS_CODE_MAX,
+ HTTP_STATUS_CODE_MIN,
SITE_KEY_E_CODE,
SITE_KEY_E_STRING,
SITE_KEY_HEADERS,
@@ -18,11 +20,16 @@
SITE_KEY_M_CODE,
SITE_KEY_M_STRING,
SITE_KEY_NAME,
+ SITE_KEY_POST_BODY,
+ SITE_KEY_STRIP_BAD_CHAR,
SITE_KEY_URI_CHECK,
+ WMN_KEY_AUTHORS,
+ WMN_KEY_CATEGORIES,
+ WMN_KEY_LICENSE,
WMN_KEY_SITES,
)
from naminter.core.exceptions import WMNSchemaError
-from naminter.core.models import WMN_REQUIRED_KEYS, WMNDataset, WMNError, WMNSite
+from naminter.core.models import WMN_REQUIRED_KEYS, WMNDataset, WMNError
logger = logging.getLogger(__name__)
@@ -30,6 +37,8 @@
class WMNValidator:
"""Validates WMN dataset against JSON Schema."""
+ __slots__ = ("schema", "validator")
+
def __init__(self, schema: Mapping[str, Any]) -> None:
"""Initialize validator with schema.
@@ -46,7 +55,7 @@ def __init__(self, schema: Mapping[str, Any]) -> None:
try:
validator_cls = validator_for(self.schema)
validator_cls.check_schema(self.schema)
- self.validator = validator_cls(self.schema)
+ self.validator: Validator = validator_cls(self.schema)
except JsonSchemaError as e:
msg = f"Invalid JSON schema: {e}"
raise WMNSchemaError(msg) from e
@@ -54,21 +63,7 @@ def __init__(self, schema: Mapping[str, Any]) -> None:
msg = f"Failed to initialize JSON schema validator: {e}"
raise WMNSchemaError(msg) from e
- def validate(self, data: WMNDataset) -> list[WMNError]:
- """Validate dataset and return list of errors.
-
- Args:
- data: WMN dataset to validate. This will not be modified.
-
- Returns:
- List of validation errors. Empty list if validation passes.
- """
- errors: list[WMNError] = []
- errors.extend(self._validate_schema(data))
- errors.extend(WMNValidator._validate_duplicates(data))
- return errors
-
- def _validate_schema(self, data: WMNDataset) -> list[WMNError]:
+ def validate_schema(self, data: WMNDataset) -> list[WMNError]:
"""Validate dataset against JSON schema and return errors.
Args:
@@ -78,20 +73,42 @@ def _validate_schema(self, data: WMNDataset) -> list[WMNError]:
List of schema validation errors.
"""
errors: list[WMNError] = []
- for error in sorted(
- self.validator.iter_errors(data),
- key=lambda err: list(err.absolute_path),
- ):
- data_preview = WMNValidator._preview(error.instance)
+ data_dict = cast("dict[str, Any]", dict(data))
+ for error in self.validator.iter_errors(data_dict):
+ validation_error: ValidationError = error
+ data_preview = WMNValidator._preview(validation_error.instance)
errors.append(
WMNError(
- path=error.json_path,
+ path=str(validation_error.json_path),
data=data_preview,
- message=error.message,
+ message=str(validation_error.message),
),
)
return errors
+ @staticmethod
+ def validate_dataset(data: WMNDataset) -> list[WMNError]:
+ """Validate dataset fields with custom rules and return list of errors.
+
+ Performs code-based validation for license, authors, categories,
+ duplicate site names, and individual site configurations.
+ Does not use JSON schema because the external schema may not cover
+ all validation rules and does not guarantee reliable validation.
+
+ Args:
+ data: WMN dataset to validate. This will not be modified.
+
+ Returns:
+ List of validation errors. Empty list if validation passes.
+ """
+ errors: list[WMNError] = []
+ errors.extend(WMNValidator._validate_license(data))
+ errors.extend(WMNValidator._validate_authors(data))
+ errors.extend(WMNValidator._validate_categories(data))
+ errors.extend(WMNValidator._validate_duplicates(data))
+ errors.extend(WMNValidator._validate_sites(data))
+ return errors
+
@staticmethod
def _preview(value: object) -> str | None:
"""Generate JSON preview of a value.
@@ -103,12 +120,8 @@ def _preview(value: object) -> str | None:
JSON string preview or None if generation fails.
"""
try:
- return json.dumps(
- value,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- indent=DEFAULT_JSON_INDENT,
- )
- except (TypeError, ValueError) as e:
+ return orjson.dumps(value, option=orjson.OPT_INDENT_2).decode("utf-8")
+ except (TypeError, ValueError, orjson.JSONEncodeError) as e:
logger.debug(
"Failed to generate data preview: %s",
e,
@@ -116,20 +129,6 @@ def _preview(value: object) -> str | None:
)
return None
- @staticmethod
- def _get_missing_keys(data: dict[str, Any], keys: Sequence[str]) -> list[str]:
- """Return a list of required keys missing from a dictionary.
-
- Args:
- data: Dictionary to check for missing keys.
- keys: Sequence of keys that should be present.
-
- Returns:
- List of keys that are missing from the dictionary. Empty list if
- all keys are present.
- """
- return [key for key in keys if key not in data]
-
@staticmethod
def _validate_duplicates(data: WMNDataset) -> list[WMNError]:
"""Validate that site names are unique and return errors if duplicates found.
@@ -140,19 +139,18 @@ def _validate_duplicates(data: WMNDataset) -> list[WMNError]:
Returns:
List of duplicate site errors.
"""
- sites_data = data.get(WMN_KEY_SITES, [])
+ sites_data: Any = data.get("sites", [])
if not isinstance(sites_data, list):
return []
+ sites_data_list: list[Any] = cast("list[Any]", sites_data)
name_indices: dict[str, list[int]] = defaultdict(list)
- for index, site in enumerate(sites_data):
+ for index, site in enumerate(sites_data_list):
if not isinstance(site, dict):
continue
- raw = site.get(SITE_KEY_NAME)
- if not isinstance(raw, str):
- continue
- name = raw.strip()
- if not name:
+ site_dict: dict[str, Any] = cast("dict[str, Any]", site)
+ name: Any = site_dict.get(SITE_KEY_NAME)
+ if not isinstance(name, str) or not name:
continue
name_indices[name].append(index)
@@ -161,7 +159,7 @@ def _validate_duplicates(data: WMNDataset) -> list[WMNError]:
if len(indices) > 1:
for index in indices:
path_string = f"$.{WMN_KEY_SITES}[{index}].{SITE_KEY_NAME}"
- data_preview = WMNValidator._preview(sites_data[index])
+ data_preview = WMNValidator._preview(sites_data_list[index])
errors.append(
WMNError(
@@ -175,59 +173,238 @@ def _validate_duplicates(data: WMNDataset) -> list[WMNError]:
)
return errors
- def validate_sites(self, sites: list[WMNSite]) -> list[WMNError]:
+ @staticmethod
+ def _validate_license(data: WMNDataset) -> list[WMNError]:
+ """Validate license field.
+
+ Args:
+ data: WMN dataset to check.
+
+ Returns:
+ List of license validation errors.
+ """
+ errors: list[WMNError] = []
+ license_data: Any = data.get("license")
+
+ if not isinstance(license_data, list):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_LICENSE}",
+ data=WMNValidator._preview(license_data),
+ message=f"Invalid {WMN_KEY_LICENSE}: must be array, got {type(license_data).__name__}",
+ ),
+ )
+
+ return errors
+
+ @staticmethod
+ def _validate_authors(data: WMNDataset) -> list[WMNError]:
+ """Validate authors field.
+
+ Args:
+ data: WMN dataset to check.
+
+ Returns:
+ List of authors validation errors.
+ """
+ errors: list[WMNError] = []
+ authors_data: Any = data.get("authors")
+
+ if not isinstance(authors_data, list):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_AUTHORS}",
+ data=WMNValidator._preview(authors_data),
+ message=f"Invalid {WMN_KEY_AUTHORS}: must be array, got {type(authors_data).__name__}",
+ ),
+ )
+ else:
+ authors_list: list[Any] = cast("list[Any]", authors_data)
+ if not authors_list:
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_AUTHORS}",
+ data=WMNValidator._preview(authors_list),
+ message=f"Invalid {WMN_KEY_AUTHORS}: must have at least 1 item",
+ ),
+ )
+
+ authors_set: set[str] = set()
+ for index, author in enumerate(authors_list):
+ if not isinstance(author, str):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_AUTHORS}[{index}]",
+ data=WMNValidator._preview(author),
+ message=f"Invalid {WMN_KEY_AUTHORS} item at index {index}: must be string, got {type(author).__name__}",
+ ),
+ )
+ elif not author.strip():
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_AUTHORS}[{index}]",
+ data=WMNValidator._preview(author),
+ message=f"Invalid {WMN_KEY_AUTHORS} item at index {index}: must be non-empty string",
+ ),
+ )
+ else:
+ if author in authors_set:
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_AUTHORS}[{index}]",
+ data=WMNValidator._preview(author),
+ message=f"Duplicate {WMN_KEY_AUTHORS} item: '{author}'",
+ ),
+ )
+ authors_set.add(author)
+
+ return errors
+
+ @staticmethod
+ def _validate_categories(data: WMNDataset) -> list[WMNError]:
+ """Validate categories field.
+
+ Args:
+ data: WMN dataset to check.
+
+ Returns:
+ List of categories validation errors.
+ """
+ errors: list[WMNError] = []
+ categories_data: Any = data.get("categories")
+
+ if not isinstance(categories_data, list):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_CATEGORIES}",
+ data=WMNValidator._preview(categories_data),
+ message=f"Invalid {WMN_KEY_CATEGORIES}: must be array, got {type(categories_data).__name__}",
+ ),
+ )
+ else:
+ categories_list: list[Any] = cast("list[Any]", categories_data)
+ if not categories_list:
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_CATEGORIES}",
+ data=WMNValidator._preview(categories_list),
+ message=f"Invalid {WMN_KEY_CATEGORIES}: must have at least 1 item",
+ ),
+ )
+
+ categories_set: set[str] = set()
+ for index, category in enumerate(categories_list):
+ if not isinstance(category, str):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_CATEGORIES}[{index}]",
+ data=WMNValidator._preview(category),
+ message=f"Invalid {WMN_KEY_CATEGORIES} item at index {index}: must be string, got {type(category).__name__}",
+ ),
+ )
+ elif not category.strip():
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_CATEGORIES}[{index}]",
+ data=WMNValidator._preview(category),
+ message=f"Invalid {WMN_KEY_CATEGORIES} item at index {index}: must be non-empty string",
+ ),
+ )
+ else:
+ if category in categories_set:
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_CATEGORIES}[{index}]",
+ data=WMNValidator._preview(category),
+ message=f"Duplicate {WMN_KEY_CATEGORIES} item: '{category}'",
+ ),
+ )
+ categories_set.add(category)
+
+ return errors
+
+ @staticmethod
+ def _validate_sites(data: WMNDataset) -> list[WMNError]:
"""Validate all site configurations.
Args:
- sites: List of site configurations to validate.
+ data: WMN dataset containing sites to validate.
Returns:
List of validation errors. Empty if all sites are valid.
"""
errors: list[WMNError] = []
+ sites_data_raw: Any = data.get("sites", [])
+
+ if not isinstance(sites_data_raw, list):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_SITES}",
+ data=WMNValidator._preview(sites_data_raw),
+ message=f"Invalid {WMN_KEY_SITES}: must be array, got {type(sites_data_raw).__name__}",
+ ),
+ )
+ return errors
+
+ sites_data: list[Any] = cast("list[Any]", sites_data_raw)
+ for index, site in enumerate(sites_data):
+ if not isinstance(site, dict):
+ errors.append(
+ WMNError(
+ path=f"$.{WMN_KEY_SITES}[{index}]",
+ data=WMNValidator._preview(site),
+ message=f"Invalid site at index {index}: must be object, got {type(site).__name__}",
+ ),
+ )
+ continue
- for index, site in enumerate(sites):
+ site_dict: dict[str, Any] = cast("dict[str, Any]", site)
base_path = f"$.{WMN_KEY_SITES}[{index}]"
- site_name = site.get(SITE_KEY_NAME, "unknown")
site_errors: list[WMNError] = []
def _create_error(
path_suffix: str,
message: str,
data: object | None = None,
- *,
- _base_path: str = base_path,
- _site_errors: list[WMNError] = site_errors,
) -> None:
"""Helper to create WMNError with path and data."""
- path = (
- f"{_base_path}.{path_suffix}" if path_suffix else _base_path
- )
- data_preview = None
- if data is not None:
- with contextlib.suppress(TypeError, ValueError):
- data_preview = json.dumps(
- data,
- ensure_ascii=DEFAULT_JSON_ENSURE_ASCII,
- indent=DEFAULT_JSON_INDENT,
- )
+ path = f"{base_path}.{path_suffix}" if path_suffix else base_path
+ data_preview = WMNValidator._preview(data)
error = WMNError(path=path, data=data_preview, message=message)
- _site_errors.append(error)
- errors.append(error)
+ site_errors.append(error)
+
+ # Validate site name first
+ site_name_raw: Any = site_dict.get(SITE_KEY_NAME)
+ if not isinstance(site_name_raw, str):
+ _create_error(
+ SITE_KEY_NAME,
+ f"Invalid {SITE_KEY_NAME}: must be string, got {type(site_name_raw).__name__}",
+ site_name_raw,
+ )
+ site_name = "unknown"
+ elif not site_name_raw.strip():
+ _create_error(
+ SITE_KEY_NAME,
+ f"Invalid {SITE_KEY_NAME}: must be non-empty string",
+ site_name_raw,
+ )
+ site_name = "unknown"
+ else:
+ site_name = site_name_raw
- missing_keys = WMNValidator._get_missing_keys(site, WMN_REQUIRED_KEYS)
+ missing_keys = [key for key in WMN_REQUIRED_KEYS if key not in site_dict]
if missing_keys:
- _create_error("", f"Missing required keys: {missing_keys}", site)
- if site_errors:
- error_messages = [error.message for error in site_errors]
- logger.warning(
- "Invalid site %s: %s",
- site_name,
- "; ".join(error_messages),
- )
+ _create_error("", f"Missing required keys: {missing_keys}", site_dict)
+ error_messages = [error.message for error in site_errors]
+ logger.warning(
+ "Invalid site %s: %s",
+ site_name,
+ "; ".join(error_messages),
+ )
+ errors.extend(site_errors)
continue
- uri_check = site[SITE_KEY_URI_CHECK]
+ uri_check: Any = site_dict.get(SITE_KEY_URI_CHECK)
if not isinstance(uri_check, str) or not uri_check:
_create_error(
SITE_KEY_URI_CHECK,
@@ -235,41 +412,123 @@ def _create_error(
uri_check,
)
- for key in (SITE_KEY_E_CODE, SITE_KEY_M_CODE):
- value = site[key]
- if not isinstance(value, int):
+ post_body: Any = site_dict.get(SITE_KEY_POST_BODY)
+ if post_body is not None:
+ if not isinstance(post_body, str):
_create_error(
- key,
- f"Invalid {key}: must be integer, got {type(value).__name__}",
- value,
+ SITE_KEY_POST_BODY,
+ f"Invalid {SITE_KEY_POST_BODY}: must be string or None, "
+ f"got {type(post_body).__name__}",
+ post_body,
)
-
- for key in (SITE_KEY_E_STRING, SITE_KEY_M_STRING):
- value = site[key]
- if not isinstance(value, str):
+ elif post_body:
+ if ACCOUNT_PLACEHOLDER not in post_body:
+ _create_error(
+ SITE_KEY_POST_BODY,
+ f"Invalid {SITE_KEY_POST_BODY}: must contain '{ACCOUNT_PLACEHOLDER}'",
+ post_body,
+ )
+ if site_dict.get(SITE_KEY_HEADERS) is None:
+ _create_error(
+ SITE_KEY_POST_BODY,
+ f"Invalid {SITE_KEY_POST_BODY}: when {SITE_KEY_POST_BODY} is provided, "
+ f"{SITE_KEY_HEADERS} is required",
+ post_body,
+ )
+ else:
+ if isinstance(uri_check, str) and ACCOUNT_PLACEHOLDER not in uri_check:
_create_error(
- key,
- f"Invalid {key}: must be string, got {type(value).__name__}",
- value,
+ SITE_KEY_URI_CHECK,
+ f"Invalid {SITE_KEY_URI_CHECK}: must contain '{ACCOUNT_PLACEHOLDER}' "
+ f"when {SITE_KEY_POST_BODY} is not provided",
+ uri_check,
)
- if SITE_KEY_HEADERS in site:
- headers = site[SITE_KEY_HEADERS]
- if headers is not None and not isinstance(headers, dict):
+ headers: Any = site_dict.get(SITE_KEY_HEADERS)
+ if headers is not None:
+ if not isinstance(headers, dict):
_create_error(
SITE_KEY_HEADERS,
f"Invalid {SITE_KEY_HEADERS}: must be dict or None, "
f"got {type(headers).__name__}",
headers,
)
+ else:
+ headers_dict: dict[Any, Any] = cast("dict[Any, Any]", headers)
+ for header_key, header_value in headers_dict.items():
+ if not isinstance(header_key, str):
+ _create_error(
+ f"{SITE_KEY_HEADERS}.{header_key}",
+ f"Invalid {SITE_KEY_HEADERS} key: must be string, "
+ f"got {type(header_key).__name__}",
+ header_key,
+ )
+ if not isinstance(header_value, str):
+ _create_error(
+ f"{SITE_KEY_HEADERS}[{header_key}]",
+ f"Invalid {SITE_KEY_HEADERS} value for key '{header_key}': "
+ f"must be string, got {type(header_value).__name__}",
+ header_value,
+ )
+
+ strip_bad_char: Any = site_dict.get(SITE_KEY_STRIP_BAD_CHAR)
+ if strip_bad_char is not None and not isinstance(strip_bad_char, str):
+ _create_error(
+ SITE_KEY_STRIP_BAD_CHAR,
+ f"Invalid {SITE_KEY_STRIP_BAD_CHAR}: must be string or None, "
+ f"got {type(strip_bad_char).__name__}",
+ strip_bad_char,
+ )
- known = site[SITE_KEY_KNOWN]
+ for key in (SITE_KEY_E_CODE, SITE_KEY_M_CODE):
+ code_value: Any = site_dict.get(key)
+ if not isinstance(code_value, int):
+ _create_error(
+ key,
+ f"Invalid {key}: must be integer, got {type(code_value).__name__}",
+ code_value,
+ )
+ elif not (HTTP_STATUS_CODE_MIN <= code_value <= HTTP_STATUS_CODE_MAX):
+ _create_error(
+ key,
+ f"Invalid {key}: must be valid HTTP status code "
+ f"({HTTP_STATUS_CODE_MIN}-{HTTP_STATUS_CODE_MAX}), got {code_value}",
+ code_value,
+ )
+
+ for key in (SITE_KEY_E_STRING, SITE_KEY_M_STRING):
+ string_value: Any = site_dict.get(key)
+ if not isinstance(string_value, str):
+ _create_error(
+ key,
+ f"Invalid {key}: must be string, got {type(string_value).__name__}",
+ string_value,
+ )
+
+ known: Any = site_dict.get(SITE_KEY_KNOWN)
if not isinstance(known, list):
msg = (
f"Invalid {SITE_KEY_KNOWN}: must be list, "
f"got {type(known).__name__}"
)
_create_error(SITE_KEY_KNOWN, msg, known)
+ else:
+ known_list: list[Any] = cast("list[Any]", known)
+ for known_index, item in enumerate(known_list):
+ if not isinstance(item, str):
+ _create_error(
+ f"{SITE_KEY_KNOWN}[{known_index}]",
+ f"Invalid {SITE_KEY_KNOWN} item at index {known_index}: "
+ f"must be string, got {type(item).__name__}",
+ item,
+ )
+ elif not item.strip():
+ _create_error(
+ f"{SITE_KEY_KNOWN}[{known_index}]",
+ f"Invalid {SITE_KEY_KNOWN} item at index {known_index}: "
+ f"must be non-empty string",
+ item,
+ )
if site_errors:
error_messages = [error.message for error in site_errors]
@@ -278,5 +537,6 @@ def _create_error(
site_name,
"; ".join(error_messages),
)
+ errors.extend(site_errors)
return errors
diff --git a/pyproject.toml b/pyproject.toml
index b8ba17a..75a148e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -33,22 +33,40 @@ keywords = [
"naminter"
]
dependencies = [
- "click>=8.3.1",
"curl-cffi>=0.14.0",
- "aiofiles>=25.1.0",
- "jinja2>=3.1.6",
"jsonschema>=4.26.0",
+ "orjson>=3.11.5",
+ "click>=8.3.1",
"rich>=14.2.0",
- "rich-click>=1.9.5",
- "weasyprint>=67.0",
+ "rich-click>=1.9.6",
+ "aiofiles>=25.1.0",
+ "jinja2>=3.1.6",
+ "weasyprint>=68.0",
+ "pathvalidate>=3.3.1",
+ "uvloop>=0.22.1",
]
[project.optional-dependencies]
+core = [
+ "curl-cffi>=0.14.0",
+ "jsonschema>=4.26.0",
+ "orjson>=3.11.5",
+]
+cli = [
+ "click>=8.3.1",
+ "rich>=14.2.0",
+ "rich-click>=1.9.6",
+ "aiofiles>=25.1.0",
+ "jinja2>=3.1.6",
+ "weasyprint>=68.0",
+ "pathvalidate>=3.3.1",
+ "uvloop>=0.22.1",
+]
dev = [
- "ruff>=0.14.11",
+ "ruff>=0.14.14",
"mkdocs>=1.6.1",
"mkdocs-material>=9.7.1",
- "mkdocstrings[python]>=1.0.0",
+ "mkdocstrings[python]>=1.0.1",
]
[project.urls]
@@ -146,6 +164,9 @@ select = [
'PERF', # perflint
'NPY', # numpy
]
+ignore = [
+ 'COM812', # conflicts with formatter
+]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"] # unused imports
@@ -177,3 +198,7 @@ skip-magic-trailing-comma = false
line-ending = "auto"
docstring-code-format = true
docstring-code-line-length = 79
+
+[tool.pyright]
+typeCheckingMode = "strict"
+reportUnknownMemberType = "none"
diff --git a/uv.lock b/uv.lock
index 06ad3a6..41e40f3 100644
--- a/uv.lock
+++ b/uv.lock
@@ -649,7 +649,7 @@ wheels = [
[[package]]
name = "mkdocstrings"
-version = "1.0.0"
+version = "1.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jinja2" },
@@ -659,9 +659,9 @@ dependencies = [
{ name = "mkdocs-autorefs" },
{ name = "pymdown-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/e5/13/10bbf9d56565fd91b91e6f5a8cd9b9d8a2b101c4e8ad6eeafa35a706301d/mkdocstrings-1.0.0.tar.gz", hash = "sha256:351a006dbb27aefce241ade110d3cd040c1145b7a3eb5fd5ac23f03ed67f401a", size = 101086, upload-time = "2025-11-27T15:39:40.534Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/bd/ec/680e3bc7c88704d3fb9c658a517ec10f2f2aed3b9340136978675e581688/mkdocstrings-1.0.1.tar.gz", hash = "sha256:caa7d311c85ac0a0674831725ecfdeee4348e3b8a2c91ab193ee319a41dbeb3d", size = 100794, upload-time = "2026-01-19T11:36:24.429Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ec/fc/80aa31b79133634721cf7855d37b76ea49773599214896f2ff10be03de2a/mkdocstrings-1.0.0-py3-none-any.whl", hash = "sha256:4c50eb960bff6e05dfc631f6bc00dfabffbcb29c5ff25f676d64daae05ed82fa", size = 35135, upload-time = "2025-11-27T15:39:39.301Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/f9/ecd3e5cf258d63eddc13e354bd090df3aa458b64be50d737d52a8ad9df22/mkdocstrings-1.0.1-py3-none-any.whl", hash = "sha256:10deb908e310e6d427a5b8f69026361dac06b77de860f46043043e26f121db02", size = 35245, upload-time = "2026-01-19T11:36:23.067Z" },
]
[package.optional-dependencies]
@@ -693,12 +693,30 @@ dependencies = [
{ name = "curl-cffi" },
{ name = "jinja2" },
{ name = "jsonschema" },
+ { name = "orjson" },
+ { name = "pathvalidate" },
{ name = "rich" },
{ name = "rich-click" },
+ { name = "uvloop" },
{ name = "weasyprint" },
]
[package.optional-dependencies]
+cli = [
+ { name = "aiofiles" },
+ { name = "click" },
+ { name = "jinja2" },
+ { name = "pathvalidate" },
+ { name = "rich" },
+ { name = "rich-click" },
+ { name = "uvloop" },
+ { name = "weasyprint" },
+]
+core = [
+ { name = "curl-cffi" },
+ { name = "jsonschema" },
+ { name = "orjson" },
+]
dev = [
{ name = "mkdocs" },
{ name = "mkdocs-material" },
@@ -709,19 +727,101 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "aiofiles", specifier = ">=25.1.0" },
+ { name = "aiofiles", marker = "extra == 'cli'", specifier = ">=25.1.0" },
{ name = "click", specifier = ">=8.3.1" },
+ { name = "click", marker = "extra == 'cli'", specifier = ">=8.3.1" },
{ name = "curl-cffi", specifier = ">=0.14.0" },
+ { name = "curl-cffi", marker = "extra == 'core'", specifier = ">=0.14.0" },
{ name = "jinja2", specifier = ">=3.1.6" },
+ { name = "jinja2", marker = "extra == 'cli'", specifier = ">=3.1.6" },
{ name = "jsonschema", specifier = ">=4.26.0" },
+ { name = "jsonschema", marker = "extra == 'core'", specifier = ">=4.26.0" },
{ name = "mkdocs", marker = "extra == 'dev'", specifier = ">=1.6.1" },
{ name = "mkdocs-material", marker = "extra == 'dev'", specifier = ">=9.7.1" },
- { name = "mkdocstrings", extras = ["python"], marker = "extra == 'dev'", specifier = ">=1.0.0" },
+ { name = "mkdocstrings", extras = ["python"], marker = "extra == 'dev'", specifier = ">=1.0.1" },
+ { name = "orjson", specifier = ">=3.11.5" },
+ { name = "orjson", marker = "extra == 'core'", specifier = ">=3.11.5" },
+ { name = "pathvalidate", specifier = ">=3.3.1" },
+ { name = "pathvalidate", marker = "extra == 'cli'", specifier = ">=3.3.1" },
{ name = "rich", specifier = ">=14.2.0" },
- { name = "rich-click", specifier = ">=1.9.5" },
- { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.14.11" },
- { name = "weasyprint", specifier = ">=67.0" },
+ { name = "rich", marker = "extra == 'cli'", specifier = ">=14.2.0" },
+ { name = "rich-click", specifier = ">=1.9.6" },
+ { name = "rich-click", marker = "extra == 'cli'", specifier = ">=1.9.6" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.14.14" },
+ { name = "uvloop", specifier = ">=0.22.1" },
+ { name = "uvloop", marker = "extra == 'cli'", specifier = ">=0.22.1" },
+ { name = "weasyprint", specifier = ">=68.0" },
+ { name = "weasyprint", marker = "extra == 'cli'", specifier = ">=68.0" },
+]
+provides-extras = ["core", "cli", "dev"]
+
+[[package]]
+name = "orjson"
+version = "3.11.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/68/6b3659daec3a81aed5ab47700adb1a577c76a5452d35b91c88efee89987f/orjson-3.11.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9c8494625ad60a923af6b2b0bd74107146efe9b55099e20d7740d995f338fcd8", size = 245318, upload-time = "2025-12-06T15:54:02.355Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/00/92db122261425f61803ccf0830699ea5567439d966cbc35856fe711bfe6b/orjson-3.11.5-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:7bb2ce0b82bc9fd1168a513ddae7a857994b780b2945a8c51db4ab1c4b751ebc", size = 129491, upload-time = "2025-12-06T15:54:03.877Z" },
+ { url = "https://files.pythonhosted.org/packages/94/4f/ffdcb18356518809d944e1e1f77589845c278a1ebbb5a8297dfefcc4b4cb/orjson-3.11.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67394d3becd50b954c4ecd24ac90b5051ee7c903d167459f93e77fc6f5b4c968", size = 132167, upload-time = "2025-12-06T15:54:04.944Z" },
+ { url = "https://files.pythonhosted.org/packages/97/c6/0a8caff96f4503f4f7dd44e40e90f4d14acf80d3b7a97cb88747bb712d3e/orjson-3.11.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:298d2451f375e5f17b897794bcc3e7b821c0f32b4788b9bcae47ada24d7f3cf7", size = 130516, upload-time = "2025-12-06T15:54:06.274Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/63/43d4dc9bd9954bff7052f700fdb501067f6fb134a003ddcea2a0bb3854ed/orjson-3.11.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa5e4244063db8e1d87e0f54c3f7522f14b2dc937e65d5241ef0076a096409fd", size = 135695, upload-time = "2025-12-06T15:54:07.702Z" },
+ { url = "https://files.pythonhosted.org/packages/87/6f/27e2e76d110919cb7fcb72b26166ee676480a701bcf8fc53ac5d0edce32f/orjson-3.11.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1db2088b490761976c1b2e956d5d4e6409f3732e9d79cfa69f876c5248d1baf9", size = 139664, upload-time = "2025-12-06T15:54:08.828Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/f8/5966153a5f1be49b5fbb8ca619a529fde7bc71aa0a376f2bb83fed248bcd/orjson-3.11.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2ed66358f32c24e10ceea518e16eb3549e34f33a9d51f99ce23b0251776a1ef", size = 137289, upload-time = "2025-12-06T15:54:09.898Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/34/8acb12ff0299385c8bbcbb19fbe40030f23f15a6de57a9c587ebf71483fb/orjson-3.11.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2021afda46c1ed64d74b555065dbd4c2558d510d8cec5ea6a53001b3e5e82a9", size = 138784, upload-time = "2025-12-06T15:54:11.022Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/27/910421ea6e34a527f73d8f4ee7bdffa48357ff79c7b8d6eb6f7b82dd1176/orjson-3.11.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b42ffbed9128e547a1647a3e50bc88ab28ae9daa61713962e0d3dd35e820c125", size = 141322, upload-time = "2025-12-06T15:54:12.427Z" },
+ { url = "https://files.pythonhosted.org/packages/87/a3/4b703edd1a05555d4bb1753d6ce44e1a05b7a6d7c164d5b332c795c63d70/orjson-3.11.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8d5f16195bb671a5dd3d1dbea758918bada8f6cc27de72bd64adfbd748770814", size = 413612, upload-time = "2025-12-06T15:54:13.858Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/36/034177f11d7eeea16d3d2c42a1883b0373978e08bc9dad387f5074c786d8/orjson-3.11.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c0e5d9f7a0227df2927d343a6e3859bebf9208b427c79bd31949abcc2fa32fa5", size = 150993, upload-time = "2025-12-06T15:54:15.189Z" },
+ { url = "https://files.pythonhosted.org/packages/44/2f/ea8b24ee046a50a7d141c0227c4496b1180b215e728e3b640684f0ea448d/orjson-3.11.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23d04c4543e78f724c4dfe656b3791b5f98e4c9253e13b2636f1af5d90e4a880", size = 141774, upload-time = "2025-12-06T15:54:16.451Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/12/cc440554bf8200eb23348a5744a575a342497b65261cd65ef3b28332510a/orjson-3.11.5-cp311-cp311-win32.whl", hash = "sha256:c404603df4865f8e0afe981aa3c4b62b406e6d06049564d58934860b62b7f91d", size = 135109, upload-time = "2025-12-06T15:54:17.73Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/83/e0c5aa06ba73a6760134b169f11fb970caa1525fa4461f94d76e692299d9/orjson-3.11.5-cp311-cp311-win_amd64.whl", hash = "sha256:9645ef655735a74da4990c24ffbd6894828fbfa117bc97c1edd98c282ecb52e1", size = 133193, upload-time = "2025-12-06T15:54:19.426Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/35/5b77eaebc60d735e832c5b1a20b155667645d123f09d471db0a78280fb49/orjson-3.11.5-cp311-cp311-win_arm64.whl", hash = "sha256:1cbf2735722623fcdee8e712cbaaab9e372bbcb0c7924ad711b261c2eccf4a5c", size = 126830, upload-time = "2025-12-06T15:54:20.836Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/a4/8052a029029b096a78955eadd68ab594ce2197e24ec50e6b6d2ab3f4e33b/orjson-3.11.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d", size = 245347, upload-time = "2025-12-06T15:54:22.061Z" },
+ { url = "https://files.pythonhosted.org/packages/64/67/574a7732bd9d9d79ac620c8790b4cfe0717a3d5a6eb2b539e6e8995e24a0/orjson-3.11.5-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626", size = 129435, upload-time = "2025-12-06T15:54:23.615Z" },
+ { url = "https://files.pythonhosted.org/packages/52/8d/544e77d7a29d90cf4d9eecd0ae801c688e7f3d1adfa2ebae5e1e94d38ab9/orjson-3.11.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f", size = 132074, upload-time = "2025-12-06T15:54:24.694Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/57/b9f5b5b6fbff9c26f77e785baf56ae8460ef74acdb3eae4931c25b8f5ba9/orjson-3.11.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85", size = 130520, upload-time = "2025-12-06T15:54:26.185Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/6d/d34970bf9eb33f9ec7c979a262cad86076814859e54eb9a059a52f6dc13d/orjson-3.11.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9", size = 136209, upload-time = "2025-12-06T15:54:27.264Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/39/bc373b63cc0e117a105ea12e57280f83ae52fdee426890d57412432d63b3/orjson-3.11.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626", size = 139837, upload-time = "2025-12-06T15:54:28.75Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/aa/7c4818c8d7d324da220f4f1af55c343956003aa4d1ce1857bdc1d396ba69/orjson-3.11.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa", size = 137307, upload-time = "2025-12-06T15:54:29.856Z" },
+ { url = "https://files.pythonhosted.org/packages/46/bf/0993b5a056759ba65145effe3a79dd5a939d4a070eaa5da2ee3180fbb13f/orjson-3.11.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477", size = 139020, upload-time = "2025-12-06T15:54:31.024Z" },
+ { url = "https://files.pythonhosted.org/packages/65/e8/83a6c95db3039e504eda60fc388f9faedbb4f6472f5aba7084e06552d9aa/orjson-3.11.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e", size = 141099, upload-time = "2025-12-06T15:54:32.196Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/b4/24fdc024abfce31c2f6812973b0a693688037ece5dc64b7a60c1ce69e2f2/orjson-3.11.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69", size = 413540, upload-time = "2025-12-06T15:54:33.361Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/37/01c0ec95d55ed0c11e4cae3e10427e479bba40c77312b63e1f9665e0737d/orjson-3.11.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3", size = 151530, upload-time = "2025-12-06T15:54:34.6Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/d4/f9ebc57182705bb4bbe63f5bbe14af43722a2533135e1d2fb7affa0c355d/orjson-3.11.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca", size = 141863, upload-time = "2025-12-06T15:54:35.801Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/04/02102b8d19fdcb009d72d622bb5781e8f3fae1646bf3e18c53d1bc8115b5/orjson-3.11.5-cp312-cp312-win32.whl", hash = "sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98", size = 135255, upload-time = "2025-12-06T15:54:37.209Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/fb/f05646c43d5450492cb387de5549f6de90a71001682c17882d9f66476af5/orjson-3.11.5-cp312-cp312-win_amd64.whl", hash = "sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875", size = 133252, upload-time = "2025-12-06T15:54:38.401Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/a6/7b8c0b26ba18c793533ac1cd145e131e46fcf43952aa94c109b5b913c1f0/orjson-3.11.5-cp312-cp312-win_arm64.whl", hash = "sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe", size = 126777, upload-time = "2025-12-06T15:54:39.515Z" },
+ { url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" },
+ { url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" },
+ { url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" },
+ { url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" },
+ { url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" },
+ { url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" },
+ { url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" },
+ { url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" },
+ { url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" },
+ { url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/60/77d7b839e317ead7bb225d55bb50f7ea75f47afc489c81199befc5435b50/orjson-3.11.5-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e446a8ea0a4c366ceafc7d97067bfd55292969143b57e3c846d87fc701e797a0", size = 245252, upload-time = "2025-12-06T15:55:01.127Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/aa/d4639163b400f8044cef0fb9aa51b0337be0da3a27187a20d1166e742370/orjson-3.11.5-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:53deb5addae9c22bbe3739298f5f2196afa881ea75944e7720681c7080909a81", size = 129419, upload-time = "2025-12-06T15:55:02.723Z" },
+ { url = "https://files.pythonhosted.org/packages/30/94/9eabf94f2e11c671111139edf5ec410d2f21e6feee717804f7e8872d883f/orjson-3.11.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd00d49d6063d2b8791da5d4f9d20539c5951f965e45ccf4e96d33505ce68f", size = 132050, upload-time = "2025-12-06T15:55:03.918Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/c8/ca10f5c5322f341ea9a9f1097e140be17a88f88d1cfdd29df522970d9744/orjson-3.11.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3fd15f9fc8c203aeceff4fda211157fad114dde66e92e24097b3647a08f4ee9e", size = 130370, upload-time = "2025-12-06T15:55:05.173Z" },
+ { url = "https://files.pythonhosted.org/packages/25/d4/e96824476d361ee2edd5c6290ceb8d7edf88d81148a6ce172fc00278ca7f/orjson-3.11.5-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df95000fbe6777bf9820ae82ab7578e8662051bb5f83d71a28992f539d2cda7", size = 136012, upload-time = "2025-12-06T15:55:06.402Z" },
+ { url = "https://files.pythonhosted.org/packages/85/8e/9bc3423308c425c588903f2d103cfcfe2539e07a25d6522900645a6f257f/orjson-3.11.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a8d676748fca47ade5bc3da7430ed7767afe51b2f8100e3cd65e151c0eaceb", size = 139809, upload-time = "2025-12-06T15:55:07.656Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/3c/b404e94e0b02a232b957c54643ce68d0268dacb67ac33ffdee24008c8b27/orjson-3.11.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa0f513be38b40234c77975e68805506cad5d57b3dfd8fe3baa7f4f4051e15b4", size = 137332, upload-time = "2025-12-06T15:55:08.961Z" },
+ { url = "https://files.pythonhosted.org/packages/51/30/cc2d69d5ce0ad9b84811cdf4a0cd5362ac27205a921da524ff42f26d65e0/orjson-3.11.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1863e75b92891f553b7922ce4ee10ed06db061e104f2b7815de80cdcb135ad", size = 138983, upload-time = "2025-12-06T15:55:10.595Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/87/de3223944a3e297d4707d2fe3b1ffb71437550e165eaf0ca8bbe43ccbcb1/orjson-3.11.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d4be86b58e9ea262617b8ca6251a2f0d63cc132a6da4b5fcc8e0a4128782c829", size = 141069, upload-time = "2025-12-06T15:55:11.832Z" },
+ { url = "https://files.pythonhosted.org/packages/65/30/81d5087ae74be33bcae3ff2d80f5ccaa4a8fedc6d39bf65a427a95b8977f/orjson-3.11.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:b923c1c13fa02084eb38c9c065afd860a5cff58026813319a06949c3af5732ac", size = 413491, upload-time = "2025-12-06T15:55:13.314Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/6f/f6058c21e2fc1efaf918986dbc2da5cd38044f1a2d4b7b91ad17c4acf786/orjson-3.11.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1b6bd351202b2cd987f35a13b5e16471cf4d952b42a73c391cc537974c43ef6d", size = 151375, upload-time = "2025-12-06T15:55:14.715Z" },
+ { url = "https://files.pythonhosted.org/packages/54/92/c6921f17d45e110892899a7a563a925b2273d929959ce2ad89e2525b885b/orjson-3.11.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb150d529637d541e6af06bbe3d02f5498d628b7f98267ff87647584293ab439", size = 141850, upload-time = "2025-12-06T15:55:15.94Z" },
+ { url = "https://files.pythonhosted.org/packages/88/86/cdecb0140a05e1a477b81f24739da93b25070ee01ce7f7242f44a6437594/orjson-3.11.5-cp314-cp314-win32.whl", hash = "sha256:9cc1e55c884921434a84a0c3dd2699eb9f92e7b441d7f53f3941079ec6ce7499", size = 135278, upload-time = "2025-12-06T15:55:17.202Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/97/b638d69b1e947d24f6109216997e38922d54dcdcdb1b11c18d7efd2d3c59/orjson-3.11.5-cp314-cp314-win_amd64.whl", hash = "sha256:a4f3cb2d874e03bc7767c8f88adaa1a9a05cecea3712649c3b58589ec7317310", size = 133170, upload-time = "2025-12-06T15:55:18.468Z" },
+ { url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" },
]
-provides-extras = ["dev"]
[[package]]
name = "packaging"
@@ -750,6 +850,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
]
+[[package]]
+name = "pathvalidate"
+version = "3.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fa/2a/52a8da6fe965dea6192eb716b357558e103aea0a1e9a8352ad575a8406ca/pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177", size = 63262, upload-time = "2025-06-15T09:07:20.736Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9a/70/875f4a23bfc4731703a5835487d0d2fb999031bd415e7d17c0ae615c18b7/pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f", size = 24305, upload-time = "2025-06-15T09:07:19.117Z" },
+]
+
[[package]]
name = "pillow"
version = "12.0.0"
@@ -1018,16 +1127,16 @@ wheels = [
[[package]]
name = "rich-click"
-version = "1.9.5"
+version = "1.9.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "rich" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/6b/d1/b60ca6a8745e76800b50c7ee246fd73f08a3be5d8e0b551fc93c19fa1203/rich_click-1.9.5.tar.gz", hash = "sha256:48120531493f1533828da80e13e839d471979ec8d7d0ca7b35f86a1379cc74b6", size = 73927, upload-time = "2025-12-21T14:49:44.167Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/4b/50/1497dbc52297d6759451bf5a991e9b2d0a122a5d33ac8cd057f81cb9910a/rich_click-1.9.6.tar.gz", hash = "sha256:463bd3dbef54a812282bfa93dde80c471bce359823fc1301be368eab63391cb2", size = 74777, upload-time = "2026-01-22T02:43:58.374Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/25/0a/d865895e1e5d88a60baee0fc3703eb111c502ee10c8c107516bc7623abf8/rich_click-1.9.5-py3-none-any.whl", hash = "sha256:9b195721a773b1acf0e16ff9ec68cef1e7d237e53471e6e3f7ade462f86c403a", size = 70580, upload-time = "2025-12-21T14:49:42.905Z" },
+ { url = "https://files.pythonhosted.org/packages/47/87/508930def644be9fb86fec63520151921061c152289b98798017a498d678/rich_click-1.9.6-py3-none-any.whl", hash = "sha256:e78d71e3f73a55548e573ccfd964e18503936e2e736a4a1f74c6c29479a2a054", size = 71430, upload-time = "2026-01-22T02:43:56.939Z" },
]
[[package]]
@@ -1140,28 +1249,28 @@ wheels = [
[[package]]
name = "ruff"
-version = "0.14.11"
+version = "0.14.14"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d4/77/9a7fe084d268f8855d493e5031ea03fa0af8cc05887f638bf1c4e3363eb8/ruff-0.14.11.tar.gz", hash = "sha256:f6dc463bfa5c07a59b1ff2c3b9767373e541346ea105503b4c0369c520a66958", size = 5993417, upload-time = "2026-01-08T19:11:58.322Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/2e/06/f71e3a86b2df0dfa2d2f72195941cd09b44f87711cb7fa5193732cb9a5fc/ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b", size = 4515732, upload-time = "2026-01-22T22:30:17.527Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f0/a6/a4c40a5aaa7e331f245d2dc1ac8ece306681f52b636b40ef87c88b9f7afd/ruff-0.14.11-py3-none-linux_armv6l.whl", hash = "sha256:f6ff2d95cbd335841a7217bdfd9c1d2e44eac2c584197ab1385579d55ff8830e", size = 12951208, upload-time = "2026-01-08T19:12:09.218Z" },
- { url = "https://files.pythonhosted.org/packages/5c/5c/360a35cb7204b328b685d3129c08aca24765ff92b5a7efedbdd6c150d555/ruff-0.14.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f6eb5c1c8033680f4172ea9c8d3706c156223010b8b97b05e82c59bdc774ee6", size = 13330075, upload-time = "2026-01-08T19:12:02.549Z" },
- { url = "https://files.pythonhosted.org/packages/1b/9e/0cc2f1be7a7d33cae541824cf3f95b4ff40d03557b575912b5b70273c9ec/ruff-0.14.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2fc34cc896f90080fca01259f96c566f74069a04b25b6205d55379d12a6855e", size = 12257809, upload-time = "2026-01-08T19:12:00.366Z" },
- { url = "https://files.pythonhosted.org/packages/a7/e5/5faab97c15bb75228d9f74637e775d26ac703cc2b4898564c01ab3637c02/ruff-0.14.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53386375001773ae812b43205d6064dae49ff0968774e6befe16a994fc233caa", size = 12678447, upload-time = "2026-01-08T19:12:13.899Z" },
- { url = "https://files.pythonhosted.org/packages/1b/33/e9767f60a2bef779fb5855cab0af76c488e0ce90f7bb7b8a45c8a2ba4178/ruff-0.14.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a697737dce1ca97a0a55b5ff0434ee7205943d4874d638fe3ae66166ff46edbe", size = 12758560, upload-time = "2026-01-08T19:11:42.55Z" },
- { url = "https://files.pythonhosted.org/packages/eb/84/4c6cf627a21462bb5102f7be2a320b084228ff26e105510cd2255ea868e5/ruff-0.14.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6845ca1da8ab81ab1dce755a32ad13f1db72e7fba27c486d5d90d65e04d17b8f", size = 13599296, upload-time = "2026-01-08T19:11:30.371Z" },
- { url = "https://files.pythonhosted.org/packages/88/e1/92b5ed7ea66d849f6157e695dc23d5d6d982bd6aa8d077895652c38a7cae/ruff-0.14.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e36ce2fd31b54065ec6f76cb08d60159e1b32bdf08507862e32f47e6dde8bcbf", size = 15048981, upload-time = "2026-01-08T19:12:04.742Z" },
- { url = "https://files.pythonhosted.org/packages/61/df/c1bd30992615ac17c2fb64b8a7376ca22c04a70555b5d05b8f717163cf9f/ruff-0.14.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590bcc0e2097ecf74e62a5c10a6b71f008ad82eb97b0a0079e85defe19fe74d9", size = 14633183, upload-time = "2026-01-08T19:11:40.069Z" },
- { url = "https://files.pythonhosted.org/packages/04/e9/fe552902f25013dd28a5428a42347d9ad20c4b534834a325a28305747d64/ruff-0.14.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53fe71125fc158210d57fe4da26e622c9c294022988d08d9347ec1cf782adafe", size = 14050453, upload-time = "2026-01-08T19:11:37.555Z" },
- { url = "https://files.pythonhosted.org/packages/ae/93/f36d89fa021543187f98991609ce6e47e24f35f008dfe1af01379d248a41/ruff-0.14.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a35c9da08562f1598ded8470fcfef2afb5cf881996e6c0a502ceb61f4bc9c8a3", size = 13757889, upload-time = "2026-01-08T19:12:07.094Z" },
- { url = "https://files.pythonhosted.org/packages/b7/9f/c7fb6ecf554f28709a6a1f2a7f74750d400979e8cd47ed29feeaa1bd4db8/ruff-0.14.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0f3727189a52179393ecf92ec7057c2210203e6af2676f08d92140d3e1ee72c1", size = 13955832, upload-time = "2026-01-08T19:11:55.064Z" },
- { url = "https://files.pythonhosted.org/packages/db/a0/153315310f250f76900a98278cf878c64dfb6d044e184491dd3289796734/ruff-0.14.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eb09f849bd37147a789b85995ff734a6c4a095bed5fd1608c4f56afc3634cde2", size = 12586522, upload-time = "2026-01-08T19:11:35.356Z" },
- { url = "https://files.pythonhosted.org/packages/2f/2b/a73a2b6e6d2df1d74bf2b78098be1572191e54bec0e59e29382d13c3adc5/ruff-0.14.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:c61782543c1231bf71041461c1f28c64b961d457d0f238ac388e2ab173d7ecb7", size = 12724637, upload-time = "2026-01-08T19:11:47.796Z" },
- { url = "https://files.pythonhosted.org/packages/f0/41/09100590320394401cd3c48fc718a8ba71c7ddb1ffd07e0ad6576b3a3df2/ruff-0.14.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82ff352ea68fb6766140381748e1f67f83c39860b6446966cff48a315c3e2491", size = 13145837, upload-time = "2026-01-08T19:11:32.87Z" },
- { url = "https://files.pythonhosted.org/packages/3b/d8/e035db859d1d3edf909381eb8ff3e89a672d6572e9454093538fe6f164b0/ruff-0.14.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:728e56879df4ca5b62a9dde2dd0eb0edda2a55160c0ea28c4025f18c03f86984", size = 13850469, upload-time = "2026-01-08T19:12:11.694Z" },
- { url = "https://files.pythonhosted.org/packages/4e/02/bb3ff8b6e6d02ce9e3740f4c17dfbbfb55f34c789c139e9cd91985f356c7/ruff-0.14.11-py3-none-win32.whl", hash = "sha256:337c5dd11f16ee52ae217757d9b82a26400be7efac883e9e852646f1557ed841", size = 12851094, upload-time = "2026-01-08T19:11:45.163Z" },
- { url = "https://files.pythonhosted.org/packages/58/f1/90ddc533918d3a2ad628bc3044cdfc094949e6d4b929220c3f0eb8a1c998/ruff-0.14.11-py3-none-win_amd64.whl", hash = "sha256:f981cea63d08456b2c070e64b79cb62f951aa1305282974d4d5216e6e0178ae6", size = 14001379, upload-time = "2026-01-08T19:11:52.591Z" },
- { url = "https://files.pythonhosted.org/packages/c4/1c/1dbe51782c0e1e9cfce1d1004752672d2d4629ea46945d19d731ad772b3b/ruff-0.14.11-py3-none-win_arm64.whl", hash = "sha256:649fb6c9edd7f751db276ef42df1f3df41c38d67d199570ae2a7bd6cbc3590f0", size = 12938644, upload-time = "2026-01-08T19:11:50.027Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/89/20a12e97bc6b9f9f68343952da08a8099c57237aef953a56b82711d55edd/ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed", size = 10467650, upload-time = "2026-01-22T22:30:08.578Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/b1/c5de3fd2d5a831fcae21beda5e3589c0ba67eec8202e992388e4b17a6040/ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c", size = 10883245, upload-time = "2026-01-22T22:30:04.155Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/7c/3c1db59a10e7490f8f6f8559d1db8636cbb13dccebf18686f4e3c9d7c772/ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de", size = 10231273, upload-time = "2026-01-22T22:30:34.642Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/6e/5e0e0d9674be0f8581d1f5e0f0a04761203affce3232c1a1189d0e3b4dad/ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e", size = 10585753, upload-time = "2026-01-22T22:30:31.781Z" },
+ { url = "https://files.pythonhosted.org/packages/23/09/754ab09f46ff1884d422dc26d59ba18b4e5d355be147721bb2518aa2a014/ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8", size = 10286052, upload-time = "2026-01-22T22:30:24.827Z" },
+ { url = "https://files.pythonhosted.org/packages/c8/cc/e71f88dd2a12afb5f50733851729d6b571a7c3a35bfdb16c3035132675a0/ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906", size = 11043637, upload-time = "2026-01-22T22:30:13.239Z" },
+ { url = "https://files.pythonhosted.org/packages/67/b2/397245026352494497dac935d7f00f1468c03a23a0c5db6ad8fc49ca3fb2/ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480", size = 12194761, upload-time = "2026-01-22T22:30:22.542Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/06/06ef271459f778323112c51b7587ce85230785cd64e91772034ddb88f200/ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df", size = 12005701, upload-time = "2026-01-22T22:30:20.499Z" },
+ { url = "https://files.pythonhosted.org/packages/41/d6/99364514541cf811ccc5ac44362f88df66373e9fec1b9d1c4cc830593fe7/ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b", size = 11282455, upload-time = "2026-01-22T22:29:59.679Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/71/37daa46f89475f8582b7762ecd2722492df26421714a33e72ccc9a84d7a5/ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974", size = 11215882, upload-time = "2026-01-22T22:29:57.032Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/10/a31f86169ec91c0705e618443ee74ede0bdd94da0a57b28e72db68b2dbac/ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66", size = 11180549, upload-time = "2026-01-22T22:30:27.175Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/1e/c723f20536b5163adf79bdd10c5f093414293cdf567eed9bdb7b83940f3f/ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13", size = 10543416, upload-time = "2026-01-22T22:30:01.964Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/34/8a84cea7e42c2d94ba5bde1d7a4fae164d6318f13f933d92da6d7c2041ff/ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412", size = 10285491, upload-time = "2026-01-22T22:30:29.51Z" },
+ { url = "https://files.pythonhosted.org/packages/55/ef/b7c5ea0be82518906c978e365e56a77f8de7678c8bb6651ccfbdc178c29f/ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3", size = 10733525, upload-time = "2026-01-22T22:30:06.499Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/5b/aaf1dfbcc53a2811f6cc0a1759de24e4b03e02ba8762daabd9b6bd8c59e3/ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b", size = 11315626, upload-time = "2026-01-22T22:30:36.848Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/aa/9f89c719c467dfaf8ad799b9bae0df494513fb21d31a6059cb5870e57e74/ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167", size = 10502442, upload-time = "2026-01-22T22:30:38.93Z" },
+ { url = "https://files.pythonhosted.org/packages/87/44/90fa543014c45560cae1fffc63ea059fb3575ee6e1cb654562197e5d16fb/ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd", size = 11630486, upload-time = "2026-01-22T22:30:10.852Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/6a/40fee331a52339926a92e17ae748827270b288a35ef4a15c9c8f2ec54715/ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c", size = 10920448, upload-time = "2026-01-22T22:30:15.417Z" },
]
[[package]]
@@ -1215,6 +1324,44 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/1a/9ffe814d317c5224166b23e7c47f606d6e473712a2fad0f704ea9b99f246/urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f", size = 131083, upload-time = "2025-12-05T15:08:45.983Z" },
]
+[[package]]
+name = "uvloop"
+version = "0.22.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" },
+ { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" },
+ { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" },
+ { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" },
+ { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" },
+ { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" },
+ { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" },
+ { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" },
+ { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" },
+ { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" },
+ { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" },
+ { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" },
+ { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" },
+ { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" },
+]
+
[[package]]
name = "watchdog"
version = "6.0.0"
@@ -1244,7 +1391,7 @@ wheels = [
[[package]]
name = "weasyprint"
-version = "67.0"
+version = "68.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi" },
@@ -1256,9 +1403,9 @@ dependencies = [
{ name = "tinycss2" },
{ name = "tinyhtml5" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/fd/bc/79a65b3a406cb62a1982fec8b49134b25a3b31abb094ca493c9fddff5492/weasyprint-67.0.tar.gz", hash = "sha256:fdfbccf700e8086c8fd1607ec42e25d4b584512c29af2d9913587a4e448dead4", size = 1534152, upload-time = "2025-12-02T16:11:36.972Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/c8/269c96363db39e34cdb99c7afecaaf8130b7e4c176bff28c74877308e0f3/weasyprint-68.0.tar.gz", hash = "sha256:447f40898b747cb44ac31a5d493d512e7441fd56e13f63744c099383bbf9cda9", size = 1541418, upload-time = "2026-01-19T14:54:45.596Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/1e/3a/a225e214ae2accd8781e4d22e9397bd51290c631ea0943d3a0a1840bc667/weasyprint-67.0-py3-none-any.whl", hash = "sha256:abc2f40872ea01c29c11f7799dafc4b23c078335bf7777f72a8affeb36e1d201", size = 316309, upload-time = "2025-12-02T16:11:35.402Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/5a/c7954167c05ee882a4640b6da0a343c37e3b9de352619c86f8c4efefbb00/weasyprint-68.0-py3-none-any.whl", hash = "sha256:c2cb40c71b50837c5971f00171c9e4078e8c9912dd7c217f3e90e068f11e8aa1", size = 319688, upload-time = "2026-01-19T14:54:44.242Z" },
]
[[package]]