From c559af26095f55cb328111f4e673ca5765d92cdd Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Fri, 27 Feb 2026 11:51:40 +0100 Subject: [PATCH 1/8] chore(python): improve dependency range automation - tighten dependency bounds and coding standards guidance\n- add dependency range validation workflow, reporting, and issue automation\n- update related tests and dependency pins for compatibility Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../python-dependency-range-validation.yml | 208 ++++ .gitignore | 1 + python/CODING_STANDARD.md | 22 +- python/packages/a2a/pyproject.toml | 4 +- python/packages/ag-ui/pyproject.toml | 12 +- python/packages/anthropic/pyproject.toml | 4 +- .../packages/azure-ai-search/pyproject.toml | 4 +- .../tests/test_aisearch_context_provider.py | 12 + python/packages/azure-ai/pyproject.toml | 8 +- python/packages/azurefunctions/pyproject.toml | 6 +- python/packages/bedrock/pyproject.toml | 2 +- python/packages/chatkit/pyproject.toml | 2 +- python/packages/claude/pyproject.toml | 4 +- python/packages/copilotstudio/pyproject.toml | 4 +- .../agent_framework/_workflows/_workflow.py | 101 +- python/packages/core/pyproject.toml | 16 +- .../azure/test_azure_embedding_client.py | 159 +++ .../openai/test_openai_embedding_client.py | 10 +- python/packages/declarative/pyproject.toml | 4 +- python/packages/devui/pyproject.toml | 12 +- python/packages/durabletask/pyproject.toml | 8 +- python/packages/foundry_local/pyproject.toml | 4 +- python/packages/github_copilot/pyproject.toml | 4 +- python/packages/lab/pyproject.toml | 32 +- python/packages/mem0/pyproject.toml | 4 +- python/packages/ollama/pyproject.toml | 4 +- python/packages/orchestrations/pyproject.toml | 2 +- python/packages/purview/pyproject.toml | 6 +- python/packages/redis/pyproject.toml | 8 +- python/pyproject.toml | 1 + python/scripts/validate_dependency_ranges.py | 1036 +++++++++++++++++ python/uv.lock | 279 ++--- 32 files changed, 1704 insertions(+), 279 deletions(-) create mode 100644 .github/workflows/python-dependency-range-validation.yml create mode 100644 python/packages/core/tests/azure/test_azure_embedding_client.py create mode 100644 python/scripts/validate_dependency_ranges.py diff --git a/.github/workflows/python-dependency-range-validation.yml b/.github/workflows/python-dependency-range-validation.yml new file mode 100644 index 0000000000..78c4fce11a --- /dev/null +++ b/.github/workflows/python-dependency-range-validation.yml @@ -0,0 +1,208 @@ +name: Python - Dependency Range Validation + +on: + workflow_dispatch: + +permissions: + contents: write + issues: write + pull-requests: write + +env: + UV_CACHE_DIR: /tmp/.uv-cache + +jobs: + dependency-range-validation: + name: Dependency Range Validation + runs-on: ubuntu-latest + env: + UV_PYTHON: "3.13" + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Set up python and install the project + uses: ./.github/actions/python-setup + with: + python-version: ${{ env.UV_PYTHON }} + os: ${{ runner.os }} + env: + UV_CACHE_DIR: /tmp/.uv-cache + + - name: Run dependency range validation + id: validate_ranges + continue-on-error: true + run: uv run poe validate-dependency-ranges + working-directory: ./python + + - name: Upload dependency range report + if: always() + uses: actions/upload-artifact@v4 + with: + name: dependency-range-results + path: python/scripts/dependency-range-results.json + if-no-files-found: warn + + - name: Create issues for failed dependency candidates + if: always() + uses: actions/github-script@v8 + with: + script: | + const fs = require("fs") + const reportPath = "python/scripts/dependency-range-results.json" + + if (!fs.existsSync(reportPath)) { + core.warning(`No dependency range report found at ${reportPath}`) + return + } + + const report = JSON.parse(fs.readFileSync(reportPath, "utf8")) + const dependencyFailures = [] + + for (const packageResult of report.packages ?? []) { + for (const dependency of packageResult.dependencies ?? []) { + const candidateVersions = new Set(dependency.candidate_versions ?? []) + const failedAttempts = (dependency.attempts ?? []).filter( + (attempt) => attempt.status === "failed" && candidateVersions.has(attempt.trial_upper) + ) + if (!failedAttempts.length) { + continue + } + + const failuresByVersion = new Map() + for (const attempt of failedAttempts) { + const version = attempt.trial_upper || "unknown" + if (!failuresByVersion.has(version)) { + failuresByVersion.set(version, attempt.error || "No error output captured.") + } + } + + dependencyFailures.push({ + packageName: packageResult.package_name, + projectPath: packageResult.project_path, + dependencyName: dependency.name, + originalRequirements: dependency.original_requirements ?? [], + finalRequirements: dependency.final_requirements ?? [], + failedVersions: [...failuresByVersion.entries()].map(([version, error]) => ({ version, error })), + }) + } + } + + if (!dependencyFailures.length) { + core.info("No failing dependency candidates found.") + return + } + + const owner = context.repo.owner + const repo = context.repo.repo + const openIssues = await github.paginate(github.rest.issues.listForRepo, { + owner, + repo, + state: "open", + per_page: 100, + }) + const openIssueTitles = new Set( + openIssues.filter((issue) => !issue.pull_request).map((issue) => issue.title) + ) + + const formatError = (message) => String(message || "No error output captured.").replace(/```/g, "'''") + + for (const failure of dependencyFailures) { + const title = `Dependency validation failed: ${failure.dependencyName} (${failure.packageName})` + if (openIssueTitles.has(title)) { + core.info(`Issue already exists: ${title}`) + continue + } + + const visibleFailures = failure.failedVersions.slice(0, 5) + const omittedCount = failure.failedVersions.length - visibleFailures.length + const failureDetails = visibleFailures + .map( + (entry) => + `- \`${entry.version}\`\n\n\`\`\`\n${formatError(entry.error).slice(0, 3500)}\n\`\`\`` + ) + .join("\n\n") + + const body = [ + "Automated dependency range validation found candidate versions that failed checks.", + "", + `- Package: \`${failure.packageName}\``, + `- Project path: \`${failure.projectPath}\``, + `- Dependency: \`${failure.dependencyName}\``, + `- Original requirements: ${ + failure.originalRequirements.length + ? failure.originalRequirements.map((value) => `\`${value}\``).join(", ") + : "_none_" + }`, + `- Final requirements after run: ${ + failure.finalRequirements.length + ? failure.finalRequirements.map((value) => `\`${value}\``).join(", ") + : "_none_" + }`, + "", + "### Failed versions and errors", + failureDetails, + omittedCount > 0 ? `\n_Additional failed versions omitted: ${omittedCount}_` : "", + "", + `Workflow run: ${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`, + ].join("\n") + + await github.rest.issues.create({ + owner, + repo, + title, + body, + }) + openIssueTitles.add(title) + core.info(`Created issue: ${title}`) + } + + - name: Refresh lockfile + if: steps.validate_ranges.outcome == 'success' + run: uv lock + working-directory: ./python + + - name: Commit and push dependency updates + id: commit_updates + if: steps.validate_ranges.outcome == 'success' + run: | + BRANCH="automation/python-dependency-range-updates" + + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + git checkout -B "${BRANCH}" + + git add python/packages/*/pyproject.toml python/uv.lock + if git diff --cached --quiet; then + echo "has_changes=false" >> "$GITHUB_OUTPUT" + echo "No dependency updates to commit." + exit 0 + fi + + git commit -m "chore(python): update dependency ranges" + git push --force-with-lease --set-upstream origin "${BRANCH}" + echo "has_changes=true" >> "$GITHUB_OUTPUT" + + - name: Create or update pull request with GitHub CLI + if: steps.validate_ranges.outcome == 'success' && steps.commit_updates.outputs.has_changes == 'true' + run: | + BRANCH="automation/python-dependency-range-updates" + PR_TITLE="Python: chore: update dependency ranges" + PR_BODY_FILE="$(mktemp)" + + cat > "${PR_BODY_FILE}" <<'EOF' + This PR was generated by the dependency range validation workflow. + + - Ran `uv run poe validate-dependency-ranges` + - Updated package dependency bounds + - Refreshed `python/uv.lock` + EOF + + PR_NUMBER="$(gh pr list --head "${BRANCH}" --base main --state open --json number --jq '.[0].number')" + if [ -n "${PR_NUMBER}" ]; then + gh pr edit "${PR_NUMBER}" --title "${PR_TITLE}" --body-file "${PR_BODY_FILE}" + else + gh pr create --base main --head "${BRANCH}" --title "${PR_TITLE}" --body-file "${PR_BODY_FILE}" + fi diff --git a/.gitignore b/.gitignore index 09b8dfa453..b66a5bb802 100644 --- a/.gitignore +++ b/.gitignore @@ -205,6 +205,7 @@ WARP.md **/memory-bank/ **/projectBrief.md **/tmpclaude* +python/scripts/dependency-range-results.json # Azurite storage emulator files */__azurite_db_blob__.json* diff --git a/python/CODING_STANDARD.md b/python/CODING_STANDARD.md index ccb8e058e3..06610056dc 100644 --- a/python/CODING_STANDARD.md +++ b/python/CODING_STANDARD.md @@ -160,10 +160,14 @@ user_msg = Message("user", ["Hello, world!"]) asst_msg = Message("assistant", ["Hello, world!"]) # ❌ Not preferred - unnecessary inheritance -from agent_framework import UserMessage, AssistantMessage +class UserMessage(Message): + pass -user_msg = UserMessage(content="Hello, world!") -asst_msg = AssistantMessage(content="Hello, world!") +class AssistantMessage(Message): + pass + +user_msg = UserMessage("user", ["Hello, world!"]) +asst_msg = AssistantMessage("assistant", ["Hello, world!"]) ``` ### Import Structure @@ -383,6 +387,18 @@ All non-core packages declare a lower bound on `agent-framework-core` (e.g., `"a - **Core version changes**: When `agent-framework-core` is updated with breaking or significant changes and its version is bumped, update the `agent-framework-core>=...` lower bound in every other package's `pyproject.toml` to match the new core version. - **Non-core version changes**: Non-core packages (connectors, extensions) can have their own versions incremented independently while keeping the existing core lower bound pinned. Only raise the core lower bound if the non-core package actually depends on new core APIs. +### External Dependency Version Bounds + +The guiding principle for external dependencies is to make the range of allowed versions as broad as possible, even it that means we have to do some conditional imports, and other tricks to allow small changes in versions. +So we use bounded ranges for external package dependencies in `pyproject.toml`: + +- For stable dependencies (`>=1.0.0`), use `>=,` (for example: `openai>=1.99.0,<2`). +- For prerelease (`dev`/`a`/`b`/`rc`) dependencies, use a known-good lower bound with a hard upper boundary in the same prerelease line (for example: `azure-ai-projects>=2.0.0b3,<2.0.0b4`). +- For `<1.0.0` dependencies, use patch-bounded caps (`>=,`), not minor-bounded caps (for example: `a2a-sdk>=0.3.5,<0.3.6`). +- Prefer keeping support for multiple major versions when practical. If APIs differ between supported majors, version-conditional imports/branches are acceptable to preserve compatibility. +- Validate dependency bounds project by project using the dependency-range validation script (`uv run poe validate-dependency-ranges`), and include both typing and tests for the final gate. + + ### Installation Options Connectors are distributed as separate packages and are not imported by default in the core package. Users install the specific connectors they need: diff --git a/python/packages/a2a/pyproject.toml b/python/packages/a2a/pyproject.toml index b7bfdb9275..64f1f92b4d 100644 --- a/python/packages/a2a/pyproject.toml +++ b/python/packages/a2a/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "a2a-sdk>=0.3.5", + "a2a-sdk>=0.3.5,<0.3.24", ] [tool.uv] @@ -87,7 +87,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_a2a" -test = "pytest -m \"not integration\" --cov=agent_framework_a2a --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_a2a --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/ag-ui/pyproject.toml b/python/packages/ag-ui/pyproject.toml index 044d7d935a..f3f861cc2e 100644 --- a/python/packages/ag-ui/pyproject.toml +++ b/python/packages/ag-ui/pyproject.toml @@ -23,15 +23,15 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "ag-ui-protocol>=0.1.9", - "fastapi>=0.115.0", - "uvicorn>=0.30.0" + "ag-ui-protocol==0.1.13", + "fastapi>=0.104.0,<0.133.1", + "uvicorn>=0.30.0,<0.30.1" ] [project.optional-dependencies] dev = [ - "pytest>=8.0.0", - "httpx>=0.27.0", + "pytest>=8.0.0,<9", + "httpx>=0.27.0,<0.29", ] [build-system] @@ -74,4 +74,4 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_ag_ui" -test = "pytest -m \"not integration\" --cov=agent_framework_ag_ui --cov-report=term-missing:skip-covered -n auto --dist worksteal tests/ag_ui" +test = 'pytest -m "not integration" --cov=agent_framework_ag_ui --cov-report=term-missing:skip-covered -n auto --dist worksteal tests/ag_ui' diff --git a/python/packages/anthropic/pyproject.toml b/python/packages/anthropic/pyproject.toml index 51631bdd30..b3e23d4f66 100644 --- a/python/packages/anthropic/pyproject.toml +++ b/python/packages/anthropic/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "anthropic>=0.70.0,<1", + "anthropic>=0.80.0,<0.80.1", ] [tool.uv] @@ -87,7 +87,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_anthropic" -test = "pytest -m \"not integration\" --cov=agent_framework_anthropic --cov-report=term-missing:skip-covered -n auto --dist worksteal tests" +test = 'pytest -m "not integration" --cov=agent_framework_anthropic --cov-report=term-missing:skip-covered -n auto --dist worksteal tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/azure-ai-search/pyproject.toml b/python/packages/azure-ai-search/pyproject.toml index 0827c2d816..996a39a914 100644 --- a/python/packages/azure-ai-search/pyproject.toml +++ b/python/packages/azure-ai-search/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "azure-search-documents==11.7.0b2", + "azure-search-documents>=11.7.0b2,<11.7.0b3", ] [tool.uv] @@ -89,7 +89,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_azure_ai_search" -test = "pytest -m \"not integration\" --cov=agent_framework_azure_ai_search --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_azure_ai_search --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/azure-ai-search/tests/test_aisearch_context_provider.py b/python/packages/azure-ai-search/tests/test_aisearch_context_provider.py index 3c4fb68fe8..d4040af61b 100644 --- a/python/packages/azure-ai-search/tests/test_aisearch_context_provider.py +++ b/python/packages/azure-ai-search/tests/test_aisearch_context_provider.py @@ -70,6 +70,18 @@ async def _search(**kwargs): return client +@pytest.fixture(autouse=True) +def clear_azure_search_env(monkeypatch: pytest.MonkeyPatch) -> None: + """Isolate tests from ambient AZURE_SEARCH_* environment variables.""" + for key in ( + "AZURE_SEARCH_ENDPOINT", + "AZURE_SEARCH_INDEX_NAME", + "AZURE_SEARCH_KNOWLEDGE_BASE_NAME", + "AZURE_SEARCH_API_KEY", + ): + monkeypatch.delenv(key, raising=False) + + def _make_provider(**overrides) -> AzureAISearchContextProvider: """Create a semantic-mode provider with mocked internals (skips auto-discovery).""" defaults = { diff --git a/python/packages/azure-ai/pyproject.toml b/python/packages/azure-ai/pyproject.toml index 2bd51729c2..60c270dafd 100644 --- a/python/packages/azure-ai/pyproject.toml +++ b/python/packages/azure-ai/pyproject.toml @@ -24,9 +24,9 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "azure-ai-agents == 1.2.0b5", - "azure-ai-inference>=1.0.0b9", - "aiohttp", + "azure-ai-agents>=1.2.0b5,<1.2.0b6", + "azure-ai-inference>=1.0.0b9,<1.0.0b10", + "aiohttp>=3.13.3,<4", ] [tool.uv] @@ -87,7 +87,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_azure_ai" -test = "pytest -m \"not integration\" --cov=agent_framework_azure_ai --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_azure_ai --cov-report=term-missing:skip-covered tests' [tool.poe.tasks.integration-tests] cmd = """ diff --git a/python/packages/azurefunctions/pyproject.toml b/python/packages/azurefunctions/pyproject.toml index 0bb2ec9612..c3789da68a 100644 --- a/python/packages/azurefunctions/pyproject.toml +++ b/python/packages/azurefunctions/pyproject.toml @@ -24,8 +24,8 @@ classifiers = [ dependencies = [ "agent-framework-core>=1.0.0rc3", "agent-framework-durabletask", - "azure-functions", - "azure-functions-durable", + "azure-functions>=1.24.0,<2", + "azure-functions-durable>=1.5.0,<2", ] [dependency-groups] @@ -93,7 +93,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_azurefunctions" -test = "pytest -m \"not integration\" --cov=agent_framework_azurefunctions --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_azurefunctions --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/bedrock/pyproject.toml b/python/packages/bedrock/pyproject.toml index b99ecb91ff..b13093b399 100644 --- a/python/packages/bedrock/pyproject.toml +++ b/python/packages/bedrock/pyproject.toml @@ -86,7 +86,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_bedrock" -test = "pytest -m \"not integration\" --cov=agent_framework_bedrock --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_bedrock --cov-report=term-missing:skip-covered tests' [build-system] requires = ["hatchling"] diff --git a/python/packages/chatkit/pyproject.toml b/python/packages/chatkit/pyproject.toml index 74d7216da6..ece58d277d 100644 --- a/python/packages/chatkit/pyproject.toml +++ b/python/packages/chatkit/pyproject.toml @@ -88,7 +88,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_chatkit" -test = "pytest -m \"not integration\" --cov=agent_framework_chatkit --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_chatkit --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/claude/pyproject.toml b/python/packages/claude/pyproject.toml index f1891586f8..3c1813fcab 100644 --- a/python/packages/claude/pyproject.toml +++ b/python/packages/claude/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "claude-agent-sdk>=0.1.25", + "claude-agent-sdk>=0.1.25,<0.1.26", ] [tool.uv] @@ -88,7 +88,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_claude" -test = "pytest -m \"not integration\" --cov=agent_framework_claude --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_claude --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/copilotstudio/pyproject.toml b/python/packages/copilotstudio/pyproject.toml index c37fa71ecf..d3dd05e253 100644 --- a/python/packages/copilotstudio/pyproject.toml +++ b/python/packages/copilotstudio/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "microsoft-agents-copilotstudio-client>=0.3.1", + "microsoft-agents-copilotstudio-client>=0.3.1,<0.3.2", ] [tool.uv] @@ -87,7 +87,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_copilotstudio" -test = "pytest -m \"not integration\" --cov=agent_framework_copilotstudio --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_copilotstudio --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index 8c6b5fe1fb..680f7b9380 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -57,7 +57,11 @@ class WorkflowRunResult(list[WorkflowEvent]): - status_timeline(): Access the complete status event history """ - def __init__(self, events: list[WorkflowEvent[Any]], status_events: list[WorkflowEvent[Any]] | None = None) -> None: + def __init__( + self, + events: list[WorkflowEvent[Any]], + status_events: list[WorkflowEvent[Any]] | None = None, + ) -> None: super().__init__(events) self._status_events: list[WorkflowEvent[Any]] = status_events or [] @@ -215,7 +219,9 @@ def __init__( # Output events (WorkflowEvent with type='output') from these executors are treated as workflow outputs. # If None or empty, all executor outputs are considered workflow outputs. - self._output_executors = list(output_executors) if output_executors else list(self.executors.keys()) + self._output_executors = ( + list(output_executors) if output_executors else list(self.executors.keys()) + ) # Store non-serializable runtime objects as private attributes self._runner_context = runner_context @@ -236,7 +242,9 @@ def __init__( def _ensure_not_running(self) -> None: """Ensure the workflow is not already running.""" if self._is_running: - raise RuntimeError("Workflow is already running. Concurrent executions are not allowed.") + raise RuntimeError( + "Workflow is already running. Concurrent executions are not allowed." + ) self._is_running = True def _reset_running_flag(self) -> None: @@ -251,7 +259,10 @@ def to_dict(self) -> dict[str, Any]: "start_executor_id": self.start_executor_id, "max_iterations": self.max_iterations, "edge_groups": [group.to_dict() for group in self.edge_groups], - "executors": {executor_id: executor.to_dict() for executor_id, executor in self.executors.items()}, + "executors": { + executor_id: executor.to_dict() + for executor_id, executor in self.executors.items() + }, "output_executors": self._output_executors, } @@ -270,7 +281,9 @@ def to_dict(self) -> dict[str, Any]: from ._workflow_executor import WorkflowExecutor if isinstance(original_executor, WorkflowExecutor): - executor_payload["workflow"] = original_executor.workflow.to_dict() + executor_payload["workflow"] = ( + original_executor.workflow.to_dict() + ) return data @@ -333,11 +346,9 @@ async def _run_workflow_with_tracing( span.add_event(OtelAttr.WORKFLOW_STARTED) # Emit explicit start/status events to the stream with _framework_event_origin(): - started = WorkflowEvent.started() - yield started + yield WorkflowEvent.started() with _framework_event_origin(): - in_progress = WorkflowEvent.status(WorkflowRunState.IN_PROGRESS) - yield in_progress + yield WorkflowEvent.status(WorkflowRunState.IN_PROGRESS) # Reset context for a new run if supported if reset_context: @@ -372,12 +383,16 @@ async def _run_workflow_with_tracing( if event.type == "request_info" and not emitted_in_progress_pending: emitted_in_progress_pending = True with _framework_event_origin(): - pending_status = WorkflowEvent.status(WorkflowRunState.IN_PROGRESS_PENDING_REQUESTS) - yield pending_status + yield WorkflowEvent.status( + WorkflowRunState.IN_PROGRESS_PENDING_REQUESTS + ) + # Workflow runs until idle - emit final status based on whether requests are pending if saw_request: with _framework_event_origin(): - terminal_status = WorkflowEvent.status(WorkflowRunState.IDLE_WITH_PENDING_REQUESTS) + terminal_status = WorkflowEvent.status( + WorkflowRunState.IDLE_WITH_PENDING_REQUESTS + ) yield terminal_status else: with _framework_event_origin(): @@ -393,11 +408,9 @@ async def _run_workflow_with_tracing( # Surface structured failure details before propagating exception details = WorkflowErrorDetails.from_exception(exc) with _framework_event_origin(): - failed_event = WorkflowEvent.failed(details) - yield failed_event + yield WorkflowEvent.failed(details) with _framework_event_origin(): - failed_status = WorkflowEvent.status(WorkflowRunState.FAILED) - yield failed_status + yield WorkflowEvent.status(WorkflowRunState.FAILED) span.add_event( name=OtelAttr.WORKFLOW_ERROR, attributes={ @@ -438,7 +451,9 @@ async def _execute_with_message_or_checkpoint( "or build workflow with WorkflowBuilder(checkpoint_storage=checkpoint_storage)." ) - await self._runner.restore_from_checkpoint(checkpoint_id, checkpoint_storage) + await self._runner.restore_from_checkpoint( + checkpoint_id, checkpoint_storage + ) # Handle initial message elif message is not None: @@ -487,7 +502,9 @@ def run( checkpoint_storage: CheckpointStorage | None = None, include_status_events: bool = False, **kwargs: Any, - ) -> ResponseStream[WorkflowEvent, WorkflowRunResult] | Awaitable[WorkflowRunResult]: + ) -> ( + ResponseStream[WorkflowEvent, WorkflowRunResult] | Awaitable[WorkflowRunResult] + ): """Run the workflow, optionally streaming events. Unified interface supporting initial runs, checkpoint restoration, and @@ -531,7 +548,9 @@ def run( streaming=stream, **kwargs, ), - finalizer=functools.partial(self._finalize_events, include_status_events=include_status_events), + finalizer=functools.partial( + self._finalize_events, include_status_events=include_status_events + ), cleanup_hooks=[ functools.partial(self._run_cleanup, checkpoint_storage), ], @@ -635,10 +654,14 @@ def _validate_run_params( - responses + checkpoint_id is allowed (restore then send) """ if message is not None and responses is not None: - raise ValueError("Cannot provide both 'message' and 'responses'. Use one or the other.") + raise ValueError( + "Cannot provide both 'message' and 'responses'. Use one or the other." + ) if message is not None and checkpoint_id is not None: - raise ValueError("Cannot provide both 'message' and 'checkpoint_id'. Use one or the other.") + raise ValueError( + "Cannot provide both 'message' and 'checkpoint_id'. Use one or the other." + ) if message is None and responses is None and checkpoint_id is None: raise ValueError( @@ -662,15 +685,23 @@ def _resolve_execution_mode( if checkpoint_id is not None: # Combined: restore checkpoint then send responses initial_executor_fn = functools.partial( - self._restore_and_send_responses, checkpoint_id, checkpoint_storage, responses + self._restore_and_send_responses, + checkpoint_id, + checkpoint_storage, + responses, ) else: # Send responses only (requires pending requests in workflow state) - initial_executor_fn = functools.partial(self._send_responses_internal, responses) + initial_executor_fn = functools.partial( + self._send_responses_internal, responses + ) return initial_executor_fn, False # Regular run or checkpoint restoration initial_executor_fn = functools.partial( - self._execute_with_message_or_checkpoint, message, checkpoint_id, checkpoint_storage + self._execute_with_message_or_checkpoint, + message, + checkpoint_id, + checkpoint_storage, ) reset_context = message is not None and checkpoint_id is None return initial_executor_fn, reset_context @@ -709,7 +740,9 @@ async def _send_responses_internal(self, responses: dict[str, Any]) -> None: coerced_responses: dict[str, Any] = {} for request_id, response in responses.items(): if request_id not in pending_requests: - raise ValueError(f"Response provided for unknown request ID: {request_id}") + raise ValueError( + f"Response provided for unknown request ID: {request_id}" + ) pending_request = pending_requests[request_id] # Try to coerce raw values (e.g., dicts from JSON) to the expected type response = try_coerce_to_type(response, pending_request.response_type) @@ -720,10 +753,12 @@ async def _send_responses_internal(self, responses: dict[str, Any]) -> None: ) coerced_responses[request_id] = response - await asyncio.gather(*[ - self._runner_context.send_request_info_response(request_id, response) - for request_id, response in coerced_responses.items() - ]) + await asyncio.gather( + *[ + self._runner_context.send_request_info_response(request_id, response) + for request_id, response in coerced_responses.items() + ] + ) def _get_executor_by_id(self, executor_id: str) -> Executor: """Get an executor by its ID. @@ -768,7 +803,9 @@ def _compute_graph_signature(self) -> dict[str, Any]: executors_signature = {} for executor_id, executor in self.executors.items(): - executor_sig: Any = f"{executor.__class__.__module__}.{executor.__class__.__name__}" + executor_sig: Any = ( + f"{executor.__class__.__module__}.{executor.__class__.__name__}" + ) if isinstance(executor, WorkflowExecutor): executor_sig = { @@ -798,7 +835,9 @@ def _compute_graph_signature(self) -> dict[str, Any]: } if isinstance(group, FanOutEdgeGroup): - group_info["selection_func"] = getattr(group, "selection_func_name", None) + group_info["selection_func"] = getattr( + group, "selection_func_name", None + ) edge_groups_signature.append(group_info) diff --git a/python/packages/core/pyproject.toml b/python/packages/core/pyproject.toml index 9d002453df..e41839cf12 100644 --- a/python/packages/core/pyproject.toml +++ b/python/packages/core/pyproject.toml @@ -24,19 +24,19 @@ classifiers = [ ] dependencies = [ # utilities - "typing-extensions", + "typing-extensions>=4.15.0,<5", "pydantic>=2,<3", "python-dotenv>=1,<2", # telemetry - "opentelemetry-api>=1.39.0", - "opentelemetry-sdk>=1.39.0", - "opentelemetry-semantic-conventions-ai>=0.4.13", + "opentelemetry-api>=1.39.0,<2", + "opentelemetry-sdk>=1.39.0,<2", + "opentelemetry-semantic-conventions-ai>=0.4.13,<0.4.14", # connectors and functions - "openai>=1.99.0", + "openai>=1.99.0,<3", "azure-identity>=1,<2", - "azure-ai-projects == 2.0.0b4", + "azure-ai-projects>=2.0.0b3,<2.0.0b4", "mcp[ws]>=1.24.0,<2", - "packaging>=24.1", + "packaging>=24.1,<26.0", ] [project.optional-dependencies] @@ -131,7 +131,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework" -test = "pytest -m \"not integration\" --cov=agent_framework --cov-report=term-missing:skip-covered -n auto --dist worksteal tests" +test = 'pytest -m "not integration" --cov=agent_framework --cov-report=term-missing:skip-covered -n auto --dist worksteal tests' [tool.flit.module] name = "agent_framework" diff --git a/python/packages/core/tests/azure/test_azure_embedding_client.py b/python/packages/core/tests/azure/test_azure_embedding_client.py new file mode 100644 index 0000000000..45922d5ff7 --- /dev/null +++ b/python/packages/core/tests/azure/test_azure_embedding_client.py @@ -0,0 +1,159 @@ +# Copyright (c) Microsoft. All rights reserved. + +from __future__ import annotations + +import os +from unittest.mock import AsyncMock, MagicMock + +import pytest +from openai.types import CreateEmbeddingResponse +from openai.types import Embedding as OpenAIEmbedding +from openai.types.create_embedding_response import Usage + +from agent_framework.azure import AzureOpenAIEmbeddingClient +from agent_framework.openai import OpenAIEmbeddingOptions + + +def _make_openai_response( + embeddings: list[list[float]], + model: str = "text-embedding-3-small", + prompt_tokens: int = 5, + total_tokens: int = 5, +) -> CreateEmbeddingResponse: + """Helper to create a mock OpenAI embeddings response.""" + data = [OpenAIEmbedding(embedding=emb, index=i, object="embedding") for i, emb in enumerate(embeddings)] + return CreateEmbeddingResponse( + data=data, + model=model, + object="list", + usage=Usage(prompt_tokens=prompt_tokens, total_tokens=total_tokens), + ) + + +@pytest.fixture +def azure_embedding_unit_test_env(monkeypatch: pytest.MonkeyPatch) -> None: + """Clear ambient Azure OpenAI embedding env vars for deterministic unit tests.""" + for key in ( + "AZURE_OPENAI_ENDPOINT", + "AZURE_OPENAI_API_KEY", + "AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME", + "AZURE_OPENAI_BASE_URL", + "AZURE_OPENAI_TOKEN_ENDPOINT", + ): + monkeypatch.delenv(key, raising=False) + + +def test_azure_construction_with_deployment_name(azure_embedding_unit_test_env: None) -> None: + client = AzureOpenAIEmbeddingClient( + deployment_name="text-embedding-3-small", + api_key="test-key", + endpoint="https://test.openai.azure.com/", + ) + assert client.model_id == "text-embedding-3-small" + + +def test_azure_construction_with_existing_client(azure_embedding_unit_test_env: None) -> None: + mock_client = MagicMock() + client = AzureOpenAIEmbeddingClient( + deployment_name="my-deployment", + async_client=mock_client, + ) + assert client.model_id == "my-deployment" + assert client.client is mock_client + + +def test_azure_construction_missing_deployment_name_raises(azure_embedding_unit_test_env: None) -> None: + with pytest.raises(ValueError, match="deployment name is required"): + AzureOpenAIEmbeddingClient( + api_key="test-key", + endpoint="https://test.openai.azure.com/", + ) + + +def test_azure_construction_missing_credentials_raises(azure_embedding_unit_test_env: None) -> None: + with pytest.raises(ValueError, match="api_key, credential, or a client"): + AzureOpenAIEmbeddingClient( + deployment_name="test", + endpoint="https://test.openai.azure.com/", + ) + + +async def test_azure_get_embeddings(azure_embedding_unit_test_env: None) -> None: + mock_response = _make_openai_response( + embeddings=[[0.1, 0.2]], + ) + mock_async_client = MagicMock() + mock_async_client.embeddings = MagicMock() + mock_async_client.embeddings.create = AsyncMock(return_value=mock_response) + + client = AzureOpenAIEmbeddingClient( + deployment_name="text-embedding-3-small", + async_client=mock_async_client, + ) + + result = await client.get_embeddings(["hello"]) + + assert len(result) == 1 + assert result[0].vector == [0.1, 0.2] + + +def test_azure_otel_provider_name(azure_embedding_unit_test_env: None) -> None: + mock_client = MagicMock() + client = AzureOpenAIEmbeddingClient( + deployment_name="test", + async_client=mock_client, + ) + assert client.OTEL_PROVIDER_NAME == "azure.ai.openai" + + +skip_if_azure_openai_integration_tests_disabled = pytest.mark.skipif( + not os.getenv("AZURE_OPENAI_ENDPOINT") + or (not os.getenv("AZURE_OPENAI_API_KEY") and not os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME")), + reason="No Azure OpenAI credentials provided; skipping integration tests.", +) + + +@skip_if_azure_openai_integration_tests_disabled +@pytest.mark.integration +@pytest.mark.flaky +async def test_integration_azure_openai_get_embeddings() -> None: + """End-to-end test of Azure OpenAI embedding generation.""" + client = AzureOpenAIEmbeddingClient() + + result = await client.get_embeddings(["hello world"]) + + assert len(result) == 1 + assert isinstance(result[0].vector, list) + assert len(result[0].vector) > 0 + assert all(isinstance(v, float) for v in result[0].vector) + assert result[0].model_id is not None + assert result.usage is not None + assert result.usage["input_token_count"] > 0 + + +@skip_if_azure_openai_integration_tests_disabled +@pytest.mark.integration +@pytest.mark.flaky +async def test_integration_azure_openai_get_embeddings_multiple() -> None: + """Test Azure OpenAI embedding generation for multiple inputs.""" + client = AzureOpenAIEmbeddingClient() + + result = await client.get_embeddings(["hello", "world", "test"]) + + assert len(result) == 3 + dims = [len(e.vector) for e in result] + assert all(d == dims[0] for d in dims) + + +@skip_if_azure_openai_integration_tests_disabled +@pytest.mark.integration +@pytest.mark.flaky +async def test_integration_azure_openai_get_embeddings_with_dimensions() -> None: + """Test Azure OpenAI embedding generation with custom dimensions.""" + client = AzureOpenAIEmbeddingClient() + + options: OpenAIEmbeddingOptions = {"dimensions": 256} + result = await client.get_embeddings(["hello world"], options=options) + + assert len(result) == 1 + assert len(result[0].vector) == 256 diff --git a/python/packages/core/tests/openai/test_openai_embedding_client.py b/python/packages/core/tests/openai/test_openai_embedding_client.py index 3ddb7538a6..01657414db 100644 --- a/python/packages/core/tests/openai/test_openai_embedding_client.py +++ b/python/packages/core/tests/openai/test_openai_embedding_client.py @@ -10,7 +10,6 @@ from openai.types import Embedding as OpenAIEmbedding from openai.types.create_embedding_response import Usage -from agent_framework.azure import AzureOpenAIEmbeddingClient from agent_framework.openai import ( OpenAIEmbeddingClient, OpenAIEmbeddingOptions, @@ -264,14 +263,9 @@ def test_azure_otel_provider_name() -> None: reason="No real OPENAI_API_KEY provided; skipping integration tests.", ) -skip_if_azure_openai_integration_tests_disabled = pytest.mark.skipif( - not os.getenv("AZURE_OPENAI_ENDPOINT") - or (not os.getenv("AZURE_OPENAI_API_KEY") and not os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME")), - reason="No Azure OpenAI credentials provided; skipping integration tests.", -) - @skip_if_openai_integration_tests_disabled +@pytest.mark.integration @pytest.mark.flaky @pytest.mark.integration async def test_integration_openai_get_embeddings() -> None: @@ -290,6 +284,7 @@ async def test_integration_openai_get_embeddings() -> None: @skip_if_openai_integration_tests_disabled +@pytest.mark.integration @pytest.mark.flaky @pytest.mark.integration async def test_integration_openai_get_embeddings_multiple() -> None: @@ -304,6 +299,7 @@ async def test_integration_openai_get_embeddings_multiple() -> None: @skip_if_openai_integration_tests_disabled +@pytest.mark.integration @pytest.mark.flaky @pytest.mark.integration async def test_integration_openai_get_embeddings_with_dimensions() -> None: diff --git a/python/packages/declarative/pyproject.toml b/python/packages/declarative/pyproject.toml index 2534339ad7..04ecd06786 100644 --- a/python/packages/declarative/pyproject.toml +++ b/python/packages/declarative/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "powerfx>=0.0.31; python_version < '3.14'", + "powerfx>=0.0.31,<0.0.32; python_version < '3.14'", "pyyaml>=6.0,<7.0", ] [dependency-groups] @@ -94,7 +94,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_declarative" -test = "pytest -m \"not integration\" --cov=agent_framework_declarative --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_declarative --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/devui/pyproject.toml b/python/packages/devui/pyproject.toml index a56cf1ab4f..f049b9cbb4 100644 --- a/python/packages/devui/pyproject.toml +++ b/python/packages/devui/pyproject.toml @@ -24,14 +24,14 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "fastapi>=0.104.0", - "uvicorn[standard]>=0.24.0", - "python-dotenv>=1.0.0", + "fastapi>=0.104.0,<0.133.1", + "uvicorn[standard]>=0.24.0,<0.24.1", + "python-dotenv>=1.0.0,<2", ] [project.optional-dependencies] -dev = ["pytest>=7.0.0", "watchdog>=3.0.0", "agent-framework-orchestrations"] -all = ["pytest>=7.0.0", "watchdog>=3.0.0"] +dev = ["pytest>=7.0.0,<9", "pytest-cov>=6.2.1,<7", "watchdog>=3.0.0,<7", "agent-framework-orchestrations"] +all = ["pytest>=7.0.0,<9", "pytest-cov>=6.2.1,<7", "watchdog>=3.0.0,<7"] [project.scripts] devui = "agent_framework_devui:main" @@ -94,7 +94,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_devui" -test = "pytest -m \"not integration\" --cov=agent_framework_devui --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_devui --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/durabletask/pyproject.toml b/python/packages/durabletask/pyproject.toml index 56493f3126..f63c7ce319 100644 --- a/python/packages/durabletask/pyproject.toml +++ b/python/packages/durabletask/pyproject.toml @@ -23,9 +23,9 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "durabletask>=1.3.0", - "durabletask-azuremanaged>=1.3.0", - "python-dateutil>=2.8.0", + "durabletask>=1.3.0,<2", + "durabletask-azuremanaged>=1.3.0,<2", + "python-dateutil>=2.8.0,<3", ] [dependency-groups] @@ -99,7 +99,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_durabletask" -test = "pytest -m \"not integration\" --cov=agent_framework_durabletask --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_durabletask --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/foundry_local/pyproject.toml b/python/packages/foundry_local/pyproject.toml index 97dd99f1ca..9cddbbd0d8 100644 --- a/python/packages/foundry_local/pyproject.toml +++ b/python/packages/foundry_local/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "foundry-local-sdk>=0.5.1,<1", + "foundry-local-sdk>=0.5.1,<0.5.2", ] [tool.uv] @@ -86,7 +86,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_foundry_local" -test = "pytest -m \"not integration\" --cov=agent_framework_foundry_local --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_foundry_local --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/github_copilot/pyproject.toml b/python/packages/github_copilot/pyproject.toml index 47069e34fa..50827ed94a 100644 --- a/python/packages/github_copilot/pyproject.toml +++ b/python/packages/github_copilot/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "github-copilot-sdk>=0.1.0", + "github-copilot-sdk>=0.1.10,<0.1.11", ] [tool.uv] @@ -87,7 +87,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_github_copilot" -test = "pytest -m \"not integration\" --cov=agent_framework_github_copilot --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_github_copilot --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/lab/pyproject.toml b/python/packages/lab/pyproject.toml index 17650293ac..ad6991d6de 100644 --- a/python/packages/lab/pyproject.toml +++ b/python/packages/lab/pyproject.toml @@ -28,30 +28,29 @@ dependencies = [ [project.optional-dependencies] # GAIA benchmark module dependencies gaia = [ - "pydantic>=2.0.0", - "opentelemetry-api>=1.39.0", - "tqdm>=4.60.0", - "huggingface-hub>=0.20.0", - "orjson>=3.8.0", - "pyarrow>=10.0.0", # For reading parquet files + "pydantic>=2,<4", + "tqdm>=4.60.0,<5", + "huggingface-hub>=0.20.0,<0.20.1", + "orjson>=3.8.0,<4", + "pyarrow", # For reading parquet files ] # Lightning RL training module dependencies lightning = [ - "agentlightning>=0.2.0,<0.3.0", + "agentlightning>=0.2.0,<0.2.1", ] # TAU2 benchmark module dependencies tau2 = [ - "pydantic>=2.0.0", - "tiktoken>=0.11.0", - "loguru>=0.7.3", - "numpy", + "pydantic>=2,<4", + "tiktoken>=0.11.0,<0.11.1", + "loguru>=0.7.3,<0.7.4", + "numpy>=2.2.6,<3", ] # Dependencies for math-related training math = [ - "sympy>=1.13.0", + "sympy>=1.13.0,<2", ] [dependency-groups] @@ -59,6 +58,7 @@ dev = [ "uv", "ruff>=0.11.8", "pytest>=8.4.1", + "pytest-cov>=6.2.1,<7", "mypy>=1.16.1", "pyright>=1.1.402", #tasks @@ -152,10 +152,10 @@ mypy-gaia = "mypy --config-file $POE_ROOT/pyproject.toml gaia/agent_framework_la mypy-lightning = "mypy --config-file $POE_ROOT/pyproject.toml lightning/agent_framework_lab_lightning" mypy-tau2 = "mypy --config-file $POE_ROOT/pyproject.toml tau2/agent_framework_lab_tau2" mypy = ["mypy-gaia", "mypy-lightning", "mypy-tau2"] -test = "pytest -m \"not integration\" --cov-report=term-missing:skip-covered --junitxml=test-results.xml" -test-gaia = "pytest -m \"not integration\" gaia/tests --cov=agent_framework_lab_gaia --cov-report=term-missing:skip-covered" -test-lightning = "pytest -m \"not integration\" lightning/tests --cov=agent_framework_lab_lightning --cov-report=term-missing:skip-covered" -test-tau2 = "pytest -m \"not integration\" tau2/tests --cov=agent_framework_lab_tau2 --cov-report=term-missing:skip-covered" +test = 'pytest -m "not integration" --cov-report=term-missing:skip-covered --junitxml=test-results.xml' +test-gaia = "pytest gaia/tests --cov=agent_framework_lab_gaia --cov-report=term-missing:skip-covered" +test-lightning = "pytest lightning/tests --cov=agent_framework_lab_lightning --cov-report=term-missing:skip-covered" +test-tau2 = "pytest tau2/tests --cov=agent_framework_lab_tau2 --cov-report=term-missing:skip-covered" build = "echo 'Skipping build'" publish = "echo 'Skipping publish'" diff --git a/python/packages/mem0/pyproject.toml b/python/packages/mem0/pyproject.toml index 506c4d75b1..048e7e6269 100644 --- a/python/packages/mem0/pyproject.toml +++ b/python/packages/mem0/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "mem0ai>=1.0.0", + "mem0ai>=1.0.0,<2", ] [tool.uv] @@ -87,7 +87,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_mem0" -test = "pytest -m \"not integration\" --cov=agent_framework_mem0 --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_mem0 --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/ollama/pyproject.toml b/python/packages/ollama/pyproject.toml index dd9ecaf46b..2671d0fa8c 100644 --- a/python/packages/ollama/pyproject.toml +++ b/python/packages/ollama/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "ollama >= 0.5.3", + "ollama>=0.5.3,<0.5.4", ] [tool.uv] @@ -90,7 +90,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_ollama" -test = "pytest -m \"not integration\" --cov=agent_framework_ollama --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_ollama --cov-report=term-missing:skip-covered tests' [tool.uv.build-backend] module-name = "agent_framework_ollama" diff --git a/python/packages/orchestrations/pyproject.toml b/python/packages/orchestrations/pyproject.toml index e15e02f3e3..d5af12b566 100644 --- a/python/packages/orchestrations/pyproject.toml +++ b/python/packages/orchestrations/pyproject.toml @@ -85,7 +85,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_orchestrations" -test = "pytest -m \"not integration\" --cov=agent_framework_orchestrations --cov-report=term-missing:skip-covered -n auto --dist worksteal tests" +test = 'pytest -m "not integration" --cov=agent_framework_orchestrations --cov-report=term-missing:skip-covered -n auto --dist worksteal tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/packages/purview/pyproject.toml b/python/packages/purview/pyproject.toml index f30b749435..e6eb512805 100644 --- a/python/packages/purview/pyproject.toml +++ b/python/packages/purview/pyproject.toml @@ -25,8 +25,8 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "azure-core>=1.30.0", - "httpx>=0.27.0", + "azure-core>=1.30.0,<2", + "httpx>=0.27.0,<0.29", ] [tool.uv] @@ -86,7 +86,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_purview" -test = "pytest -m \"not integration\" --cov=agent_framework_purview --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_purview --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.9,<4.0"] diff --git a/python/packages/redis/pyproject.toml b/python/packages/redis/pyproject.toml index 21aaf47865..42c7e88568 100644 --- a/python/packages/redis/pyproject.toml +++ b/python/packages/redis/pyproject.toml @@ -24,9 +24,9 @@ classifiers = [ ] dependencies = [ "agent-framework-core>=1.0.0rc3", - "redis>=6.4.0", - "redisvl>=0.8.2", - "numpy>=2.2.6" + "redis>=6.4.0,<7.2.1", + "redisvl>=0.8.2,<0.8.3", + "numpy>=2.2.6,<3" ] [tool.uv] @@ -89,7 +89,7 @@ include = "../../shared_tasks.toml" [tool.poe.tasks] mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_redis" -test = "pytest -m \"not integration\" --cov=agent_framework_redis --cov-report=term-missing:skip-covered tests" +test = 'pytest -m "not integration" --cov=agent_framework_redis --cov-report=term-missing:skip-covered tests' [build-system] requires = ["flit-core >= 3.11,<4.0"] diff --git a/python/pyproject.toml b/python/pyproject.toml index 9f4ca3c08c..da2d3f34b7 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -223,6 +223,7 @@ pyright = "python scripts/run_tasks_in_packages_if_exists.py pyright" mypy = "python scripts/run_tasks_in_packages_if_exists.py mypy" samples-syntax = "pyright -p pyrightconfig.samples.json --warnings" typing = ["pyright", "mypy"] +validate-dependency-ranges = "python scripts/validate_dependency_ranges.py" # cleaning clean-dist-packages = "python scripts/run_tasks_in_packages_if_exists.py clean-dist" clean-dist-meta = "rm -rf dist" diff --git a/python/scripts/validate_dependency_ranges.py b/python/scripts/validate_dependency_ranges.py new file mode 100644 index 0000000000..b93c3e426b --- /dev/null +++ b/python/scripts/validate_dependency_ranges.py @@ -0,0 +1,1036 @@ +# Copyright (c) Microsoft. All rights reserved. +# ruff: noqa: INP001, S404, S603 + +"""Raise dependency upper bounds, validate, and persist the latest passing set.""" + +from __future__ import annotations + +import argparse +import concurrent.futures +import json +import os +import shutil +import subprocess +import tempfile +import threading +from dataclasses import dataclass +from datetime import UTC, datetime +from pathlib import Path +from urllib import error as urllib_error +from urllib import request as urllib_request + +import tomli +from packaging.version import InvalidVersion, Version +from rich import print +from task_runner import discover_projects, extract_poe_tasks + +CHECK_TASK_PRIORITY = ("check", "typing", "pyright", "mypy", "lint") +REQ_PATTERN = r"^\s*([A-Za-z0-9_.-]+(?:\[[^\]]+\])?)\s*(.*?)\s*$" + + +@dataclass +class RequirementEntry: + """A parsed requirement entry from pyproject dependencies.""" + + raw: str + name: str + name_extras: str + marker: str | None + spec_parts: list[str] + lower_version: Version | None + upper_index: int | None + upper_version: Version | None + exact_index: int | None = None + exact_version: Version | None = None + + def with_upper(self, upper: Version) -> str: + """Return a new requirement with the given exclusive upper bound.""" + updated_parts = list(self.spec_parts) + if self.exact_index is not None and self.exact_version is not None: + updated_parts[self.exact_index] = f">={self.exact_version}" + if self.upper_index is not None: + updated_parts[self.upper_index] = f"<{upper}" + else: + updated_parts.append(f"<{upper}") + elif self.upper_index is not None: + updated_parts[self.upper_index] = f"<{upper}" + else: + raise ValueError(f"Requirement has no mutable bound information: {self.raw}") + spec = ",".join(updated_parts) + requirement = f"{self.name_extras}{spec}" + if self.marker: + requirement += f"; {self.marker}" + return requirement + + +@dataclass +class DependencyTarget: + """A dependency to optimize within one package.""" + + name: str + entries: list[RequirementEntry] + lower_version: Version | None + upper_version: Version + allow_prerelease_candidates: bool + + @property + def original_requirements(self) -> list[str]: + """Return original requirement strings for this dependency group.""" + return [entry.raw for entry in self.entries] + + +@dataclass +class DependencyAttempt: + """A single upper-bound trial for one dependency.""" + + trial_upper: str + status: str + error: str | None = None + + +@dataclass +class DependencyOutcome: + """Final outcome for one dependency optimization.""" + + name: str + changed: bool + original_requirements: list[str] + final_requirements: list[str] + candidate_versions: list[str] + attempted_versions: list[str] + attempts: list[DependencyAttempt] + skipped_reason: str | None = None + + +@dataclass +class PackagePlan: + """Execution plan for a package.""" + + project_path: Path + package_name: str + pyproject_path: Path + internal_editables: list[Path] + include_dev_group: bool + include_dev_extra: bool + + +@dataclass +class PackageOutcome: + """Execution outcome for a package.""" + + project_path: str + package_name: str + tasks: list[str] + changed: bool + dependencies: list[DependencyOutcome] + replacements: dict[str, str] + skipped: list[str] + error: str | None = None + + +def _utc_now() -> str: + return datetime.now(UTC).isoformat() + + +def _truncate_error(stdout: str, stderr: str, *, max_chars: int = 2000) -> str: + combined = "\n".join(part for part in [stderr.strip(), stdout.strip()] if part) + if len(combined) <= max_chars: + return combined + return f"...\n{combined[-max_chars:]}" + + +def _parse_requirement(requirement: str) -> RequirementEntry | None: + import re + + match = re.match(REQ_PATTERN, requirement) + if not match: + return None + name_extras = match.group(1) + rest = match.group(2).strip() + marker = None + if ";" in rest: + spec_part, marker_part = rest.split(";", 1) + spec = spec_part.strip() + marker = marker_part.strip() + else: + spec = rest + if not spec: + return None + + spec_parts = [part.strip() for part in spec.split(",") if part.strip()] + if not spec_parts: + return None + + lower_version: Version | None = None + upper_version: Version | None = None + upper_index: int | None = None + exact_version: Version | None = None + exact_index: int | None = None + + for index, part in enumerate(spec_parts): + if part.startswith((">=", ">")): + raw_version = part[2:].strip() if part.startswith(">=") else part[1:].strip() + try: + parsed = Version(raw_version) + except InvalidVersion: + continue + if lower_version is None or parsed > lower_version: + lower_version = parsed + elif part.startswith(("==", "===")): + raw_version = part[3:].strip() if part.startswith("===") else part[2:].strip() + try: + parsed = Version(raw_version) + except InvalidVersion: + continue + exact_version = parsed + exact_index = index + if lower_version is None or parsed > lower_version: + lower_version = parsed + if part.startswith(("<", "<=")): + raw_version = part[2:].strip() if part.startswith("<=") else part[1:].strip() + try: + parsed = Version(raw_version) + except InvalidVersion: + continue + if upper_version is None or parsed < upper_version: + upper_version = parsed + upper_index = index + + if upper_version is None and exact_version is None: + return None + name = name_extras.split("[", 1)[0].lower() + return RequirementEntry( + raw=requirement, + name=name, + name_extras=name_extras, + marker=marker, + spec_parts=spec_parts, + lower_version=lower_version, + upper_index=upper_index, + upper_version=upper_version, + exact_index=exact_index, + exact_version=exact_version, + ) + + +def _replace_requirements(path: Path, replacements: list[tuple[str, str]]) -> None: + text = path.read_text() + updated_text = text + for old, new in replacements: + replaced = False + old_double = f'"{old}"' + old_single = f"'{old}'" + new_double = f'"{new}"' + new_single = f"'{new}'" + if old_double in updated_text: + updated_text = updated_text.replace(old_double, new_double) + replaced = True + if old_single in updated_text: + updated_text = updated_text.replace(old_single, new_single) + replaced = True + if not replaced: + raise ValueError(f"Could not find dependency string in {path}: {old}") + if updated_text != text: + path.write_text(updated_text) + + +def _load_lock_versions(workspace_root: Path) -> dict[str, list[Version]]: + lock_file = workspace_root / "uv.lock" + if not lock_file.exists(): + return {} + with lock_file.open("rb") as f: + lock_data = tomli.load(f) + versions_by_name: dict[str, set[Version]] = {} + for package_data in lock_data.get("package", []): + package_name = str(package_data.get("name", "")).lower() + package_version = package_data.get("version") + if not package_name or not package_version: + continue + try: + parsed = Version(str(package_version)) + except InvalidVersion: + continue + versions_by_name.setdefault(package_name, set()).add(parsed) + return {name: sorted(values) for name, values in versions_by_name.items()} + + +class VersionCatalog: + """Cache and fetch available dependency versions.""" + + def __init__(self, lock_versions: dict[str, list[Version]], source: str) -> None: + """Initialize the catalog with lock-based fallback and fetch source.""" + self._lock_versions = lock_versions + self._source = source + self._cache: dict[str, list[Version]] = {} + self._lock = threading.Lock() + + def get(self, package_name: str) -> list[Version]: + """Return cached or fetched versions for a package name.""" + with self._lock: + cached = self._cache.get(package_name) + if cached is not None: + return cached + versions = self._fetch(package_name) + with self._lock: + self._cache[package_name] = versions + return versions + + def _fetch(self, package_name: str) -> list[Version]: + if self._source == "lock": + return self._lock_versions.get(package_name, []) + + try: + url = f"https://pypi.org/pypi/{package_name}/json" + with urllib_request.urlopen(url, timeout=20) as response: + payload = json.load(response) + except (urllib_error.URLError, TimeoutError, json.JSONDecodeError): + return self._lock_versions.get(package_name, []) + + versions: set[Version] = set() + for raw_version, files in payload.get("releases", {}).items(): + if not files: + continue + non_yanked = any(not bool(file_info.get("yanked", False)) for file_info in files) + if not non_yanked: + continue + try: + versions.add(Version(raw_version)) + except InvalidVersion: + continue + if versions: + return sorted(versions) + return self._lock_versions.get(package_name, []) + + +def _load_package_name(pyproject_file: Path) -> str: + with pyproject_file.open("rb") as f: + data = tomli.load(f) + return str(data["project"]["name"]) + + +def _select_validation_tasks(available_tasks: set[str]) -> list[str]: + check_task = next((task for task in CHECK_TASK_PRIORITY if task in available_tasks), None) + tasks: list[str] = [] + if check_task: + tasks.append(check_task) + if "test" in available_tasks and "test" not in tasks: + tasks.append("test") + return tasks + + +def _build_workspace_package_map(workspace_root: Path) -> dict[str, Path]: + package_map: dict[str, Path] = {} + for pyproject_file in sorted((workspace_root / "packages").glob("*/pyproject.toml")): + with pyproject_file.open("rb") as f: + data = tomli.load(f) + package_name = str(data.get("project", {}).get("name", "")).strip() + if package_name: + package_map[package_name] = pyproject_file.parent + return package_map + + +def _build_internal_graph(workspace_root: Path, package_map: dict[str, Path]) -> dict[str, set[str]]: + graph: dict[str, set[str]] = {} + for package_name, package_path in package_map.items(): + pyproject_file = package_path / "pyproject.toml" + with pyproject_file.open("rb") as f: + data = tomli.load(f) + dependencies = data.get("project", {}).get("dependencies", []) or [] + internal = set() + for dependency in dependencies: + parsed = _parse_requirement(dependency) + if not parsed: + continue + if parsed.name.startswith("agent-framework"): + for candidate_name in package_map: + if candidate_name.lower() == parsed.name: + internal.add(candidate_name) + break + graph[package_name] = internal + return graph + + +def _resolve_internal_editables( + package_name: str, package_map: dict[str, Path], graph: dict[str, set[str]] +) -> list[Path]: + visited: set[str] = set() + stack = [package_name] + results: set[Path] = set() + while stack: + current = stack.pop() + if current in visited: + continue + visited.add(current) + for dependency_name in graph.get(current, set()): + dependency_path = package_map.get(dependency_name) + if dependency_path and dependency_name != package_name: + results.add(dependency_path.resolve()) + stack.append(dependency_name) + return sorted(results) + + +def _collect_targets( + pyproject_file: Path, + *, + dependency_filters: set[str] | None, +) -> tuple[list[DependencyTarget], list[str]]: + with pyproject_file.open("rb") as f: + data = tomli.load(f) + project = data.get("project", {}) + dependencies: list[str] = list(project.get("dependencies", []) or []) + for values in (project.get("optional-dependencies", {}) or {}).values(): + dependencies.extend(values or []) + + grouped: dict[str, list[RequirementEntry]] = {} + skipped: list[str] = [] + + for dependency in dependencies: + parsed = _parse_requirement(dependency) + if not parsed: + continue + if parsed.name.startswith("agent-framework"): + continue + if dependency_filters and parsed.name not in dependency_filters: + continue + grouped.setdefault(parsed.name, []).append(parsed) + + targets: list[DependencyTarget] = [] + for dependency_name, entries in sorted(grouped.items()): + if not entries: + continue + allow_prerelease_candidates = any( + ( + (entry.lower_version is not None and entry.lower_version.is_prerelease) + or (entry.upper_version is not None and entry.upper_version.is_prerelease) + or (entry.exact_version is not None and entry.exact_version.is_prerelease) + ) + for entry in entries + ) + upper_entries = [entry for entry in entries if entry.upper_version is not None] + exact_entries = [entry for entry in entries if entry.exact_version is not None] + + if upper_entries: + if len(upper_entries) != len(entries): + skipped.append(f"{dependency_name}: mixed bounded and unbounded/exact requirements in package") + continue + first_upper = upper_entries[0].upper_version + if first_upper is None: + skipped.append(f"{dependency_name}: missing upper bound value") + continue + if any(entry.upper_version != first_upper for entry in upper_entries[1:]): + skipped.append(f"{dependency_name}: conflicting upper bounds in package") + continue + lower_versions = [entry.lower_version for entry in entries if entry.lower_version is not None] + lower = max(lower_versions) if lower_versions else None + targets.append( + DependencyTarget( + name=dependency_name, + entries=entries, + lower_version=lower, + upper_version=first_upper, + allow_prerelease_candidates=allow_prerelease_candidates, + ) + ) + continue + + if exact_entries and len(exact_entries) == len(entries): + first_exact = exact_entries[0].exact_version + if first_exact is None: + skipped.append(f"{dependency_name}: missing exact version value") + continue + if any(entry.exact_version != first_exact for entry in exact_entries[1:]): + skipped.append(f"{dependency_name}: conflicting exact pins in package") + continue + targets.append( + DependencyTarget( + name=dependency_name, + entries=entries, + lower_version=first_exact, + upper_version=first_exact, + allow_prerelease_candidates=allow_prerelease_candidates, + ) + ) + continue + + skipped.append(f"{dependency_name}: no usable upper or exact bound to optimize") + return targets, skipped + + +def _build_trial_bounds( + versions: list[Version], + *, + lower: Version | None, + current_upper: Version, + allow_prerelease: bool, + max_candidates: int, +) -> list[Version]: + if lower is not None and lower.is_prerelease: + if lower.pre is not None: + pre_tag, pre_num = lower.pre + next_prerelease = Version(f"{lower.base_version}{pre_tag}{pre_num + 1}") + elif lower.dev is not None: + next_prerelease = Version(f"{lower.base_version}.dev{lower.dev + 1}") + else: + next_prerelease = None + if next_prerelease is None: + return [] + return [version for version in versions if version == next_prerelease and version > current_upper] + + if lower is not None and lower.major == 0: + candidates = [ + version + for version in versions + if version > current_upper and version.major == 0 and version.minor == lower.minor and version > lower + ] + if not allow_prerelease: + candidates = [version for version in candidates if not version.is_prerelease] + candidates.sort(reverse=True) + if max_candidates > 0: + return candidates[:max_candidates] + return candidates + + candidates = [version for version in versions if version > current_upper and (lower is None or version > lower)] + # `packaging` treats .dev/.a/.b/.rc as prereleases; only probe them when current spec already uses them. + if not allow_prerelease: + candidates = [version for version in candidates if not version.is_prerelease] + candidates.sort(reverse=True) + if max_candidates > 0: + return candidates[:max_candidates] + return candidates + + +def _run_tasks( + project_dir: Path, + *, + tasks: list[str], + internal_editables: list[Path], + resolution: str, + dependency_pin: tuple[str, Version] | None, + include_dev_group: bool, + include_dev_extra: bool, + timeout_seconds: int, +) -> tuple[bool, str | None]: + env = dict(os.environ) + env["UV_PRERELEASE"] = "allow" + for task_name in tasks: + command = [ + "uv", + "--no-progress", + "--directory", + str(project_dir), + "run", + "--isolated", + "--resolution", + resolution, + "--prerelease", + "allow", + "--quiet", + ] + if include_dev_group: + command.extend(["--group", "dev"]) + if include_dev_extra: + command.extend(["--extra", "dev"]) + for editable_path in internal_editables: + command.extend(["--with-editable", str(editable_path)]) + if dependency_pin is not None: + dependency_name, dependency_version = dependency_pin + command.extend(["--with", f"{dependency_name}=={dependency_version}"]) + command.extend(["poe", task_name]) + try: + result = subprocess.run( + command, + capture_output=True, + text=True, + timeout=timeout_seconds, + check=False, + env=env, + ) + except subprocess.TimeoutExpired: + return False, f"Timeout while running task '{task_name}'." + if result.returncode != 0: + return ( + False, + f"Task '{task_name}' failed.\n{_truncate_error(result.stdout, result.stderr)}", + ) + return True, None + + +def _optimize_dependency( + *, + temp_pyproject: Path, + dependency: DependencyTarget, + available_versions: list[Version], + tasks: list[str], + internal_editables: list[Path], + dry_run: bool, + max_candidates: int, + timeout_seconds: int, + package_label: str, + include_dev_group: bool, + include_dev_extra: bool, +) -> DependencyOutcome: + candidates = _build_trial_bounds( + available_versions, + lower=dependency.lower_version, + current_upper=dependency.upper_version, + allow_prerelease=dependency.allow_prerelease_candidates, + max_candidates=max_candidates, + ) + candidate_versions = [str(candidate) for candidate in candidates] + current_requirements = list(dependency.original_requirements) + attempted_versions: list[str] = [] + attempts: list[DependencyAttempt] = [] + + in_range_versions = [ + version + for version in available_versions + if (dependency.lower_version is None or version >= dependency.lower_version) + and (dependency.upper_version is None or version < dependency.upper_version) + ] + if not dependency.allow_prerelease_candidates: + in_range_versions = [version for version in in_range_versions if not version.is_prerelease] + baseline_trials: list[tuple[str, Version, str]] = [] + if dependency.upper_version is not None and dependency.lower_version == dependency.upper_version: + baseline_trials.append(("current_fixed", dependency.upper_version, "highest")) + else: + if dependency.lower_version is not None: + lower_probe = next( + (version for version in in_range_versions if version >= dependency.lower_version), + dependency.lower_version, + ) + baseline_trials.append(("current_lower", lower_probe, "lowest-direct")) + if dependency.upper_version is not None: + upper_probe = in_range_versions[-1] if in_range_versions else dependency.upper_version + baseline_trials.append(("current_upper", upper_probe, "highest")) + + for baseline_name, baseline_version, baseline_resolution in baseline_trials: + attempted_versions.append(str(baseline_version)) + print( + f"[cyan]{package_label} :: {dependency.name} :: baseline {baseline_name} " + f"({baseline_resolution}) [{baseline_version}] [/cyan]" + ) + if dry_run: + attempts.append( + DependencyAttempt( + trial_upper=str(baseline_version), + status=f"{baseline_name}_dry_run_pass", + ) + ) + continue + + success, error = _run_tasks( + temp_pyproject.parent, + tasks=tasks, + internal_editables=internal_editables, + resolution=baseline_resolution, + dependency_pin=(dependency.name, baseline_version), + include_dev_group=include_dev_group, + include_dev_extra=include_dev_extra, + timeout_seconds=timeout_seconds, + ) + if success: + attempts.append( + DependencyAttempt( + trial_upper=str(baseline_version), + status=f"{baseline_name}_passed", + ) + ) + continue + + attempts.append( + DependencyAttempt( + trial_upper=str(baseline_version), + status="failed", + error=error, + ) + ) + return DependencyOutcome( + name=dependency.name, + changed=False, + original_requirements=dependency.original_requirements, + final_requirements=dependency.original_requirements, + candidate_versions=candidate_versions, + attempted_versions=attempted_versions, + attempts=attempts, + skipped_reason=f"Baseline validation failed at {baseline_name}.", + ) + + if not candidates: + return DependencyOutcome( + name=dependency.name, + changed=False, + original_requirements=dependency.original_requirements, + final_requirements=dependency.original_requirements, + candidate_versions=[], + attempted_versions=attempted_versions, + attempts=attempts, + skipped_reason="No higher candidate bounds found.", + ) + + for candidate in candidates: + attempted_versions.append(str(candidate)) + trial_requirements = [entry.with_upper(candidate) for entry in dependency.entries] + replacements = list(zip(current_requirements, trial_requirements, strict=True)) + _replace_requirements(temp_pyproject, [(old, new) for old, new in replacements]) + + print(f"[cyan]{package_label} :: {dependency.name} -> <{candidate}[/cyan]") + if dry_run: + attempts.append(DependencyAttempt(trial_upper=str(candidate), status="dry_run_pass")) + current_requirements = trial_requirements + break + + success, error = _run_tasks( + temp_pyproject.parent, + tasks=tasks, + internal_editables=internal_editables, + resolution="highest", + dependency_pin=None, + include_dev_group=include_dev_group, + include_dev_extra=include_dev_extra, + timeout_seconds=timeout_seconds, + ) + if success: + attempts.append(DependencyAttempt(trial_upper=str(candidate), status="passed")) + current_requirements = trial_requirements + break + + attempts.append(DependencyAttempt(trial_upper=str(candidate), status="failed", error=error)) + _replace_requirements(temp_pyproject, [(new, old) for old, new in replacements]) + continue + + changed = current_requirements != dependency.original_requirements + return DependencyOutcome( + name=dependency.name, + changed=changed, + original_requirements=dependency.original_requirements, + final_requirements=current_requirements, + candidate_versions=candidate_versions, + attempted_versions=attempted_versions, + attempts=attempts, + ) + + +def _process_package( + plan: PackagePlan, + *, + catalog: VersionCatalog, + dependency_filters: set[str] | None, + dry_run: bool, + max_candidates: int, + timeout_seconds: int, +) -> PackageOutcome: + pyproject_file = plan.pyproject_path + source_workspace_root = pyproject_file.parent.parent.parent.resolve() + available_tasks = extract_poe_tasks(pyproject_file) + tasks = _select_validation_tasks(available_tasks) + if not tasks: + return PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=[], + changed=False, + dependencies=[], + replacements={}, + skipped=["No check/test task combination found."], + ) + + targets, skipped = _collect_targets(pyproject_file, dependency_filters=dependency_filters) + if not targets: + return PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=tasks, + changed=False, + dependencies=[], + replacements={}, + skipped=[*skipped, "No eligible dependencies with upper bounds."], + ) + + with tempfile.TemporaryDirectory(prefix=f"dep-range-{plan.project_path.name}-") as temp_dir: + temp_root = Path(temp_dir) + temp_workspace_root = temp_root / source_workspace_root.name + shutil.copytree( + source_workspace_root, + temp_workspace_root, + ignore=shutil.ignore_patterns( + ".git", + ".venv", + "__pycache__", + ".pytest_cache", + ".mypy_cache", + ".ruff_cache", + "node_modules", + "dist", + ), + ) + + temp_packages_dir = temp_workspace_root / "packages" + if temp_packages_dir.exists(): + for package_dir in temp_packages_dir.iterdir(): + if package_dir.is_dir() and not (package_dir / "pyproject.toml").exists(): + shutil.rmtree(package_dir) + + temp_project_dir = temp_workspace_root / plan.project_path + temp_pyproject = temp_project_dir / "pyproject.toml" + temp_internal_editables: list[Path] = [] + for editable in plan.internal_editables: + try: + relative_editable = editable.resolve().relative_to(source_workspace_root) + except ValueError: + continue + candidate = temp_workspace_root / relative_editable + if candidate.exists(): + temp_internal_editables.append(candidate) + + dependency_results: list[DependencyOutcome] = [] + replacements: dict[str, str] = {} + package_label = f"{plan.project_path} ({plan.package_name})" + + for target in targets: + versions = catalog.get(target.name) + outcome = _optimize_dependency( + temp_pyproject=temp_pyproject, + dependency=target, + available_versions=versions, + tasks=tasks, + internal_editables=temp_internal_editables, + dry_run=dry_run, + max_candidates=max_candidates, + timeout_seconds=timeout_seconds, + package_label=package_label, + include_dev_group=plan.include_dev_group, + include_dev_extra=plan.include_dev_extra, + ) + dependency_results.append(outcome) + if outcome.changed: + for old, new in zip(outcome.original_requirements, outcome.final_requirements, strict=True): + replacements[old] = new + + return PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=tasks, + changed=bool(replacements), + dependencies=dependency_results, + replacements=replacements, + skipped=skipped, + ) + + +def _write_json(path: Path, payload: dict) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(payload, indent=2, sort_keys=False)) + + +def _to_json(package_outcome: PackageOutcome) -> dict: + return { + "project_path": package_outcome.project_path, + "package_name": package_outcome.package_name, + "tasks": package_outcome.tasks, + "changed": package_outcome.changed, + "skipped": package_outcome.skipped, + "error": package_outcome.error, + "dependencies": [ + { + "name": dependency.name, + "changed": dependency.changed, + "original_requirements": dependency.original_requirements, + "final_requirements": dependency.final_requirements, + "candidate_versions": dependency.candidate_versions, + "attempted_versions": dependency.attempted_versions, + "skipped_reason": dependency.skipped_reason, + "attempts": [ + { + "trial_upper": attempt.trial_upper, + "status": attempt.status, + "error": attempt.error, + } + for attempt in dependency.attempts + ], + } + for dependency in package_outcome.dependencies + ], + } + + +def _apply_package_replacements(path: Path, replacements: dict[str, str]) -> None: + if not replacements: + return + _replace_requirements(path, list(replacements.items())) + + +def main() -> None: + """Run package-by-package dependency upper-bound discovery and updates.""" + parser = argparse.ArgumentParser( + description=( + "Raise dependency upper bounds per package, run check+test in isolated uv envs, " + "and write a JSON report while updating pyproject files." + ) + ) + parser.add_argument( + "--packages", + nargs="*", + default=None, + help="Optional package filters by workspace path (e.g., packages/core) or package name.", + ) + parser.add_argument( + "--dependencies", + nargs="*", + default=None, + help="Optional dependency-name filters (normalized to lowercase).", + ) + parser.add_argument( + "--parallelism", + type=int, + default=max(1, min(os.cpu_count() or 4, 8)), + help="Number of packages to process concurrently.", + ) + parser.add_argument( + "--max-candidates", + type=int, + default=0, + help="Maximum candidate upper bounds per dependency (0 = no limit).", + ) + parser.add_argument( + "--output-json", + default="scripts/dependency-range-results.json", + help="Path to incremental JSON output report.", + ) + parser.add_argument( + "--version-source", + choices=("pypi", "lock"), + default="pypi", + help="Version source for candidate upper bounds.", + ) + parser.add_argument( + "--timeout-seconds", + type=int, + default=1200, + help="Timeout per task command execution.", + ) + parser.add_argument("--dry-run", action="store_true", help="Do not execute uv commands or update pyprojects.") + args = parser.parse_args() + + workspace_pyproject = Path(__file__).parent.parent / "pyproject.toml" + workspace_root = workspace_pyproject.parent + package_filters = set(args.packages) if args.packages else None + dependency_filters = {name.lower() for name in args.dependencies} if args.dependencies else None + output_json_path = (workspace_root / args.output_json).resolve() + + package_map = _build_workspace_package_map(workspace_root) + internal_graph = _build_internal_graph(workspace_root, package_map) + lock_versions = _load_lock_versions(workspace_root) + catalog = VersionCatalog(lock_versions=lock_versions, source=args.version_source) + + plans: list[PackagePlan] = [] + for project_path in sorted(set(discover_projects(workspace_pyproject))): + pyproject_file = workspace_root / project_path / "pyproject.toml" + if not pyproject_file.exists(): + print(f"[yellow]Skipping {project_path}: missing pyproject.toml[/yellow]") + continue + package_name = _load_package_name(pyproject_file) + with pyproject_file.open("rb") as f: + package_config = tomli.load(f) + project_section = package_config.get("project", {}) + optional_dependencies = project_section.get("optional-dependencies", {}) or {} + dependency_groups = package_config.get("dependency-groups", {}) or {} + if package_filters and str(project_path) not in package_filters and package_name not in package_filters: + continue + plans.append( + PackagePlan( + project_path=project_path, + package_name=package_name, + pyproject_path=pyproject_file, + internal_editables=_resolve_internal_editables(package_name, package_map, internal_graph), + include_dev_group="dev" in dependency_groups, + include_dev_extra="dev" in optional_dependencies, + ) + ) + + if not plans: + print("[yellow]No packages matched the selection.[/yellow]") + return + + report: dict = { + "started_at": _utc_now(), + "workspace_root": str(workspace_root), + "version_source": args.version_source, + "dry_run": args.dry_run, + "packages": [], + "summary": { + "packages_total": len(plans), + "packages_changed": 0, + "dependencies_changed": 0, + "dependencies_failed": 0, + }, + } + _write_json(output_json_path, report) + print(f"[cyan]Writing dependency-range report to {output_json_path}[/cyan]") + + package_outcomes: list[PackageOutcome] = [] + with concurrent.futures.ThreadPoolExecutor(max_workers=max(1, args.parallelism)) as executor: + future_to_plan = { + executor.submit( + _process_package, + plan, + catalog=catalog, + dependency_filters=dependency_filters, + dry_run=args.dry_run, + max_candidates=args.max_candidates, + timeout_seconds=args.timeout_seconds, + ): plan + for plan in plans + } + + for future in concurrent.futures.as_completed(future_to_plan): + plan = future_to_plan[future] + try: + outcome = future.result() + except Exception as exc: + outcome = PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=[], + changed=False, + dependencies=[], + replacements={}, + skipped=[], + error=str(exc), + ) + package_outcomes.append(outcome) + + if outcome.changed and not args.dry_run: + _apply_package_replacements(plan.pyproject_path, outcome.replacements) + + report["packages"].append(_to_json(outcome)) + report["summary"]["packages_changed"] = sum(1 for value in package_outcomes if value.changed) + report["summary"]["dependencies_changed"] = sum( + 1 for value in package_outcomes for dependency in value.dependencies if dependency.changed + ) + report["summary"]["dependencies_failed"] = sum( + 1 + for value in package_outcomes + for dependency in value.dependencies + for attempt in dependency.attempts + if attempt.status == "failed" + ) + report["updated_at"] = _utc_now() + _write_json(output_json_path, report) + + if outcome.error: + print(f"[red]{plan.project_path}: package execution error[/red]") + elif outcome.changed: + print(f"[green]{plan.project_path}: updated dependency bounds[/green]") + else: + print(f"[yellow]{plan.project_path}: no changes[/yellow]") + + print( + "[bold]Done.[/bold] " + f"packages_changed={report['summary']['packages_changed']}, " + f"dependencies_changed={report['summary']['dependencies_changed']}, " + f"failed_attempts={report['summary']['dependencies_failed']}" + ) + + +if __name__ == "__main__": + main() diff --git a/python/uv.lock b/python/uv.lock index 7233077c30..5691e953e4 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -60,7 +60,7 @@ overrides = [ [[package]] name = "a2a-sdk" -version = "0.3.24" +version = "0.3.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -69,9 +69,9 @@ dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/76/cefa956fb2d3911cb91552a1da8ce2dbb339f1759cb475e2982f0ae2332b/a2a_sdk-0.3.24.tar.gz", hash = "sha256:3581e6e8a854cd725808f5732f90b7978e661b6d4e227a4755a8f063a3c1599d", size = 255550, upload-time = "2026-02-20T10:05:43.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/0d/12ebef081b096ca5fafd1ec8cc589739abba07b46ae7899c7420e599f2a6/a2a_sdk-0.3.5.tar.gz", hash = "sha256:48cf37dedeb63cf0a072512221a12ed4b3950df695c9d65eadb839a99392c3e5", size = 222064, upload-time = "2025-09-08T17:30:35.826Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/6e/cae5f0caea527b39c0abd7204d9416768764573c76649ca03cc345a372be/a2a_sdk-0.3.24-py3-none-any.whl", hash = "sha256:7b248767096bb55311f57deebf6b767349388d94c1b376c60cb8f6b715e053f6", size = 145752, upload-time = "2026-02-20T10:05:41.729Z" }, + { url = "https://files.pythonhosted.org/packages/4c/96/c33802d929b0f884cb6e509195d69914632536256d273bd7127e900d79ea/a2a_sdk-0.3.5-py3-none-any.whl", hash = "sha256:fd85b1e4e7be18a89b5d723e4013171510150a235275876f98de9e1ba869457e", size = 136911, upload-time = "2025-09-08T17:30:34.091Z" }, ] [[package]] @@ -155,7 +155,7 @@ dependencies = [ [package.metadata] requires-dist = [ - { name = "a2a-sdk", specifier = ">=0.3.5" }, + { name = "a2a-sdk", specifier = ">=0.3.5,<0.3.24" }, { name = "agent-framework-core", editable = "packages/core" }, ] @@ -178,12 +178,12 @@ dev = [ [package.metadata] requires-dist = [ - { name = "ag-ui-protocol", specifier = ">=0.1.9" }, + { name = "ag-ui-protocol", specifier = "==0.1.13" }, { name = "agent-framework-core", editable = "packages/core" }, - { name = "fastapi", specifier = ">=0.115.0" }, - { name = "httpx", marker = "extra == 'dev'", specifier = ">=0.27.0" }, - { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, - { name = "uvicorn", specifier = ">=0.30.0" }, + { name = "fastapi", specifier = ">=0.104.0,<0.133.1" }, + { name = "httpx", marker = "extra == 'dev'", specifier = ">=0.27.0,<0.29" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0,<9" }, + { name = "uvicorn", specifier = ">=0.30.0,<0.30.1" }, ] provides-extras = ["dev"] @@ -199,7 +199,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "anthropic", specifier = ">=0.70.0,<1" }, + { name = "anthropic", specifier = ">=0.80.0,<0.80.1" }, ] [[package]] @@ -216,9 +216,9 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "aiohttp" }, - { name = "azure-ai-agents", specifier = "==1.2.0b5" }, - { name = "azure-ai-inference", specifier = ">=1.0.0b9" }, + { name = "aiohttp", specifier = ">=3.13.3,<4" }, + { name = "azure-ai-agents", specifier = ">=1.2.0b5,<1.2.0b6" }, + { name = "azure-ai-inference", specifier = ">=1.0.0b9,<1.0.0b10" }, ] [[package]] @@ -233,7 +233,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "azure-search-documents", specifier = "==11.7.0b2" }, + { name = "azure-search-documents", specifier = ">=11.7.0b2,<11.7.0b3" }, ] [[package]] @@ -266,8 +266,8 @@ dependencies = [ requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, { name = "agent-framework-durabletask", editable = "packages/durabletask" }, - { name = "azure-functions" }, - { name = "azure-functions-durable" }, + { name = "azure-functions", specifier = ">=1.24.0,<2" }, + { name = "azure-functions-durable", specifier = ">=1.5.0,<2" }, ] [package.metadata.requires-dev] @@ -317,7 +317,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "claude-agent-sdk", specifier = ">=0.1.25" }, + { name = "claude-agent-sdk", specifier = ">=0.1.25,<0.1.26" }, ] [[package]] @@ -332,7 +332,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "microsoft-agents-copilotstudio-client", specifier = ">=0.3.1" }, + { name = "microsoft-agents-copilotstudio-client", specifier = ">=0.3.1,<0.3.2" }, ] [[package]] @@ -404,14 +404,14 @@ requires-dist = [ { name = "azure-ai-projects", specifier = "==2.0.0b4" }, { name = "azure-identity", specifier = ">=1,<2" }, { name = "mcp", extras = ["ws"], specifier = ">=1.24.0,<2" }, - { name = "openai", specifier = ">=1.99.0" }, - { name = "opentelemetry-api", specifier = ">=1.39.0" }, - { name = "opentelemetry-sdk", specifier = ">=1.39.0" }, - { name = "opentelemetry-semantic-conventions-ai", specifier = ">=0.4.13" }, - { name = "packaging", specifier = ">=24.1" }, + { name = "openai", specifier = ">=1.99.0,<3" }, + { name = "opentelemetry-api", specifier = ">=1.39.0,<2" }, + { name = "opentelemetry-sdk", specifier = ">=1.39.0,<2" }, + { name = "opentelemetry-semantic-conventions-ai", specifier = ">=0.4.13,<0.4.14" }, + { name = "packaging", specifier = ">=24.1,<26.0" }, { name = "pydantic", specifier = ">=2,<3" }, { name = "python-dotenv", specifier = ">=1,<2" }, - { name = "typing-extensions" }, + { name = "typing-extensions", specifier = ">=4.15.0,<5" }, ] provides-extras = ["all"] @@ -433,7 +433,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "powerfx", marker = "python_full_version < '3.14'", specifier = ">=0.0.31" }, + { name = "powerfx", marker = "python_full_version < '3.14'", specifier = ">=0.0.31,<0.0.32" }, { name = "pyyaml", specifier = ">=6.0,<7.0" }, ] @@ -454,11 +454,13 @@ dependencies = [ [package.optional-dependencies] all = [ { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "watchdog", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] dev = [ { name = "agent-framework-orchestrations", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "watchdog", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] @@ -466,13 +468,15 @@ dev = [ requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, { name = "agent-framework-orchestrations", marker = "extra == 'dev'", editable = "packages/orchestrations" }, - { name = "fastapi", specifier = ">=0.104.0" }, - { name = "pytest", marker = "extra == 'all'", specifier = ">=7.0.0" }, - { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" }, - { name = "python-dotenv", specifier = ">=1.0.0" }, - { name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" }, - { name = "watchdog", marker = "extra == 'all'", specifier = ">=3.0.0" }, - { name = "watchdog", marker = "extra == 'dev'", specifier = ">=3.0.0" }, + { name = "fastapi", specifier = ">=0.104.0,<0.133.1" }, + { name = "pytest", marker = "extra == 'all'", specifier = ">=7.0.0,<9" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0,<9" }, + { name = "pytest-cov", marker = "extra == 'all'", specifier = ">=6.2.1,<7" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=6.2.1,<7" }, + { name = "python-dotenv", specifier = ">=1.0.0,<2" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0,<0.24.1" }, + { name = "watchdog", marker = "extra == 'all'", specifier = ">=3.0.0,<7" }, + { name = "watchdog", marker = "extra == 'dev'", specifier = ">=3.0.0,<7" }, ] provides-extras = ["dev", "all"] @@ -495,9 +499,9 @@ dev = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "durabletask", specifier = ">=1.3.0" }, - { name = "durabletask-azuremanaged", specifier = ">=1.3.0" }, - { name = "python-dateutil", specifier = ">=2.8.0" }, + { name = "durabletask", specifier = ">=1.3.0,<2" }, + { name = "durabletask-azuremanaged", specifier = ">=1.3.0,<2" }, + { name = "python-dateutil", specifier = ">=2.8.0,<3" }, ] [package.metadata.requires-dev] @@ -515,7 +519,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "foundry-local-sdk", specifier = ">=0.5.1,<1" }, + { name = "foundry-local-sdk", specifier = ">=0.5.1,<0.5.2" }, ] [[package]] @@ -531,7 +535,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "github-copilot-sdk", specifier = ">=0.1.0" }, + { name = "github-copilot-sdk", specifier = ">=0.1.10,<0.1.11" }, ] [[package]] @@ -545,7 +549,6 @@ dependencies = [ [package.optional-dependencies] gaia = [ { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "orjson", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyarrow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -572,6 +575,7 @@ dev = [ { name = "prek", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyright", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ruff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tau2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -583,18 +587,17 @@ dev = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "agentlightning", marker = "extra == 'lightning'", specifier = ">=0.2.0,<0.3.0" }, - { name = "huggingface-hub", marker = "extra == 'gaia'", specifier = ">=0.20.0" }, - { name = "loguru", marker = "extra == 'tau2'", specifier = ">=0.7.3" }, - { name = "numpy", marker = "extra == 'tau2'" }, - { name = "opentelemetry-api", marker = "extra == 'gaia'", specifier = ">=1.39.0" }, - { name = "orjson", marker = "extra == 'gaia'", specifier = ">=3.8.0" }, - { name = "pyarrow", marker = "extra == 'gaia'", specifier = ">=10.0.0" }, - { name = "pydantic", marker = "extra == 'gaia'", specifier = ">=2.0.0" }, - { name = "pydantic", marker = "extra == 'tau2'", specifier = ">=2.0.0" }, - { name = "sympy", marker = "extra == 'math'", specifier = ">=1.13.0" }, - { name = "tiktoken", marker = "extra == 'tau2'", specifier = ">=0.11.0" }, - { name = "tqdm", marker = "extra == 'gaia'", specifier = ">=4.60.0" }, + { name = "agentlightning", marker = "extra == 'lightning'", specifier = ">=0.2.0,<0.2.1" }, + { name = "huggingface-hub", marker = "extra == 'gaia'", specifier = ">=0.20.0,<0.20.1" }, + { name = "loguru", marker = "extra == 'tau2'", specifier = ">=0.7.3,<0.7.4" }, + { name = "numpy", marker = "extra == 'tau2'", specifier = ">=2.2.6,<3" }, + { name = "orjson", marker = "extra == 'gaia'", specifier = ">=3.8.0,<4" }, + { name = "pyarrow", marker = "extra == 'gaia'" }, + { name = "pydantic", marker = "extra == 'gaia'", specifier = ">=2,<4" }, + { name = "pydantic", marker = "extra == 'tau2'", specifier = ">=2,<4" }, + { name = "sympy", marker = "extra == 'math'", specifier = ">=1.13.0,<2" }, + { name = "tiktoken", marker = "extra == 'tau2'", specifier = ">=0.11.0,<0.11.1" }, + { name = "tqdm", marker = "extra == 'gaia'", specifier = ">=4.60.0,<5" }, ] provides-extras = ["gaia", "lightning", "tau2", "math"] @@ -605,6 +608,7 @@ dev = [ { name = "prek", specifier = ">=0.3.2" }, { name = "pyright", specifier = ">=1.1.402" }, { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-cov", specifier = ">=6.2.1,<7" }, { name = "rich" }, { name = "ruff", specifier = ">=0.11.8" }, { name = "tau2", git = "https://github.com/sierra-research/tau2-bench?rev=5ba9e3e56db57c5e4114bf7f901291f09b2c5619" }, @@ -625,7 +629,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "mem0ai", specifier = ">=1.0.0" }, + { name = "mem0ai", specifier = ">=1.0.0,<2" }, ] [[package]] @@ -640,7 +644,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "ollama", specifier = ">=0.5.3" }, + { name = "ollama", specifier = ">=0.5.3,<0.5.4" }, ] [[package]] @@ -667,8 +671,8 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "azure-core", specifier = ">=1.30.0" }, - { name = "httpx", specifier = ">=0.27.0" }, + { name = "azure-core", specifier = ">=1.30.0,<2" }, + { name = "httpx", specifier = ">=0.27.0,<0.29" }, ] [[package]] @@ -686,14 +690,14 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "numpy", specifier = ">=2.2.6" }, - { name = "redis", specifier = ">=6.4.0" }, - { name = "redisvl", specifier = ">=0.8.2" }, + { name = "numpy", specifier = ">=2.2.6,<3" }, + { name = "redis", specifier = ">=6.4.0,<7.2.1" }, + { name = "redisvl", specifier = ">=0.8.2,<0.8.3" }, ] [[package]] name = "agentlightning" -version = "0.2.2" +version = "0.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "agentops", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -713,9 +717,9 @@ dependencies = [ { name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "uvicorn", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/28/834cbf3e708069d4c7e8a56d8f80268abccc30ba5b536b019175eac2a2b4/agentlightning-0.2.2.tar.gz", hash = "sha256:5bcde5edc1808abda94cc3f6c54523fa4ab11f7aeb9814d51b792455766499bf", size = 810460, upload-time = "2025-11-12T16:06:15.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/51/8e1c4586cf7397e2c203099102880fc2f3d53d2290e4a3c2c640a07b5149/agentlightning-0.2.0.tar.gz", hash = "sha256:34b16894579e155cb1817f54bd835b335c3cae1977fa1d04ef587c9d12c2fb5a", size = 710474, upload-time = "2025-10-22T06:06:14.308Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/40/8bde88541f6583731489a436e480ea86a8cf902de69fa281ea000e276069/agentlightning-0.2.2-py3-none-any.whl", hash = "sha256:80a5701c868ae040523a1bc14c58028f2ec9d85e3cc1422c8b3c5ce69499ab23", size = 198080, upload-time = "2025-11-12T16:06:14.36Z" }, + { url = "https://files.pythonhosted.org/packages/94/52/086781c293541ea2dbc301d2f0b30354e9fc158f30956cea50f8d28743bc/agentlightning-0.2.0-py3-none-any.whl", hash = "sha256:9ae6d0dd05e27eee9a509b9c09eabf1b695bd26bbd804c8c879c81207407ccc2", size = 194156, upload-time = "2025-10-22T06:06:12.546Z" }, ] [[package]] @@ -2278,7 +2282,7 @@ wheels = [ [[package]] name = "github-copilot-sdk" -version = "0.1.25" +version = "0.1.10" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.11' and sys_platform == 'darwin'", @@ -2290,13 +2294,9 @@ dependencies = [ { name = "python-dateutil", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] +sdist = { url = "https://files.pythonhosted.org/packages/b0/a5/ace80f47a5913e478a1f29127089ce4e5dd26ce0fa9bddf3e790d321b401/github_copilot_sdk-0.1.10.tar.gz", hash = "sha256:31f71532c8b32c6972a57e334fb44a9db91e99bbec655e58f457f953ed5ebbfa", size = 82585, upload-time = "2026-01-14T00:50:25.605Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/06/1dec504b54c724d69283969d4ed004225ec8bbb1c0a5e9e0c3b6b048099a/github_copilot_sdk-0.1.25-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d32c3fc2c393f70923a645a133607da2e562d078b87437f499100d5bb8c1902f", size = 58097936, upload-time = "2026-02-18T00:07:20.672Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a3/a6ad1ca47af561069d6d8d0a4b074b000b0be1dfa9e66215b264ee31650c/github_copilot_sdk-0.1.25-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7af33d3afbe09a78dfc9d65a843526e47aba15631e90926c42a21a200fab12da", size = 54867128, upload-time = "2026-02-18T00:07:25.228Z" }, - { url = "https://files.pythonhosted.org/packages/8c/08/74fd9be0ed292d524a15fa4db950f43f4afefb77514f856e36fd1203bf13/github_copilot_sdk-0.1.25-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:bc74a3d08ee45313ac02a3f7159c583ec41fc16090ec5f27f88c4b737f03139e", size = 60999905, upload-time = "2026-02-18T00:07:29.462Z" }, - { url = "https://files.pythonhosted.org/packages/ae/01/daae53c8586c0cadae9a2a146d1da9bd6dbd7e89b7dcd72643b453267345/github_copilot_sdk-0.1.25-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:13ef99fa8c709c5f80d820672bf36ee9176bc33f0efce6a2b5cbf6d1bb2369e8", size = 59183062, upload-time = "2026-02-18T00:07:34.059Z" }, - { url = "https://files.pythonhosted.org/packages/81/a8/2ec7d47a18b042cca2c140cabb5fe6621697c1b43b8721637061122c51ed/github_copilot_sdk-0.1.25-py3-none-win_amd64.whl", hash = "sha256:1a90ee583309ff308fea42f9edec61203645a33ca1d3dc42953628fb8c3eda07", size = 53624148, upload-time = "2026-02-18T00:07:38.558Z" }, - { url = "https://files.pythonhosted.org/packages/6b/2e/4cffd33552ede91de7517641835a3365571abd3f436c9d76a4f50793033c/github_copilot_sdk-0.1.25-py3-none-win_arm64.whl", hash = "sha256:5249a63d1ac1e4d325c70c9902e81327b0baca53afa46010f52ac3fd3b5a111b", size = 51623455, upload-time = "2026-02-18T00:07:42.156Z" }, + { url = "https://files.pythonhosted.org/packages/c1/23/1a07d21025dbf731568058551bd6a39a3791b3a6fdcee2920ead2fe4fd89/github_copilot_sdk-0.1.10-py3-none-any.whl", hash = "sha256:0e6c2c66671d67d4f44c4cafb284044df77e705422ea4bd571f0ac8163690a72", size = 28946, upload-time = "2026-01-14T00:50:24.017Z" }, ] [[package]] @@ -2390,7 +2390,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d", size = 277747, upload-time = "2026-02-20T20:16:21.325Z" }, { url = "https://files.pythonhosted.org/packages/fb/07/cb284a8b5c6498dbd7cba35d31380bb123d7dceaa7907f606c8ff5993cbf/greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13", size = 579202, upload-time = "2026-02-20T20:47:28.955Z" }, { url = "https://files.pythonhosted.org/packages/ed/45/67922992b3a152f726163b19f890a85129a992f39607a2a53155de3448b8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e", size = 590620, upload-time = "2026-02-20T20:55:55.581Z" }, - { url = "https://files.pythonhosted.org/packages/03/5f/6e2a7d80c353587751ef3d44bb947f0565ec008a2e0927821c007e96d3a7/greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7", size = 602132, upload-time = "2026-02-20T21:02:43.261Z" }, { url = "https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f", size = 591729, upload-time = "2026-02-20T20:20:58.395Z" }, { url = "https://files.pythonhosted.org/packages/24/b4/21f5455773d37f94b866eb3cf5caed88d6cea6dd2c6e1f9c34f463cba3ec/greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef", size = 1551946, upload-time = "2026-02-20T20:49:31.102Z" }, { url = "https://files.pythonhosted.org/packages/00/68/91f061a926abead128fe1a87f0b453ccf07368666bd59ffa46016627a930/greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca", size = 1618494, upload-time = "2026-02-20T20:21:06.541Z" }, @@ -2398,7 +2397,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, - { url = "https://files.pythonhosted.org/packages/9c/8b/1430a04657735a3f23116c2e0d5eb10220928846e4537a938a41b350bed6/greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2", size = 605046, upload-time = "2026-02-20T21:02:45.234Z" }, { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, @@ -2407,7 +2405,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, @@ -2416,7 +2413,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, @@ -2425,7 +2421,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, - { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, @@ -2434,7 +2429,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, - { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, @@ -2710,8 +2704,6 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "fsspec", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "hf-xet", marker = "(platform_machine == 'AMD64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'darwin') or (platform_machine == 'amd64' and sys_platform == 'darwin') or (platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and sys_platform == 'darwin') or (platform_machine == 'AMD64' and sys_platform == 'linux') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'amd64' and sys_platform == 'linux') or (platform_machine == 'arm64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine == 'AMD64' and sys_platform == 'win32') or (platform_machine == 'aarch64' and sys_platform == 'win32') or (platform_machine == 'amd64' and sys_platform == 'win32') or (platform_machine == 'arm64' and sys_platform == 'win32') or (platform_machine == 'x86_64' and sys_platform == 'win32')" }, - { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3471,31 +3463,31 @@ wheels = [ [[package]] name = "microsoft-agents-activity" -version = "0.8.0" +version = "0.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/8a/3dbdf47f3ddabf646987ddf6f5260e77865c6812177b8759f1c7fc395ac8/microsoft_agents_activity-0.8.0.tar.gz", hash = "sha256:f9e7d92db119cf93dd0642a5e698732c40a450c064306ad076b0d83d95eae114", size = 61226, upload-time = "2026-02-24T18:28:49.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/6a/dfc2fc0316b7dc4f6d24792b4a31a873b026be76792af1e0c3e65f843ef0/microsoft_agents_activity-0.3.1.tar.gz", hash = "sha256:c7567fc30f8e6f2a2d74cd65a1f7f31ade0d7ec9dd94531677d0d7b0648c77ee", size = 44886, upload-time = "2025-09-09T23:19:43.044Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/10/18b87c552112917496256d4e9e50a49bd712015d285f01a3c6e18cdfdd74/microsoft_agents_activity-0.8.0-py3-none-any.whl", hash = "sha256:16f0e7fd5ba8f64c43ceac514b7b22734e97b4478b7e97963232ca893cfe336d", size = 132917, upload-time = "2026-02-24T18:28:59.002Z" }, + { url = "https://files.pythonhosted.org/packages/25/8b/50ce2243e2900e94358f37009121145bb8224a388d95d704856aa2686667/microsoft_agents_activity-0.3.1-py3-none-any.whl", hash = "sha256:d7fc2e9cf2843ec8d6d42608b808b159a12cbb61e1fc7d7b1aaf29899f20746a", size = 111904, upload-time = "2025-09-09T23:19:50.722Z" }, ] [[package]] name = "microsoft-agents-copilotstudio-client" -version = "0.8.0" +version = "0.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "microsoft-agents-hosting-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/5d/a8567b03ff7d29d575aa8c4ebfb53d3f6ee8765cedd8550fae68e9df917d/microsoft_agents_copilotstudio_client-0.8.0.tar.gz", hash = "sha256:7416b2e7906977bd55b66f0b23853fb0c55d4a367cc8bf30cc8aba63d0949514", size = 27196, upload-time = "2026-02-24T18:28:52.033Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/a5/2381ffd14d6a584f9f7ab80c7b6c634f658ea651b38702eb403c930d8396/microsoft_agents_copilotstudio_client-0.3.1.tar.gz", hash = "sha256:c529209241c9d11b7a6e8696f96a3d43121c10b49e44f00e5066f9cf5256f4f3", size = 5024, upload-time = "2025-09-09T23:19:44.833Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/6b/999ab044edfe924f0330bd2ce200f3fa9c2a84550212587781c68d617679/microsoft_agents_copilotstudio_client-0.8.0-py3-none-any.whl", hash = "sha256:d00936e2a0b48482380d81695f00af86d71c82c0b464947cc723834b63c91553", size = 23715, upload-time = "2026-02-24T18:29:01.3Z" }, + { url = "https://files.pythonhosted.org/packages/97/35/8b4e9c691f2ce89653007f358519bbadff1fe0d495c3723c9dbbfa962a33/microsoft_agents_copilotstudio_client-0.3.1-py3-none-any.whl", hash = "sha256:cac7485405325b990202452c9c14848cbdb25d13e6cdaf7bd3eca3a5c1fb3989", size = 7420, upload-time = "2025-09-09T23:19:52.287Z" }, ] [[package]] name = "microsoft-agents-hosting-core" -version = "0.8.0" +version = "0.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3504,9 +3496,9 @@ dependencies = [ { name = "pyjwt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/8a/5ab47498bbc74989c30dbfbcb7862211117bdbeba4e3d844bb281c0e05bf/microsoft_agents_hosting_core-0.8.0.tar.gz", hash = "sha256:d3b34803f73d7f677b797733dfe5c561af876e8721c426d6379a762fe6e86fa4", size = 94079, upload-time = "2026-02-24T18:28:54.156Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/14/a1365e0bab1486c2d16aabeb192ca90715794edf4e68be4815c245884420/microsoft_agents_hosting_core-0.3.1.tar.gz", hash = "sha256:0b76bda10e7a54ff3c86e56cbabaad5ac7a4c2a076c9833af3b2f4c86fa85e89", size = 81137, upload-time = "2025-09-09T23:19:46.73Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/ff/a1497b3ea63ab0658518fc18532179e5696c5d8d7b28683ec82c34323e54/microsoft_agents_hosting_core-0.8.0-py3-none-any.whl", hash = "sha256:603f53f14bebc7888b5664718bbd24038dafffdd282c81d0e635fca7acfc6aef", size = 139555, upload-time = "2026-02-24T18:29:03.479Z" }, + { url = "https://files.pythonhosted.org/packages/f0/1b/543ddaa2daf8593911a02a07a6a78366d4a6a0053ec86a557c19fa97b60e/microsoft_agents_hosting_core-0.3.1-py3-none-any.whl", hash = "sha256:a4b41556b15321b74f539c5a0a89f70955459b7ec57e9e4b24e61bba27f1cbbc", size = 94573, upload-time = "2025-09-09T23:19:53.855Z" }, ] [[package]] @@ -3972,15 +3964,15 @@ wheels = [ [[package]] name = "ollama" -version = "0.6.1" +version = "0.5.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/5a/652dac4b7affc2b37b95386f8ae78f22808af09d720689e3d7a86b6ed98e/ollama-0.6.1.tar.gz", hash = "sha256:478c67546836430034b415ed64fa890fd3d1ff91781a9d548b3325274e69d7c6", size = 51620, upload-time = "2025-11-13T23:02:17.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/6d/ae96027416dcc2e98c944c050c492789502d7d7c0b95a740f0bb39268632/ollama-0.5.3.tar.gz", hash = "sha256:40b6dff729df3b24e56d4042fd9d37e231cee8e528677e0d085413a1d6692394", size = 43331, upload-time = "2025-08-07T21:44:10.422Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/4f/4a617ee93d8208d2bcf26b2d8b9402ceaed03e3853c754940e2290fed063/ollama-0.6.1-py3-none-any.whl", hash = "sha256:fc4c984b345735c5486faeee67d8a265214a31cbb828167782dc642ce0a2bf8c", size = 14354, upload-time = "2025-11-13T23:02:16.292Z" }, + { url = "https://files.pythonhosted.org/packages/be/f6/2091e50b8b6c3e6901f6eab283d5efd66fb71c86ddb1b4d68766c3eeba0f/ollama-0.5.3-py3-none-any.whl", hash = "sha256:a8303b413d99a9043dbf77ebf11ced672396b59bec27e6d5db67c88f01b279d2", size = 13490, upload-time = "2025-08-07T21:44:09.353Z" }, ] [[package]] @@ -4658,15 +4650,14 @@ wheels = [ [[package]] name = "powerfx" -version = "0.0.34" +version = "0.0.31" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, { name = "pythonnet", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/fb/6c4bf87e0c74ca1c563921ce89ca1c5785b7576bca932f7255cdf81082a7/powerfx-0.0.34.tar.gz", hash = "sha256:956992e7afd272657ed16d80f4cad24ec95d9e4a79fb9dfa4a068a09e136af32", size = 3237555, upload-time = "2025-12-22T15:50:59.682Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/1d/40228886242df10c10ed69faf27e973d020c586aa723a51afbe48542d535/powerfx-0.0.31.tar.gz", hash = "sha256:fa9637f315d71163dd900d16f97fce562d550049713d2fc358f8d446bb23906f", size = 3235618, upload-time = "2025-09-16T15:10:13.159Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/96/0f8a1f86485b3ec0315e3e8403326884a0334b3dcd699df2482669cca4be/powerfx-0.0.34-py3-none-any.whl", hash = "sha256:f2dc1c42ba8bfa4c72a7fcff2a00755b95394547388ca0b3e36579c49ee7ed75", size = 3483089, upload-time = "2025-12-22T15:50:57.536Z" }, + { url = "https://files.pythonhosted.org/packages/25/45/fdc98dc8a3e38a3cde464e18624a4851785bf7cc63f207d04279e0a1db4f/powerfx-0.0.31-py3-none-any.whl", hash = "sha256:616dcff4950624d3c63dd72c01daea60b0838e217b0c5533dd2c40677444a340", size = 3481524, upload-time = "2025-09-16T15:10:10.393Z" }, ] [[package]] @@ -5195,7 +5186,7 @@ wheels = [ [[package]] name = "pytest" -version = "9.0.2" +version = "8.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -5206,9 +5197,9 @@ dependencies = [ { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, ] [[package]] @@ -5227,16 +5218,16 @@ wheels = [ [[package]] name = "pytest-cov" -version = "7.0.0" +version = "6.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/4c/f883ab8f0daad69f47efdf95f55a66b51a8b939c430dadce0611508d9e99/pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2", size = 70398, upload-time = "2025-09-06T15:40:14.361Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, + { url = "https://files.pythonhosted.org/packages/80/b4/bb7263e12aade3842b938bc5c6958cae79c5ee18992f9b9349019579da0f/pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749", size = 25115, upload-time = "2025-09-06T15:40:12.44Z" }, ] [[package]] @@ -5449,14 +5440,14 @@ wheels = [ [[package]] name = "redis" -version = "7.1.1" +version = "6.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "(python_full_version < '3.11.3' and sys_platform == 'darwin') or (python_full_version < '3.11.3' and sys_platform == 'linux') or (python_full_version < '3.11.3' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/80/2971931d27651affa88a44c0ad7b8c4a19dc29c998abb20b23868d319b59/redis-7.1.1.tar.gz", hash = "sha256:a2814b2bda15b39dad11391cc48edac4697214a8a5a4bd10abe936ab4892eb43", size = 4800064, upload-time = "2026-02-09T18:39:40.292Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/d6/e8b92798a5bd67d659d51a18170e91c16ac3b59738d91894651ee255ed49/redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010", size = 4647399, upload-time = "2025-08-07T08:10:11.441Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/55/1de1d812ba1481fa4b37fb03b4eec0fcb71b6a0d44c04ea3482eb017600f/redis-7.1.1-py3-none-any.whl", hash = "sha256:f77817f16071c2950492c67d40b771fa493eb3fccc630a424a10976dbb794b7a", size = 356057, upload-time = "2026-02-09T18:39:38.602Z" }, + { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, ] [[package]] @@ -6195,15 +6186,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0", size = 1003468, upload-time = "2026-02-08T15:08:38.723Z" }, ] -[[package]] -name = "shellingham" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, -] - [[package]] name = "six" version = "1.17.0" @@ -6410,63 +6392,38 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.12.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, - { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, - { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, - { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, - { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, - { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, - { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, - { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, - { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, - { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, - { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, - { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, - { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, - { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, - { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, - { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, - { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, - { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, - { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, - { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, - { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, - { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, - { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, - { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, - { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, - { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, - { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, - { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, - { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, - { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, - { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, - { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, - { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, - { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, - { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, - { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, - { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, + { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, + { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, + { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, + { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, + { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, + { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, + { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, + { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, + { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, + { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, + { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, + { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, + { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, + { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, ] [[package]] From 5edef16468cd945b14430acb2cdf514bf2f639bf Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Fri, 27 Feb 2026 12:11:25 +0100 Subject: [PATCH 2/8] updated text and pyarrow --- python/CODING_STANDARD.md | 7 +++---- python/packages/lab/pyproject.toml | 2 +- python/uv.lock | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/python/CODING_STANDARD.md b/python/CODING_STANDARD.md index 06610056dc..e654504f27 100644 --- a/python/CODING_STANDARD.md +++ b/python/CODING_STANDARD.md @@ -392,12 +392,11 @@ All non-core packages declare a lower bound on `agent-framework-core` (e.g., `"a The guiding principle for external dependencies is to make the range of allowed versions as broad as possible, even it that means we have to do some conditional imports, and other tricks to allow small changes in versions. So we use bounded ranges for external package dependencies in `pyproject.toml`: -- For stable dependencies (`>=1.0.0`), use `>=,` (for example: `openai>=1.99.0,<2`). + +- For stable dependencies (`>=1.0.0`), use a lower bound at a known-good version and an explicit upper bound that reflects the maximum major version we currently support (for example: `openai>=1.99.0,<3`). - For prerelease (`dev`/`a`/`b`/`rc`) dependencies, use a known-good lower bound with a hard upper boundary in the same prerelease line (for example: `azure-ai-projects>=2.0.0b3,<2.0.0b4`). - For `<1.0.0` dependencies, use patch-bounded caps (`>=,`), not minor-bounded caps (for example: `a2a-sdk>=0.3.5,<0.3.6`). -- Prefer keeping support for multiple major versions when practical. If APIs differ between supported majors, version-conditional imports/branches are acceptable to preserve compatibility. -- Validate dependency bounds project by project using the dependency-range validation script (`uv run poe validate-dependency-ranges`), and include both typing and tests for the final gate. - +- Prefer keeping support for multiple major versions when practical. This may mean that the upper bound spans multiple major versions when the dependency maintains backward compatibility; if APIs differ between supported majors, version-conditional imports/branches are acceptable to preserve compatibility. For `<1.0.0>` and prerelease dependencies, also make the bounds as broad as possible but only for known packages, not for new ones, as the odds of breaking changes being introduced are higher. ### Installation Options diff --git a/python/packages/lab/pyproject.toml b/python/packages/lab/pyproject.toml index ad6991d6de..1db05d0123 100644 --- a/python/packages/lab/pyproject.toml +++ b/python/packages/lab/pyproject.toml @@ -32,7 +32,7 @@ gaia = [ "tqdm>=4.60.0,<5", "huggingface-hub>=0.20.0,<0.20.1", "orjson>=3.8.0,<4", - "pyarrow", # For reading parquet files + "pyarrow>=10,<24", # For reading parquet files ] # Lightning RL training module dependencies diff --git a/python/uv.lock b/python/uv.lock index 5691e953e4..6b4a71af5e 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -592,7 +592,7 @@ requires-dist = [ { name = "loguru", marker = "extra == 'tau2'", specifier = ">=0.7.3,<0.7.4" }, { name = "numpy", marker = "extra == 'tau2'", specifier = ">=2.2.6,<3" }, { name = "orjson", marker = "extra == 'gaia'", specifier = ">=3.8.0,<4" }, - { name = "pyarrow", marker = "extra == 'gaia'" }, + { name = "pyarrow", marker = "extra == 'gaia'", specifier = ">=10,<24" }, { name = "pydantic", marker = "extra == 'gaia'", specifier = ">=2,<4" }, { name = "pydantic", marker = "extra == 'tau2'", specifier = ">=2,<4" }, { name = "sympy", marker = "extra == 'math'", specifier = ">=1.13.0,<2" }, From af541f6200f1d114f4e52b4b26240404c979a060 Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Fri, 27 Feb 2026 12:32:44 +0100 Subject: [PATCH 3/8] new lock --- python/uv.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/python/uv.lock b/python/uv.lock index 6b4a71af5e..6c1e730353 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -60,7 +60,7 @@ overrides = [ [[package]] name = "a2a-sdk" -version = "0.3.5" +version = "0.3.23" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -69,9 +69,9 @@ dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/0d/12ebef081b096ca5fafd1ec8cc589739abba07b46ae7899c7420e599f2a6/a2a_sdk-0.3.5.tar.gz", hash = "sha256:48cf37dedeb63cf0a072512221a12ed4b3950df695c9d65eadb839a99392c3e5", size = 222064, upload-time = "2025-09-08T17:30:35.826Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/6a/2fe24e0a85240a651006c12f79bdb37156adc760a96c44bc002ebda77916/a2a_sdk-0.3.23.tar.gz", hash = "sha256:7c46b8572c4633a2b41fced2833e11e62871e8539a5b3c782ba2ba1e33d213c2", size = 255265, upload-time = "2026-02-17T08:34:34.648Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/96/c33802d929b0f884cb6e509195d69914632536256d273bd7127e900d79ea/a2a_sdk-0.3.5-py3-none-any.whl", hash = "sha256:fd85b1e4e7be18a89b5d723e4013171510150a235275876f98de9e1ba869457e", size = 136911, upload-time = "2025-09-08T17:30:34.091Z" }, + { url = "https://files.pythonhosted.org/packages/d4/20/77d119f19ab03449d3e6bc0b1f11296d593dae99775c1d891ab1e290e416/a2a_sdk-0.3.23-py3-none-any.whl", hash = "sha256:8c2f01dffbfdd3509eafc15c4684743e6ae75e69a5df5d6f87be214c948e7530", size = 145689, upload-time = "2026-02-17T08:34:33.263Z" }, ] [[package]] @@ -1413,7 +1413,7 @@ name = "clr-loader" version = "0.2.10" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, + { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/18/24/c12faf3f61614b3131b5c98d3bf0d376b49c7feaa73edca559aeb2aee080/clr_loader-0.2.10.tar.gz", hash = "sha256:81f114afbc5005bafc5efe5af1341d400e22137e275b042a8979f3feb9fc9446", size = 83605, upload-time = "2026-01-03T23:13:06.984Z" } wheels = [ @@ -1892,7 +1892,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -4653,7 +4653,7 @@ name = "powerfx" version = "0.0.31" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pythonnet", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, + { name = "pythonnet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/56/1d/40228886242df10c10ed69faf27e973d020c586aa723a51afbe48542d535/powerfx-0.0.31.tar.gz", hash = "sha256:fa9637f315d71163dd900d16f97fce562d550049713d2fc358f8d446bb23906f", size = 3235618, upload-time = "2025-09-16T15:10:13.159Z" } wheels = [ @@ -5316,7 +5316,7 @@ name = "pythonnet" version = "3.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "clr-loader", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, + { name = "clr-loader", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9a/d6/1afd75edd932306ae9bd2c2d961d603dc2b52fcec51b04afea464f1f6646/pythonnet-3.0.5.tar.gz", hash = "sha256:48e43ca463941b3608b32b4e236db92d8d40db4c58a75ace902985f76dac21cf", size = 239212, upload-time = "2024-12-13T08:30:44.393Z" } wheels = [ From 2847eda15bf66ff9b6c03efa00d3cd98a1065d18 Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Fri, 27 Feb 2026 12:42:23 +0100 Subject: [PATCH 4/8] fixed workflow --- .../agent_framework/_workflows/_workflow.py | 102 +- python/pyproject.toml | 2 - .../validate_dependency_lower_bounds.py | 988 ++++++++++++++++++ 3 files changed, 1020 insertions(+), 72 deletions(-) create mode 100644 python/scripts/validate_dependency_lower_bounds.py diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index 680f7b9380..cf030bf7b0 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +# ruff: noqa: RUF070, RUF100 from __future__ import annotations import asyncio @@ -57,11 +58,7 @@ class WorkflowRunResult(list[WorkflowEvent]): - status_timeline(): Access the complete status event history """ - def __init__( - self, - events: list[WorkflowEvent[Any]], - status_events: list[WorkflowEvent[Any]] | None = None, - ) -> None: + def __init__(self, events: list[WorkflowEvent[Any]], status_events: list[WorkflowEvent[Any]] | None = None) -> None: super().__init__(events) self._status_events: list[WorkflowEvent[Any]] = status_events or [] @@ -219,9 +216,7 @@ def __init__( # Output events (WorkflowEvent with type='output') from these executors are treated as workflow outputs. # If None or empty, all executor outputs are considered workflow outputs. - self._output_executors = ( - list(output_executors) if output_executors else list(self.executors.keys()) - ) + self._output_executors = list(output_executors) if output_executors else list(self.executors.keys()) # Store non-serializable runtime objects as private attributes self._runner_context = runner_context @@ -242,9 +237,7 @@ def __init__( def _ensure_not_running(self) -> None: """Ensure the workflow is not already running.""" if self._is_running: - raise RuntimeError( - "Workflow is already running. Concurrent executions are not allowed." - ) + raise RuntimeError("Workflow is already running. Concurrent executions are not allowed.") self._is_running = True def _reset_running_flag(self) -> None: @@ -259,10 +252,7 @@ def to_dict(self) -> dict[str, Any]: "start_executor_id": self.start_executor_id, "max_iterations": self.max_iterations, "edge_groups": [group.to_dict() for group in self.edge_groups], - "executors": { - executor_id: executor.to_dict() - for executor_id, executor in self.executors.items() - }, + "executors": {executor_id: executor.to_dict() for executor_id, executor in self.executors.items()}, "output_executors": self._output_executors, } @@ -281,9 +271,7 @@ def to_dict(self) -> dict[str, Any]: from ._workflow_executor import WorkflowExecutor if isinstance(original_executor, WorkflowExecutor): - executor_payload["workflow"] = ( - original_executor.workflow.to_dict() - ) + executor_payload["workflow"] = original_executor.workflow.to_dict() return data @@ -346,9 +334,11 @@ async def _run_workflow_with_tracing( span.add_event(OtelAttr.WORKFLOW_STARTED) # Emit explicit start/status events to the stream with _framework_event_origin(): - yield WorkflowEvent.started() + started = WorkflowEvent.started() + yield started with _framework_event_origin(): - yield WorkflowEvent.status(WorkflowRunState.IN_PROGRESS) + in_progress = WorkflowEvent.status(WorkflowRunState.IN_PROGRESS) + yield in_progress # Reset context for a new run if supported if reset_context: @@ -383,16 +373,12 @@ async def _run_workflow_with_tracing( if event.type == "request_info" and not emitted_in_progress_pending: emitted_in_progress_pending = True with _framework_event_origin(): - yield WorkflowEvent.status( - WorkflowRunState.IN_PROGRESS_PENDING_REQUESTS - ) - + pending_status = WorkflowEvent.status(WorkflowRunState.IN_PROGRESS_PENDING_REQUESTS) + yield pending_status # Workflow runs until idle - emit final status based on whether requests are pending if saw_request: with _framework_event_origin(): - terminal_status = WorkflowEvent.status( - WorkflowRunState.IDLE_WITH_PENDING_REQUESTS - ) + terminal_status = WorkflowEvent.status(WorkflowRunState.IDLE_WITH_PENDING_REQUESTS) yield terminal_status else: with _framework_event_origin(): @@ -408,9 +394,11 @@ async def _run_workflow_with_tracing( # Surface structured failure details before propagating exception details = WorkflowErrorDetails.from_exception(exc) with _framework_event_origin(): - yield WorkflowEvent.failed(details) + failed_event = WorkflowEvent.failed(details) + yield failed_event with _framework_event_origin(): - yield WorkflowEvent.status(WorkflowRunState.FAILED) + failed_status = WorkflowEvent.status(WorkflowRunState.FAILED) + yield failed_status span.add_event( name=OtelAttr.WORKFLOW_ERROR, attributes={ @@ -451,9 +439,7 @@ async def _execute_with_message_or_checkpoint( "or build workflow with WorkflowBuilder(checkpoint_storage=checkpoint_storage)." ) - await self._runner.restore_from_checkpoint( - checkpoint_id, checkpoint_storage - ) + await self._runner.restore_from_checkpoint(checkpoint_id, checkpoint_storage) # Handle initial message elif message is not None: @@ -502,9 +488,7 @@ def run( checkpoint_storage: CheckpointStorage | None = None, include_status_events: bool = False, **kwargs: Any, - ) -> ( - ResponseStream[WorkflowEvent, WorkflowRunResult] | Awaitable[WorkflowRunResult] - ): + ) -> ResponseStream[WorkflowEvent, WorkflowRunResult] | Awaitable[WorkflowRunResult]: """Run the workflow, optionally streaming events. Unified interface supporting initial runs, checkpoint restoration, and @@ -548,9 +532,7 @@ def run( streaming=stream, **kwargs, ), - finalizer=functools.partial( - self._finalize_events, include_status_events=include_status_events - ), + finalizer=functools.partial(self._finalize_events, include_status_events=include_status_events), cleanup_hooks=[ functools.partial(self._run_cleanup, checkpoint_storage), ], @@ -654,14 +636,10 @@ def _validate_run_params( - responses + checkpoint_id is allowed (restore then send) """ if message is not None and responses is not None: - raise ValueError( - "Cannot provide both 'message' and 'responses'. Use one or the other." - ) + raise ValueError("Cannot provide both 'message' and 'responses'. Use one or the other.") if message is not None and checkpoint_id is not None: - raise ValueError( - "Cannot provide both 'message' and 'checkpoint_id'. Use one or the other." - ) + raise ValueError("Cannot provide both 'message' and 'checkpoint_id'. Use one or the other.") if message is None and responses is None and checkpoint_id is None: raise ValueError( @@ -685,23 +663,15 @@ def _resolve_execution_mode( if checkpoint_id is not None: # Combined: restore checkpoint then send responses initial_executor_fn = functools.partial( - self._restore_and_send_responses, - checkpoint_id, - checkpoint_storage, - responses, + self._restore_and_send_responses, checkpoint_id, checkpoint_storage, responses ) else: # Send responses only (requires pending requests in workflow state) - initial_executor_fn = functools.partial( - self._send_responses_internal, responses - ) + initial_executor_fn = functools.partial(self._send_responses_internal, responses) return initial_executor_fn, False # Regular run or checkpoint restoration initial_executor_fn = functools.partial( - self._execute_with_message_or_checkpoint, - message, - checkpoint_id, - checkpoint_storage, + self._execute_with_message_or_checkpoint, message, checkpoint_id, checkpoint_storage ) reset_context = message is not None and checkpoint_id is None return initial_executor_fn, reset_context @@ -740,9 +710,7 @@ async def _send_responses_internal(self, responses: dict[str, Any]) -> None: coerced_responses: dict[str, Any] = {} for request_id, response in responses.items(): if request_id not in pending_requests: - raise ValueError( - f"Response provided for unknown request ID: {request_id}" - ) + raise ValueError(f"Response provided for unknown request ID: {request_id}") pending_request = pending_requests[request_id] # Try to coerce raw values (e.g., dicts from JSON) to the expected type response = try_coerce_to_type(response, pending_request.response_type) @@ -753,12 +721,10 @@ async def _send_responses_internal(self, responses: dict[str, Any]) -> None: ) coerced_responses[request_id] = response - await asyncio.gather( - *[ - self._runner_context.send_request_info_response(request_id, response) - for request_id, response in coerced_responses.items() - ] - ) + await asyncio.gather(*[ + self._runner_context.send_request_info_response(request_id, response) + for request_id, response in coerced_responses.items() + ]) def _get_executor_by_id(self, executor_id: str) -> Executor: """Get an executor by its ID. @@ -803,9 +769,7 @@ def _compute_graph_signature(self) -> dict[str, Any]: executors_signature = {} for executor_id, executor in self.executors.items(): - executor_sig: Any = ( - f"{executor.__class__.__module__}.{executor.__class__.__name__}" - ) + executor_sig: Any = f"{executor.__class__.__module__}.{executor.__class__.__name__}" if isinstance(executor, WorkflowExecutor): executor_sig = { @@ -835,9 +799,7 @@ def _compute_graph_signature(self) -> dict[str, Any]: } if isinstance(group, FanOutEdgeGroup): - group_info["selection_func"] = getattr( - group, "selection_func_name", None - ) + group_info["selection_func"] = getattr(group, "selection_func_name", None) edge_groups_signature.append(group_info) diff --git a/python/pyproject.toml b/python/pyproject.toml index da2d3f34b7..1f8707dfcc 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -149,8 +149,6 @@ ignore = [ "**/tests/**" = ["D", "INP", "TD", "ERA001", "RUF", "S"] "samples/**" = ["D", "INP", "ERA001", "RUF", "S", "T201", "CPY"] "*.ipynb" = ["CPY", "E501"] -# RUF070: Assignment before yield is intentional - context manager must exit before yielding -"**/agent_framework/_workflows/_workflow.py" = ["RUF070"] [tool.ruff.format] docstring-code-format = true diff --git a/python/scripts/validate_dependency_lower_bounds.py b/python/scripts/validate_dependency_lower_bounds.py new file mode 100644 index 0000000000..d042459394 --- /dev/null +++ b/python/scripts/validate_dependency_lower_bounds.py @@ -0,0 +1,988 @@ +# Copyright (c) Microsoft. All rights reserved. +# ruff: noqa: INP001, S404, S603 + +"""Lower dependency bounds, validate, and persist the oldest passing set.""" + +from __future__ import annotations + +import argparse +import concurrent.futures +import json +import os +import shutil +import subprocess +import tempfile +import threading +from dataclasses import dataclass +from datetime import UTC, datetime +from pathlib import Path +from urllib import error as urllib_error +from urllib import request as urllib_request + +import tomli +from packaging.version import InvalidVersion, Version +from rich import print +from task_runner import discover_projects, extract_poe_tasks + +CHECK_TASK_PRIORITY = ("check", "typing", "pyright", "mypy", "lint") +REQ_PATTERN = r"^\s*([A-Za-z0-9_.-]+(?:\[[^\]]+\])?)\s*(.*?)\s*$" + + +@dataclass +class RequirementEntry: + """A parsed requirement entry from pyproject dependencies.""" + + raw: str + name: str + name_extras: str + marker: str | None + spec_parts: list[str] + lower_version: Version | None + lower_index: int | None + upper_index: int | None + upper_version: Version | None + exact_index: int | None = None + exact_version: Version | None = None + + def with_lower(self, lower: Version) -> str: + """Return a new requirement with the given inclusive lower bound.""" + updated_parts = list(self.spec_parts) + if self.exact_index is not None: + raise ValueError(f"Exact pin cannot be lowered in-place: {self.raw}") + if self.lower_index is not None: + updated_parts[self.lower_index] = f">={lower}" + else: + updated_parts.insert(0, f">={lower}") + spec = ",".join(updated_parts) + requirement = f"{self.name_extras}{spec}" + if self.marker: + requirement += f"; {self.marker}" + return requirement + + +@dataclass +class DependencyTarget: + """A dependency to optimize within one package.""" + + name: str + entries: list[RequirementEntry] + lower_version: Version | None + upper_version: Version + allow_prerelease_candidates: bool + + @property + def original_requirements(self) -> list[str]: + """Return original requirement strings for this dependency group.""" + return [entry.raw for entry in self.entries] + + +@dataclass +class DependencyAttempt: + """A single lower-bound trial for one dependency.""" + + trial_lower: str + status: str + error: str | None = None + + +@dataclass +class DependencyOutcome: + """Final outcome for one dependency optimization.""" + + name: str + changed: bool + original_requirements: list[str] + final_requirements: list[str] + candidate_versions: list[str] + attempted_versions: list[str] + attempts: list[DependencyAttempt] + skipped_reason: str | None = None + + +@dataclass +class PackagePlan: + """Execution plan for a package.""" + + project_path: Path + package_name: str + pyproject_path: Path + internal_editables: list[Path] + include_dev_group: bool + include_dev_extra: bool + + +@dataclass +class PackageOutcome: + """Execution outcome for a package.""" + + project_path: str + package_name: str + tasks: list[str] + changed: bool + dependencies: list[DependencyOutcome] + replacements: dict[str, str] + skipped: list[str] + error: str | None = None + + +def _utc_now() -> str: + return datetime.now(UTC).isoformat() + + +def _truncate_error(stdout: str, stderr: str, *, max_chars: int = 2000) -> str: + combined = "\n".join(part for part in [stderr.strip(), stdout.strip()] if part) + if len(combined) <= max_chars: + return combined + return f"...\n{combined[-max_chars:]}" + + +def _parse_requirement(requirement: str) -> RequirementEntry | None: + import re + + match = re.match(REQ_PATTERN, requirement) + if not match: + return None + name_extras = match.group(1) + rest = match.group(2).strip() + marker = None + if ";" in rest: + spec_part, marker_part = rest.split(";", 1) + spec = spec_part.strip() + marker = marker_part.strip() + else: + spec = rest + if not spec: + return None + + spec_parts = [part.strip() for part in spec.split(",") if part.strip()] + if not spec_parts: + return None + + lower_version: Version | None = None + lower_index: int | None = None + upper_version: Version | None = None + upper_index: int | None = None + exact_version: Version | None = None + exact_index: int | None = None + + for index, part in enumerate(spec_parts): + if part.startswith((">=", ">")): + raw_version = part[2:].strip() if part.startswith(">=") else part[1:].strip() + try: + parsed = Version(raw_version) + except InvalidVersion: + continue + if lower_version is None or parsed > lower_version: + lower_version = parsed + lower_index = index + elif part.startswith(("==", "===")): + raw_version = part[3:].strip() if part.startswith("===") else part[2:].strip() + try: + parsed = Version(raw_version) + except InvalidVersion: + continue + exact_version = parsed + exact_index = index + if lower_version is None or parsed > lower_version: + lower_version = parsed + lower_index = None + if part.startswith(("<", "<=")): + raw_version = part[2:].strip() if part.startswith("<=") else part[1:].strip() + try: + parsed = Version(raw_version) + except InvalidVersion: + continue + if upper_version is None or parsed < upper_version: + upper_version = parsed + upper_index = index + + if upper_version is None and exact_version is None: + return None + name = name_extras.split("[", 1)[0].lower() + return RequirementEntry( + raw=requirement, + name=name, + name_extras=name_extras, + marker=marker, + spec_parts=spec_parts, + lower_version=lower_version, + lower_index=lower_index, + upper_index=upper_index, + upper_version=upper_version, + exact_index=exact_index, + exact_version=exact_version, + ) + + +def _replace_requirements(path: Path, replacements: list[tuple[str, str]]) -> None: + text = path.read_text() + updated_text = text + for old, new in replacements: + replaced = False + old_double = f'"{old}"' + old_single = f"'{old}'" + new_double = f'"{new}"' + new_single = f"'{new}'" + if old_double in updated_text: + updated_text = updated_text.replace(old_double, new_double) + replaced = True + if old_single in updated_text: + updated_text = updated_text.replace(old_single, new_single) + replaced = True + if not replaced: + raise ValueError(f"Could not find dependency string in {path}: {old}") + if updated_text != text: + path.write_text(updated_text) + + +def _load_lock_versions(workspace_root: Path) -> dict[str, list[Version]]: + lock_file = workspace_root / "uv.lock" + if not lock_file.exists(): + return {} + with lock_file.open("rb") as f: + lock_data = tomli.load(f) + versions_by_name: dict[str, set[Version]] = {} + for package_data in lock_data.get("package", []): + package_name = str(package_data.get("name", "")).lower() + package_version = package_data.get("version") + if not package_name or not package_version: + continue + try: + parsed = Version(str(package_version)) + except InvalidVersion: + continue + versions_by_name.setdefault(package_name, set()).add(parsed) + return {name: sorted(values) for name, values in versions_by_name.items()} + + +class VersionCatalog: + """Cache and fetch available dependency versions.""" + + def __init__(self, lock_versions: dict[str, list[Version]], source: str) -> None: + """Initialize the catalog with lock-based fallback and fetch source.""" + self._lock_versions = lock_versions + self._source = source + self._cache: dict[str, list[Version]] = {} + self._lock = threading.Lock() + + def get(self, package_name: str) -> list[Version]: + """Return cached or fetched versions for a package name.""" + with self._lock: + cached = self._cache.get(package_name) + if cached is not None: + return cached + versions = self._fetch(package_name) + with self._lock: + self._cache[package_name] = versions + return versions + + def _fetch(self, package_name: str) -> list[Version]: + if self._source == "lock": + return self._lock_versions.get(package_name, []) + + try: + url = f"https://pypi.org/pypi/{package_name}/json" + with urllib_request.urlopen(url, timeout=20) as response: + payload = json.load(response) + except (urllib_error.URLError, TimeoutError, json.JSONDecodeError): + return self._lock_versions.get(package_name, []) + + versions: set[Version] = set() + for raw_version, files in payload.get("releases", {}).items(): + if not files: + continue + non_yanked = any(not bool(file_info.get("yanked", False)) for file_info in files) + if not non_yanked: + continue + try: + versions.add(Version(raw_version)) + except InvalidVersion: + continue + if versions: + return sorted(versions) + return self._lock_versions.get(package_name, []) + + +def _load_package_name(pyproject_file: Path) -> str: + with pyproject_file.open("rb") as f: + data = tomli.load(f) + return str(data["project"]["name"]) + + +def _select_validation_tasks(available_tasks: set[str]) -> list[str]: + tasks: list[str] = [] + if "lint" in available_tasks: + tasks.append("lint") + else: + check_task = next((task for task in CHECK_TASK_PRIORITY if task in available_tasks), None) + if check_task: + tasks.append(check_task) + if "test" in available_tasks and "test" not in tasks: + tasks.append("test") + return tasks + + +def _build_workspace_package_map(workspace_root: Path) -> dict[str, Path]: + package_map: dict[str, Path] = {} + for pyproject_file in sorted((workspace_root / "packages").glob("*/pyproject.toml")): + with pyproject_file.open("rb") as f: + data = tomli.load(f) + package_name = str(data.get("project", {}).get("name", "")).strip() + if package_name: + package_map[package_name] = pyproject_file.parent + return package_map + + +def _build_internal_graph(workspace_root: Path, package_map: dict[str, Path]) -> dict[str, set[str]]: + graph: dict[str, set[str]] = {} + for package_name, package_path in package_map.items(): + pyproject_file = package_path / "pyproject.toml" + with pyproject_file.open("rb") as f: + data = tomli.load(f) + dependencies = data.get("project", {}).get("dependencies", []) or [] + internal = set() + for dependency in dependencies: + parsed = _parse_requirement(dependency) + if not parsed: + continue + if parsed.name.startswith("agent-framework"): + for candidate_name in package_map: + if candidate_name.lower() == parsed.name: + internal.add(candidate_name) + break + graph[package_name] = internal + return graph + + +def _resolve_internal_editables( + package_name: str, package_map: dict[str, Path], graph: dict[str, set[str]] +) -> list[Path]: + visited: set[str] = set() + stack = [package_name] + results: set[Path] = set() + while stack: + current = stack.pop() + if current in visited: + continue + visited.add(current) + for dependency_name in graph.get(current, set()): + dependency_path = package_map.get(dependency_name) + if dependency_path and dependency_name != package_name: + results.add(dependency_path.resolve()) + stack.append(dependency_name) + return sorted(results) + + +def _collect_targets( + pyproject_file: Path, + *, + dependency_filters: set[str] | None, +) -> tuple[list[DependencyTarget], list[str]]: + with pyproject_file.open("rb") as f: + data = tomli.load(f) + project = data.get("project", {}) + dependencies: list[str] = list(project.get("dependencies", []) or []) + for values in (project.get("optional-dependencies", {}) or {}).values(): + dependencies.extend(values or []) + + grouped: dict[str, list[RequirementEntry]] = {} + skipped: list[str] = [] + + for dependency in dependencies: + parsed = _parse_requirement(dependency) + if not parsed: + continue + if parsed.name.startswith("agent-framework"): + continue + if dependency_filters and parsed.name not in dependency_filters: + continue + grouped.setdefault(parsed.name, []).append(parsed) + + targets: list[DependencyTarget] = [] + for dependency_name, entries in sorted(grouped.items()): + if not entries: + continue + allow_prerelease_candidates = any( + ( + (entry.lower_version is not None and entry.lower_version.is_prerelease) + or (entry.upper_version is not None and entry.upper_version.is_prerelease) + or (entry.exact_version is not None and entry.exact_version.is_prerelease) + ) + for entry in entries + ) + upper_entries = [entry for entry in entries if entry.upper_version is not None] + exact_entries = [entry for entry in entries if entry.exact_version is not None] + + if upper_entries: + if len(upper_entries) != len(entries): + skipped.append(f"{dependency_name}: mixed bounded and unbounded/exact requirements in package") + continue + first_upper = upper_entries[0].upper_version + if first_upper is None: + skipped.append(f"{dependency_name}: missing upper bound value") + continue + if any(entry.upper_version != first_upper for entry in upper_entries[1:]): + skipped.append(f"{dependency_name}: conflicting upper bounds in package") + continue + lower_versions = [entry.lower_version for entry in entries if entry.lower_version is not None] + if not lower_versions: + skipped.append(f"{dependency_name}: missing lower bound value") + continue + lower = max(lower_versions) + targets.append( + DependencyTarget( + name=dependency_name, + entries=entries, + lower_version=lower, + upper_version=first_upper, + allow_prerelease_candidates=allow_prerelease_candidates, + ) + ) + continue + + if exact_entries and len(exact_entries) == len(entries): + skipped.append(f"{dependency_name}: exact pins are skipped for lower-bound optimization") + continue + + skipped.append(f"{dependency_name}: no usable bounded range to optimize") + return targets, skipped + + +def _build_trial_lower_bounds( + versions: list[Version], + *, + lower: Version, + current_upper: Version, + allow_prerelease: bool, + max_candidates: int, +) -> list[Version]: + candidates = [version for version in versions if version < lower and version < current_upper] + # `packaging` treats .dev/.a/.b/.rc as prereleases; only probe them when current spec already uses them. + if not allow_prerelease: + candidates = [version for version in candidates if not version.is_prerelease] + candidates.sort(reverse=True) + if max_candidates > 0: + return candidates[:max_candidates] + return candidates + + +def _run_tasks( + project_dir: Path, + *, + tasks: list[str], + internal_editables: list[Path], + resolution: str, + dependency_pin: tuple[str, Version] | None, + include_dev_group: bool, + include_dev_extra: bool, + timeout_seconds: int, +) -> tuple[bool, str | None]: + env = dict(os.environ) + env["UV_PRERELEASE"] = "allow" + for task_name in tasks: + command = [ + "uv", + "--no-progress", + "--directory", + str(project_dir), + "run", + "--isolated", + "--resolution", + resolution, + "--prerelease", + "allow", + "--quiet", + ] + if include_dev_group: + command.extend(["--group", "dev"]) + if include_dev_extra: + command.extend(["--extra", "dev"]) + for editable_path in internal_editables: + command.extend(["--with-editable", str(editable_path)]) + if dependency_pin is not None: + dependency_name, dependency_version = dependency_pin + command.extend(["--with", f"{dependency_name}=={dependency_version}"]) + command.extend(["poe", task_name]) + try: + result = subprocess.run( + command, + capture_output=True, + text=True, + timeout=timeout_seconds, + check=False, + env=env, + ) + except subprocess.TimeoutExpired: + return False, f"Timeout while running task '{task_name}'." + if result.returncode != 0: + return ( + False, + f"Task '{task_name}' failed.\n{_truncate_error(result.stdout, result.stderr)}", + ) + return True, None + + +def _optimize_dependency( + *, + temp_pyproject: Path, + dependency: DependencyTarget, + available_versions: list[Version], + tasks: list[str], + internal_editables: list[Path], + dry_run: bool, + max_candidates: int, + timeout_seconds: int, + package_label: str, + include_dev_group: bool, + include_dev_extra: bool, +) -> DependencyOutcome: + if dependency.lower_version is None: + return DependencyOutcome( + name=dependency.name, + changed=False, + original_requirements=dependency.original_requirements, + final_requirements=dependency.original_requirements, + candidate_versions=[], + attempted_versions=[], + attempts=[], + skipped_reason="No lower bound available for optimization.", + ) + + candidates = _build_trial_lower_bounds( + available_versions, + lower=dependency.lower_version, + current_upper=dependency.upper_version, + allow_prerelease=dependency.allow_prerelease_candidates, + max_candidates=max_candidates, + ) + candidate_versions = [str(candidate) for candidate in candidates] + current_requirements = list(dependency.original_requirements) + attempted_versions: list[str] = [] + attempts: list[DependencyAttempt] = [] + + baseline_version = dependency.lower_version + attempted_versions.append(str(baseline_version)) + print(f"[cyan]{package_label} :: {dependency.name} :: baseline current_lower [{baseline_version}] [/cyan]") + if dry_run: + attempts.append( + DependencyAttempt( + trial_lower=str(baseline_version), + status="current_lower_dry_run_pass", + ) + ) + else: + success, error = _run_tasks( + temp_pyproject.parent, + tasks=tasks, + internal_editables=internal_editables, + resolution="highest", + dependency_pin=(dependency.name, baseline_version), + include_dev_group=include_dev_group, + include_dev_extra=include_dev_extra, + timeout_seconds=timeout_seconds, + ) + if not success: + attempts.append( + DependencyAttempt( + trial_lower=str(baseline_version), + status="failed", + error=error, + ) + ) + return DependencyOutcome( + name=dependency.name, + changed=False, + original_requirements=dependency.original_requirements, + final_requirements=dependency.original_requirements, + candidate_versions=candidate_versions, + attempted_versions=attempted_versions, + attempts=attempts, + skipped_reason="Baseline validation failed at current_lower.", + ) + + attempts.append( + DependencyAttempt( + trial_lower=str(baseline_version), + status="current_lower_passed", + ) + ) + + if not candidates: + return DependencyOutcome( + name=dependency.name, + changed=False, + original_requirements=dependency.original_requirements, + final_requirements=dependency.original_requirements, + candidate_versions=[], + attempted_versions=attempted_versions, + attempts=attempts, + skipped_reason="No lower candidate bounds found.", + ) + + for candidate in candidates: + attempted_versions.append(str(candidate)) + trial_requirements = [entry.with_lower(candidate) for entry in dependency.entries] + replacements = list(zip(current_requirements, trial_requirements, strict=True)) + _replace_requirements(temp_pyproject, [(old, new) for old, new in replacements]) + + print(f"[cyan]{package_label} :: {dependency.name} -> >={candidate}[/cyan]") + if dry_run: + attempts.append(DependencyAttempt(trial_lower=str(candidate), status="dry_run_pass")) + current_requirements = trial_requirements + continue + + success, error = _run_tasks( + temp_pyproject.parent, + tasks=tasks, + internal_editables=internal_editables, + resolution="highest", + dependency_pin=(dependency.name, candidate), + include_dev_group=include_dev_group, + include_dev_extra=include_dev_extra, + timeout_seconds=timeout_seconds, + ) + if success: + attempts.append(DependencyAttempt(trial_lower=str(candidate), status="passed")) + current_requirements = trial_requirements + continue + + attempts.append(DependencyAttempt(trial_lower=str(candidate), status="failed", error=error)) + _replace_requirements(temp_pyproject, [(new, old) for old, new in replacements]) + continue + + changed = current_requirements != dependency.original_requirements + return DependencyOutcome( + name=dependency.name, + changed=changed, + original_requirements=dependency.original_requirements, + final_requirements=current_requirements, + candidate_versions=candidate_versions, + attempted_versions=attempted_versions, + attempts=attempts, + ) + + +def _process_package( + plan: PackagePlan, + *, + catalog: VersionCatalog, + dependency_filters: set[str] | None, + dry_run: bool, + max_candidates: int, + timeout_seconds: int, +) -> PackageOutcome: + pyproject_file = plan.pyproject_path + source_workspace_root = pyproject_file.parent.parent.parent.resolve() + available_tasks = extract_poe_tasks(pyproject_file) + tasks = _select_validation_tasks(available_tasks) + if not tasks: + return PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=[], + changed=False, + dependencies=[], + replacements={}, + skipped=["No check/test task combination found."], + ) + + targets, skipped = _collect_targets(pyproject_file, dependency_filters=dependency_filters) + if not targets: + return PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=tasks, + changed=False, + dependencies=[], + replacements={}, + skipped=[*skipped, "No eligible dependencies with lower+upper bounds."], + ) + + with tempfile.TemporaryDirectory(prefix=f"dep-lower-{plan.project_path.name}-") as temp_dir: + temp_root = Path(temp_dir) + temp_workspace_root = temp_root / source_workspace_root.name + shutil.copytree( + source_workspace_root, + temp_workspace_root, + ignore=shutil.ignore_patterns( + ".git", + ".venv", + "__pycache__", + ".pytest_cache", + ".mypy_cache", + ".ruff_cache", + "node_modules", + "dist", + ), + ) + + temp_packages_dir = temp_workspace_root / "packages" + if temp_packages_dir.exists(): + for package_dir in temp_packages_dir.iterdir(): + if package_dir.is_dir() and not (package_dir / "pyproject.toml").exists(): + shutil.rmtree(package_dir) + + temp_project_dir = temp_workspace_root / plan.project_path + temp_pyproject = temp_project_dir / "pyproject.toml" + temp_internal_editables: list[Path] = [] + for editable in plan.internal_editables: + try: + relative_editable = editable.resolve().relative_to(source_workspace_root) + except ValueError: + continue + candidate = temp_workspace_root / relative_editable + if candidate.exists(): + temp_internal_editables.append(candidate) + + dependency_results: list[DependencyOutcome] = [] + replacements: dict[str, str] = {} + package_label = f"{plan.project_path} ({plan.package_name})" + + for target in targets: + versions = catalog.get(target.name) + outcome = _optimize_dependency( + temp_pyproject=temp_pyproject, + dependency=target, + available_versions=versions, + tasks=tasks, + internal_editables=temp_internal_editables, + dry_run=dry_run, + max_candidates=max_candidates, + timeout_seconds=timeout_seconds, + package_label=package_label, + include_dev_group=plan.include_dev_group, + include_dev_extra=plan.include_dev_extra, + ) + dependency_results.append(outcome) + if outcome.changed: + for old, new in zip(outcome.original_requirements, outcome.final_requirements, strict=True): + replacements[old] = new + + return PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=tasks, + changed=bool(replacements), + dependencies=dependency_results, + replacements=replacements, + skipped=skipped, + ) + + +def _write_json(path: Path, payload: dict) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(payload, indent=2, sort_keys=False)) + + +def _to_json(package_outcome: PackageOutcome) -> dict: + return { + "project_path": package_outcome.project_path, + "package_name": package_outcome.package_name, + "tasks": package_outcome.tasks, + "changed": package_outcome.changed, + "skipped": package_outcome.skipped, + "error": package_outcome.error, + "dependencies": [ + { + "name": dependency.name, + "changed": dependency.changed, + "original_requirements": dependency.original_requirements, + "final_requirements": dependency.final_requirements, + "candidate_versions": dependency.candidate_versions, + "attempted_versions": dependency.attempted_versions, + "skipped_reason": dependency.skipped_reason, + "attempts": [ + { + "trial_lower": attempt.trial_lower, + "status": attempt.status, + "error": attempt.error, + } + for attempt in dependency.attempts + ], + } + for dependency in package_outcome.dependencies + ], + } + + +def _apply_package_replacements(path: Path, replacements: dict[str, str]) -> None: + if not replacements: + return + _replace_requirements(path, list(replacements.items())) + + +def main() -> None: + """Run package-by-package dependency lower-bound discovery and updates.""" + parser = argparse.ArgumentParser( + description=( + "Lower dependency bounds per package, run lint+test in isolated uv envs, " + "and write a JSON report while updating pyproject files." + ) + ) + parser.add_argument( + "--packages", + nargs="*", + default=None, + help="Optional package filters by workspace path (e.g., packages/core) or package name.", + ) + parser.add_argument( + "--dependencies", + nargs="*", + default=None, + help="Optional dependency-name filters (normalized to lowercase).", + ) + parser.add_argument( + "--parallelism", + type=int, + default=max(1, min(os.cpu_count() or 4, 8)), + help="Number of packages to process concurrently.", + ) + parser.add_argument( + "--max-candidates", + type=int, + default=0, + help="Maximum candidate lower bounds per dependency (0 = no limit).", + ) + parser.add_argument( + "--output-json", + default="scripts/dependency-lower-bound-results.json", + help="Path to incremental JSON output report.", + ) + parser.add_argument( + "--version-source", + choices=("pypi", "lock"), + default="pypi", + help="Version source for candidate lower bounds.", + ) + parser.add_argument( + "--timeout-seconds", + type=int, + default=1200, + help="Timeout per task command execution.", + ) + parser.add_argument("--dry-run", action="store_true", help="Do not execute uv commands or update pyprojects.") + args = parser.parse_args() + + workspace_pyproject = Path(__file__).parent.parent / "pyproject.toml" + workspace_root = workspace_pyproject.parent + package_filters = set(args.packages) if args.packages else None + dependency_filters = {name.lower() for name in args.dependencies} if args.dependencies else None + output_json_path = (workspace_root / args.output_json).resolve() + + package_map = _build_workspace_package_map(workspace_root) + internal_graph = _build_internal_graph(workspace_root, package_map) + lock_versions = _load_lock_versions(workspace_root) + catalog = VersionCatalog(lock_versions=lock_versions, source=args.version_source) + + plans: list[PackagePlan] = [] + for project_path in sorted(set(discover_projects(workspace_pyproject))): + pyproject_file = workspace_root / project_path / "pyproject.toml" + if not pyproject_file.exists(): + print(f"[yellow]Skipping {project_path}: missing pyproject.toml[/yellow]") + continue + package_name = _load_package_name(pyproject_file) + with pyproject_file.open("rb") as f: + package_config = tomli.load(f) + project_section = package_config.get("project", {}) + optional_dependencies = project_section.get("optional-dependencies", {}) or {} + dependency_groups = package_config.get("dependency-groups", {}) or {} + if package_filters and str(project_path) not in package_filters and package_name not in package_filters: + continue + plans.append( + PackagePlan( + project_path=project_path, + package_name=package_name, + pyproject_path=pyproject_file, + internal_editables=_resolve_internal_editables(package_name, package_map, internal_graph), + include_dev_group="dev" in dependency_groups, + include_dev_extra="dev" in optional_dependencies, + ) + ) + + if not plans: + print("[yellow]No packages matched the selection.[/yellow]") + return + + report: dict = { + "started_at": _utc_now(), + "workspace_root": str(workspace_root), + "version_source": args.version_source, + "dry_run": args.dry_run, + "packages": [], + "summary": { + "packages_total": len(plans), + "packages_changed": 0, + "dependencies_changed": 0, + "dependencies_failed": 0, + }, + } + _write_json(output_json_path, report) + print(f"[cyan]Writing dependency-lower-bound report to {output_json_path}[/cyan]") + + package_outcomes: list[PackageOutcome] = [] + with concurrent.futures.ThreadPoolExecutor(max_workers=max(1, args.parallelism)) as executor: + future_to_plan = { + executor.submit( + _process_package, + plan, + catalog=catalog, + dependency_filters=dependency_filters, + dry_run=args.dry_run, + max_candidates=args.max_candidates, + timeout_seconds=args.timeout_seconds, + ): plan + for plan in plans + } + + for future in concurrent.futures.as_completed(future_to_plan): + plan = future_to_plan[future] + try: + outcome = future.result() + except Exception as exc: + outcome = PackageOutcome( + project_path=str(plan.project_path), + package_name=plan.package_name, + tasks=[], + changed=False, + dependencies=[], + replacements={}, + skipped=[], + error=str(exc), + ) + package_outcomes.append(outcome) + + if outcome.changed and not args.dry_run: + _apply_package_replacements(plan.pyproject_path, outcome.replacements) + + report["packages"].append(_to_json(outcome)) + report["summary"]["packages_changed"] = sum(1 for value in package_outcomes if value.changed) + report["summary"]["dependencies_changed"] = sum( + 1 for value in package_outcomes for dependency in value.dependencies if dependency.changed + ) + report["summary"]["dependencies_failed"] = sum( + 1 + for value in package_outcomes + for dependency in value.dependencies + for attempt in dependency.attempts + if attempt.status == "failed" + ) + report["updated_at"] = _utc_now() + _write_json(output_json_path, report) + + if outcome.error: + print(f"[red]{plan.project_path}: package execution error[/red]") + elif outcome.changed: + print(f"[green]{plan.project_path}: updated dependency lower bounds[/green]") + else: + print(f"[yellow]{plan.project_path}: no changes[/yellow]") + + print( + "[bold]Done.[/bold] " + f"packages_changed={report['summary']['packages_changed']}, " + f"dependencies_changed={report['summary']['dependencies_changed']}, " + f"failed_attempts={report['summary']['dependencies_failed']}" + ) + + +if __name__ == "__main__": + main() From cdf38a5b93616b332d21934b03a360f3a13ed866 Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Fri, 27 Feb 2026 16:03:23 +0100 Subject: [PATCH 5/8] updated deps --- .gitignore | 1 + .../server/main.py | 16 +++- python/packages/ag-ui/pyproject.toml | 10 ++- python/packages/azure-ai/pyproject.toml | 2 +- python/packages/azurefunctions/pyproject.toml | 4 +- python/packages/core/pyproject.toml | 14 ++-- .../validate_dependency_lower_bounds.py | 34 +++++++-- python/scripts/validate_dependency_ranges.py | 9 ++- python/uv.lock | 76 ++++++++++--------- 9 files changed, 110 insertions(+), 56 deletions(-) diff --git a/.gitignore b/.gitignore index b66a5bb802..d82e2c4a54 100644 --- a/.gitignore +++ b/.gitignore @@ -206,6 +206,7 @@ WARP.md **/projectBrief.md **/tmpclaude* python/scripts/dependency-range-results.json +python/scripts/dependency-lower-bound-results.json # Azurite storage emulator files */__azurite_db_blob__.json* diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py index 5ea275b5fd..b422d70c8e 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py @@ -6,13 +6,12 @@ import logging import os -from typing import cast +from typing import Any, cast import uvicorn from agent_framework import ChatOptions from agent_framework._clients import SupportsChatGetResponse from agent_framework.ag_ui import add_agent_framework_fastapi_endpoint -from agent_framework.anthropic import AnthropicClient from agent_framework.azure import AzureOpenAIChatClient from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware @@ -26,6 +25,15 @@ from ..agents.ui_generator_agent import ui_generator_agent from ..agents.weather_agent import weather_agent +AnthropicClient: type[Any] | None +try: + import agent_framework.anthropic as _anthropic_namespace +except ImportError: + # If the Anthropic client isn't installed, we can still run the server with Azure OpenAI as the default chat client + AnthropicClient = None +else: + AnthropicClient = cast(type[Any] | None, getattr(_anthropic_namespace, "AnthropicClient", None)) + # Configure logging to file and console (disabled by default - set ENABLE_DEBUG_LOGGING=1 to enable) if os.getenv("ENABLE_DEBUG_LOGGING"): log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "ag_ui_server.log") @@ -70,7 +78,9 @@ # Set CHAT_CLIENT=anthropic to use Anthropic, defaults to Azure OpenAI client: SupportsChatGetResponse[ChatOptions] = cast( SupportsChatGetResponse[ChatOptions], - AnthropicClient() if os.getenv("CHAT_CLIENT", "").lower() == "anthropic" else AzureOpenAIChatClient(), + AnthropicClient() + if AnthropicClient is not None and os.getenv("CHAT_CLIENT", "").lower() == "anthropic" + else AzureOpenAIChatClient(), ) # Agentic Chat - basic chat agent diff --git a/python/packages/ag-ui/pyproject.toml b/python/packages/ag-ui/pyproject.toml index f3f861cc2e..665b186e95 100644 --- a/python/packages/ag-ui/pyproject.toml +++ b/python/packages/ag-ui/pyproject.toml @@ -24,13 +24,17 @@ classifiers = [ dependencies = [ "agent-framework-core>=1.0.0rc3", "ag-ui-protocol==0.1.13", - "fastapi>=0.104.0,<0.133.1", - "uvicorn>=0.30.0,<0.30.1" + "fastapi>=0.115.0,<0.133.1", + "uvicorn>=0.30.0,<0.30.6" ] [project.optional-dependencies] dev = [ - "pytest>=8.0.0,<9", + "pytest>=8.0.0,<9.0.2", + "pytest-asyncio>=1.0.0,<2", + "pytest-cov>=6.0.0,<7", + "pytest-xdist[psutil]>=3.2.0,<4", + "agent-framework-orchestrations>=1.0.0b260225", "httpx>=0.27.0,<0.29", ] diff --git a/python/packages/azure-ai/pyproject.toml b/python/packages/azure-ai/pyproject.toml index 60c270dafd..e61d6b818e 100644 --- a/python/packages/azure-ai/pyproject.toml +++ b/python/packages/azure-ai/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ "agent-framework-core>=1.0.0rc3", "azure-ai-agents>=1.2.0b5,<1.2.0b6", "azure-ai-inference>=1.0.0b9,<1.0.0b10", - "aiohttp>=3.13.3,<4", + "aiohttp>=3.7.0,<4", ] [tool.uv] diff --git a/python/packages/azurefunctions/pyproject.toml b/python/packages/azurefunctions/pyproject.toml index c3789da68a..f83a8296f3 100644 --- a/python/packages/azurefunctions/pyproject.toml +++ b/python/packages/azurefunctions/pyproject.toml @@ -24,8 +24,8 @@ classifiers = [ dependencies = [ "agent-framework-core>=1.0.0rc3", "agent-framework-durabletask", - "azure-functions>=1.24.0,<2", - "azure-functions-durable>=1.5.0,<2", + "azure-functions>=1.0.3,<2", + "azure-functions-durable>=1.0.0,<2", ] [dependency-groups] diff --git a/python/packages/core/pyproject.toml b/python/packages/core/pyproject.toml index e41839cf12..fe69b5cc51 100644 --- a/python/packages/core/pyproject.toml +++ b/python/packages/core/pyproject.toml @@ -24,19 +24,19 @@ classifiers = [ ] dependencies = [ # utilities - "typing-extensions>=4.15.0,<5", + "typing-extensions>=4.0.0,<5", "pydantic>=2,<3", "python-dotenv>=1,<2", # telemetry - "opentelemetry-api>=1.39.0,<2", - "opentelemetry-sdk>=1.39.0,<2", - "opentelemetry-semantic-conventions-ai>=0.4.13,<0.4.14", + "opentelemetry-api>=1.0.0,<2", + "opentelemetry-sdk>=1.0.0,<2", + "opentelemetry-semantic-conventions-ai>=0.4.0,<0.4.14", # connectors and functions - "openai>=1.99.0,<3", + "openai>=1.0.0,<3", "azure-identity>=1,<2", "azure-ai-projects>=2.0.0b3,<2.0.0b4", - "mcp[ws]>=1.24.0,<2", - "packaging>=24.1,<26.0", + "mcp[ws]>=1.0.0,<2", + "packaging>=24.0,<26.0", ] [project.optional-dependencies] diff --git a/python/scripts/validate_dependency_lower_bounds.py b/python/scripts/validate_dependency_lower_bounds.py index d042459394..54c19024bb 100644 --- a/python/scripts/validate_dependency_lower_bounds.py +++ b/python/scripts/validate_dependency_lower_bounds.py @@ -339,7 +339,12 @@ def _build_internal_graph(workspace_root: Path, package_map: dict[str, Path]) -> pyproject_file = package_path / "pyproject.toml" with pyproject_file.open("rb") as f: data = tomli.load(f) - dependencies = data.get("project", {}).get("dependencies", []) or [] + project = data.get("project", {}) or {} + dependencies: list[str] = list(project.get("dependencies", []) or []) + for values in (project.get("optional-dependencies", {}) or {}).values(): + dependencies.extend([value for value in (values or []) if isinstance(value, str)]) + for values in (data.get("dependency-groups", {}) or {}).values(): + dependencies.extend([value for value in (values or []) if isinstance(value, str)]) internal = set() for dependency in dependencies: parsed = _parse_requirement(dependency) @@ -460,7 +465,18 @@ def _build_trial_lower_bounds( # `packaging` treats .dev/.a/.b/.rc as prereleases; only probe them when current spec already uses them. if not allow_prerelease: candidates = [version for version in candidates if not version.is_prerelease] - candidates.sort(reverse=True) + if lower.major >= 1: + major_floor = Version(f"{lower.major}.0.0") + candidates = [version for version in candidates if version.major == lower.major and version >= major_floor] + else: + minor_floor = Version(f"0.{lower.minor}.0") + candidates = [ + version + for version in candidates + if version.major == 0 and version.minor == lower.minor and version >= minor_floor + ] + + candidates.sort() if max_candidates > 0: return candidates[:max_candidates] return candidates @@ -479,6 +495,7 @@ def _run_tasks( ) -> tuple[bool, str | None]: env = dict(os.environ) env["UV_PRERELEASE"] = "allow" + env.pop("VIRTUAL_ENV", None) for task_name in tasks: command = [ "uv", @@ -486,6 +503,7 @@ def _run_tasks( "--directory", str(project_dir), "run", + "--active", "--isolated", "--resolution", resolution, @@ -616,10 +634,14 @@ def _optimize_dependency( candidate_versions=[], attempted_versions=attempted_versions, attempts=attempts, - skipped_reason="No lower candidate bounds found.", + skipped_reason="No lower candidate bounds found within allowed boundary.", ) - for candidate in candidates: + low = 0 + high = len(candidates) - 1 + while low <= high: + midpoint = (low + high) // 2 + candidate = candidates[midpoint] attempted_versions.append(str(candidate)) trial_requirements = [entry.with_lower(candidate) for entry in dependency.entries] replacements = list(zip(current_requirements, trial_requirements, strict=True)) @@ -629,6 +651,7 @@ def _optimize_dependency( if dry_run: attempts.append(DependencyAttempt(trial_lower=str(candidate), status="dry_run_pass")) current_requirements = trial_requirements + high = midpoint - 1 continue success, error = _run_tasks( @@ -644,11 +667,12 @@ def _optimize_dependency( if success: attempts.append(DependencyAttempt(trial_lower=str(candidate), status="passed")) current_requirements = trial_requirements + high = midpoint - 1 continue attempts.append(DependencyAttempt(trial_lower=str(candidate), status="failed", error=error)) _replace_requirements(temp_pyproject, [(new, old) for old, new in replacements]) - continue + low = midpoint + 1 changed = current_requirements != dependency.original_requirements return DependencyOutcome( diff --git a/python/scripts/validate_dependency_ranges.py b/python/scripts/validate_dependency_ranges.py index b93c3e426b..6e12e220cf 100644 --- a/python/scripts/validate_dependency_ranges.py +++ b/python/scripts/validate_dependency_ranges.py @@ -335,7 +335,12 @@ def _build_internal_graph(workspace_root: Path, package_map: dict[str, Path]) -> pyproject_file = package_path / "pyproject.toml" with pyproject_file.open("rb") as f: data = tomli.load(f) - dependencies = data.get("project", {}).get("dependencies", []) or [] + project = data.get("project", {}) or {} + dependencies: list[str] = list(project.get("dependencies", []) or []) + for values in (project.get("optional-dependencies", {}) or {}).values(): + dependencies.extend([value for value in (values or []) if isinstance(value, str)]) + for values in (data.get("dependency-groups", {}) or {}).values(): + dependencies.extend([value for value in (values or []) if isinstance(value, str)]) internal = set() for dependency in dependencies: parsed = _parse_requirement(dependency) @@ -512,6 +517,7 @@ def _run_tasks( ) -> tuple[bool, str | None]: env = dict(os.environ) env["UV_PRERELEASE"] = "allow" + env.pop("VIRTUAL_ENV", None) for task_name in tasks: command = [ "uv", @@ -519,6 +525,7 @@ def _run_tasks( "--directory", str(project_dir), "run", + "--active", "--isolated", "--resolution", resolution, diff --git a/python/uv.lock b/python/uv.lock index 6c1e730353..6346479fd9 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -155,7 +155,7 @@ dependencies = [ [package.metadata] requires-dist = [ - { name = "a2a-sdk", specifier = ">=0.3.5,<0.3.24" }, + { name = "a2a-sdk", specifier = ">=0.2.1,<0.3.24" }, { name = "agent-framework-core", editable = "packages/core" }, ] @@ -172,18 +172,26 @@ dependencies = [ [package.optional-dependencies] dev = [ + { name = "agent-framework-orchestrations", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-xdist", extra = ["psutil"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] [package.metadata] requires-dist = [ { name = "ag-ui-protocol", specifier = "==0.1.13" }, { name = "agent-framework-core", editable = "packages/core" }, - { name = "fastapi", specifier = ">=0.104.0,<0.133.1" }, + { name = "agent-framework-orchestrations", marker = "extra == 'dev'", editable = "packages/orchestrations" }, + { name = "fastapi", specifier = ">=0.115.0,<0.133.1" }, { name = "httpx", marker = "extra == 'dev'", specifier = ">=0.27.0,<0.29" }, - { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0,<9" }, - { name = "uvicorn", specifier = ">=0.30.0,<0.30.1" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0,<9.0.2" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=1.0.0,<2" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=6.0.0,<7" }, + { name = "pytest-xdist", extras = ["psutil"], marker = "extra == 'dev'", specifier = ">=3.2.0,<4" }, + { name = "uvicorn", specifier = ">=0.30.0,<0.30.6" }, ] provides-extras = ["dev"] @@ -216,8 +224,8 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "aiohttp", specifier = ">=3.13.3,<4" }, - { name = "azure-ai-agents", specifier = ">=1.2.0b5,<1.2.0b6" }, + { name = "aiohttp", specifier = ">=3.7.0,<4" }, + { name = "azure-ai-agents", specifier = ">=1.0.0,<1.2.0b6" }, { name = "azure-ai-inference", specifier = ">=1.0.0b9,<1.0.0b10" }, ] @@ -233,7 +241,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "azure-search-documents", specifier = ">=11.7.0b2,<11.7.0b3" }, + { name = "azure-search-documents", specifier = ">=11.0.0,<11.7.0b3" }, ] [[package]] @@ -266,8 +274,8 @@ dependencies = [ requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, { name = "agent-framework-durabletask", editable = "packages/durabletask" }, - { name = "azure-functions", specifier = ">=1.24.0,<2" }, - { name = "azure-functions-durable", specifier = ">=1.5.0,<2" }, + { name = "azure-functions", specifier = ">=1.0.3,<2" }, + { name = "azure-functions-durable", specifier = ">=1.0.0,<2" }, ] [package.metadata.requires-dev] @@ -286,8 +294,8 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "boto3", specifier = ">=1.35.0,<2.0.0" }, - { name = "botocore", specifier = ">=1.35.0,<2.0.0" }, + { name = "boto3", specifier = ">=1.0.0,<2.0.0" }, + { name = "botocore", specifier = ">=1.0.0,<2.0.0" }, ] [[package]] @@ -302,7 +310,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "openai-chatkit", specifier = ">=1.4.0,<2.0.0" }, + { name = "openai-chatkit", specifier = ">=1.0.0,<2.0.0" }, ] [[package]] @@ -317,7 +325,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "claude-agent-sdk", specifier = ">=0.1.25,<0.1.26" }, + { name = "claude-agent-sdk", specifier = ">=0.1.0,<0.1.26" }, ] [[package]] @@ -332,7 +340,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "microsoft-agents-copilotstudio-client", specifier = ">=0.3.1,<0.3.2" }, + { name = "microsoft-agents-copilotstudio-client", specifier = ">=0.0.0,<0.3.2" }, ] [[package]] @@ -403,15 +411,15 @@ requires-dist = [ { name = "agent-framework-redis", marker = "extra == 'all'", editable = "packages/redis" }, { name = "azure-ai-projects", specifier = "==2.0.0b4" }, { name = "azure-identity", specifier = ">=1,<2" }, - { name = "mcp", extras = ["ws"], specifier = ">=1.24.0,<2" }, - { name = "openai", specifier = ">=1.99.0,<3" }, - { name = "opentelemetry-api", specifier = ">=1.39.0,<2" }, - { name = "opentelemetry-sdk", specifier = ">=1.39.0,<2" }, - { name = "opentelemetry-semantic-conventions-ai", specifier = ">=0.4.13,<0.4.14" }, - { name = "packaging", specifier = ">=24.1,<26.0" }, + { name = "mcp", extras = ["ws"], specifier = ">=1.0.0,<2" }, + { name = "openai", specifier = ">=1.0.0,<3" }, + { name = "opentelemetry-api", specifier = ">=1.0.0,<2" }, + { name = "opentelemetry-sdk", specifier = ">=1.0.0,<2" }, + { name = "opentelemetry-semantic-conventions-ai", specifier = ">=0.4.0,<0.4.14" }, + { name = "packaging", specifier = ">=24.0,<26.0" }, { name = "pydantic", specifier = ">=2,<3" }, { name = "python-dotenv", specifier = ">=1,<2" }, - { name = "typing-extensions", specifier = ">=4.15.0,<5" }, + { name = "typing-extensions", specifier = ">=4.0.0,<5" }, ] provides-extras = ["all"] @@ -499,9 +507,9 @@ dev = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "durabletask", specifier = ">=1.3.0,<2" }, - { name = "durabletask-azuremanaged", specifier = ">=1.3.0,<2" }, - { name = "python-dateutil", specifier = ">=2.8.0,<3" }, + { name = "durabletask", specifier = ">=0.1.0,<2" }, + { name = "durabletask-azuremanaged", specifier = ">=0.1.1,<2" }, + { name = "python-dateutil", specifier = ">=1.4,<3" }, ] [package.metadata.requires-dev] @@ -519,7 +527,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "foundry-local-sdk", specifier = ">=0.5.1,<0.5.2" }, + { name = "foundry-local-sdk", specifier = ">=0.3.0,<0.5.2" }, ] [[package]] @@ -644,7 +652,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "ollama", specifier = ">=0.5.3,<0.5.4" }, + { name = "ollama", specifier = ">=0.0.0,<0.5.4" }, ] [[package]] @@ -671,7 +679,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "azure-core", specifier = ">=1.30.0,<2" }, + { name = "azure-core", specifier = ">=1.0.0,<2" }, { name = "httpx", specifier = ">=0.27.0,<0.29" }, ] @@ -690,9 +698,9 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "numpy", specifier = ">=2.2.6,<3" }, - { name = "redis", specifier = ">=6.4.0,<7.2.1" }, - { name = "redisvl", specifier = ">=0.8.2,<0.8.3" }, + { name = "numpy", specifier = ">=2.1.0,<3" }, + { name = "redis", specifier = ">=6.0.0,<7.2.1" }, + { name = "redisvl", specifier = ">=0.8.0,<0.8.3" }, ] [[package]] @@ -1413,7 +1421,7 @@ name = "clr-loader" version = "0.2.10" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "cffi", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/18/24/c12faf3f61614b3131b5c98d3bf0d376b49c7feaa73edca559aeb2aee080/clr_loader-0.2.10.tar.gz", hash = "sha256:81f114afbc5005bafc5efe5af1341d400e22137e275b042a8979f3feb9fc9446", size = 83605, upload-time = "2026-01-03T23:13:06.984Z" } wheels = [ @@ -1892,7 +1900,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -4653,7 +4661,7 @@ name = "powerfx" version = "0.0.31" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pythonnet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pythonnet", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/56/1d/40228886242df10c10ed69faf27e973d020c586aa723a51afbe48542d535/powerfx-0.0.31.tar.gz", hash = "sha256:fa9637f315d71163dd900d16f97fce562d550049713d2fc358f8d446bb23906f", size = 3235618, upload-time = "2025-09-16T15:10:13.159Z" } wheels = [ @@ -5316,7 +5324,7 @@ name = "pythonnet" version = "3.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "clr-loader", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "clr-loader", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9a/d6/1afd75edd932306ae9bd2c2d961d603dc2b52fcec51b04afea464f1f6646/pythonnet-3.0.5.tar.gz", hash = "sha256:48e43ca463941b3608b32b4e236db92d8d40db4c58a75ace902985f76dac21cf", size = 239212, upload-time = "2024-12-13T08:30:44.393Z" } wheels = [ From a841df6cbbcef7cae06b657539843c1248c9fce9 Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Fri, 27 Feb 2026 17:17:29 +0100 Subject: [PATCH 6/8] fix tiktoken --- python/packages/lab/pyproject.toml | 2 +- python/uv.lock | 89 +++++++++++++++++++----------- 2 files changed, 58 insertions(+), 33 deletions(-) diff --git a/python/packages/lab/pyproject.toml b/python/packages/lab/pyproject.toml index 1db05d0123..af6d9aad28 100644 --- a/python/packages/lab/pyproject.toml +++ b/python/packages/lab/pyproject.toml @@ -43,7 +43,7 @@ lightning = [ # TAU2 benchmark module dependencies tau2 = [ "pydantic>=2,<4", - "tiktoken>=0.11.0,<0.11.1", + "tiktoken>=0.12.0,<0.12.1", "loguru>=0.7.3,<0.7.4", "numpy>=2.2.6,<3", ] diff --git a/python/uv.lock b/python/uv.lock index 6346479fd9..1509ccd9e0 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -604,7 +604,7 @@ requires-dist = [ { name = "pydantic", marker = "extra == 'gaia'", specifier = ">=2,<4" }, { name = "pydantic", marker = "extra == 'tau2'", specifier = ">=2,<4" }, { name = "sympy", marker = "extra == 'math'", specifier = ">=1.13.0,<2" }, - { name = "tiktoken", marker = "extra == 'tau2'", specifier = ">=0.11.0,<0.11.1" }, + { name = "tiktoken", marker = "extra == 'tau2'", specifier = ">=0.12.0,<0.12.1" }, { name = "tqdm", marker = "extra == 'gaia'", specifier = ">=4.60.0,<5" }, ] provides-extras = ["gaia", "lightning", "tau2", "math"] @@ -1421,7 +1421,7 @@ name = "clr-loader" version = "0.2.10" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, + { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/18/24/c12faf3f61614b3131b5c98d3bf0d376b49c7feaa73edca559aeb2aee080/clr_loader-0.2.10.tar.gz", hash = "sha256:81f114afbc5005bafc5efe5af1341d400e22137e275b042a8979f3feb9fc9446", size = 83605, upload-time = "2026-01-03T23:13:06.984Z" } wheels = [ @@ -1900,7 +1900,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -4661,7 +4661,7 @@ name = "powerfx" version = "0.0.31" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pythonnet", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, + { name = "pythonnet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/56/1d/40228886242df10c10ed69faf27e973d020c586aa723a51afbe48542d535/powerfx-0.0.31.tar.gz", hash = "sha256:fa9637f315d71163dd900d16f97fce562d550049713d2fc358f8d446bb23906f", size = 3235618, upload-time = "2025-09-16T15:10:13.159Z" } wheels = [ @@ -5324,7 +5324,7 @@ name = "pythonnet" version = "3.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "clr-loader", marker = "(python_full_version < '3.14' and sys_platform == 'darwin') or (python_full_version < '3.14' and sys_platform == 'linux') or (python_full_version < '3.14' and sys_platform == 'win32')" }, + { name = "clr-loader", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9a/d6/1afd75edd932306ae9bd2c2d961d603dc2b52fcec51b04afea464f1f6646/pythonnet-3.0.5.tar.gz", hash = "sha256:48e43ca463941b3608b32b4e236db92d8d40db4c58a75ace902985f76dac21cf", size = 239212, upload-time = "2024-12-13T08:30:44.393Z" } wheels = [ @@ -6400,38 +6400,63 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, - { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, - { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, - { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, - { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, - { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, - { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, - { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, - { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, - { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, ] [[package]] From b860e5317e6df14cb1a3c1c3da1911957c8634df Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Mon, 2 Mar 2026 12:39:02 +0100 Subject: [PATCH 7/8] chore(python): refine dependency validation workflows Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../python-dependency-range-validation.yml | 4 +-- .../skills/python-package-management/SKILL.md | 33 ++++++++++++++++- python/CODING_STANDARD.md | 1 + python/DEV_SETUP.md | 27 +++++++++++++- python/pyproject.toml | 35 ++++++++++++++++++- 5 files changed, 95 insertions(+), 5 deletions(-) diff --git a/.github/workflows/python-dependency-range-validation.yml b/.github/workflows/python-dependency-range-validation.yml index 78c4fce11a..08c238474f 100644 --- a/.github/workflows/python-dependency-range-validation.yml +++ b/.github/workflows/python-dependency-range-validation.yml @@ -161,7 +161,7 @@ jobs: - name: Refresh lockfile if: steps.validate_ranges.outcome == 'success' - run: uv lock + run: uv lock --upgrade working-directory: ./python - name: Commit and push dependency updates @@ -197,7 +197,7 @@ jobs: - Ran `uv run poe validate-dependency-ranges` - Updated package dependency bounds - - Refreshed `python/uv.lock` + - Refreshed `python/uv.lock` with `uv lock --upgrade` EOF PR_NUMBER="$(gh pr list --head "${BRANCH}" --base main --state open --json number --jq '.[0].number')" diff --git a/python/.github/skills/python-package-management/SKILL.md b/python/.github/skills/python-package-management/SKILL.md index 8784aed453..a4fc7ae4c1 100644 --- a/python/.github/skills/python-package-management/SKILL.md +++ b/python/.github/skills/python-package-management/SKILL.md @@ -33,13 +33,32 @@ Uses [uv](https://github.com/astral-sh/uv) for dependency management and # Full setup (venv + install + prek hooks) uv run poe setup -# Install/update all dependencies +# Install dependencies from lockfile (frozen resolution with prerelease policy) uv run poe install # Create venv with specific Python version uv run poe venv --python 3.12 + +# Intentionally upgrade a specific dependency to reduce lockfile conflicts +uv lock --upgrade-package && uv run poe install + +# After adding/changing an external dependency, extend min then max bounds +uv run poe validate-dependency-lower-bounds +uv run poe validate-dependency-ranges + +# Add a dependency to one project and run both validators for that project/dependency +uv run poe add-dependency-and-validate-bounds --project --dependency "" ``` +### Dependency Bound Notes + +- Stable dependencies (`>=1.0`) should typically be bounded as `>=,`. +- Prerelease (`dev`/`a`/`b`/`rc`) and `<1.0` dependencies should use hard bounds on a known-good line (avoid open-ended ranges). +- Prefer supporting multiple majors when practical; if APIs diverge across supported majors, use version-conditional imports/paths. +- For dependency changes, run lower-bound discovery first, then upper-bound validation to keep both minimum and maximum constraints current. +- Prefer targeted lock updates with `uv lock --upgrade-package ` to reduce `uv.lock` merge conflicts. +- Use `add-dependency-and-validate-bounds` for package-scoped dependency additions plus bound validation in one command. + ## Lazy Loading Pattern Provider folders in core use `__getattr__` to lazy load from connector packages: @@ -74,6 +93,18 @@ def __getattr__(name: str) -> Any: 4. Do **NOT** add to `[all]` extra in `packages/core/pyproject.toml` 5. Do **NOT** create lazy loading in core yet +Recommended dependency workflow during connector implementation: + +1. Add the dependency to the target package: + `uv run poe add-dependency-to-project --project --dependency ""` +2. Implement connector code and tests. +3. Validate dependency bounds for that package/dependency: + - `uv run poe validate-dependency-lower-bounds-project --project --dependency ""` + - `uv run poe validate-dependency-ranges-project --project --dependency ""` +4. If the package has meaningful tests/checks that validate dependency compatibility, you can use the add + validation flow in one command: + `uv run poe add-dependency-and-validate-bounds --project --dependency ""` + If compatibility checks are not in place yet, add the dependency first, then implement tests before running bound validation. + ### Promotion to Stable 1. Move samples to root `samples/` folder diff --git a/python/CODING_STANDARD.md b/python/CODING_STANDARD.md index e654504f27..9eb89f304b 100644 --- a/python/CODING_STANDARD.md +++ b/python/CODING_STANDARD.md @@ -397,6 +397,7 @@ So we use bounded ranges for external package dependencies in `pyproject.toml`: - For prerelease (`dev`/`a`/`b`/`rc`) dependencies, use a known-good lower bound with a hard upper boundary in the same prerelease line (for example: `azure-ai-projects>=2.0.0b3,<2.0.0b4`). - For `<1.0.0` dependencies, use patch-bounded caps (`>=,`), not minor-bounded caps (for example: `a2a-sdk>=0.3.5,<0.3.6`). - Prefer keeping support for multiple major versions when practical. This may mean that the upper bound spans multiple major versions when the dependency maintains backward compatibility; if APIs differ between supported majors, version-conditional imports/branches are acceptable to preserve compatibility. For `<1.0.0>` and prerelease dependencies, also make the bounds as broad as possible but only for known packages, not for new ones, as the odds of breaking changes being introduced are higher. +- When adding or changing an external dependency, run `uv run poe validate-dependency-lower-bounds` first to extend the minimum supported bound, then run `uv run poe validate-dependency-ranges` to raise/set the maximum supported bound. ### Installation Options diff --git a/python/DEV_SETUP.md b/python/DEV_SETUP.md index 3769a5df9e..f47d45da6a 100644 --- a/python/DEV_SETUP.md +++ b/python/DEV_SETUP.md @@ -217,10 +217,11 @@ uv run poe setup --python 3.12 ``` #### `install` -Install all dependencies including extras and dev dependencies, including updates: +Install all dependencies (including extras and dev dependencies) from the lockfile using frozen resolution: ```bash uv run poe install ``` +For intentional dependency upgrades, run `uv lock --upgrade-package ` and then run `uv run poe install`. #### `venv` Create a virtual environment with specified Python version or switch python version: @@ -278,6 +279,30 @@ Lint markdown code blocks: uv run poe markdown-code-lint ``` +#### `validate-dependency-ranges` +Validate and extend external dependency upper bounds by running package checks/tests in isolated environments: +```bash +uv run poe validate-dependency-ranges +``` + +#### `validate-dependency-lower-bounds` +Validate and extend external dependency lower bounds by running package checks/tests in isolated environments: +```bash +uv run poe validate-dependency-lower-bounds +``` + +When adding or changing an external dependency, run lower bounds first, then upper bounds: +```bash +uv run poe validate-dependency-lower-bounds +uv run poe validate-dependency-ranges +``` + +#### `add-dependency-and-validate-bounds` +Add an external dependency to a workspace project and run both validators for that same project/dependency: +```bash +uv run poe add-dependency-and-validate-bounds --project --dependency "" +``` + ### Comprehensive Checks #### `check-packages` diff --git a/python/pyproject.toml b/python/pyproject.toml index 1f8707dfcc..498ac35221 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -211,7 +211,7 @@ executor.type = "uv" [tool.poe.tasks] markdown-code-lint = "uv run python scripts/check_md_code_blocks.py 'README.md' './packages/**/README.md' './samples/**/*.md' --exclude cookiecutter-agent-framework-lab --exclude tau2 --exclude 'packages/devui/frontend' --exclude context_providers/azure_ai_search" prek-install = "prek install --overwrite" -install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit" +install = "uv sync --all-packages --all-extras --dev --frozen --prerelease=if-necessary-or-explicit" test = "python scripts/run_tasks_in_packages_if_exists.py test" fmt = "python scripts/run_tasks_in_packages_if_exists.py fmt" format.ref = "fmt" @@ -222,6 +222,7 @@ mypy = "python scripts/run_tasks_in_packages_if_exists.py mypy" samples-syntax = "pyright -p pyrightconfig.samples.json --warnings" typing = ["pyright", "mypy"] validate-dependency-ranges = "python scripts/validate_dependency_ranges.py" +validate-dependency-lower-bounds = "python scripts/validate_dependency_lower_bounds.py" # cleaning clean-dist-packages = "python scripts/run_tasks_in_packages_if_exists.py clean-dist" clean-dist-meta = "rm -rf dist" @@ -286,6 +287,38 @@ sequence = [ ] args = [{ name = "python", default = "3.13", options = ['-p', '--python'] }] +[tool.poe.tasks.add-dependency-to-project] +cmd = "uv add --package ${project} ${dependency}" +args = [ + { name = "project", options = ["-p", "--project"] }, + { name = "dependency", options = ["-d", "--dependency"] }, +] + +[tool.poe.tasks.validate-dependency-lower-bounds-project] +cmd = "python scripts/validate_dependency_lower_bounds.py --packages ${project} --dependencies ${dependency}" +args = [ + { name = "project", options = ["-p", "--project"] }, + { name = "dependency", options = ["-d", "--dependency"] }, +] + +[tool.poe.tasks.validate-dependency-ranges-project] +cmd = "python scripts/validate_dependency_ranges.py --packages ${project} --dependencies ${dependency}" +args = [ + { name = "project", options = ["-p", "--project"] }, + { name = "dependency", options = ["-d", "--dependency"] }, +] + +[tool.poe.tasks.add-dependency-and-validate-bounds] +sequence = [ + { ref = "add-dependency-to-project --project ${project} --dependency ${dependency}" }, + { ref = "validate-dependency-lower-bounds-project --project ${project} --dependency ${dependency}" }, + { ref = "validate-dependency-ranges-project --project ${project} --dependency ${dependency}" }, +] +args = [ + { name = "project", options = ["-p", "--project"] }, + { name = "dependency", options = ["-d", "--dependency"] }, +] + [tool.poe.tasks.prek-pyright] cmd = "uv run python scripts/run_tasks_in_changed_packages.py pyright --files ${files}" args = [{ name = "files", default = ".", positional = true, multiple = true }] From d42b45a4205ce4a0b6e888c73240af7f58292c5a Mon Sep 17 00:00:00 2001 From: eavanvalkenburg Date: Tue, 3 Mar 2026 13:36:21 +0100 Subject: [PATCH 8/8] docs(python): add high-level dependency validation comments Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .github/workflows/python-dependency-range-validation.yml | 5 +++++ python/scripts/validate_dependency_lower_bounds.py | 7 +++++++ python/scripts/validate_dependency_ranges.py | 6 ++++++ 3 files changed, 18 insertions(+) diff --git a/.github/workflows/python-dependency-range-validation.yml b/.github/workflows/python-dependency-range-validation.yml index 08c238474f..919226e2ad 100644 --- a/.github/workflows/python-dependency-range-validation.yml +++ b/.github/workflows/python-dependency-range-validation.yml @@ -33,11 +33,13 @@ jobs: - name: Run dependency range validation id: validate_ranges + # Keep workflow running so we can still publish diagnostics from this run. continue-on-error: true run: uv run poe validate-dependency-ranges working-directory: ./python - name: Upload dependency range report + # Always publish the report so failures are inspectable even when validation fails. if: always() uses: actions/upload-artifact@v4 with: @@ -46,6 +48,7 @@ jobs: if-no-files-found: warn - name: Create issues for failed dependency candidates + # Always process the report so failed candidates create actionable tracking issues. if: always() uses: actions/github-script@v8 with: @@ -160,6 +163,7 @@ jobs: } - name: Refresh lockfile + # Only refresh lockfile after a clean validation to avoid committing known-bad ranges. if: steps.validate_ranges.outcome == 'success' run: uv lock --upgrade working-directory: ./python @@ -186,6 +190,7 @@ jobs: echo "has_changes=true" >> "$GITHUB_OUTPUT" - name: Create or update pull request with GitHub CLI + # Only open/update PRs for validated updates to keep automation branches trustworthy. if: steps.validate_ranges.outcome == 'success' && steps.commit_updates.outputs.has_changes == 'true' run: | BRANCH="automation/python-dependency-range-updates" diff --git a/python/scripts/validate_dependency_lower_bounds.py b/python/scripts/validate_dependency_lower_bounds.py index 54c19024bb..eae4fdd87b 100644 --- a/python/scripts/validate_dependency_lower_bounds.py +++ b/python/scripts/validate_dependency_lower_bounds.py @@ -578,6 +578,7 @@ def _optimize_dependency( attempted_versions: list[str] = [] attempts: list[DependencyAttempt] = [] + # Establish a validated baseline before searching for lower acceptable bounds. baseline_version = dependency.lower_version attempted_versions.append(str(baseline_version)) print(f"[cyan]{package_label} :: {dependency.name} :: baseline current_lower [{baseline_version}] [/cyan]") @@ -637,6 +638,7 @@ def _optimize_dependency( skipped_reason="No lower candidate bounds found within allowed boundary.", ) + # Probe older bounds with a binary-search-style loop: keep successful tighter lowers, revert failures. low = 0 high = len(candidates) - 1 while low <= high: @@ -710,6 +712,7 @@ def _process_package( skipped=["No check/test task combination found."], ) + # Build the per-package optimization target set from eligible bounded dependency specifications. targets, skipped = _collect_targets(pyproject_file, dependency_filters=dependency_filters) if not targets: return PackageOutcome( @@ -758,6 +761,7 @@ def _process_package( if candidate.exists(): temp_internal_editables.append(candidate) + # Execute lower-bound trials per dependency and accumulate final replacement strings for persistence. dependency_results: list[DependencyOutcome] = [] replacements: dict[str, str] = {} package_label = f"{plan.project_path} ({plan.package_name})" @@ -893,6 +897,7 @@ def main() -> None: dependency_filters = {name.lower() for name in args.dependencies} if args.dependencies else None output_json_path = (workspace_root / args.output_json).resolve() + # Phase 1: prepare shared workspace metadata and collect package execution plans. package_map = _build_workspace_package_map(workspace_root) internal_graph = _build_internal_graph(workspace_root, package_map) lock_versions = _load_lock_versions(workspace_root) @@ -927,6 +932,7 @@ def main() -> None: print("[yellow]No packages matched the selection.[/yellow]") return + # Phase 2: initialize incremental report state before running package validations in parallel. report: dict = { "started_at": _utc_now(), "workspace_root": str(workspace_root), @@ -978,6 +984,7 @@ def main() -> None: if outcome.changed and not args.dry_run: _apply_package_replacements(plan.pyproject_path, outcome.replacements) + # Phase 3: aggregate outcomes, persist incremental JSON snapshots, and emit per-package progress. report["packages"].append(_to_json(outcome)) report["summary"]["packages_changed"] = sum(1 for value in package_outcomes if value.changed) report["summary"]["dependencies_changed"] = sum( diff --git a/python/scripts/validate_dependency_ranges.py b/python/scripts/validate_dependency_ranges.py index 6e12e220cf..cf753ce21e 100644 --- a/python/scripts/validate_dependency_ranges.py +++ b/python/scripts/validate_dependency_ranges.py @@ -576,6 +576,7 @@ def _optimize_dependency( include_dev_group: bool, include_dev_extra: bool, ) -> DependencyOutcome: + # Build descending candidate trial bounds from the current constraint window. candidates = _build_trial_bounds( available_versions, lower=dependency.lower_version, @@ -674,6 +675,7 @@ def _optimize_dependency( skipped_reason="No higher candidate bounds found.", ) + # Probe candidates from highest to lowest; keep the first passing upper-bound rewrite. for candidate in candidates: attempted_versions.append(str(candidate)) trial_requirements = [entry.with_upper(candidate) for entry in dependency.entries] @@ -793,6 +795,7 @@ def _process_package( replacements: dict[str, str] = {} package_label = f"{plan.project_path} ({plan.package_name})" + # Run per-dependency trial generation + validation in the isolated temp workspace. for target in targets: versions = catalog.get(target.name) outcome = _optimize_dependency( @@ -918,6 +921,7 @@ def main() -> None: parser.add_argument("--dry-run", action="store_true", help="Do not execute uv commands or update pyprojects.") args = parser.parse_args() + # Preparation/target collection: resolve workspace metadata and package execution plans. workspace_pyproject = Path(__file__).parent.parent / "pyproject.toml" workspace_root = workspace_pyproject.parent package_filters = set(args.packages) if args.packages else None @@ -958,6 +962,7 @@ def main() -> None: print("[yellow]No packages matched the selection.[/yellow]") return + # Aggregation + persistence/reporting: initialize the incremental JSON report. report: dict = { "started_at": _utc_now(), "workspace_root": str(workspace_root), @@ -1009,6 +1014,7 @@ def main() -> None: if outcome.changed and not args.dry_run: _apply_package_replacements(plan.pyproject_path, outcome.replacements) + # Persist each completed package outcome so long runs keep a live report. report["packages"].append(_to_json(outcome)) report["summary"]["packages_changed"] = sum(1 for value in package_outcomes if value.changed) report["summary"]["dependencies_changed"] = sum(