diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 71d6f30d5..cd70ca474 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,5 @@ *Note*: Before submitting this pull request, please review our [contributing -guidelines](https://fauststream.com/en/latest/contributing.html). +guidelines](https://faust-streaming.github.io/faust/contributing.html). ## Description diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 566158075..4f6360b71 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -1,3 +1,4 @@ +--- # For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # @@ -9,17 +10,15 @@ # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # -name: "CodeQL" - +name: CodeQL on: push: - branches: [ master ] + branches: [master] pull_request: # The branches below must be a subset of the branches above - branches: [ master ] + branches: [master] schedule: - - cron: '19 10 * * 6' - + - cron: 19 10 * * 6 jobs: analyze: name: Analyze @@ -28,24 +27,22 @@ jobs: actions: read contents: read security-events: write - strategy: fail-fast: false matrix: - language: [ 'python' ] + language: [python] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed - steps: - - name: Checkout repository - uses: actions/checkout@v2 + - name: Checkout repository + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. @@ -53,8 +50,8 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 + - name: Autobuild + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -66,6 +63,5 @@ jobs: #- run: | # make bootstrap # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml new file mode 100644 index 000000000..8ac306d81 --- /dev/null +++ b/.github/workflows/gh-pages.yml @@ -0,0 +1,47 @@ +--- +name: Pages +on: + push: + branches: [master] + pull_request: + branches: [master] + release: + types: [created] + branches: [master] +jobs: + build: + name: Build docs + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v5 + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + - name: Install runtime dependencies + run: | + pip install . + pip install -r requirements/docs.txt + - name: Install doc build deps and build with Sphinx + run: make docs + - name: Upload artifacts + uses: actions/upload-pages-artifact@v3 + with: + # Upload built docs + path: ./Documentation + deploy: + name: Deploy docs + if: github.event_name == 'release' && github.event.action == 'created' + needs: build + runs-on: ubuntu-latest + # Grant GITHUB_TOKEN the permissions required to make a Pages deployment + permissions: + pages: write # to deploy to Pages + id-token: write # to verify the deployment originates from an appropriate source + # Deploy to the github-pages environment + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - uses: actions/deploy-pages@v4 + id: deployment + name: Deploy to GitHub Pages diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 7d6100611..03d5642b1 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -1,39 +1,143 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - +--- +name: CI/CD on: push: - branches: ["master"] + branches: [master] pull_request: - branches: ["master"] + branches: [master] + release: + types: [created] + branches: [master] +env: + FORCE_COLOR: '1' # Make tools pretty. + PIP_DISABLE_PIP_VERSION_CHECK: '1' + PIP_NO_PYTHON_VERSION_WARNING: '1' + PYTHON_LATEST: '3.12' + # For re-actors/checkout-python-sdist + sdist-artifact: python-package-distributions jobs: - tests: - name: "Python ${{ matrix.python-version }}/Cython: ${{ matrix.use-cython }}" - runs-on: "ubuntu-latest" - + lint: + name: Check linting + runs-on: ubuntu-latest + steps: + - name: Checkout project + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_LATEST }} + cache: pip + - run: | + python -m pip install build + python -m pip install -r requirements/test.txt + name: Install core libraries for build and install + - name: Run linting checks + run: scripts/check + test-pytest: + name: 'Python ${{ matrix.python-version }}/Cython: ${{ matrix.use-cython }}' + runs-on: ubuntu-latest + timeout-minutes: 10 # Maybe we should remove this someday but the PyPy tests are acting strange strategy: # Complete all jobs even if one fails, allows us to see # for example if a test fails only when Cython is enabled fail-fast: false matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] - use-cython: ["true", "false"] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + use-cython: ['true', 'false'] + experimental: [false] + include: + - python-version: 'pypy3.9' + use-cython: false + experimental: true + - python-version: ~3.13.0-0 + experimental: true + use-cython: false env: USE_CYTHON: ${{ matrix.use-cython }} - + continue-on-error: ${{ matrix.experimental }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: pip + cache-dependency-path: requirements/test.txt + - name: Install dependencies + run: | + pip install -r requirements/test.txt + pip install . + - name: Run tests + run: scripts/tests + - name: Enforce coverage + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + check: # This job does nothing and is only used for the branch protection + name: ✅ Ensure the required checks passing + if: always() + needs: [lint, test-pytest] + runs-on: ubuntu-latest steps: - - uses: "actions/checkout@v2" - - uses: "actions/setup-python@v2" - with: - python-version: "${{ matrix.python-version }}" - - name: "Install dependencies" - run: "scripts/install" - - name: "Run linting checks" - run: "scripts/check" - - name: "Run tests" - run: "scripts/tests" - - name: "Enforce coverage" - run: "scripts/coverage" + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} + build_wheels: + name: 📦 Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + needs: check + if: github.event_name == 'release' && github.event.action == 'created' + strategy: + matrix: + os: [ubuntu-20.04, macos-11] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Build wheels + uses: pypa/cibuildwheel@v2.10.1 + - uses: actions/upload-artifact@v3 + with: + path: ./wheelhouse/*.whl + build_sdist: + name: 📦 Build the source distribution + runs-on: ubuntu-latest + needs: check + if: github.event_name == 'release' && github.event.action == 'created' + steps: + - uses: actions/checkout@v4 + name: Checkout source repository + with: + fetch-depth: 0 + - uses: actions/setup-python@v5 + - name: Build sdist + run: > + pip3 install pkgconfig cython --upgrade && + python3 setup.py sdist + - uses: actions/upload-artifact@v4 + name: Upload build artifacts + with: + path: dist/*.tar.gz + publish: + name: 📦 Publish to PyPI + runs-on: ubuntu-latest + needs: [build_wheels, build_sdist] + permissions: + id-token: write + environment: pypi + if: github.event_name == 'release' && github.event.action == 'created' + steps: + - name: Download the sdist artifact + uses: actions/download-artifact@v4 + with: + name: artifact + path: dist + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..506fa3f5e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,26 @@ +# See https://pre-commit.com/ for usage and config +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: check-case-conflict + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-added-large-files + +# - repo: local +# hooks: +# # - id: black +# # name: black +# # stages: [commit] +# # language: system +# # entry: black +# # types: [python] +# # files: .*\.py$ +# +# - id: flake8 +# name: flake8 +# stages: [commit] +# language: system +# entry: flake8 +# types: [python] diff --git a/AUTHORS.md b/AUTHORS.md index 212d66e16..e2a78a6b3 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -12,47 +12,48 @@ The award for doing this in any significant capacity for one year or longer, is ## Contributors -| Name | Email | -|---------------------|---------------------------------------| -| Allison Wang | | -| Jamshed Vesuna | | -| Jaren Glover | | -| Jerry Li | | -| Prithvi Narasimhan | | -| Ruby Wang | | -| Shrey Kumar Shahi | | -| Mika Eloranta | | -| Omar Rayward | | -| Alexander Oberegger | | -| Matthew Stump | | -| Martin Maillard | | -| Mattias Karlsson | | -| Matthias Wutte | | -| Thibault Serot | | -| Ryan Whitten | | -| Nimi Wariboko Jr | | -| Chris Seto | | -| Amit Ripshtos | | -| Miha Troha | | -| Perk Lim | | -| Julien Surloppe | | -| Bob Haddleton | | -| Nimish Telang | | -| Cesar Pantoja | | -| Tomasz Nguyen | | -| Artak Papikyan | | -| Andrei Tuppitcyn | | -| Vikram Patki | | -| Victor Miroshnikov | | -| Tobias Rauter | | -| DhruvaPatil98 | | -| Leandro Vonwerra | | -| Ignacio Peluffo | | -| Sergej Herbert | | -| Marcos Schroh | | -| Denis Kovalev | | -| Jonathan A. Booth | | -| Eran Kampf | | -| Wjatscheslaw Kewlin | | -| Ramkumar M | | -| Mark Lambert | | +| Name | Email | +|---------------------|-----------------------------------| +| Allison Wang | | +| Jamshed Vesuna | | +| Jaren Glover | | +| Jerry Li | | +| Prithvi Narasimhan | | +| Ruby Wang | | +| Shrey Kumar Shahi | | +| Mika Eloranta | | +| Omar Rayward | | +| Alexander Oberegger | | +| Matthew Stump | | +| Martin Maillard | | +| Mattias Karlsson | | +| Matthias Wutte | | +| Thibault Serot | | +| Ryan Whitten | | +| Nimi Wariboko Jr | | +| Chris Seto | | +| Amit Ripshtos | | +| Miha Troha | | +| Perk Lim | | +| Julien Surloppe | | +| Bob Haddleton | | +| Nimish Telang | | +| Cesar Pantoja | | +| Tomasz Nguyen | | +| Artak Papikyan | | +| Andrei Tuppitcyn | | +| Vikram Patki | | +| Victor Miroshnikov | | +| Tobias Rauter | | +| DhruvaPatil98 | | +| Leandro Vonwerra | | +| Ignacio Peluffo | | +| Sergej Herbert | | +| Marcos Schroh | | +| Denis Kovalev | | +| Jonathan A. Booth | | +| Eran Kampf | | +| Wjatscheslaw Kewlin | | +| Ramkumar M | | +| Mark Lambert | | +| William Barnhart | | diff --git a/CHANGELOG.md b/CHANGELOG.md index 293bec1b2..0907bc736 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,137 +1,490 @@ # Changelog - All notable changes to this project will be documented in this file. -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 0.6.9 -### Changed -- Fixed [166](https://github.com/faust-streaming/faust/issues/166) +## [v0.8.10](https://github.com/faust-streaming/faust/releases/tag/v0.8.10) - 2022-09-14 + +[Compare with v0.8.9](https://github.com/faust-streaming/faust/compare/v0.8.9...v0.8.10) -## 0.6.8 ### Changed -- Fixed [166](https://github.com/faust-streaming/faust/issues/166) +- Change `aioeventlet` extension to `faust-aioeventlet` +- Update versioning of `click>=6.7,<8.2` + +## [v0.8.9](https://github.com/faust-streaming/faust/releases/tag/v0.8.9) - 2022-09-04 + +[Compare with v0.8.8](https://github.com/faust-streaming/faust/compare/v0.8.8...v0.8.9) + +### Added +- Add improvements to documentation (#351) ([2091326](https://github.com/faust-streaming/faust/commit/20913264c916d26f0ce50ba28f66e7008e38787e) by William Barnhart). +- Added aerospike to stores (#350) ([ba000ee](https://github.com/faust-streaming/faust/commit/ba000ee319d969070d396ad99f24f4f3265abbb9) by William Barnhart). +- Add aerospike docs (#348) ([3680bbd](https://github.com/faust-streaming/faust/commit/3680bbdb6f3c251a270365d023b1229dd0432d7e) by William Barnhart). +- Add noqa: b024 to base classes to pass lint check (#344) ([7648c6e](https://github.com/faust-streaming/faust/commit/7648c6e053dc7e6daff9e7cb86a4baa5cd411965) by William Barnhart). +- Address topics.py error in #175 by callling message.message (#342) ([76f720a](https://github.com/faust-streaming/faust/commit/76f720ad1dfc9366c7a1cd308cf29171888d65ae) by William Barnhart). -## 0.6.7 ### Changed -- aiokafka dependency is set to >=0.7.1,<0.8.0 +- Change version to 0.8.9 in preparaion of next release ([6475f8c](https://github.com/faust-streaming/faust/commit/6475f8ca9ee8b713996661f6a0fd88cda05493f3) by William Barnhart). + +### Fixed +- Fix running `make develop` and restore `pre-commit` tool (#145) ([85534ec](https://github.com/faust-streaming/faust/commit/85534ec2031f06ba0bdebabb77688476ffe9f806) by Taybin Rutkin). + + +## [v0.8.8](https://github.com/faust-streaming/faust/releases/tag/v0.8.8) - 2022-08-15 + +[Compare with v0.8.7](https://github.com/faust-streaming/faust/compare/v0.8.7...v0.8.8) + +### Added +- Adding intervaltree to manage gaps in topics to prevent oom (#282) ([1e7be3a](https://github.com/faust-streaming/faust/commit/1e7be3a1dba51e13e23dd804bfda9d630cc729bd) by Vikram Patki). + + +## [v0.8.7](https://github.com/faust-streaming/faust/releases/tag/v0.8.7) - 2022-08-11 + +[Compare with v0.8.6](https://github.com/faust-streaming/faust/compare/v0.8.6...v0.8.7) + +### Added +- Add dist.yml for uploading builds to pypi (#338) ([79d672c](https://github.com/faust-streaming/faust/commit/79d672cda7785803af43c9f2c0c28483788b7dd5) by William Barnhart). + +### Fixed +- Fix recovery of partitions for large volume writes (#335) ([5a2ba13](https://github.com/faust-streaming/faust/commit/5a2ba13ddc45bcfdf7948d6f77daaba34dcaef97) by William Barnhart). Related issues/PRs: [#333](https://github.com/faust-streaming/faust/issues/333) + +### Removed +- Remove a broken link in readme.md (#332) ([9eb770a](https://github.com/faust-streaming/faust/commit/9eb770a028bc63396eb33b97e3ee3c692723701b) by Sefa Degirmenci). + + +## [v0.8.6](https://github.com/faust-streaming/faust/releases/tag/v0.8.6) - 2022-07-19 + +[Compare with v0.8.5](https://github.com/faust-streaming/faust/compare/v0.8.5...v0.8.6) + +### Added +- Add method to rocksdb for backing up partitions (#304) ([0bb2685](https://github.com/faust-streaming/faust/commit/0bb2685e545d28c4e8a604cc748c12e0911c1260) by William Barnhart). + +### Fixed +- Fixed filter not acking filtered out messages. (#208) ([a887571](https://github.com/faust-streaming/faust/commit/a887571e143f875c3ab4df488964e2ebde6dc5d2) by Matthew Drago). +- Fix twisted link in readme.md (#309) ([49574b2](https://github.com/faust-streaming/faust/commit/49574b264a7c6dfdff03e6fb5f58a5033bc0c5b4) by William Barnhart). +- Fix flake warning with bound loop var (#326) ([1e9d4a5](https://github.com/faust-streaming/faust/commit/1e9d4a5bfc768cce0ed0766fe709c47de019288b) by William Barnhart). + + +## [v0.8.5](https://github.com/faust-streaming/faust/releases/tag/v0.8.5) - 2022-06-02 + +[Compare with v0.8.4](https://github.com/faust-streaming/faust/compare/v0.8.4...v0.8.5) + +### Added +- Add support for additional sentry tags and arguments (#285) ([c230076](https://github.com/faust-streaming/faust/commit/c230076202e48cab273b656f01f938d3e12db54f) by Ran K). +- Added connections_max_idle_ms for consumer and producer (#281) ([84302f5](https://github.com/faust-streaming/faust/commit/84302f57e8a232ca81c2689805b9ee8b63bcc202) by Magnus Zotterman). +- Added metadata_max_age_ms option for both consumer and producer (#279) ([3b7f079](https://github.com/faust-streaming/faust/commit/3b7f079996977f852eaef96daaf94f9cb7d7e366) by Roman). + +### Fixed +- Fix readme typos (#308) ([26ff8fc](https://github.com/faust-streaming/faust/commit/26ff8fc6d98a9154b5fe9b4daad5610314b0144b) by William Barnhart). +- Fix user guide create channel example to run (#299) ([f52b783](https://github.com/faust-streaming/faust/commit/f52b783b4f6308b947666bcb1bd9f79aa19e9c15) by Robbie Palmer). +- Fix (#287) ([ed026a0](https://github.com/faust-streaming/faust/commit/ed026a0e1865a7f927fdd16cdf30cd38241f3b1b) by ameen-orca). +- Fix: rewrite non enabled unit tests (#272) ([d15d349](https://github.com/faust-streaming/faust/commit/d15d349c2f53c48e7f5dfa6ae24d43309005c21f) by Christoph Brand). +- Fix aiohttp threadedproducer driver python36, add unit tests (#277) ([784bee7](https://github.com/faust-streaming/faust/commit/784bee7f843fd830c3724c2ed55a6eda26fef785) by Christoph Brand). + + +## [v0.8.4](https://github.com/faust-streaming/faust/releases/tag/v0.8.4) - 2022-02-25 + +[Compare with v0.8.2](https://github.com/faust-streaming/faust/compare/v0.8.2...v0.8.4) + +### Fixed +- Fix: producer send pending check with threads (#270) ([4e32327](https://github.com/faust-streaming/faust/commit/4e32327ed784ab0d242a095eb2f45f760ecee3a1) by Christoph Brand). +- Fix name_prefix usage (#266) ([ea8be8c](https://github.com/faust-streaming/faust/commit/ea8be8c8ffeaa1f9f69a700e4f4fb67db5a5317a) by Julien Surloppe). + + +## [v0.8.2](https://github.com/faust-streaming/faust/releases/tag/v0.8.2) - 2022-02-04 + +[Compare with v0.8.1](https://github.com/faust-streaming/faust/compare/v0.8.1...v0.8.2) + +### Fixed +- Fix errors in seek when flow is not active (#264) ([a313821](https://github.com/faust-streaming/faust/commit/a31382119c57da006b2ff073094358119f0d7f3d) by Vikram Patki). + + +## [v0.8.1](https://github.com/faust-streaming/faust/releases/tag/v0.8.1) - 2022-01-19 + +[Compare with v0.8.0](https://github.com/faust-streaming/faust/compare/v0.8.0...v0.8.1) + + +## [v0.8.0](https://github.com/faust-streaming/faust/releases/tag/v0.8.0) - 2022-01-11 + +[Compare with v0.7.9](https://github.com/faust-streaming/faust/compare/v0.7.9...v0.8.0) + + +## [v0.7.9](https://github.com/faust-streaming/faust/releases/tag/v0.7.9) - 2022-01-07 + +[Compare with v0.7.8](https://github.com/faust-streaming/faust/compare/v0.7.8...v0.7.9) + +### Fixed +- Fix condition to delete old table keys (#251) ([a0e9a31](https://github.com/faust-streaming/faust/commit/a0e9a31c8f831b99a40ed5245219d053831571e3) by Dima Kovalchuk). + + +## [v0.7.8](https://github.com/faust-streaming/faust/releases/tag/v0.7.8) - 2021-12-18 + +[Compare with v0.7.7](https://github.com/faust-streaming/faust/compare/v0.7.7...v0.7.8) + + +## [v0.7.7](https://github.com/faust-streaming/faust/releases/tag/v0.7.7) - 2021-12-17 + +[Compare with v0.7.6](https://github.com/faust-streaming/faust/compare/v0.7.6...v0.7.7) + +### Fixed +- Fix race conditions on rebalance (#241) ([6f3c783](https://github.com/faust-streaming/faust/commit/6f3c783022b612d171de33003968fa056f48b325) by Vikram Patki). + + +## [v0.7.6](https://github.com/faust-streaming/faust/releases/tag/v0.7.6) - 2021-12-15 + +[Compare with v0.7.5](https://github.com/faust-streaming/faust/compare/v0.7.5...v0.7.6) + +### Removed +- Remove wait_first and extra log (#240) ([6e5c301](https://github.com/faust-streaming/faust/commit/6e5c301e08ff0a8b9fc693ba9cc88d11e5951cd2) by ekerstens). + + +## [v0.7.5](https://github.com/faust-streaming/faust/releases/tag/v0.7.5) - 2021-12-14 + +[Compare with v0.7.4](https://github.com/faust-streaming/faust/compare/v0.7.4...v0.7.5) + + +## [v0.7.4](https://github.com/faust-streaming/faust/releases/tag/v0.7.4) - 2021-12-13 + +[Compare with v0.7.3](https://github.com/faust-streaming/faust/compare/v0.7.3...v0.7.4) + +### Fixed +- Fix race condition when buffers are full (#237) ([7d861dc](https://github.com/faust-streaming/faust/commit/7d861dc9616a26d0105b102eb70c793c69af2759) by Vikram Patki). Related issues/PRs: [#166](https://github.com/faust-streaming/faust/issues/166) + + +## [v0.7.3](https://github.com/faust-streaming/faust/releases/tag/v0.7.3) - 2021-12-10 + +[Compare with v0.7.2](https://github.com/faust-streaming/faust/compare/v0.7.2...v0.7.3) + + +## [v0.7.2](https://github.com/faust-streaming/faust/releases/tag/v0.7.2) - 2021-12-10 + +[Compare with v0.7.1](https://github.com/faust-streaming/faust/compare/v0.7.1...v0.7.2) + + +## [v0.7.1](https://github.com/faust-streaming/faust/releases/tag/v0.7.1) - 2021-12-10 + +[Compare with v0.7.0](https://github.com/faust-streaming/faust/compare/v0.7.0...v0.7.1) + + +## [v0.7.0](https://github.com/faust-streaming/faust/releases/tag/v0.7.0) - 2021-12-03 + +[Compare with v0.6.14](https://github.com/faust-streaming/faust/compare/v0.6.14...v0.7.0) + + +## [v0.6.14](https://github.com/faust-streaming/faust/releases/tag/v0.6.14) - 2021-11-30 + +[Compare with v0.6.13](https://github.com/faust-streaming/faust/compare/v0.6.13...v0.6.14) + +### Fixed +- Fix for out of order events (#228) ([7e3c60c](https://github.com/faust-streaming/faust/commit/7e3c60cad80b79fd177ca1e1448d1b33385a1f10) by Vikram Patki). +- Fix flake8 (#227) ([3caf810](https://github.com/faust-streaming/faust/commit/3caf8104948827b110a94e50f9535df7b32737a2) by ekerstens). +- Fixed the hello world example ([02afc3c](https://github.com/faust-streaming/faust/commit/02afc3c0ac529da87a14ffe70033d53708afbad6) by Luís Braga). + + +## [v0.6.13](https://github.com/faust-streaming/faust/releases/tag/v0.6.13) - 2021-11-29 + +[Compare with v0.6.12](https://github.com/faust-streaming/faust/compare/v0.6.12...v0.6.13) + + +## [v0.6.12](https://github.com/faust-streaming/faust/releases/tag/v0.6.12) - 2021-11-18 + +[Compare with v0.6.11](https://github.com/faust-streaming/faust/compare/v0.6.11...v0.6.12) + +### Added +- Add default to external_topic_distribution (#222) ([23c2d90](https://github.com/faust-streaming/faust/commit/23c2d902380419ec8804cd05b3f9e8c47279e5d0) by Stegallo). + + +## [v0.6.11](https://github.com/faust-streaming/faust/releases/tag/v0.6.11) - 2021-11-17 + +[Compare with v0.6.10](https://github.com/faust-streaming/faust/compare/v0.6.10...v0.6.11) + +### Added +- Add all partitions of global tables as active in order to let them recover in the beginning (#213) ([60a8696](https://github.com/faust-streaming/faust/commit/60a869654d1e13914d36f3db0c66fef28911c7ce) by aoberegg). + +### Fixed +- Fix recovery._resume_streams (#217) ([e3bd128](https://github.com/faust-streaming/faust/commit/e3bd128e5631657c58d301943754b9921d3cbe95) by ekerstens). +- Fix worker default web_host to be none (#210) ([41200e4](https://github.com/faust-streaming/faust/commit/41200e44cb80000e5a2f09f2a94de18168239dde) by Ran K). + +### Removed +- Remove outdated colorclass library (#204) ([9385c81](https://github.com/faust-streaming/faust/commit/9385c8166256bc85c5c4b985fab6b749d070ba68) by Taybin Rutkin). + + +## [v0.6.10](https://github.com/faust-streaming/faust/releases/tag/v0.6.10) - 2021-10-21 + +[Compare with v0.6.9](https://github.com/faust-streaming/faust/compare/v0.6.9...v0.6.10) + +### Added +- Adding new feature to retry on aerospike runtime exceptions issue#202 (#203) ([868d7a4](https://github.com/faust-streaming/faust/commit/868d7a40c04450924237914591ea26dc88eb00ba) by Vikram Patki). + +### Fixed +- Fix canonical url when setting either port of host from the cli (#196) ([808312b](https://github.com/faust-streaming/faust/commit/808312b4093c9e99a4b28e848574240290de5d12) by Ran K). +- Fixes #197 (#198) ([3959268](https://github.com/faust-streaming/faust/commit/395926869a372d81ec6564fc4d117e57a159e0d6) by David Parker). +- Fix routing method #188 (#191) ([835f37e](https://github.com/faust-streaming/faust/commit/835f37e7d451d08ee8d3bfad86d8361b785d2333) by Ondřej Chmelař). +- Fix for reassign table keys faust-streaming#171 (#174) ([c1a6b0e](https://github.com/faust-streaming/faust/commit/c1a6b0e216f62fe01f898f7dca7054bb7622161f) by Philipp Jaschke). +- Fix for #121 (#168) ([fe343c0](https://github.com/faust-streaming/faust/commit/fe343c01a7640d2d33ae7514de0c172bba759a4d) by Taybin Rutkin). +- Fix markdown formatting in changelog ([727987b](https://github.com/faust-streaming/faust/commit/727987b76ccfbaa61809af971e0fcb66e16e76cf) by Taybin Rutkin). +- Fix lint warning ([e80829c](https://github.com/faust-streaming/faust/commit/e80829c645bbd543c3c4769a272a852148386544) by Taybin Rutkin). + + +## [v0.6.9](https://github.com/faust-streaming/faust/releases/tag/v0.6.9) - 2021-07-06 + +[Compare with v0.6.8](https://github.com/faust-streaming/faust/compare/v0.6.8...v0.6.9) + +### Fixed +- Fix error messages in faust app #166 ([b06e579](https://github.com/faust-streaming/faust/commit/b06e5799653d75021aa8a409c3d3e6f83a266f1f) by Vikram Patki 24489). + + +## [v0.6.8](https://github.com/faust-streaming/faust/releases/tag/v0.6.8) - 2021-06-29 + +[Compare with v0.6.7](https://github.com/faust-streaming/faust/compare/v0.6.7...v0.6.8) + +### Fixed +- Fix for consumer errors in app #166 (#167) ([761713b](https://github.com/faust-streaming/faust/commit/761713b475f1ca4df45b1a0c3d833e4d18735184) by Vikram Patki). +- Fixed a few small mistakes in streams userguide (#161) ([c45a464](https://github.com/faust-streaming/faust/commit/c45a46429340a4edb199af01637ce19fdf16a873) by Dongkuo Ma). + + +## [v0.6.7](https://github.com/faust-streaming/faust/releases/tag/v0.6.7) - 2021-06-07 + +[Compare with v0.6.6](https://github.com/faust-streaming/faust/compare/v0.6.6...v0.6.7) + + +## [v0.6.6](https://github.com/faust-streaming/faust/releases/tag/v0.6.6) - 2021-06-03 + +[Compare with v0.6.5](https://github.com/faust-streaming/faust/compare/v0.6.5...v0.6.6) + +### Fixed +- Fix string formatting error when logging slow processing (#156) ([e71145c](https://github.com/faust-streaming/faust/commit/e71145c2053c4a9bf0b9f74eec0f5180f8f6b877) by Erik Forsberg). Related issues/PRs: [#153](https://github.com/faust-streaming/faust/issues/153) +- Fix record instances deserialize properly when returned by agent.ask (#152) ([df0856f](https://github.com/faust-streaming/faust/commit/df0856fbc9e3d4c667c05cc49ae4883fc4076db6) by tarbaig). + + +## [v0.6.5](https://github.com/faust-streaming/faust/releases/tag/v0.6.5) - 2021-05-14 + +[Compare with v0.6.4](https://github.com/faust-streaming/faust/compare/v0.6.4...v0.6.5) + +### Added +- Adding replication factor to the leader topic (#150) ([7ab0647](https://github.com/faust-streaming/faust/commit/7ab0647b66e7c45ef255810e2cd5648cd631759e) by Vikram Patki). + +### Fixed +- Fix tests directory name in makefile (#134) ([1a5c431](https://github.com/faust-streaming/faust/commit/1a5c4314ab5394f3a5777c874c85c73c82f06854) by Taybin Rutkin). + + +## [v0.6.4](https://github.com/faust-streaming/faust/releases/tag/v0.6.4) - 2021-04-26 + +[Compare with v0.6.3](https://github.com/faust-streaming/faust/compare/v0.6.3...v0.6.4) + +### Fixed +- Fix rocksdb for use with global tables or tables that use_partitioner (#130) ([ec3ac3e](https://github.com/faust-streaming/faust/commit/ec3ac3ec5946be54134703bab53fe87a16abaa53) by aoberegg). -## 0.6.6 -### Bug Fixes -- Error when logging slow process in aiokafka with python 3.9 [153](https://github.com/faust-streaming/faust/issues/153) -- FIX Record instances deserialize properly when returned by agent.ask[151](https://github.com/faust-streaming/faust/issues/151) -## 0.6.5 -### Bug Fixes -- Fix leader topic replication [149](https://github.com/faust-streaming/faust/issues/149) +## [v0.6.3](https://github.com/faust-streaming/faust/releases/tag/v0.6.3) - 2021-04-06 -## 0.6.4 -### Bug Fixes -- Fix partition from message for globaltables or tables that use partitioner [129](https://github.com/faust-streaming/faust/issues/129) -- Calling window close callback after popping the value from store [137](https://github.com/faust-streaming/faust/pull/137) +[Compare with v0.6.2](https://github.com/faust-streaming/faust/compare/v0.6.2...v0.6.3) -## 0.6.3 -### Bug Fixes -- Fix for [126](https://github.com/faust-streaming/faust/issues/126) +### Added +- Adding changelog ([72c59b7](https://github.com/faust-streaming/faust/commit/72c59b78553f5ee85e84f07c6962a7deaac1a22f) by Vikram Patki 24489). -## 0.6.2 -### Bug Fixes -- Add app_name to prometheus sensors[120](https://github.com/faust-streaming/faust/pull/120) --DatadogMonitor - IndexError: deque index out of range[113](https://github.com/faust-streaming/faust/issues/113) +### Fixed +- Fix for https://github.com/faust-streaming/faust/issues/126 (#127) ([159ad62](https://github.com/faust-streaming/faust/commit/159ad62978e84fb5f7d5a47afcb45446123a0255) by Vikram Patki). -## 0.6.1 -### Bug Fixes -- fix agents with multiple topics[116](https://github.com/faust-streaming/faust/pull/116) --Simplify the code by getting rid of deque_pushpopmax in favour of using maxlen= parameter of deque[115](https://github.com/faust-streaming/faust/pull/115) --fixes a race condition in writing messages to topics that resulted in a violation of the ordering guarantee (especially changelog topics)[112](https://github.com/faust-streaming/faust/pull/112) -## 0.6.0 -### Bug Fixes --Adding support for aerospike [114](https://github.com/faust-streaming/faust/issues/114) +## [v0.6.2](https://github.com/faust-streaming/faust/releases/tag/v0.6.2) - 2021-03-12 -## 0.4.7 -### Bug Fixes -- Allow multiple workers to share rocksdb data dir[98](https://github.com/faust-streaming/faust/issues/98) -- Fix rebalance and recovery issues [83](https://github.com/faust-streaming/faust/issues/83) --[92](https://github.com/faust-streaming/faust/issues/92) +[Compare with v0.6.1](https://github.com/faust-streaming/faust/compare/v0.6.1...v0.6.2) -## 0.4.6 -### Bug Fixes -- Fix for [85](https://github.com/faust-streaming/faust/issues/85) -- Fix for [88](https://github.com/faust-streaming/faust/issues/88) -- Fix for [91](https://github.com/faust-streaming/faust/issues/91) -- Enabled Tests [79](https://github.com/faust-streaming/faust/issues/79) -- Fix for [84](https://github.com/faust-streaming/faust/issues/84) +### Added +- Add app_name to prometheus metrics (#120) ([e0010f7](https://github.com/faust-streaming/faust/commit/e0010f72b0b0084c24c328962184d455bf2a02c8) by Alexey Kuzyashin). -## 0.4.5 -### Features -- Peformance improvements in rocksdb by ignore bloom filters within an event context -## 0.4.3 -### Features -- New Sensor Support to monitor ThreadedProducer -[84](https://github.com/faust-streaming/faust/issues/84) -- Support for new rocksdb library faust-streaming-rocksdb -[85](https://github.com/faust-streaming/faust/issues/85) +## [v0.6.1](https://github.com/faust-streaming/faust/releases/tag/v0.6.1) - 2021-02-28 -## 0.4.2 -### Features -- New threaded producer for applications using send_soon for sending messages +[Compare with v0.6.0](https://github.com/faust-streaming/faust/compare/v0.6.0...v0.6.1) -## 0.4.1 ### Fixed -- Adding Prometheus Sensor -- Stability fixes for table recovery when the stream buffer is full -[75](https://github.com/faust-streaming/faust/pull/75) +- Fix scan options (#117) ([750e3ad](https://github.com/faust-streaming/faust/commit/750e3ad2303bf6fa21fb4e1020b8f15dec3101a2) by Vikram Patki). +- Fix iterating over keys when changelog topic is set. (#106) ([7d29cad](https://github.com/faust-streaming/faust/commit/7d29cada3471fb665a1ae0093d4a0fa4f71c9e5d) by aoberegg). +- Fix race (out of order) in flushing topics (wrong state is stored in changelog) (#112) ([a841a0e](https://github.com/faust-streaming/faust/commit/a841a0e0ca8e8ab0c4884ffe9423ac26df8e1df0) by trauter). +- Fix agents with multiple topics (#116) ([70e5516](https://github.com/faust-streaming/faust/commit/70e5516d48b7d44c0cb389c14e5639781069d613) by Stevan Milic). + + +## [v0.6.0](https://github.com/faust-streaming/faust/releases/tag/v0.6.0) - 2021-02-25 + +[Compare with v0.5.2](https://github.com/faust-streaming/faust/compare/v0.5.2...v0.6.0) -## 0.4.0 ### Fixed -- Prevent stream buffer overflow by lowering the rate of incoming partitions -[53](https://github.com/faust-streaming/faust/issues/53) --Recovery thread updating standby partition writes in single writes instead of using writeBatch -[51](https://github.com/faust-streaming/faust/issues/51) -- IllegalStateException on seek to offset of a partition that was removed by a rebalance -[54](https://github.com/faust-streaming/faust/issues/54) +- Fix table rebalance issue (#110) ([cd136ad](https://github.com/faust-streaming/faust/commit/cd136ad791546c3b0e1fdac44a27cb6cd2caea21) by Bob Haddleton). -## 0.3.1 +## [v0.5.2](https://github.com/faust-streaming/faust/releases/tag/v0.5.2) - 2021-02-19 + +[Compare with v0.5.1](https://github.com/faust-streaming/faust/compare/v0.5.1...v0.5.2) + ### Fixed --Updating opentracing dependency[50](https://github.com/faust-streaming/faust/issues/50) --New-offset is off between 0.2.0 and 0.3.0, resulting in reprocessing last record (or many records) on worker restart - [48](https://github.com/faust-streaming/faust/issues/48) --Worker fails to recover table with exactly_once guarantee [47](https://github.com/faust-streaming/faust/issues/47) +- Fix extra parameter (#101) (#109) ([d3f28bd](https://github.com/faust-streaming/faust/commit/d3f28bdfa657131877e8c8a7ad59c73397ac7797) by Bob Haddleton). + + +## [v0.5.1](https://github.com/faust-streaming/faust/releases/tag/v0.5.1) - 2021-02-18 + +[Compare with v0.5.0](https://github.com/faust-streaming/faust/compare/v0.5.0...v0.5.1) + + +## [v0.5.0](https://github.com/faust-streaming/faust/releases/tag/v0.5.0) - 2021-02-18 + +[Compare with v0.4.7](https://github.com/faust-streaming/faust/compare/v0.4.7...v0.5.0) + + +## [v0.4.7](https://github.com/faust-streaming/faust/releases/tag/v0.4.7) - 2021-02-10 + +[Compare with v0.4.6](https://github.com/faust-streaming/faust/compare/v0.4.6...v0.4.7) + +### Added +- Adding changelog for release ([4f7735d](https://github.com/faust-streaming/faust/commit/4f7735ddf8d591eeab5d54a0bbe3d38cd43644c4) by Vikram Patki 24489). -## 0.3.0 ### Fixed -Recovery Crash [44](https://github.com/faust-streaming/faust/issues/44) -[37](https://github.com/faust-streaming/faust/issues/37) +- Fix ipv6 address case in server_list function (#66) ([05c0ff9](https://github.com/faust-streaming/faust/commit/05c0ff987df1ea3e68c84f42a38f6fc5fb55b2f4) by Damien Nadé). +- Fix commit exceptions (#94) (#95) ([5e799c2](https://github.com/faust-streaming/faust/commit/5e799c2e6746745da07e67ae35fa6abf3279bb0c) by Bob Haddleton). + + +## [v0.4.6](https://github.com/faust-streaming/faust/releases/tag/v0.4.6) - 2021-01-29 + +[Compare with v0.4.5](https://github.com/faust-streaming/faust/compare/v0.4.5...v0.4.6) -## 0.2.2 ### Fixed -- Consumer offsets not progressing for certain partitions -- Agent dies silenty when mode cancels pending tasks [678](https://github.com/robinhood/faust/issues/678) +- Fixing race conditions in opening rocksdb (#90) ([f51a850](https://github.com/faust-streaming/faust/commit/f51a8508836ed0aa46e13fe9bb3d64a35d261b42) by Vikram Patki). +- Fix dropped messages when topic backpressure is enabled (#88) (#89) ([5263720](https://github.com/faust-streaming/faust/commit/5263720825a26c2871513b3524226f7eb2cc51be) by Bob Haddleton). + + +## [v0.4.5](https://github.com/faust-streaming/faust/releases/tag/v0.4.5) - 2021-01-28 + +[Compare with v0.4.3](https://github.com/faust-streaming/faust/compare/v0.4.3...v0.4.5) + +### Added +- Adding performance improvement for getters (#87) ([d4392d5](https://github.com/faust-streaming/faust/commit/d4392d5b84c99d654caf8b2848029948c1709a7d) by Vikram Patki). -## 0.2.1 + +## [v0.4.3](https://github.com/faust-streaming/faust/releases/tag/v0.4.3) - 2021-01-26 + +[Compare with v0.4.2](https://github.com/faust-streaming/faust/compare/v0.4.2...v0.4.3) + + +## [v0.4.2](https://github.com/faust-streaming/faust/releases/tag/v0.4.2) - 2021-01-21 + +[Compare with v0.4.1](https://github.com/faust-streaming/faust/compare/v0.4.1...v0.4.2) + +### Added +- Adding threaded producer (#68) ([fda7e52](https://github.com/faust-streaming/faust/commit/fda7e5264a4e19321e286d3151a0e0bb56ee5545) by Vikram Patki). + + +## [v0.4.1](https://github.com/faust-streaming/faust/releases/tag/v0.4.1) - 2021-01-15 + +[Compare with 0.4.0](https://github.com/faust-streaming/faust/compare/0.4.0...v0.4.1) + +### Added +- Add web_ssl_context to allow internal web server to support https (#69) ([fcb6b18](https://github.com/faust-streaming/faust/commit/fcb6b185105239329fd21e4af0f23e5244a2c8eb) by Bob Haddleton). ### Fixed +- Fix for outdated sensors after rebalancing (#72) ([0a4d059](https://github.com/faust-streaming/faust/commit/0a4d059d3234bd2ceccf3845f206d35a706bbefc) by krzysieksulejczak). -- Prometheus rebalance typo [#26](https://github.com/faust-streaming/faust/pull/26) -- Make SCRAM-SHA-256/512 SASL Mechanisms available [#29](https://github.com/faust-streaming/faust/pull/29) -## 0.2.0 +## [0.4.0](https://github.com/faust-streaming/faust/releases/tag/0.4.0) - 2020-12-22 + +[Compare with v0.4.0](https://github.com/faust-streaming/faust/compare/v0.4.0...0.4.0) + +### Changed +- Change requirements from mode to mode-streaming (#65) ([e239534](https://github.com/faust-streaming/faust/commit/e23953473e3aca2c56f77963926ceec49aba1e57) by Thomas Sarboni). + + +## [v0.4.0](https://github.com/faust-streaming/faust/releases/tag/v0.4.0) - 2020-12-18 + +[Compare with v0.3.1](https://github.com/faust-streaming/faust/compare/v0.3.1...v0.4.0) ### Added +- Add backpressure to slow stream processing to avoid filing up the buffer (#55) ([ccaf0a7](https://github.com/faust-streaming/faust/commit/ccaf0a77d37738873e39235d738172c6f6386035) by Vikram Patki). + +### Fixed +- Fix for writebatch when not in recovery for standby writes (#56) ([979ca5a](https://github.com/faust-streaming/faust/commit/979ca5a360d082ff767032daa76b41fcf0c9b092) by Vikram Patki). +- Fix for rebalance exception causing recovery to crash (#57) ([8d6758f](https://github.com/faust-streaming/faust/commit/8d6758f13944bd50ba65cbe65259f47e7816edc1) by Vikram Patki). + + +## [v0.3.1](https://github.com/faust-streaming/faust/releases/tag/v0.3.1) - 2020-12-07 + +[Compare with v0.3.0](https://github.com/faust-streaming/faust/compare/v0.3.0...v0.3.1) + +### Fixed +- Fix recovery issue in transaction and reprocessing message in consumer (#49) ([be1e6db](https://github.com/faust-streaming/faust/commit/be1e6dbe2507fd068a40391dcd2ddd436372661e) by Vikram Patki). + + +## [v0.3.0](https://github.com/faust-streaming/faust/releases/tag/v0.3.0) - 2020-11-24 + +[Compare with v0.2.2](https://github.com/faust-streaming/faust/compare/v0.2.2...v0.3.0) + -- Hanging of workers on kafka rebalance [#21](https://github.com/faust-streaming/faust/pull/21) +## [v0.2.2](https://github.com/faust-streaming/faust/releases/tag/v0.2.2) - 2020-11-20 -## 0.1.1 +[Compare with v0.2.1](https://github.com/faust-streaming/faust/compare/v0.2.1...v0.2.2) ### Fixed +- Fix for canceled from mode (#42) ([1131106](https://github.com/faust-streaming/faust/commit/1131106b8c954c6df9f5a9d4f7321f3211f65327) by Vikram Patki). +- Fixed ack for tombstones in cython-stream (#39) ([41d3e6b](https://github.com/faust-streaming/faust/commit/41d3e6b97baa5287c336d0997701e396400ab508) by trauter). +- Fix commited offset is always behind the real offset by 1 (#33) ([18230a7](https://github.com/faust-streaming/faust/commit/18230a729a717a05aad2a2df58dfe494917de2ab) by Vikram Patki). +- Fix(aiokafka): release all fetch waiters (#32) ([9fe472f](https://github.com/faust-streaming/faust/commit/9fe472f4d7c4b870cdc76ae5400a4017b7985b34) by Vikram Patki). +- Fix commited offset is always behind the real offset by 1 ([361b09d](https://github.com/faust-streaming/faust/commit/361b09dde7de3c7314d7cac7447e029a8e54c97b) by Maxim Musayev). +- Fix(aiokafka): release all fetch waiters ([8cd7ad4](https://github.com/faust-streaming/faust/commit/8cd7ad4b8ec398dfc3916961cc37064e3f8c3223) by Bob Haddleton). +- Fix: readme updated ([7493b6e](https://github.com/faust-streaming/faust/commit/7493b6e033822b64b80d2c890df5f0a8468a625c) by marcosschroh). + + +## [v0.2.1](https://github.com/faust-streaming/faust/releases/tag/v0.2.1) - 2020-11-13 + +[Compare with v0.2.0](https://github.com/faust-streaming/faust/compare/v0.2.0...v0.2.1) + +### Fixed +- Fix typo (#26) ([0db67af](https://github.com/faust-streaming/faust/commit/0db67af566335f257c5bf1dbef7f3d352ad8218c) by M). + + +## [v0.2.0](https://github.com/faust-streaming/faust/releases/tag/v0.2.0) - 2020-11-11 + +[Compare with v0.1.1](https://github.com/faust-streaming/faust/compare/v0.1.1...v0.2.0) + + +## [v0.1.1](https://github.com/faust-streaming/faust/releases/tag/v0.1.1) - 2020-11-11 + +[Compare with v0.1.0rc6](https://github.com/faust-streaming/faust/compare/v0.1.0rc6...v0.1.1) + +### Fixed +- Fixed recovery hang ([107142c](https://github.com/faust-streaming/faust/commit/107142cedf2b193590847b52d3326d952e60f34b) by Vikram Patki 24489). +- Fix: web bind to should be 0.0.0.0 instead of localhost ([ea9a322](https://github.com/faust-streaming/faust/commit/ea9a3225e2857b5a76cff75f5e9bb185696fa580) by marcosschroh). + + +## [v0.1.0rc6](https://github.com/faust-streaming/faust/releases/tag/v0.1.0rc6) - 2020-11-10 + +[Compare with v0.1.0rc5](https://github.com/faust-streaming/faust/compare/v0.1.0rc5...v0.1.0rc6) + +### Fixed +- Fixed recovery hang ([1f3111a](https://github.com/faust-streaming/faust/commit/1f3111a9cc692e4a25ea29d316b55acd95a889b0) by Vikram Patki 24489). + + +## [v0.1.0rc5](https://github.com/faust-streaming/faust/releases/tag/v0.1.0rc5) - 2020-11-09 + +[Compare with v0.1.0rc3-2](https://github.com/faust-streaming/faust/compare/v0.1.0rc3-2...v0.1.0rc5) + +### Fixed +- Fixed recovery hang ([40ea0b0](https://github.com/faust-streaming/faust/commit/40ea0b0967ad5a4fee2754bacefa8ee8f9d45ee5) by Vikram Patki 24489). + + +## [v0.1.0rc3-2](https://github.com/faust-streaming/faust/releases/tag/v0.1.0rc3-2) - 2020-11-08 + +[Compare with v0.1.0](https://github.com/faust-streaming/faust/compare/v0.1.0...v0.1.0rc3-2) + +### Fixed +- Fixed recovery hang ([7902e02](https://github.com/faust-streaming/faust/commit/7902e0280971410ff50e5ebc4a4b911872e4bccf) by Vikram Patki 24489). +- Fixed recovery hang and undefined set_close method in aiokafka ([06755bc](https://github.com/faust-streaming/faust/commit/06755bc76c901e67b6f5c02e6a3e90373e445b64) by Vikram Patki 24489). +- Fix: manifest and setup.py fixed. close #20 ([d34b084](https://github.com/faust-streaming/faust/commit/d34b084f8e3e88afe5e15ff84a5182ac40dda3aa) by marcosschroh). +- Fix: codecov added ([2caf669](https://github.com/faust-streaming/faust/commit/2caf669067fe10c05e5a92375f6a501e423a35be) by marcosschroh). +- Fixed typo ([774e36d](https://github.com/faust-streaming/faust/commit/774e36d2a78e7d4e67af054b4c9e70fea81ae7b4) by Vikram Patki 24489). + -- web bind to should be 0.0.0.0 instead of localhost [#24](https://github.com/faust-streaming/faust/pull/24) +## [v0.1.0](https://github.com/faust-streaming/faust/releases/tag/v0.1.0) - 2020-11-04 -## 0.1.0 +[Compare with first commit](https://github.com/faust-streaming/faust/compare/b4d2fde2f5170aecf56c46c502a95266486bff04...v0.1.0) ### Added diff --git a/Makefile b/Makefile index 0aef79033..2eea4310d 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,6 @@ FLAKE8 ?= flake8 PYDOCSTYLE ?= pydocstyle MYPY ?= mypy SPHINX2RST ?= sphinx2rst -BUMPVERSION ?= bumpversion VULTURE ?= vulture VULTURE_MIN_CONFIDENCE ?= 100 PRE_COMMIT ?= pre-commit @@ -39,7 +38,7 @@ help: @echo "livedocs - Start documentation live web server." @echo "develop - Start contributing to Faust" @echo " develop-hooks - Install Git commit hooks (required)" - @echo " reqs" - Install requirements + @echo " reqs - Install requirements" @echo " setup-develop - Run setup.py develop" @echo "cdevelop - Like develop but installs C extensions" @echo " reqs-rocksdb - Install python-rocksdb (require rocksdb)" @@ -59,6 +58,7 @@ help: @echo " pep257check - Run pep257 on the source code." @echo " vulture - Run vulture to find unused code." @echo "readme - Regenerate README.rst file." + @echo "changelog - Regenerate CHANGELOG.md file." @echo "contrib - Regenerate CONTRIBUTING.rst file" @echo "configref - Regenerate docs/userguide/settings.rst" @echo "coc - Regenerate CODE_OF_CONDUCT.rst file" @@ -71,22 +71,13 @@ help: @echo "bump - Bump patch version number." @echo "bump-minor - Bump minor version number." @echo "bump-major - Bump major version number." - @echo "hooks" - Update pre-commit hooks + @echo "hooks - Update pre-commit hooks" @echo "release - Make PyPI release." clean: clean-docs clean-pyc clean-build clean-dist: clean clean-git-force -bump: - $(BUMPVERSION) patch - -bump-minor: - $(BUMPVERSION) minor - -bump-major: - $(BUMPVERSION) major - release: $(PYTHON) setup.py register sdist bdist_wheel upload --sign --identity="$(PGPIDENT)" @@ -107,6 +98,7 @@ livedocs: clean-docs: -rm -rf "$(SPHINX_BUILDDIR)" + -rm -rf "$(DOCUMENTATION)" lint: flakecheck apicheck configcheck readmecheck pep257check vulture @@ -145,6 +137,9 @@ $(README): readme: clean-readme $(README) readmecheck +changelog: + git-changelog . -o CHANGELOG.md -t path:"$(SPHINX_DIR)/keepachangelog/" + clean-contrib: -rm -f "$(CONTRIBUTING)" @@ -223,7 +218,7 @@ reqs: reqs-default reqs-test reqs-dist reqs-docs reqs-ci reqs-debug .PHONY: reqs-default: - $(PIP) install -U -r requirements/default.txt + $(PIP) install -U -r requirements/requirements.txt .PHONY: reqs-test: diff --git a/README.md b/README.md index 5622ef009..c1391f2b6 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,14 @@ # Python Stream Processing Fork -![python versions](https://img.shields.io/badge/python-3.6%203.7%203.8%203.9-blue) -![version](https://img.shields.io/badge/version-0.6.9-green) +![python versions](https://img.shields.io/pypi/pyversions/faust-streaming.svg) +![version](https://img.shields.io/pypi/v/faust-streaming) [![codecov](https://codecov.io/gh/faust-streaming/faust/branch/master/graph/badge.svg?token=QJFBYNN0JJ)](https://codecov.io/gh/faust-streaming/faust) +[![slack](https://img.shields.io/badge/slack-Faust-brightgreen.svg?logo=slack)](https://join.slack.com/t/fauststreaming/shared_invite/zt-1q1jhq4kh-Q1t~rJgpyuMQ6N38cByE9g) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +![pre-commit](https://img.shields.io/badge/pre--commit-enabled-green) +![license](https://img.shields.io/pypi/l/faust-streaming) +![downloads](https://img.shields.io/pypi/dw/faust-streaming) ## Installation @@ -12,19 +17,19 @@ ## Documentation -- `introduction`: https://fauststream.com/en/latest/introduction.html -- `quickstart`: https://fauststream.com/en/latest/playbooks/quickstart.html -- `User Guide`: https://fauststream.com/en/latest/userguide/index.html +- `introduction`: https://faust-streaming.github.io/faust/introduction.html +- `quickstart`: https://faust-streaming.github.io/faust/playbooks/quickstart.html +- `User Guide`: https://faust-streaming.github.io/faust/userguide/index.html ## Why the fork -We have decided to fork the original `Faust` project because there is a critical process of releasing new versions which causes uncertainty in the community. Everybody is welcome to contribute to this `fork`, and you can be added as a manitainer. +We have decided to fork the original `Faust` project because there is a critical process of releasing new versions which causes uncertainty in the community. Everybody is welcome to contribute to this `fork`, and you can be added as a maintainer. We want to: - Ensure continues release - Code quality -- Use of latests versions of kafka drivers (for now only [aiokafka](https://github.com/aio-libs/aiokafka)) +- Use of latest versions of kafka drivers (for now only [aiokafka](https://github.com/aio-libs/aiokafka)) - Support kafka transactions - Update the documentation @@ -95,7 +100,7 @@ of "number of clicks from the last day," or we support tumbling, hopping and sliding windows of time, and old windows can be expired to stop data from filling up. -For reliability we use a Kafka topic as "write-ahead-log". +For reliability, we use a Kafka topic as "write-ahead-log". Whenever a key is changed we publish to the changelog. Standby nodes consume from this changelog to keep an exact replica of the data and enables instant recovery should any of the nodes fail. @@ -253,7 +258,7 @@ increase once we can support a more optimized Kafka client. Faust is just Python, and a stream is an infinite asynchronous iterator. If you know how to use Python, you already know how to use Faust, and it works with your favorite Python libraries like Django, Flask, -SQLAlchemy, NTLK, NumPy, SciPy, TensorFlow, etc. +SQLAlchemy, NLTK, NumPy, SciPy, TensorFlow, etc. ## Bundles @@ -264,9 +269,9 @@ You can specify these in your requirements or on the ``pip`` command-line by using brackets. Separate multiple bundles using the comma: ```sh -pip install "faust[rocksdb]" +pip install "faust-streaming[rocksdb]" -pip install "faust[rocksdb,uvloop,fast,redis, aerospike]" +pip install "faust-streaming[rocksdb,uvloop,fast,redis,aerospike]" ``` The following bundles are available: @@ -275,17 +280,26 @@ The following bundles are available: ### Stores -`pip install faust[rocksdb]` for using `RocksDB` for storing Faust table state. **Recommended in production.** +#### RocksDB -`pip install faust[aerospike]` for using `Aerospike` for storing Faust table state. **Recommended if supported** +For using `RocksDB` for storing Faust table state. **Recommended in production.** + +`pip install faust-streaming[rocksdb]` (uses RocksDB 6) + +`pip install faust-streaming[rocksdict]` (uses RocksDB 8, not backwards compatible with 6) + + +#### Aerospike + +`pip install faust-streaming[aerospike]` for using `Aerospike` for storing Faust table state. **Recommended if supported** ### Aerospike Configuration Aerospike can be enabled as the state store by specifying `store="aerospike://"` -By default all tables backed by Aerospike use `use_partitioner=True` and generate changelog topic events similar +By default, all tables backed by Aerospike use `use_partitioner=True` and generate changelog topic events similar to a state store backed by RocksDB. -The following configuration options should be passed in as keys to the options parameter in [Table](https://fauststream.com/en/latest/reference/faust.tables.html) +The following configuration options should be passed in as keys to the options parameter in [Table](https://faust-streaming.github.io/faust/reference/faust.tables.html) `namespace` : aerospike namespace `ttl`: TTL for all KV's in the table @@ -302,45 +316,45 @@ The following configuration options should be passed in as keys to the options p ### Caching -`faust[redis]` for using `Redis` as a simple caching backend (Memcached-style). +`faust-streaming[redis]` for using `Redis` as a simple caching backend (Memcached-style). ### Codecs -`faust[yaml]` for using YAML and the `PyYAML` library in streams. +`faust-streaming[yaml]` for using YAML and the `PyYAML` library in streams. ### Optimization -`faust[fast]` for installing all the available C speedup extensions to Faust core. +`faust-streaming[fast]` for installing all the available C speedup extensions to Faust core. ### Sensors -`faust[datadog]` for using the `Datadog` Faust monitor. +`faust-streaming[datadog]` for using the `Datadog` Faust monitor. -`faust[statsd]` for using the `Statsd` Faust monitor. +`faust-streaming[statsd]` for using the `Statsd` Faust monitor. -`faust[prometheus]` for using the `Prometheus` Faust monitor. +`faust-streaming[prometheus]` for using the `Prometheus` Faust monitor. ### Event Loops -`faust[uvloop]` for using Faust with `uvloop`. +`faust-streaming[uvloop]` for using Faust with `uvloop`. -`faust[eventlet]` for using Faust with `eventlet` +`faust-streaming[eventlet]` for using Faust with `eventlet` ### Debugging -`faust[debug]` for using `aiomonitor` to connect and debug a running Faust worker. +`faust-streaming[debug]` for using `aiomonitor` to connect and debug a running Faust worker. -`faust[setproctitle]`when the `setproctitle` module is installed the Faust worker will use it to set a nicer process name in `ps`/`top` listings.vAlso installed with the `fast` and `debug` bundles. +`faust-streaming[setproctitle]`when the `setproctitle` module is installed the Faust worker will use it to set a nicer process name in `ps`/`top` listings.vAlso installed with the `fast` and `debug` bundles. ## Downloading and installing from source -Download the latest version of Faust from http://pypi.org/project/faust +Download the latest version of Faust from https://pypi.org/project/faust-streaming/ You can install it by doing: ```sh -$ tar xvfz faust-0.0.0.tar.gz -$ cd faust-0.0.0 +$ tar xvfz faust-streaming-0.0.0.tar.gz +$ cd faust-streaming-0.0.0 $ python setup.py build # python setup.py install ``` @@ -368,11 +382,11 @@ Yes! Use ``eventlet`` as a bridge to integrate with ``asyncio``. This approach works with any blocking Python library that can work with `eventlet` -Using `eventlet` requires you to install the `aioeventlet` module, +Using `eventlet` requires you to install the `faust-aioeventlet` module, and you can install this as a bundle along with Faust: ```sh -pip install -U faust[eventlet] +pip install -U faust-streaming[eventlet] ``` Then to actually use eventlet as the event loop you have to either @@ -398,17 +412,24 @@ Yes! Use the `tornado.platform.asyncio` [bridge](http://www.tornadoweb.org/en/st ### Can I use Faust with Twisted -Yes! Use the `asyncio` reactor implementation: https://twistedmatrix.com/documents/17.1.0/api/twisted internet.asyncioreactor.html +Yes! Use the `asyncio` reactor implementation: https://twistedmatrix.com/documents/current/api/twisted.internet.asyncioreactor.html ### Will you support Python 2.7 or Python 3.5 -No. Faust requires Python 3.6 or later, since it heavily uses features that were +No. Faust requires Python 3.8 or later, since it heavily uses features that were introduced in Python 3.6 (`async`, `await`, variable type annotations). ### I get a maximum number of open files exceeded error by RocksDB when running a Faust app locally. How can I fix this -You may need to increase the limit for the maximum number of open files. The -following post explains how to do so on OS X: https://blog.dekstroza.io/ulimit-shenanigans-on-osx-el-capitan/ +You may need to increase the limit for the maximum number of open files. +On macOS and Linux you can use: + +```ulimit -n max_open_files``` to increase the open files limit to max_open_files. + +On docker, you can use the --ulimit flag: + +```docker run --ulimit nofile=50000:100000 ``` +where 50000 is the soft limit, and 100000 is the hard limit [See the difference](https://unix.stackexchange.com/a/29579). ### What kafka versions faust supports @@ -421,7 +442,7 @@ Faust supports kafka with version >= 0.10. For discussions about the usage, development, and future of Faust, please join the `fauststream` Slack. - https://fauststream.slack.com -- Sign-up: https://join.slack.com/t/fauststream/shared_invite/enQtNDEzMTIyMTUyNzU2LTIyMjNjY2M2YzA2OWFhMDlmMzVkODk3YTBlYThlYmZiNTUwZDJlYWZiZTdkN2Q4ZGU4NWM4YWMyNTM5MGQ5OTg +- Sign-up: https://join.slack.com/t/fauststreaming/shared_invite/zt-1q1jhq4kh-Q1t~rJgpyuMQ6N38cByE9g ## Resources diff --git a/docs/_ext/typehints.py b/docs/_ext/typehints.py index 8be172b56..38791e59f 100644 --- a/docs/_ext/typehints.py +++ b/docs/_ext/typehints.py @@ -1,6 +1,10 @@ import inspect +import logging from typing import Any, AnyStr, TypeVar, get_type_hints -from sphinx.util.inspect import Signature +try: + from sphinx.ext.autodoc import Signature +except ImportError: + from sphinx.util.inspect import signature as Signature try: from typing import GenericMeta # Py3.7 @@ -127,6 +131,9 @@ def process_docstring(app, what, name, obj, options, lines): except (AttributeError, TypeError): # Introspecting a slot wrapper will raise TypeError return + except Exception as e: # FIXME: A new exception is being raised, likely due to upgrading sphinx + logging.exception(e) + return for argname, annotation in type_hints.items(): formatted_annotation = format_annotation(annotation) diff --git a/docs/authors.rst b/docs/authors.rst index 3f34ddecf..ababb0541 100644 --- a/docs/authors.rst +++ b/docs/authors.rst @@ -7,4 +7,4 @@ .. contents:: :local: -.. include:: ../AUTHORS.rst +.. mdinclude:: ../AUTHORS.md diff --git a/docs/changelog.rst b/docs/changelog.rst index 93efd55ea..6336d7666 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1 +1 @@ -.. include:: ../Changelog.rst +.. mdinclude:: ../CHANGELOG.md diff --git a/docs/conf.py b/docs/conf.py index db6b1a4f0..badf67f8a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,10 +11,10 @@ project='Faust', version_dev='1.1', version_stable='1.0', - canonical_url='https://fauststream.com/en/latest', + canonical_url='https://faust-streaming.github.io/faust', webdomain='', - github_project='robinhood/faust', - copyright='2017-2020', + github_project='faust-streaming/faust', + copyright='2017-2020, 2021-2022 Community', html_logo='images/logo.png', html_favicon='images/favicon.ico', html_prepend_sidebars=[], @@ -24,6 +24,7 @@ 'alabaster', 'typehints', 'faustdocs', + 'm2r2', ], extra_intersphinx_mapping={ 'aiohttp': ('https://aiohttp.readthedocs.io/en/stable/', None), @@ -62,7 +63,7 @@ r'faust.web.apps.*', 'faust.web.apps.stats.app', 'faust.web.apps.router.app', - 'faust' + 'faust', 'faust.web.drivers', r'.*\._cython.*', ], @@ -88,7 +89,7 @@ def configcheck_project_settings(): 'github_banner': True, 'travis_button': True, 'show_related': True, - 'github_user': 'robinhood', + 'github_user': 'faust-streaming', 'pre_bg': '#4c4c4c', 'github_repo': 'faust', 'github_type': 'star', diff --git a/docs/contributing.rst b/docs/contributing.rst index 6155e1f6b..e1bcbcfa7 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -159,7 +159,7 @@ Bugs for a package in the Faust ecosystem should be reported to the relevant issue tracker. * :pypi:`Faust` - https://github.com/faust-streaming/faust/issues -* :pypi:`Mode` - https://github.com/ask/mode/issues +* :pypi:`Mode` - https://github.com/faust-streaming/mode/issues If you're unsure of the origin of the bug you can ask the :ref:`mailing-list`, or just use the Faust issue tracker. @@ -795,18 +795,18 @@ Packages --------- :git: https://github.com/faust-streaming/faust -:CI: http://travis-ci.org/#!/robinhood/faust +:CI: https://app.travis-ci.com/github/faust-streaming/faust :Windows-CI: https://ci.appveyor.com/project/ask/faust -:PyPI: :pypi:`faust` -:docs: https://fauststream.com/en/latest/ +:PyPI: :pypi:`faust-streaming` +:docs: https://faust-streaming.github.io/faust/ ``Mode`` -------- -:git: https://github.com/ask/mode -:CI: http://travis-ci.org/#!/ask/mode -:Windows-CI: https://ci.appveyor.com/project/ask/mode -:PyPI: :pypi:`Mode` +:git: https://github.com/faust-streaming/mode/ +:CI: https://www.travis-ci.com/github/faust-streaming/mode +:Windows-CI: https://ci.appveyor.com/project/faust-streaming/mode +:PyPI: :pypi:`mode-streaming` :docs: http://mode.readthedocs.io/ .. _release-procedure: @@ -817,54 +817,11 @@ Release Procedure Updating the version number --------------------------- -The version number must be updated two places: +This is done automatically by `setuptools_scm`. - * :file:`faust/__init__.py` - * :file:`docs/include/introduction.txt` - -After you have changed these files you must render -the :file:`README` files. There's a script to convert sphinx syntax -to generic reStructured Text syntax, and the make target `readme` -does this for you: - -.. sourcecode:: console - - $ make readme - -Now commit the changes: - -.. sourcecode:: console - - $ git commit -a -m "Bumps version to X.Y.Z" - -and make a new version tag: - -.. sourcecode:: console - - $ git tag vX.Y.Z - $ git push --tags Releasing --------- -Commands to make a new public stable release: - -.. sourcecode:: console - - $ make distcheck # checks pep8, autodoc index, runs tests and more - $ make dist # NOTE: Runs git clean -xdf and removes files not in the repo. - $ python setup.py sdist upload --sign --identity='Celery Security Team' - $ python setup.py bdist_wheel upload --sign --identity='Celery Security Team' - -If this is a new release series then you also need to do the -following: - -* Go to the Read The Docs management interface at: - http://readthedocs.org/projects/faust/?fromdocs=faust - -* Enter "Edit project" - - Change default branch to the branch of this series, for example, use - the ``1.0`` branch for the 1.0 series. - -* Also add the previous version under the "versions" tab. +This is all done automatically on GitHub when a release +is tagged in https://github.com/faust-streaming/faust/releases. diff --git a/docs/includes/faq.txt b/docs/includes/faq.txt index 3d4d22687..7641cb64a 100644 --- a/docs/includes/faq.txt +++ b/docs/includes/faq.txt @@ -13,12 +13,12 @@ Using :pypi:`eventlet` This approach works with any blocking Python library that can work with :pypi:`eventlet`. -Using :pypi:`eventlet` requires you to install the :pypi:`aioeventlet` module, +Using :pypi:`eventlet` requires you to install the :pypi:`faust-aioeventlet` module, and you can install this as a bundle along with Faust: .. sourcecode:: console - $ pip install -U faust[eventlet] + $ pip install -U faust-streaming[eventlet] Then to actually use eventlet as the event loop you have to either use the :option:`-L ` argument to the :program:`faust` program: @@ -55,7 +55,7 @@ https://twistedmatrix.com/documents/17.1.0/api/twisted.internet.asyncioreactor.h Will you support Python 2.7 or Python 3.5? ------------------------------------------ -No. Faust requires Python 3.6 or later, since it heavily uses features that were +No. Faust requires Python 3.8 or later, since it heavily uses features that were introduced in Python 3.6 (`async`, `await`, variable type annotations). diff --git a/docs/includes/installation.txt b/docs/includes/installation.txt index cf67eb279..a75702494 100644 --- a/docs/includes/installation.txt +++ b/docs/includes/installation.txt @@ -8,7 +8,7 @@ To install using `pip`: .. sourcecode:: console - $ pip install -U faust + $ pip install -U faust-streaming .. _bundles: @@ -23,9 +23,9 @@ command-line by using brackets. Separate multiple bundles using the comma: .. sourcecode:: console - $ pip install "faust[rocksdb]" + $ pip install "faust-streaming[rocksdb]" - $ pip install "faust[rocksdb,uvloop,fast,redis]" + $ pip install "faust-streaming[rocksdb,uvloop,fast,redis]" The following bundles are available: diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 87b9d8118..ab5ab7368 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -58,4 +58,4 @@ Faust is just Python, and a stream is an infinite asynchronous iterator. If you know how to use Python, you already know how to use Faust, and it works with your favorite Python libraries like Django, Flask, - SQLAlchemy, NTLK, NumPy, SciPy, TensorFlow, etc. + SQLAlchemy, NLTK, NumPy, SciPy, TensorFlow, etc. diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index fc7ea7ad4..02345bf37 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -9,10 +9,9 @@ Slack ----- For discussions about the usage, development, and future of Faust, -please join the `fauststream`_ Slack. +please join the faust-streaming Slack at https://fauststream.slack.com. -* https://fauststream.slack.com -* Sign-up: https://join.slack.com/t/fauststream/shared_invite/enQtNDEzMTIyMTUyNzU2LTIyMjNjY2M2YzA2OWFhMDlmMzVkODk3YTBlYThlYmZiNTUwZDJlYWZiZTdkN2Q4ZGU4NWM4YWMyNTM5MGQ5OTg +* Sign-up: https://join.slack.com/t/fauststreaming/shared_invite/zt-1q1jhq4kh-Q1t~rJgpyuMQ6N38cByE9g Resources ========= diff --git a/docs/includes/settingref.txt b/docs/includes/settingref.txt index 5ae599b61..91bec6c02 100644 --- a/docs/includes/settingref.txt +++ b/docs/includes/settingref.txt @@ -128,7 +128,7 @@ The value for this argument can be: :type: :class:`str` / :class:`~pathlib.Path` :default: ``'{conf.name}-data'`` :environment: :envvar:`APP_DATADIR` -:related-command-options: :option:`faust -`, :option:`faust -`, :option:`faust d`, :option:`faust a`, :option:`faust t`, :option:`faust a`, :option:`faust d`, :option:`faust i`, :option:`faust r` +:related-command-options: :option:`faust --data-dir` Application data directory. @@ -171,7 +171,7 @@ setting. :type: :class:`bool` :default: :const:`False` :environment: :envvar:`APP_DEBUG` -:related-command-options: :option:`faust -`, :option:`faust -`, :option:`faust d`, :option:`faust e`, :option:`faust b`, :option:`faust u`, :option:`faust g` +:related-command-options: :option:`faust --debug` Use in development to expose sensor information endpoint. @@ -290,7 +290,7 @@ the second version is 2, and so on. :type: :class:`float` / :class:`~datetime.timedelta` :default: :const:`None` :environment: :envvar:`BLOCKING_TIMEOUT` -:related-command-options: :option:`faust -`, :option:`faust -`, :option:`faust b`, :option:`faust l`, :option:`faust o`, :option:`faust c`, :option:`faust k`, :option:`faust i`, :option:`faust n`, :option:`faust g`, :option:`faust -`, :option:`faust t`, :option:`faust i`, :option:`faust m`, :option:`faust e`, :option:`faust o`, :option:`faust u`, :option:`faust t` +:related-command-options: :option:`faust --blocking-timeout` Blocking timeout (in seconds). @@ -439,6 +439,41 @@ SASL Authentication password=BROKER_PASSWORD, )) +OAuth2 Authentication + You can enable SASL authentication via OAuth2 Bearer tokens: + + .. sourcecode:: python + + import faust + from asyncio import get_running_loop + from aiokafka.helpers import create_ssl_context + from aiokafka.conn import AbstractTokenProvider + + class TokenProvider(AbstractTokenProvider): + async def token(self): + return await get_running_loop().run_in_executor( + None, self.get_token) + + def get_token(self): + return 'token' + + app = faust.App( + broker_credentials=faust.OAuthCredentials( + oauth_cb=TokenProvider() + ssl_context=create_ssl_context() + ) + ) + + .. info:: + + The implementation should ensure token reuse so that multiple + calls at connect time do not create multiple tokens. + The implementation should also periodically refresh the token in order to + guarantee that each call returns an unexpired token. + + Token Providers MUST implement the :meth:`token` method + + GSSAPI Authentication GSSAPI authentication over plain text: @@ -746,7 +781,7 @@ Automatically check the CRC32 of the records consumed. -------------------- :type: :class:`str` -:default: ``'faust-1.11.0a1'`` +:default: ``'faust-0.8.9'`` :environment: :envvar:`BROKER_CLIENT_ID` Broker client ID. @@ -1029,6 +1064,45 @@ If not set, default assignment strategy is used. Otherwise, each consumer instance has to have a unique id. +.. setting:: consumer_metadata_max_age_ms + +``consumer_metadata_max_age_ms`` +-------------------------------- + +.. versionadded:: 0.8.5 + +:type: :class:`int` +:default: ``300000`` +:environment: :envvar:`CONSUMER_METADATA_MAX_AGE_MS` + +Consumer metadata max age milliseconds + +The period of time in milliseconds after which we force +a refresh of metadata even if we haven’t seen any partition +leadership changes to proactively discover any new brokers or partitions. + +Default: 300000 + + +.. setting:: consumer_connections_max_idle_ms + +``consumer_connections_max_idle_ms`` +------------------------------------ + +.. versionadded:: 0.8.5 + +:type: :class:`int` +:default: ``540000`` +:environment: :envvar:`CONSUMER_CONNECTIONS_MAX_IDLE_MS` + +Consumer connections max idle milliseconds. + +Close idle connections after the number of milliseconds +specified by this config. + +Default: 540000 (9 minutes). + + .. setting:: ConsumerScheduler ``ConsumerScheduler`` @@ -1295,72 +1369,62 @@ This is set high by default, as this is also the time when producer batches expire and will no longer be retried. +.. setting:: producer_threaded -.. _settings-rpc: - -Agent RPC Settings -================== - -.. setting:: reply_create_topic +``producer_threaded`` +--------------------- -``reply_create_topic`` ----------------------- +.. versionadded:: 0.4.5 :type: :class:`bool` :default: :const:`False` -:environment: :envvar:`APP_REPLY_CREATE_TOPIC` +:environment: :envvar:`PRODUCER_THREADED` -Automatically create reply topics. - -Set this to :const:`True` if you plan on using the RPC with agents. +Thread separate producer for send_soon. -This will create the internal topic used for RPC replies on that -instance at startup. +If True, spin up a different producer in a different thread +to be used for messages buffered up for producing via +send_soon function. -.. setting:: reply_expires +.. setting:: producer_metadata_max_age_ms -``reply_expires`` ------------------ +``producer_metadata_max_age_ms`` +-------------------------------- -:type: :class:`float` / :class:`~datetime.timedelta` -:default: ``86400.0`` -:environment: :envvar:`APP_REPLY_EXPIRES` +.. versionadded:: 0.8.5 -RPC reply expiry time in seconds. +:type: :class:`int` +:default: ``300000`` +:environment: :envvar:`PRODUCER_METADATA_MAX_AGE_MS` -The expiry time (in seconds :class:`float`, -or :class:`~datetime.timedelta`), for how long replies will stay -in the instances local reply topic before being removed. +Producer metadata max age milliseconds +The period of time in milliseconds after which we force +a refresh of metadata even if we haven’t seen any partition +leadership changes to proactively discover any new brokers or partitions. -.. setting:: reply_to +Default: 300000 -``reply_to`` ------------- -:type: :class:`str` -:default: :const:`None` -Reply to address. +.. setting:: producer_connections_max_idle_ms -The name of the reply topic used by this instance. -If not set one will be automatically generated when the app -is created. +``producer_connections_max_idle_ms`` +------------------------------------ +.. versionadded:: 0.8.5 -.. setting:: reply_to_prefix - -``reply_to_prefix`` -------------------- +:type: :class:`int` +:default: ``540000`` +:environment: :envvar:`PRODUCER_CONNECTIONS_MAX_IDLE_MS` -:type: :class:`str` -:default: ``'f-reply-'`` -:environment: :envvar:`APP_REPLY_TO_PREFIX` +Producer connections max idle milliseconds. -Reply address topic name prefix. +Close idle connections after the number of milliseconds +specified by this config. -The prefix used when generating reply topic names. +Default: 540000 (9 minutes). @@ -1369,6 +1433,92 @@ The prefix used when generating reply topic names. Advanced Stream Settings ======================== +.. setting:: recovery_consistency_check + +``recovery_consistency_check`` +------------------------------ + +.. versionadded:: 0.4.7 + +:type: :class:`bool` +:default: :const:`True` +:environment: :envvar:`RECOVERY_CONSISTENCY_CHECK` + +Check Kafka and local offsets for consistency. + +If True, assert that Kafka highwater offsets >= local offset +in the rocksdb state storee + + +.. setting:: store_check_exists + +``store_check_exists`` +---------------------- + +.. versionadded:: 0.6.0 + +:type: :class:`bool` +:default: :const:`True` +:environment: :envvar:`STORE_CHECK_EXISTS` + +Execute exists on the underlying store. + +If True, executes exists on the underlying store. If False +client has to catch KeyError + + +.. setting:: crash_app_on_aerospike_exception + +``crash_app_on_aerospike_exception`` +------------------------------------ + +.. versionadded:: 0.6.3 + +:type: :class:`bool` +:default: :const:`True` +:environment: :envvar:`CRASH_APP_ON_AEROSPIKE_EXCEPTION` + +Crashes the app on an aerospike Exceptions. + +If True, crashes the app and prevents the commit offset on progressing. If False +client has to catch the Error and implement a dead letter queue + + +.. setting:: aerospike_retries_on_exception + +``aerospike_retries_on_exception`` +---------------------------------- + +.. versionadded:: 0.6.10 + +:type: :class:`int` +:default: ``60`` +:environment: :envvar:`AEROSPIKE_RETRIES_ON_EXCEPTION` + +Number of retries to aerospike on a runtime error from the aerospike client. + +Set this to the number of retries using the aerospike client on a runtime +Exception thrown by the client + + +.. setting:: aerospike_sleep_seconds_between_retries_on_exception + +``aerospike_sleep_seconds_between_retries_on_exception`` +-------------------------------------------------------- + +.. versionadded:: 0.6.10 + +:type: :class:`int` +:default: ``1`` +:environment: :envvar:`AEROSPIKE_SLEEP_SECONDS_BETWEEN_RETRIES_ON_EXCEPTION` + +Seconds to sleep between retries to aerospike on a runtime error from +the aerospike client. + +Set this to the sleep in seconds between retries using the aerospike +client on a runtime Exception thrown by the client + + .. setting:: stream_buffer_maxsize ``stream_buffer_maxsize`` @@ -1520,6 +1670,74 @@ streams are idempotent you can disable it using this setting. +.. _settings-rpc: + +Agent RPC Settings +================== + +.. setting:: reply_create_topic + +``reply_create_topic`` +---------------------- + +:type: :class:`bool` +:default: :const:`False` +:environment: :envvar:`APP_REPLY_CREATE_TOPIC` + +Automatically create reply topics. + +Set this to :const:`True` if you plan on using the RPC with agents. + +This will create the internal topic used for RPC replies on that +instance at startup. + + +.. setting:: reply_expires + +``reply_expires`` +----------------- + +:type: :class:`float` / :class:`~datetime.timedelta` +:default: ``86400.0`` +:environment: :envvar:`APP_REPLY_EXPIRES` + +RPC reply expiry time in seconds. + +The expiry time (in seconds :class:`float`, +or :class:`~datetime.timedelta`), for how long replies will stay +in the instances local reply topic before being removed. + + +.. setting:: reply_to + +``reply_to`` +------------ + +:type: :class:`str` +:default: :const:`None` + +Reply to address. + +The name of the reply topic used by this instance. +If not set one will be automatically generated when the app +is created. + + +.. setting:: reply_to_prefix + +``reply_to_prefix`` +------------------- + +:type: :class:`str` +:default: ``'f-reply-'`` +:environment: :envvar:`APP_REPLY_TO_PREFIX` + +Reply address topic name prefix. + +The prefix used when generating reply topic names. + + + .. _settings-table: Advanced Table Settings @@ -1820,6 +2038,21 @@ This option is usually set by :option:`faust worker --web-port`, not by passing it as a keyword argument to :class:`app`. +.. setting:: web_ssl_context + +``web_ssl_context`` +------------------- + +.. versionadded:: 0.5.0 + +:type: :class:`~ssl.SSLContext` +:default: :const:`None` + +Web server SSL configuration. + +See :setting:`credentials`. + + .. setting:: web_transport ``web_transport`` @@ -2370,6 +2603,3 @@ Example using a class:: Example using the string path to a class:: app = faust.App(..., Monitor='myproj.monitors.Monitor') - - - diff --git a/docs/includes/tags.txt b/docs/includes/tags.txt index 1b4e4dd7f..397665e71 100644 --- a/docs/includes/tags.txt +++ b/docs/includes/tags.txt @@ -1,6 +1,4 @@ -:Version: 1.10.4 -:Web: https://fauststream.com/en/latest -:Download: http://pypi.org/project/faust -:Source: http://github.com/faust-streaming/faust +:Web: https://faust-streaming.github.io/faust +:Download: https://pypi.org/project/faust-streaming +:Source: https://github.com/faust-streaming/faust :Keywords: distributed, stream, async, processing, data, queue, state management - diff --git a/docs/introduction.rst b/docs/introduction.rst index b044ee3c2..5932baa23 100644 --- a/docs/introduction.rst +++ b/docs/introduction.rst @@ -4,6 +4,15 @@ Introducing Faust ===================== +.. image:: https://img.shields.io/pypi/v/faust-streaming.svg + :target: https://pypi.python.org/pypi/faust-streaming/ + +.. image:: https://img.shields.io/pypi/pyversions/faust-streaming.svg + :target: https://pypi.org/project/faust-streaming/ + +.. image:: https://img.shields.io/pypi/dm/faust-streaming + :target: https://pypi.python.org/pypi/faust-streaming/ + .. include:: includes/tags.txt **Table of Contents** @@ -146,14 +155,14 @@ What do I need? **Core** - - Python 3.6 or later. + - Python 3.8 or later. - Kafka 0.10.1 or later. **Extensions** - RocksDB 5.0 or later, :pypi:`python-rocksdb` -Faust requires Python 3.6 or later, and a running Kafka broker. +Faust requires Python 3.8 or later, and a running Kafka broker. There's no plan to support earlier Python versions. Please get in touch if this is something you want to work on. @@ -165,53 +174,53 @@ Extensions +--------------+-------------+--------------------------------------------------+ | **Name** | **Version** | **Bundle** | +--------------+-------------+--------------------------------------------------+ -| ``rocksdb`` | 5.0 | ``pip install faust[rocksdb]`` | +| ``rocksdb`` | 5.0 | ``pip install faust-streaming[rocksdb]`` | +--------------+-------------+--------------------------------------------------+ -| ``redis`` | aredis 1.1 | ``pip install faust[redis]`` | +| ``redis`` | aredis 1.1 | ``pip install faust-streaming[redis]`` | +--------------+-------------+--------------------------------------------------+ -| ``datadog`` | 0.20.0 | ``pip install faust[datadog]`` | +| ``datadog`` | 0.20.0 | ``pip install faust-streaming[datadog]`` | +--------------+-------------+--------------------------------------------------+ -| ``statsd`` | 3.2.1 | ``pip install faust[statsd]`` | +| ``statsd`` | 3.2.1 | ``pip install faust-streaming[statsd]`` | +--------------+-------------+--------------------------------------------------+ -| ``uvloop`` | 0.8.1 | ``pip install faust[uvloop]`` | +| ``uvloop`` | 0.8.1 | ``pip install faust-streaming[uvloop]`` | +--------------+-------------+--------------------------------------------------+ -| ``eventlet`` | 1.16.0 | ``pip install faust[eventlet]`` | +| ``eventlet`` | 1.16.0 | ``pip install faust-streaming[eventlet]`` | +--------------+-------------+--------------------------------------------------+ -| ``yaml`` | 5.1.0 | ``pip install faust[yaml]`` | +| ``yaml`` | 5.1.0 | ``pip install faust-streaming[yaml]`` | +--------------+-------------+--------------------------------------------------+ Optimizations ------------- -These can be all installed using ``pip install faust[fast]``: +These can be all installed using ``pip install faust-streaming[fast]``: +------------------+-------------+--------------------------------------------------+ | **Name** | **Version** | **Bundle** | +------------------+-------------+--------------------------------------------------+ -| ``aiodns`` | 1.1.0 | ``pip install faust[aiodns]`` | +| ``aiodns`` | 1.1.0 | ``pip install faust-streaming[aiodns]`` | +------------------+-------------+--------------------------------------------------+ -| ``cchardet`` | 1.1.0 | ``pip install faust[cchardet]`` | +| ``cchardet`` | 1.1.0 | ``pip install faust-streaming[cchardet]`` | +------------------+-------------+--------------------------------------------------+ -| ``ciso8601`` | 2.1.0 | ``pip install faust[ciso8601]`` | +| ``ciso8601`` | 2.1.0 | ``pip install faust-streaming[ciso8601]`` | +------------------+-------------+--------------------------------------------------+ -| ``cython`` | 0.9.26 | ``pip install faust[cython]`` | +| ``cython`` | 0.9.26 | ``pip install faust-straming[cython]`` | +------------------+-------------+--------------------------------------------------+ -| ``orjson`` | 2.0.0 | ``pip install faust[orjson]`` | +| ``orjson`` | 2.0.0 | ``pip install faust-streaming[orjson]`` | +------------------+-------------+--------------------------------------------------+ -| ``setproctitle`` | 1.1.0 | ``pip install faust[setproctitle]`` | +| ``setproctitle`` | 1.1.0 | ``pip install faust-streaming[setproctitle]`` | +------------------+-------------+--------------------------------------------------+ Debugging extras ---------------- -These can be all installed using ``pip install faust[debug]``: +These can be all installed using ``pip install faust-streaming[debug]``: +------------------+-------------+--------------------------------------------------+ | **Name** | **Version** | **Bundle** | +------------------+-------------+--------------------------------------------------+ -| ``aiomonitor`` | 0.3 | ``pip install faust[aiomonitor]`` | +| ``aiomonitor`` | 0.3 | ``pip install faust-streaming[aiomonitor]`` | +------------------+-------------+--------------------------------------------------+ -| ``setproctitle`` | 1.1.0 | ``pip install faust[setproctitle]`` | +| ``setproctitle`` | 1.1.0 | ``pip install faust-streaming[setproctitle]`` | +------------------+-------------+--------------------------------------------------+ .. note:: @@ -225,7 +234,7 @@ These can be all installed using ``pip install faust[debug]``: .. sourcecode:: console - $ pip install faust[uvloop,fast,rocksdb,datadog,redis] + $ pip install faust-streaming[uvloop,fast,rocksdb,datadog,redis] .. admonition:: RocksDB On MacOS Sierra diff --git a/docs/keepachangelog/changelog.md b/docs/keepachangelog/changelog.md new file mode 100644 index 000000000..ae3e8753a --- /dev/null +++ b/docs/keepachangelog/changelog.md @@ -0,0 +1,9 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +{% for version in changelog.versions_list -%} +{% include 'version.md' with context %} +{% endfor -%} diff --git a/docs/keepachangelog/commit.md b/docs/keepachangelog/commit.md new file mode 100644 index 000000000..f308d52d7 --- /dev/null +++ b/docs/keepachangelog/commit.md @@ -0,0 +1,4 @@ +- {{ commit.style.subject|default(commit.subject)|capitalize }} ([{{ commit.hash|truncate(7, True, '') }}]({{ commit.url }}) by {{ commit.author_name }}). +{%- if commit.text_refs.issues_not_in_subject %} Related issues/PRs: {% for issue in commit.text_refs.issues_not_in_subject -%} +{% if issue.url %}[{{ issue.ref }}]({{ issue.url }}){%else %}{{ issue.ref }}{% endif %}{% if not loop.last %}, {% endif -%} +{%- endfor -%}{%- endif -%} diff --git a/docs/keepachangelog/section.md b/docs/keepachangelog/section.md new file mode 100644 index 000000000..9203dd83f --- /dev/null +++ b/docs/keepachangelog/section.md @@ -0,0 +1,4 @@ +### {{ section.type or "Misc" }} +{% for commit in section.commits|sort(attribute='author_date',reverse=true)|unique(attribute='subject') -%} +{% include 'commit.md' with context %} +{% endfor %} diff --git a/docs/keepachangelog/version.md b/docs/keepachangelog/version.md new file mode 100644 index 000000000..a888e3683 --- /dev/null +++ b/docs/keepachangelog/version.md @@ -0,0 +1,15 @@ +{%- if version.tag or version.planned_tag -%} +## [{{ version.tag or version.planned_tag }}]({{ version.url }}){% if version.date %} - {{ version.date }}{% endif %} + +[Compare with {{ version.previous_version.tag|default("first commit") }}]({{ version.compare_url }}) +{%- else -%} +## Unreleased + +[Compare with latest]({{ version.compare_url }}) +{%- endif %} + +{% for type, section in version.sections_dict|dictsort -%} +{%- if type and type in changelog.style.DEFAULT_RENDER -%} +{% include 'section.md' with context %} +{% endif -%} +{%- endfor -%} diff --git a/docs/reference/faust.stores.aerospike.rst b/docs/reference/faust.stores.aerospike.rst new file mode 100644 index 000000000..d63eb5e92 --- /dev/null +++ b/docs/reference/faust.stores.aerospike.rst @@ -0,0 +1,11 @@ +===================================================== + ``faust.stores.aerospike`` +===================================================== + +.. contents:: + :local: +.. currentmodule:: faust.stores.aerospike + +.. automodule:: faust.stores.aerospike + :members: + :undoc-members: diff --git a/docs/reference/index.rst b/docs/reference/index.rst index 4d403e786..979616bda 100644 --- a/docs/reference/index.rst +++ b/docs/reference/index.rst @@ -128,6 +128,7 @@ Stores faust.stores.base faust.stores.memory faust.stores.rocksdb + faust.stores.aerospike Tables ====== diff --git a/docs/templates/readme.txt b/docs/templates/readme.txt index bce9fcc36..47ed3e3f3 100644 --- a/docs/templates/readme.txt +++ b/docs/templates/readme.txt @@ -42,7 +42,7 @@ Be sure to also read the `Contributing to Faust`_ section in the documentation. .. _`Contributing to Faust`: - https://fauststream.com/en/latest/contributing.html + https://faust-streaming.github.io/faust/contributing.html .. include:: ../includes/code-of-conduct.txt diff --git a/docs/userguide/application.rst b/docs/userguide/application.rst index 70615252d..832739c81 100644 --- a/docs/userguide/application.rst +++ b/docs/userguide/application.rst @@ -1374,7 +1374,7 @@ setuptools to install a command-line program for your project. include_package_data=True, zip_safe=False, install_requires=['faust'], - python_requires='~=3.6', + python_requires='~=3.8', ) For inspiration you can also look to the `setup.py` files in the diff --git a/docs/userguide/debugging.rst b/docs/userguide/debugging.rst index 6d11c6513..07fb24f61 100644 --- a/docs/userguide/debugging.rst +++ b/docs/userguide/debugging.rst @@ -27,7 +27,7 @@ You can also install it as part of a :ref:`bundle `: .. sourcecode:: console - $ pip install -U faust[debug] + $ pip install -U faust-streaming[debug] After :pypi:`aiomonitor` is installed you may start the worker with the diff --git a/docs/userguide/models.rst b/docs/userguide/models.rst index 7945764df..5831a74b5 100644 --- a/docs/userguide/models.rst +++ b/docs/userguide/models.rst @@ -186,13 +186,13 @@ a schema that reads message key and value type from Kafka headers: serializer=None): if loads is None: loads = app.serializers.loads_value - # try to get key_type and serializer from Kafka headers + # try to get value_type and serializer from Kafka headers headers = dict(message.headers) value_type_name = headers.get('ValueType') serializer = serializer or headers.get('ValueSerializer') if value_type_name: - value_type = registry[value_type] - return loads(value_type, message.key, + value_type = registry[value_type_name] + return loads(value_type, message.value, serializer=serializer) else: return super().loads_value( diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index b055507fc..05bb27c9f 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -117,9 +117,10 @@ first test ``foo`` with ``bar`` mocked, then in a different test do ``bar``: from example import app, foo, bar - @pytest.fixture() + @pytest.fixture(scope="function") def test_app(event_loop): """passing in event_loop helps avoid 'attached to a different loop' error""" + app.loop = event_loop app.finalize() app.conf.store = 'memory://' app.flow_control.resume() @@ -144,6 +145,24 @@ first test ``foo`` with ``bar`` mocked, then in a different test do ``bar``: async with bar.test_context() as agent: event = await agent.put('hey') assert agent.results[event.message.offset] == 'heyYOLO' + + +You can put the `test_app` fixture into a [`conftest.py` file](https://docs.pytest.org/en/6.2.x/fixture.html#scope-sharing-fixtures-across-classes-modules-packages-or-session). If the fixture is not in the same file as the app's definition (which should be the case) you must import the app the fixture definition: + +.. sourcecode:: python + from example import app + + @pytest.fixture(scope="function") + def test_app(event_loop): + """passing in event_loop helps avoid 'attached to a different loop' error""" + + from example import app + + app.loop = event_loop + app.finalize() + app.conf.store = 'memory://' + app.flow_control.resume() + return app .. note:: diff --git a/examples/advanced/service.py b/examples/advanced/service.py index 439e8d582..2d8b49242 100755 --- a/examples/advanced/service.py +++ b/examples/advanced/service.py @@ -41,7 +41,7 @@ async def consumer(stream): @app.timer(1.0) async def producer(): - await consumer.send('hello') + await consumer.send(value='hello') if __name__ == '__main__': diff --git a/examples/bound_agent.py b/examples/bound_agent.py new file mode 100644 index 000000000..f4e63dc89 --- /dev/null +++ b/examples/bound_agent.py @@ -0,0 +1,79 @@ +import logging +from random import randint +import time +from functools import partial +from typing import AsyncIterator + +import faust +from faust import ChannelT, StreamT + +app = faust.App("bound_agent") +logger = logging.getLogger(__name__) + +class DeviceAction(faust.Record): + device_id: str + +class DeadLetter(faust.Record): + stage: str + record: DeviceAction + +async def stage1_agent( + dead_letters: ChannelT, + stream: StreamT[DeviceAction] +) -> AsyncIterator[DeviceAction]: + async for action in stream: + now_ts = int(time.time()) + try: + if now_ts % 3 == 0: + raise Exception("!!!") + + logger.info(f"[stage1] action arrived: {action}") + yield action + except: + await dead_letters.send(value=DeadLetter(stage="stage1", record=action)) + +async def stage2_agent( + dead_letters: ChannelT, + stream: StreamT[DeviceAction] +) -> AsyncIterator[DeviceAction]: + async for action in stream: + now_ts = int(time.time()) + try: + if now_ts % 3 == 1: + raise Exception("!!!") + + logger.info(f"[stage2] action arrived: {action}") + yield action + except: + await dead_letters.send(value=DeadLetter(stage="stage2", record=action)) + +async def deadletter_agent(stream: StreamT[DeviceAction]) -> AsyncIterator[DeviceAction]: + async for dl in stream: + logger.error(f"[dead letter] arrived: {dl}") + yield dl + +async def action_generator(device_actions: ChannelT): + for i in range(0, randint(3, 101)): + await device_actions.send(value=DeviceAction(device_id=i)) + + +def main(): + channel_device_action = app.channel(value_type=DeviceAction) + channel_stage1_stage2 = app.channel(value_type=DeviceAction) + channel_deadletter = app.channel(value_type=DeadLetter) + + app.timer(interval=3, on_leader=True)(partial(action_generator, channel_device_action)) + app.agent(channel_deadletter, name="dead-letter-agent")(deadletter_agent) + + app.agent(channel_device_action, name="stage1-agent", sink=[channel_stage1_stage2])( + partial(stage1_agent, channel_deadletter) + ) + app.agent(channel_stage1_stage2, name="stage2-agent")( + partial(stage2_agent, channel_deadletter) + ) + + app.main() + + +if __name__ == "__main__": + main() diff --git a/examples/django/proj/__init__.py b/examples/django/proj/__init__.py index a11c44581..6f05102a4 100644 --- a/examples/django/proj/__init__.py +++ b/examples/django/proj/__init__.py @@ -14,7 +14,7 @@ __version__ = '0.9.3' __author__ = 'Robinhood Markets, Inc.' __contact__ = 'opensource@robinhood.com' -__homepage__ = 'https://fauststream.com/en/latest' +__homepage__ = 'https://faust-streaming.github.io/faust/' __docformat__ = 'restructuredtext' # -eof meta- diff --git a/examples/django/requirements/default.txt b/examples/django/requirements/default.txt index 1f7355e24..e5aed2ea6 100644 --- a/examples/django/requirements/default.txt +++ b/examples/django/requirements/default.txt @@ -1,4 +1,4 @@ django -faust[rocksdb] +faust[rocksdict] eventlet -aioeventlet +faust-aioeventlet diff --git a/examples/django/setup.py b/examples/django/setup.py index bab4806cc..eda2bfd42 100644 --- a/examples/django/setup.py +++ b/examples/django/setup.py @@ -14,7 +14,6 @@ License :: OSI Approved :: BSD License Programming Language :: Python Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.6 Operating System :: POSIX Operating System :: POSIX :: Linux Operating System :: MacOS :: MacOS X @@ -96,7 +95,7 @@ def reqs(*f): license='BSD', packages=find_packages(exclude=['ez_setup', 'tests', 'tests.*']), include_package_data=True, - python_requires='>=3.6.0', + python_requires='>=3.8.0', keywords=[], zip_safe=False, install_requires=reqs('default.txt'), diff --git a/tests/old/__init__.py b/examples/fastapi/__init__.py similarity index 100% rename from tests/old/__init__.py rename to examples/fastapi/__init__.py diff --git a/examples/fastapi/api/__init__.py b/examples/fastapi/api/__init__.py new file mode 100644 index 000000000..251300510 --- /dev/null +++ b/examples/fastapi/api/__init__.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter + +from api.my_api import router as my_api_router + +router = APIRouter() + +router.include_router(my_api_router) diff --git a/examples/fastapi/api/my_api.py b/examples/fastapi/api/my_api.py new file mode 100644 index 000000000..e03859834 --- /dev/null +++ b/examples/fastapi/api/my_api.py @@ -0,0 +1,16 @@ +from typing import Union +from fastapi import APIRouter + +from my_faust.table.my_table import greetings_table + +router = APIRouter() + + +@router.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} + + +@router.get("/table") +def read_table(): + return [{k: v} for k, v in greetings_table.items()] diff --git a/examples/fastapi/main.py b/examples/fastapi/main.py new file mode 100644 index 000000000..82e3c6b2c --- /dev/null +++ b/examples/fastapi/main.py @@ -0,0 +1,42 @@ +from contextlib import asynccontextmanager +from fastapi import FastAPI +from api import router as api_router + +from my_faust.timer import router as timer_router +from my_faust.app import faust_app + + +# This is just hello_world.py integrated with a FastAPI application + + +def fake_answer_to_everything_ml_model(x: float): + return x * 42 + + +ml_models = {} + + +@asynccontextmanager +async def lifespan(app: FastAPI): + faust_app.discover() + await faust_app.start() + yield + await faust_app.stop() + + +# You MUST have "app" defined in order for Faust to discover the app +# if you're using "faust" on CLI, but this doesn't work yet +app = fastapi_app = FastAPI( + lifespan=lifespan, +) + +# For now, run via "uvicorn fastapi_example:app" +# then visit http://127.0.0.1:8000/docs + +app.include_router(router=api_router) +app.include_router(router=timer_router) + + +@app.get("/") +def read_root(): + return {"Hello": "World"} diff --git a/examples/fastapi/my_faust/__init__.py b/examples/fastapi/my_faust/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/fastapi/my_faust/agent/__init__.py b/examples/fastapi/my_faust/agent/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/fastapi/my_faust/agent/my_agent.py b/examples/fastapi/my_faust/agent/my_agent.py new file mode 100644 index 000000000..ee4ccb6af --- /dev/null +++ b/examples/fastapi/my_faust/agent/my_agent.py @@ -0,0 +1,10 @@ +from my_faust.app import faust_app +from my_faust.table.my_table import greetings_table +from my_faust.topic.my_topic import greetings_topic + + +@faust_app.agent(greetings_topic) +async def print_greetings(greetings): + async for greeting in greetings: + print(f"greeting: {greeting}") + greetings_table[greeting] = {"hello world"} diff --git a/examples/fastapi/my_faust/app.py b/examples/fastapi/my_faust/app.py new file mode 100644 index 000000000..8a5b5f79b --- /dev/null +++ b/examples/fastapi/my_faust/app.py @@ -0,0 +1,18 @@ +import faust + + +def get_all_packages_to_scan(): + return ["my_faust"] + + +# You MUST have "app" defined in order for Faust to discover the app +# if you're using "faust" on CLI, but this doesn't work yet +# autodiscover https://faust-streaming.github.io/faust/userguide/settings.html#autodiscover +app = faust_app = faust.App( + 'hello-world-fastapi', + broker='kafka://localhost:9092', + web_enabled=False, + autodiscover=get_all_packages_to_scan, +) + +# For now, run via "faust -A my_faust.app worker -l info" diff --git a/examples/fastapi/my_faust/table/__init__.py b/examples/fastapi/my_faust/table/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/fastapi/my_faust/table/my_table.py b/examples/fastapi/my_faust/table/my_table.py new file mode 100644 index 000000000..fda9b0fc3 --- /dev/null +++ b/examples/fastapi/my_faust/table/my_table.py @@ -0,0 +1,8 @@ +from my_faust.app import faust_app + +greetings_table = faust_app.GlobalTable( + name="greetings_table", + default=dict, + partitions=1, + recovery_buffer_size=1, +) diff --git a/examples/fastapi/my_faust/timer/__init__.py b/examples/fastapi/my_faust/timer/__init__.py new file mode 100644 index 000000000..d27e8c112 --- /dev/null +++ b/examples/fastapi/my_faust/timer/__init__.py @@ -0,0 +1,6 @@ +from fastapi import APIRouter +from my_faust.timer.my_timer import router as my_timer_router + +router = APIRouter() + +router.include_router(my_timer_router) diff --git a/examples/fastapi/my_faust/timer/my_timer.py b/examples/fastapi/my_faust/timer/my_timer.py new file mode 100644 index 000000000..57261aafd --- /dev/null +++ b/examples/fastapi/my_faust/timer/my_timer.py @@ -0,0 +1,14 @@ +from uuid import uuid4 +from fastapi import APIRouter + +from my_faust.app import faust_app +from my_faust.topic.my_topic import greetings_topic + +router = APIRouter() + + +@faust_app.timer(5) # make sure you *always* add the timer above if you're using one +@router.get("/produce") +async def produce(): + await greetings_topic.send(value=uuid4().hex) + return {"success": True} diff --git a/examples/fastapi/my_faust/topic/__init__.py b/examples/fastapi/my_faust/topic/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/fastapi/my_faust/topic/my_topic.py b/examples/fastapi/my_faust/topic/my_topic.py new file mode 100644 index 000000000..b5e21a288 --- /dev/null +++ b/examples/fastapi/my_faust/topic/my_topic.py @@ -0,0 +1,3 @@ +from my_faust.app import faust_app + +greetings_topic = faust_app.topic("greetings", value_type=str) diff --git a/examples/fastapi_example.py b/examples/fastapi_example.py new file mode 100755 index 000000000..3e666fcda --- /dev/null +++ b/examples/fastapi_example.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +import asyncio +from contextlib import asynccontextmanager +from typing import Union + +from fastapi import FastAPI + +import faust + + +# This is just hello_world.py integrated with a FastAPI application + + +def fake_answer_to_everything_ml_model(x: float): + return x * 42 + + +ml_models = {} + + +# You MUST have "app" defined in order for Faust to discover the app +# if you're using "faust" on CLI, but this doesn't work yet +faust_app = faust.App( + 'hello-world-fastapi', + broker='kafka://localhost:9092', + web_enabled=False, +) +# app = faust_app + +greetings_topic = faust_app.topic('greetings', value_type=str) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Load the ML model + ml_models["answer_to_everything"] = fake_answer_to_everything_ml_model + await faust_app.start() + yield + # Clean up the ML models and release the resources + ml_models.clear() + await faust_app.stop() + + +app = fastapi_app = FastAPI( + lifespan=lifespan, +) +# For now, run via "uvicorn fastapi_example:app" +# then visit http://127.0.0.1:8000/docs + + +@fastapi_app.get("/") +def read_root(): + return {"Hello": "World"} + + +@fastapi_app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} + + +@faust_app.agent(greetings_topic) +async def print_greetings(greetings): + async for greeting in greetings: + print(greeting) + + +@faust_app.timer(5) # make sure you *always* add the timer above if you're using one +@fastapi_app.get("/produce") +async def produce(): + for i in range(100): + await greetings_topic.send(value=f'hello {i}') + return {"success": True} diff --git a/examples/kubernetes/producer/producer.py b/examples/kubernetes/producer/producer.py index 317485c06..94fa5dbbb 100644 --- a/examples/kubernetes/producer/producer.py +++ b/examples/kubernetes/producer/producer.py @@ -1,20 +1,20 @@ import json from random import random -from kafka import KafkaProducer +from aiokafka import KafkaProducer -TOPIC = 'test' -KEY = 'score' +TOPIC = "test" +KEY = "score" def publish_message(producer_instance, topic_name, key, value): try: - key_bytes = bytes(key, encoding='utf-8') - value_bytes = bytes(value, encoding='utf-8') + key_bytes = bytes(key, encoding="utf-8") + value_bytes = bytes(value, encoding="utf-8") producer_instance.send(topic_name, key=key_bytes, value=value_bytes) producer_instance.flush() - print('Message published successfully.') + print("Message published successfully.") except Exception as ex: - print('Exception in publishing message') + print("Exception in publishing message") print(ex) @@ -26,20 +26,20 @@ def connect_kafka_producer(): # Don't use in production, this only works with Docker for Mac in # development _producer = KafkaProducer( - bootstrap_servers=['host.docker.internal:9092'], - api_version=(0, 10)) + bootstrap_servers=["host.docker.internal:9092"], api_version=(0, 10) + ) except Exception as ex: - print('Exception while connecting Kafka') + print("Exception while connecting Kafka") print(ex) return _producer -if __name__ == '__main__': +if __name__ == "__main__": kafka_producer = connect_kafka_producer() for index in range(0, 10000): message = { - 'index': index, - 'value': round(random(), 2), + "index": index, + "value": round(random(), 2), } publish_message(kafka_producer, TOPIC, KEY, json.dumps(message)) if kafka_producer is not None: diff --git a/examples/windowing/hopping_2.py b/examples/windowing/hopping_2.py new file mode 100644 index 000000000..39a0871fc --- /dev/null +++ b/examples/windowing/hopping_2.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python + +# In this exapmple we have a function `publish_every_2secs` publishing a +# message every 2 seconds to topic `hopping_topic`. +# We have created an agent `print_windowed_events` consuming events from +# `hopping_topic` that mutates the windowed table `hopping_table`. + +# `hopping_table` is a table with hopping (overlaping) windows. Each of +# its windows is 10 seconds of duration, and we create a new window every 5 +# seconds. +# |----------| +# |-----------| +# |-----------| +# |-----------| +# Since we produce an event every 2 seconds and our windows are 10 +# seconds of duration we expect different the following results per method +# called in `WindowWrapper`: +# - now(): Gets the closest value to current local time. It will always be +# between 1 and 3. +# - current(): Gets the value relative to the event's timestamp. It will +# always be between 1 and 3. +# - value(): Gets the value relative to default relative option. It will +# always be between 1 and 3. +# - delta(30): Gets the value of window 30 secs before the current event. For +# the first 20 seconds it will be 0 and after second 30 it will always be 5. + +import sys +from datetime import datetime, timedelta +from time import time +import faust + + +class RawModel(faust.Record): + date: datetime + value: float + + +class WinModel(faust.Record): + win: list + + +TOPIC = 'raw-event' +TABLE = 'hopping_table' +KAFKA = 'kafka://localhost:9092' +CLEANUP_INTERVAL = 10 +WINDOW = 6 +STEP = 3 +WINDOW_EXPIRES = 60 +PARTITIONS = 1 + +app = faust.App('windowed-hopping', broker=KAFKA, topic_partitions=PARTITIONS) + +app.conf.table_cleanup_interval = CLEANUP_INTERVAL +source = app.topic(TOPIC, value_type=RawModel) + + +def window_processor(key, events): + timestamp = key[1][0] + count = len(events) + + print( + f'processing window:' + f'{count} events,' + f'timestamp {timestamp}', + ) + + +hopping_table = ( + app.Table( + TABLE, + default=list, + partitions=PARTITIONS, + on_window_close=window_processor, + ) + .hopping(WINDOW, STEP, expires=timedelta(seconds=WINDOW_EXPIRES)) + .relative_to_field(RawModel.date) +) + + +@app.agent(source) +async def print_windowed_events(stream): + async for event in stream: + value_list = hopping_table['events'].value() + + if len(value_list) > 0: + event.value = value_list[-1].value + 1 + print("Receive message : " + str(event)) + + value_list.append(event) + hopping_table['events'] = value_list + + +@app.timer(0.1) +async def produce(): + value = 1 + await source.send(value=RawModel(value=value, date=int(time()))) + # print(f'Produce Message :: send messge {value}') + + +if __name__ == '__main__': + if len(sys.argv) < 2: + sys.argv.extend(['worker', '-l', 'info']) + app.main() diff --git a/extra/release/sphinx2rst_config.py b/extra/release/sphinx2rst_config.py index d7eaa34b8..a9229ce35 100644 --- a/extra/release/sphinx2rst_config.py +++ b/extra/release/sphinx2rst_config.py @@ -1,4 +1,4 @@ -REFBASE = 'https://fauststream.com/en/latest' +REFBASE = 'https://faust-streaming.github.io/faust' REFS = { 'slack-channel': 'getting-started/resources.html#slack-channel', 'bundles': 'introduction/installation.html#bundles', diff --git a/extra/tools/verify_doc_defaults.py b/extra/tools/verify_doc_defaults.py index 84f87266a..4385ed1f5 100644 --- a/extra/tools/verify_doc_defaults.py +++ b/extra/tools/verify_doc_defaults.py @@ -15,36 +15,36 @@ from yarl import URL -SETTINGS: Path = Path('docs/userguide/settings.rst') +SETTINGS: Path = Path("docs/userguide/settings.rst") -app = faust.App('verify_defaults') +app = faust.App("verify_defaults") ignore_settings: Set[str] = { - 'id', - 'tabledir', - 'reply_to', - 'broker_consumer', - 'broker_producer', + "id", + "tabledir", + "reply_to", + "broker_consumer", + "broker_producer", } builtin_locals: Dict[str, Any] = { - 'aiohttp': aiohttp, - 'app': app, - 'datetime': datetime, - 'datadir': app.conf.datadir, - 'faust': faust, - 'logging': logging, - 'mode': mode, - 'socket': socket, - 'timedelta': timedelta, - 'web_host': socket.gethostname(), - 'web_port': 6066, - 'VERSION': faust.__version__, - 'uuid': uuid, - 'URL': URL, + "aiohttp": aiohttp, + "app": app, + "datetime": datetime, + "datadir": app.conf.datadir, + "faust": faust, + "logging": logging, + "mode": mode, + "socket": socket, + "timedelta": timedelta, + "web_host": socket.gethostname(), + "web_port": 6066, + "VERSION": faust.__version__, + "uuid": uuid, + "URL": URL, } -RE_REF = re.compile(r'^:(\w+):`') +RE_REF = re.compile(r"^:(\w+):`") class Error(NamedTuple): @@ -63,7 +63,7 @@ def verify_settings(rst_path: Path) -> Iterator[Error]: actual = actual.value if actual != default: yield Error( - reason='mismatch', + reason="mismatch", setting=setting_name, default=default, actual=actual, @@ -74,14 +74,14 @@ def report_errors(errors: Iterator[Error]) -> int: num_errors: int = 0 for num_errors, e in enumerate(errors, start=1): if num_errors == 1: - carp(f'{sys.argv[0]}: Errors in docs/userguide/settings.rst:') - carp(f' + Setting {e.reason} {e.setting}:') - carp(f' documentation: {e.default!r}') - carp(f' actual: {e.actual!r}') + carp(f"{sys.argv[0]}: Errors in docs/userguide/settings.rst:") + carp(f" + Setting {e.reason} {e.setting}:") + carp(f" documentation: {e.default!r}") + carp(f" actual: {e.actual!r}") if num_errors: - carp(f'Found {num_errors} error(s).', file=sys.stderr) + carp(f"Found {num_errors} error(s).", file=sys.stderr) else: - print(f'{sys.argv[0]}: All OK :-)', file=sys.stdout) + print(f"{sys.argv[0]}: All OK :-)", file=sys.stdout) return num_errors @@ -89,38 +89,39 @@ def carp(msg, *, file: IO = sys.stderr, **kwargs: Any) -> None: print(msg, file=file, **kwargs) -def find_settings_in_rst(rst_path: Path, - locals: Dict[str, Any] = None, - builtin_locals: Dict[str, Any] = builtin_locals, - ignore_settings: Set[str] = ignore_settings): +def find_settings_in_rst( + rst_path: Path, + locals: Dict[str, Any] = None, + builtin_locals: Dict[str, Any] = builtin_locals, + ignore_settings: Set[str] = ignore_settings, +): setting: str = None default: Any = None - app = faust.App('_verify_doc_defaults') + app = faust.App("_verify_doc_defaults") _globals = dict(globals()) # Add setting default to globals # so that defaults referencing another setting work. # E.g.: # :default: :setting:`broker_api_version` - _globals.update({ - name: getattr(app.conf, name) - for name in app.conf.setting_names() - }) + _globals.update( + {name: getattr(app.conf, name) for name in app.conf.setting_names()} + ) local_ns: Dict[str, Any] = {**builtin_locals, **(locals or {})} for line in rst_path.read_text().splitlines(): - if line.startswith('.. setting::'): + if line.startswith(".. setting::"): if setting and not default and setting not in ignore_settings: - raise Exception(f'No default value for {setting}') - setting = line.split('::')[-1].strip() - elif ':default:' in line: - if '``' in line: - line, sep, rest = line.rpartition('``') - default = line.split(':default:')[-1].strip() - default = default.strip('`') - default = RE_REF.sub('', default) + raise Exception(f"No default value for {setting}") + setting = line.split("::")[-1].strip() + elif ":default:" in line: + if "``" in line: + line, sep, rest = line.rpartition("``") + default = line.split(":default:")[-1].strip() + default = default.strip("`") + default = RE_REF.sub("", default) default_value = eval(default, _globals, local_ns) if setting not in ignore_settings: yield setting, default_value -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(report_errors(verify_settings(SETTINGS))) diff --git a/faust/__init__.py b/faust/__init__.py index ba5e4e194..c20b05903 100644 --- a/faust/__init__.py +++ b/faust/__init__.py @@ -21,11 +21,17 @@ import re import sys import typing + +if sys.version_info < (3, 8): + from importlib_metadata import version +else: + from importlib.metadata import version + from typing import Any, Mapping, NamedTuple, Optional, Sequence, Tuple -__version__ = "0.8.5" +__version__ = version("faust-streaming") __author__ = "Robinhood Markets, Inc." -__contact__ = "schrohm@gmail.com, vpatki@wayfair.com" +__contact__ = "schrohm@gmail.com, vpatki@wayfair.com, williambbarnhart@gmail.com" __homepage__ = "https://github.com/faust-streaming/faust" __docformat__ = "markdown" @@ -36,8 +42,8 @@ class VersionInfo(NamedTuple): major: int minor: int micro: int - releaselevel: str - serial: str + releaselevel: Optional[str] = None + serial: Optional[str] = None version_info_t = VersionInfo # XXX compat @@ -45,13 +51,11 @@ class VersionInfo(NamedTuple): # bumpversion can only search for {current_version} # so we have to parse the version here. -_match = re.match(r"(\d+)\.(\d+).(\d+)(.+)?", __version__) +_match = re.match(r"^(?Pv)?(?P[^\+]+)(?P.*)?$", __version__) if _match is None: # pragma: no cover raise RuntimeError("THIS IS A BROKEN RELEASE!") _temp = _match.groups() -VERSION = version_info = VersionInfo( - int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or "", "" -) +VERSION = version_info = VersionInfo(*_temp) del _match del _temp del re @@ -129,7 +133,12 @@ def _extract_arg_from_argv( # pragma: no cover from .agents import Agent # noqa: E402 from .app import App # noqa: E402 - from .auth import GSSAPICredentials, SASLCredentials, SSLCredentials # noqa: E402 + from .auth import ( # noqa: E402 + GSSAPICredentials, + OAuthCredentials, + SASLCredentials, + SSLCredentials, + ) from .channels import Channel, ChannelT # noqa: E402 from .events import Event, EventT # noqa: E402 from .models import Model, ModelOptions, Record # noqa: E402 @@ -180,6 +189,7 @@ def _extract_arg_from_argv( # pragma: no cover "TopicT", "GSSAPICredentials", "SASLCredentials", + "OAuthCredentials", "SSLCredentials", "Settings", "HoppingWindow", @@ -215,6 +225,7 @@ def _extract_arg_from_argv( # pragma: no cover "GSSAPICredentials", "SASLCredentials", "SSLCredentials", + "OAuthCredentials", ], "faust.types.settings": ["Settings"], "faust.windows": [ diff --git a/faust/agents/agent.py b/faust/agents/agent.py index ba9ef5255..9ebd896af 100644 --- a/faust/agents/agent.py +++ b/faust/agents/agent.py @@ -226,7 +226,7 @@ def __init__( "Agent concurrency must be 1 when using isolated partitions" ) self.use_reply_headers = use_reply_headers - Service.__init__(self) + Service.__init__(self, loop=app.loop) def on_init_dependencies(self) -> Iterable[ServiceT]: """Return list of services dependencies required to start agent.""" @@ -509,7 +509,7 @@ def _prepare_channel( has_prefix=has_prefix, **kwargs, ) - raise TypeError(f"Channel must be channel, topic, or str; not {type(channel)}") + raise TypeError(f"Channel must be channel, topic, or str, not {type(channel)}") def __call__( self, @@ -661,7 +661,9 @@ async def _prepare_actor(self, aref: ActorRefT, beacon: NodeT) -> ActorRefT: else: # agent yields and is an AsyncIterator so we have to consume it. coro = self._slurp(aref, aiter(aref)) - task = asyncio.Task(self._execute_actor(coro, aref), loop=self.loop) + # Calling asyncio.Task is not proper usage of asyncio, + # we need to create the task directly from the loop + task = self.loop.create_task(self._execute_actor(coro, aref)) task._beacon = beacon # type: ignore aref.actor_task = task self._actors.add(aref) @@ -1090,7 +1092,6 @@ def shortlabel(self) -> str: class AgentTestWrapper(Agent, AgentTestWrapperT): # pragma: no cover - _stream: StreamT def __init__( diff --git a/faust/agents/replies.py b/faust/agents/replies.py index 7ce915c2d..69e87c953 100644 --- a/faust/agents/replies.py +++ b/faust/agents/replies.py @@ -175,6 +175,7 @@ async def _start_fetcher(self, topic_name: str) -> None: # declare the topic topic = self._reply_topic(topic_name) await topic.maybe_declare() + self.app.topics.add(topic) await self.sleep(3.0) # then create the future self._fetchers[topic_name] = self.add_future(self._drain_replies(topic)) diff --git a/faust/app/base.py b/faust/app/base.py index 0a5bb310c..bc99f99a7 100644 --- a/faust/app/base.py +++ b/faust/app/base.py @@ -466,7 +466,7 @@ def __init__( self._default_options = (id, options) # The agent manager manages all agents. - self.agents = AgentManager(self) + self.agents = AgentManager(self, loop=loop) # Sensors monitor Faust using a standard sensor API. self.sensors = SensorDelegate(self) @@ -1326,7 +1326,9 @@ def table_route( def _decorator(fun: ViewHandlerFun) -> ViewHandlerFun: _query_param = query_param if shard_param is not None: - warnings.warn(DeprecationWarning(W_DEPRECATED_SHARD_PARAM)) + warnings.warn( + DeprecationWarning(W_DEPRECATED_SHARD_PARAM), stacklevel=2 + ) if query_param: raise TypeError("Cannot specify shard_param and query_param") _query_param = shard_param @@ -1370,7 +1372,9 @@ def topic_route( def _decorator(fun: ViewHandlerFun) -> ViewHandlerFun: _query_param = query_param if shard_param is not None: - warnings.warn(DeprecationWarning(W_DEPRECATED_SHARD_PARAM)) + warnings.warn( + DeprecationWarning(W_DEPRECATED_SHARD_PARAM), stacklevel=2 + ) if query_param: raise TypeError("Cannot specify shard_param and query_param") _query_param = shard_param @@ -1791,7 +1795,7 @@ def _update_assignment(self, assigned: Set[TP]) -> Tuple[Set[TP], Set[TP]]: return revoked, newly_assigned def _new_producer(self) -> ProducerT: - return self.transport.create_producer(beacon=self.beacon) + return self.transport.create_producer(loop=self.loop, beacon=self.beacon) def _new_consumer(self) -> ConsumerT: return self.transport.create_consumer( @@ -1899,7 +1903,8 @@ def _prepare_compat_settings(self, options: MutableMapping) -> Mapping: f"Cannot use both compat option {old!r} and {new!r}" ) warnings.warn( - FutureWarning(W_OPTION_DEPRECATED.format(old=old, new=new)) + FutureWarning(W_OPTION_DEPRECATED.format(old=old, new=new)), + stacklevel=2, ) return options diff --git a/faust/assignor/partition_assignor.py b/faust/assignor/partition_assignor.py index fb8ac4fb6..705079620 100644 --- a/faust/assignor/partition_assignor.py +++ b/faust/assignor/partition_assignor.py @@ -4,9 +4,9 @@ from collections import defaultdict from typing import Iterable, List, Mapping, MutableMapping, Sequence, Set, cast -from kafka.cluster import ClusterMetadata -from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor -from kafka.coordinator.protocol import ( +from aiokafka.cluster import ClusterMetadata +from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor +from aiokafka.coordinator.protocol import ( ConsumerProtocolMemberAssignment, ConsumerProtocolMemberMetadata, ) @@ -336,10 +336,7 @@ def _global_table_standby_assignments( # the table and consumer group B, C, D only consuming. With the # synchronize_all_active_partitions flag it's possible to have # shared state over multiple consumer groups. - if ( - table.synchronize_all_active_partitions - or table.use_partitioner - ): + if table.synchronize_all_active_partitions or table.use_partitioner: standby_partitions = all_partitions else: # Only add those partitions as standby which aren't active standby_partitions = all_partitions - active_partitions diff --git a/faust/auth.py b/faust/auth.py index 22c4e859f..0c97e6a60 100644 --- a/faust/auth.py +++ b/faust/auth.py @@ -2,11 +2,14 @@ import ssl from typing import Any, Optional, Union +from aiokafka.conn import AbstractTokenProvider + from faust.types.auth import AuthProtocol, CredentialsT, SASLMechanism __all__ = [ "Credentials", "SASLCredentials", + "OAuthCredentials", "GSSAPICredentials", "SSLCredentials", ] @@ -49,6 +52,33 @@ def __repr__(self) -> str: return f"<{type(self).__name__}: username={self.username}>" +class OAuthCredentials(Credentials): + """Describe OAuth Bearer credentials over SASL""" + + protocol = AuthProtocol.SASL_PLAINTEXT + mechanism: SASLMechanism = SASLMechanism.OAUTHBEARER + + ssl_context: Optional[ssl.SSLContext] + + def __init__( + self, + *, + oauth_cb: AbstractTokenProvider, + ssl_context: Optional[ssl.SSLContext] = None, + ): + self.oauth_cb = oauth_cb + self.ssl_context = ssl_context + + if ssl_context is not None: + self.protocol = AuthProtocol.SASL_SSL + + def __repr__(self) -> str: + return "<{0}: oauth credentials {1} SSL support".format( + type(self).__name__, + "with" if self.protocol == AuthProtocol.SASL_SSL else "without", + ) + + class GSSAPICredentials(Credentials): """Describe GSSAPI credentials over SASL.""" diff --git a/faust/cli/base.py b/faust/cli/base.py index 232dfe9fc..cc5ef4f8b 100644 --- a/faust/cli/base.py +++ b/faust/cli/base.py @@ -442,7 +442,7 @@ def cli(*args: Any, **kwargs: Any) -> None: # pragma: no cover Use --help for help, --version for version information. - https://fauststream.com/en/latest + https://faust-streaming.github.io/faust """ return _prepare_cli(*args, **kwargs) @@ -481,7 +481,7 @@ def _prepare_cli( os.environ["F_DATADIR"] = datadir -class Command(abc.ABC): +class Command(abc.ABC): # noqa: B024 """Base class for subcommands.""" UsageError: Type[Exception] = click.UsageError @@ -527,6 +527,7 @@ class Command(abc.ABC): @classmethod def as_click_command(cls) -> Callable: # pragma: no cover """Convert command into :pypi:`click` command.""" + # This is what actually registers the commands into the # :pypi:`click` command-line interface (the ``def cli`` main above). # __init_subclass__ calls this for the side effect of being @@ -592,8 +593,8 @@ def __init__(self, ctx: click.Context, *args: Any, **kwargs: Any) -> None: self._blocking_timeout = self.state.blocking_timeout self._console_port = self.state.console_port - @no_type_check # Subclasses can omit *args, **kwargs in signature. - async def run(self, *args: Any, **kwargs: Any) -> Any: + @no_type_check # Subclasses can omit *args, **kwargs in signature. # noqa: B027 + async def run(self, *args: Any, **kwargs: Any) -> Any: # noqa: B027 """Override this method to define what your command does.""" # NOTE: If you override __call__ below, you have a non-async command. # This is used by .worker to call the @@ -607,7 +608,7 @@ async def execute(self, *args: Any, **kwargs: Any) -> Any: finally: await self.on_stop() - async def on_stop(self) -> None: + async def on_stop(self) -> None: # noqa: B027 """Call after command executed.""" ... @@ -621,7 +622,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> NoReturn: def run_using_worker(self, *args: Any, **kwargs: Any) -> NoReturn: """Execute command using :class:`faust.Worker`.""" - loop = asyncio.get_event_loop() + loop = asyncio.get_event_loop_policy().get_event_loop() args = self.args + args kwargs = {**self.kwargs, **kwargs} service = self.as_service(loop, *args, **kwargs) @@ -629,7 +630,7 @@ def run_using_worker(self, *args: Any, **kwargs: Any) -> NoReturn: self.on_worker_created(worker) raise worker.execute_from_commandline() - def on_worker_created(self, worker: Worker) -> None: + def on_worker_created(self, worker: Worker) -> None: # noqa: B027 """Call when creating :class:`faust.Worker` to execute this command.""" ... @@ -640,7 +641,7 @@ def as_service( return Service.from_awaitable( self.execute(*args, **kwargs), name=type(self).__name__, - loop=loop or asyncio.get_event_loop(), + loop=loop or asyncio.get_event_loop_policy().get_event_loop(), ) def worker_for_service( @@ -659,7 +660,7 @@ def worker_for_service( console_port=self.console_port, redirect_stdouts=self.redirect_stdouts or False, redirect_stdouts_level=self.redirect_stdouts_level, - loop=loop or asyncio.get_event_loop(), + loop=loop or asyncio.get_event_loop_policy().get_event_loop(), daemon=self.daemon, ) diff --git a/faust/cli/completion.py b/faust/cli/completion.py index 9a0f32b4c..d9f18b126 100644 --- a/faust/cli/completion.py +++ b/faust/cli/completion.py @@ -28,7 +28,7 @@ async def run(self) -> None: "Run `pip install click_completion` from your virtualenv\n" "and try again!" ) - self.say(click_completion.get_code(shell=self.shell())) + self.say(click_completion.get_code(shell=self.shell())) # nosec: B604 def shell(self) -> str: """Return the current shell used in this environment.""" diff --git a/faust/cli/model.py b/faust/cli/model.py index fde0fe530..67a1ecd9c 100644 --- a/faust/cli/model.py +++ b/faust/cli/model.py @@ -58,7 +58,7 @@ def _unknown_model( alt = text.didyoumean( registry, lookup, - fmt_none=f"Please run `{self.prog_name} models` for a list.", + fmt_none=f'Please run "{self.prog_name} models" for a list.', ) return click.UsageError(f"No model {name!r}. {alt}") diff --git a/faust/cli/send.py b/faust/cli/send.py index a52cd4c9e..cc996f589 100644 --- a/faust/cli/send.py +++ b/faust/cli/send.py @@ -82,5 +82,7 @@ async def run( meta: RecordMetadata = await fut_send_complete self.say(self.dumps(meta._asdict())) if i and max_latency: - await asyncio.sleep(random.uniform(min_latency, max_latency)) + await asyncio.sleep( + random.uniform(min_latency, max_latency) # nosec B311 + ) await self.app.producer.stop() diff --git a/faust/cli/worker.py b/faust/cli/worker.py index c992cd093..ae22b6980 100644 --- a/faust/cli/worker.py +++ b/faust/cli/worker.py @@ -57,7 +57,7 @@ class worker(AppCommand): "-h", default=None, type=str, - help=f"Canonical host name for the web server " f"(default: {WEB_BIND})", + help=f"Canonical host name for the web server (default: {WEB_BIND})", ), ] diff --git a/faust/contrib/sentry.py b/faust/contrib/sentry.py index c22921e34..04d260329 100644 --- a/faust/contrib/sentry.py +++ b/faust/contrib/sentry.py @@ -96,7 +96,7 @@ def handler_from_dsn( include_paths: Iterable[str] = None, loglevel: Optional[int] = None, qsize: int = 1000, - **kwargs: Any + **kwargs: Any, ) -> Optional[logging.Handler]: if raven is None: raise ImproperlyConfigured("faust.contrib.sentry requires the `raven` library.") @@ -115,7 +115,7 @@ def handler_from_dsn( qsize=qsize, ), disable_existing_loggers=False, - **kwargs + **kwargs, ) handler = _build_sentry_handler()(client) handler.setLevel(level) @@ -130,7 +130,7 @@ def setup( workers: int = 4, max_queue_size: int = 1000, loglevel: Optional[int] = None, - **kwargs + **kwargs, ) -> None: sentry_handler = handler_from_dsn( dsn=dsn, workers=workers, qsize=max_queue_size, loglevel=loglevel, **kwargs diff --git a/faust/fixups/django.py b/faust/fixups/django.py index 5ca7cd20f..804d9018a 100644 --- a/faust/fixups/django.py +++ b/faust/fixups/django.py @@ -63,7 +63,7 @@ def enabled(self) -> bool: try: import django # noqa except ImportError: - warnings.warn(WARN_NOT_INSTALLED) + warnings.warn(WARN_NOT_INSTALLED, stacklevel=2) else: return True return False @@ -73,7 +73,7 @@ def wait_for_django(self) -> None: django.setup() if self.settings.DEBUG: - warnings.warn(WARN_DEBUG_ENABLED) + warnings.warn(WARN_DEBUG_ENABLED, stacklevel=2) def autodiscover_modules(self) -> Iterable[str]: """Return list of additional autodiscover modules. diff --git a/faust/livecheck/case.py b/faust/livecheck/case.py index c71c0b51f..38061a729 100644 --- a/faust/livecheck/case.py +++ b/faust/livecheck/case.py @@ -176,7 +176,7 @@ def __init__( # signal attributes have the correct signal instance. self.__dict__.update(self.signals) - Service.__init__(self, **kwargs) + Service.__init__(self, loop=app.loop, **kwargs) @Service.timer(10.0) async def _sampler(self) -> None: @@ -204,7 +204,7 @@ async def maybe_trigger( """Schedule test execution, or not, based on probability setting.""" execution: Optional[TestExecution] = None with ExitStack() as exit_stack: - if uniform(0, 1) < self.probability: + if uniform(0, 1) < self.probability: # nosec B311 execution = await self.trigger(id, *args, **kwargs) exit_stack.enter_context(current_test_stack.push(execution)) yield execution diff --git a/faust/models/base.py b/faust/models/base.py index 3f7ec65c5..6193f115c 100644 --- a/faust/models/base.py +++ b/faust/models/base.py @@ -242,7 +242,8 @@ def loads( """ if default_serializer is not None: warnings.warn( - DeprecationWarning("default_serializer deprecated, use: serializer") + DeprecationWarning("default_serializer deprecated, use: serializer"), + stacklevel=2, ) ser = cls._options.serializer or serializer or default_serializer data = loads(ser, s) diff --git a/faust/models/fields.py b/faust/models/fields.py index a51db66e0..e34b22a7d 100644 --- a/faust/models/fields.py +++ b/faust/models/fields.py @@ -331,7 +331,6 @@ def prepare_value( class NumberField(FieldDescriptor[T]): - max_value: Optional[int] min_value: Optional[int] @@ -439,7 +438,6 @@ def validate(self, value: Decimal) -> Iterable[ValidationError]: class CharField(FieldDescriptor[CharacterType]): - max_length: Optional[int] min_length: Optional[int] trim_whitespace: bool diff --git a/faust/models/record.py b/faust/models/record.py index 2dedb96fa..1d8fd3a54 100644 --- a/faust/models/record.py +++ b/faust/models/record.py @@ -143,7 +143,7 @@ def _contribute_to_options(cls, options: ModelOptions) -> None: ) options.fields = cast(Mapping, fields) options.fieldset = frozenset(fields) - options.fieldpos = {i: k for i, k in enumerate(fields.keys())} + options.fieldpos = {i: k for i, k in enumerate(fields.keys())} # noqa: C416 # extract all default values, but only for actual fields. options.defaults = { @@ -592,7 +592,7 @@ def to_representation(self) -> Mapping[str, Any]: payload = self.asdict() options = self._options if options.include_metadata: - payload["__faust"] = {"ns": options.namespace} + payload[self._blessed_key] = {"ns": options.namespace} return payload def asdict(self) -> Dict[str, Any]: # pragma: no cover diff --git a/faust/models/tags.py b/faust/models/tags.py index ac87b0466..ed0a3cc7c 100644 --- a/faust/models/tags.py +++ b/faust/models/tags.py @@ -79,7 +79,6 @@ def __format__(self, format_spec: str) -> str: class _FrameLocal(UserString, Generic[T]): - _field_name: str _tag_type: str _frame: str diff --git a/faust/models/typing.py b/faust/models/typing.py index eaf9ee4a6..4c44deb75 100644 --- a/faust/models/typing.py +++ b/faust/models/typing.py @@ -271,11 +271,13 @@ def __init__(self, expr: Type, root: "RootNode" = None) -> None: if DEBUG: print(f"NODE {self!r}") - def __post_init__(self) -> None: + def __post_init__(self) -> None: # noqa: B027 ... def random_identifier(self, n: int = 8) -> str: - return "".join(random.choice(string.ascii_letters) for _ in range(n)) + return "".join( + random.choice(string.ascii_letters) for _ in range(n) # nosec B311 + ) @abc.abstractmethod def build(self, var: Variable, *args: Type) -> str: diff --git a/faust/sensors/datadog.py b/faust/sensors/datadog.py index ed8bf57b9..3c79174c4 100644 --- a/faust/sensors/datadog.py +++ b/faust/sensors/datadog.py @@ -146,7 +146,7 @@ def __init__( self.rate = rate if datadog is None: raise ImproperlyConfigured( - f"{type(self).__name__} requires `pip install datadog`." + f'{type(self).__name__} requires "pip install datadog".' ) super().__init__(**kwargs) diff --git a/faust/sensors/distributed_tracing.py b/faust/sensors/distributed_tracing.py new file mode 100644 index 000000000..31ea59510 --- /dev/null +++ b/faust/sensors/distributed_tracing.py @@ -0,0 +1,164 @@ +import traceback +from functools import cached_property +from typing import Any, Dict + +import aiohttp +import opentracing +from mode import get_logger +from mode.utils.compat import want_str +from opentracing import Format +from opentracing.ext import tags + +from faust import App, EventT, Sensor, StreamT +from faust.types import TP, Message, PendingMessage, ProducerT, RecordMetadata +from faust.types.core import OpenHeadersArg, merge_headers +from faust.utils.tracing import current_span, set_current_span + +logger = get_logger(__name__) + + +class TracingSensor(Sensor): + aiohttp_sessions: Dict[str, aiohttp.ClientSession] = None + + @cached_property + def app_tracer(self) -> opentracing.Tracer: + return opentracing.tracer + + async def stop(self) -> None: + if self.aiohttp_sessions: + for session in self.aiohttp_sessions.values(): + await session.close() + await super().stop() + + @cached_property + def kafka_tracer(self) -> opentracing.Tracer: + return opentracing.tracer + + # Message received by a consumer. + def on_message_in(self, tp: TP, offset: int, message: Message) -> None: + carrier_headers = {want_str(k): want_str(v) for k, v in message.headers} + + if carrier_headers: + parent_context = self.app_tracer.extract( + format=Format.TEXT_MAP, carrier=carrier_headers + ) + span = self.app_tracer.start_span( + operation_name=f"consume-from-{message.topic}", + references=opentracing.follows_from(parent_context), + ) + else: + span = self.app_tracer.start_span( + operation_name=f"consume-from-{message.topic}" + ) + set_current_span(span) + span.set_tag("kafka-topic", tp.topic) + span.set_tag("kafka-partition", tp.partition) + span.set_tag("kafka-key", message.key) + span.__enter__() + message.span = span # type: ignore + + # Message sent to a stream as an event. + def on_stream_event_in( + self, tp: TP, offset: int, stream: StreamT, event: EventT + ) -> None: + stream_meta = getattr(event.message, "stream_meta", None) + if stream_meta is None: + stream_meta = event.message.stream_meta = {} # type: ignore + parent_span = event.message.span # type: ignore + if parent_span: + stream_span = opentracing.start_child_span( + parent_span, f"job-{event.message.topic}" + ) + stream_span.set_tag("stream-concurrency-index", stream.concurrency_index) + stream_span.set_tag("stream-prefix", stream.prefix) + spans = stream_meta.get("stream_spans") + if spans is None: + spans = stream_meta["stream_spans"] = {} + spans[stream] = stream_span + stream_span.__enter__() + + # Event was acknowledged by stream. + def on_stream_event_out( + self, tp: TP, offset: int, stream: StreamT, event: EventT, state: Dict = None + ) -> None: + stream_meta = getattr(event.message, "stream_meta", None) + if stream_meta is None: + stream_meta = event.message.stream_meta = {} # type: ignore + stream_spans = stream_meta.get("stream_spans") + if stream_spans: + span = stream_spans.pop(stream, None) + if span is not None: + span.finish() + + # All streams finished processing message. + def on_message_out(self, tp: TP, offset: int, message: Message) -> None: + span = message.span # type: ignore + if span: + span.finish() + + # About to send a message. + def on_send_initiated( + self, + producer: ProducerT, + topic: str, + message: PendingMessage, + keysize: int, + valsize: int, + ) -> Any: + parent_span = current_span() + if parent_span: + span = opentracing.start_child_span(parent_span, f"produce-to-{topic}") + header_map = dict(message.headers) if message.headers else {} + span.set_tag("kafka-headers", header_map) + self.trace_inject_headers(span, message.headers) + span.__enter__() + return {"span": span} + return {"span": None} + + # Message successfully sent. + def on_send_completed( + self, producer: ProducerT, state: Any, metadata: RecordMetadata + ) -> None: + span = state.get("span") + if span is not None: + span.set_tag("kafka-topic", metadata.topic) + span.set_tag("kafka-partition", metadata.partition) + span.set_tag("kafka-offset", metadata.offset) + span.finish() + + # Error while sending message. + def on_send_error( + self, producer: ProducerT, exc: BaseException, state: Any + ) -> None: + span = state.get("span") + if span is not None: + span.set_tag(tags.ERROR, "true") + span.log_kv( + { + "python.exception.type": type(exc), + "python.exception.val": exc, + "python.exception.tb": traceback.format_stack(), + } + ) + span.finish(exception=exc) + + def trace_inject_headers( + self, span: opentracing.Span, headers: OpenHeadersArg + ) -> Any: + try: + if self.app_tracer is not None: + if span is not None: + carrier: Dict = {} + self.app_tracer.inject( + span_context=span.context, + format=opentracing.Format.TEXT_MAP, + carrier=carrier, + ) + merge_headers(headers, carrier) + return headers + except Exception as ex: + logger.warning(f"Exception in trace_inject_headers {ex} ") + return None + + def on_threaded_producer_buffer_processed(self, app: App, size: int) -> None: + pass diff --git a/faust/sensors/monitor.py b/faust/sensors/monitor.py index 89d73125d..efacca358 100644 --- a/faust/sensors/monitor.py +++ b/faust/sensors/monitor.py @@ -68,7 +68,7 @@ def __init__( *, keys_retrieved: int = 0, keys_updated: int = 0, - keys_deleted: int = 0 + keys_deleted: int = 0, ) -> None: self.table: CollectionT = table self.keys_retrieved = keys_retrieved @@ -252,7 +252,7 @@ def __init__( http_response_codes: Counter[HTTPStatus] = None, http_response_latency: Deque[float] = None, http_response_latency_avg: float = 0.0, - **kwargs: Any + **kwargs: Any, ) -> None: if max_avg_history is not None: self.max_avg_history = max_avg_history @@ -417,7 +417,7 @@ def _topic_buffer_full_dict(self) -> MutableMapping[str, int]: return {label(topic): count for topic, count in self.topic_buffer_full.items()} def _metric_counts_dict(self) -> MutableMapping[str, int]: - return {key: count for key, count in self.metric_counts.items()} + return {key: count for key, count in self.metric_counts.items()} # noqa: C416 def _http_response_codes_dict(self) -> MutableMapping[int, int]: return {int(code): count for code, count in self.http_response_codes.items()} @@ -636,7 +636,7 @@ def on_web_request_end( response: Optional[web.Response], state: Dict, *, - view: web.View = None + view: web.View = None, ) -> None: """Web server finished working on request.""" status_code = HTTPStatus(response.status if response is not None else 500) @@ -659,7 +659,7 @@ def _normalize( name: str, *, pattern: Pattern = RE_NORMALIZE, - substitution: str = RE_NORMALIZE_SUBSTITUTION + substitution: str = RE_NORMALIZE_SUBSTITUTION, ) -> str: return pattern.sub(substitution, name) diff --git a/faust/serializers/codecs.py b/faust/serializers/codecs.py index 5175add57..e642f3be4 100644 --- a/faust/serializers/codecs.py +++ b/faust/serializers/codecs.py @@ -159,7 +159,7 @@ def msgpack() -> codecs.Codec: At this point may want to publish this on PyPI to share the extension with other Faust users. """ -import pickle as _pickle +import pickle as _pickle # nosec B403 from base64 import b64decode, b64encode from types import ModuleType from typing import Any, Dict, MutableMapping, Optional, Tuple, cast @@ -279,10 +279,10 @@ class raw_pickle(Codec): """:mod:`pickle` serializer with no encoding.""" def _loads(self, s: bytes) -> Any: - return _pickle.loads(s) + return _pickle.loads(s) # nosec B301 def _dumps(self, obj: Any) -> bytes: - return _pickle.dumps(obj) + return _pickle.dumps(obj) # nosec B403 def pickle() -> Codec: @@ -313,7 +313,7 @@ def _dumps(self, s: bytes) -> bytes: #: Codec registry, mapping of name to :class:`Codec` instance. codecs: MutableMapping[str, CodecT] = { "json": json(), - "pickle": pickle(), + "pickle": pickle(), # nosec B403 "binary": binary(), "raw": raw(), "yaml": yaml(), diff --git a/faust/stores/aerospike.py b/faust/stores/aerospike.py index f95de6d56..e713b4068 100644 --- a/faust/stores/aerospike.py +++ b/faust/stores/aerospike.py @@ -1,3 +1,4 @@ +"""Aerospike storage.""" import time import typing from typing import Any, Dict, Iterator, Optional, Tuple, Union @@ -41,13 +42,13 @@ class AeroSpikeStore(base.SerializedStore): ttl: int policies: typing.Mapping[str, Any] BIN_KEY = "value_key" - USERNAME_KEY = "username" - HOSTS_KEY = "hosts" - PASSWORD_KEY = "password" # nosec - NAMESPACE_KEY = "namespace" - TTL_KEY = "ttl" - POLICIES_KEY = "policies" - CLIENT_OPTIONS_KEY = "client" + USERNAME_KEY: str = "user" + HOSTS_KEY: str = "hosts" + PASSWORD_KEY: str = "password" # nosec + NAMESPACE_KEY: str = "namespace" + TTL_KEY: str = "ttl" + POLICIES_KEY: str = "policies" + CLIENT_OPTIONS_KEY: str = "client" def __init__( self, @@ -70,18 +71,23 @@ def __init__( @staticmethod def get_aerospike_client(aerospike_config: Dict[Any, Any]) -> Client: + """Try to get Aerospike client instance.""" global aerospike_client if aerospike_client: return aerospike_client else: - client = aerospike.client( - aerospike_config.get(AeroSpikeStore.CLIENT_OPTIONS_KEY) + client_config: Dict[Any, Any] = aerospike_config.get( + AeroSpikeStore.CLIENT_OPTIONS_KEY, {} ) + client_config[AeroSpikeStore.USERNAME_KEY] = aerospike_config.get( + AeroSpikeStore.USERNAME_KEY, None + ) + client_config[AeroSpikeStore.PASSWORD_KEY] = aerospike_config.get( + AeroSpikeStore.PASSWORD_KEY, None + ) + try: - client.connect( - aerospike_config.get(AeroSpikeStore.USERNAME_KEY), - aerospike_config.get(AeroSpikeStore.PASSWORD_KEY), - ) + client = aerospike.client(client_config) aerospike_client = client return client except Exception as e: @@ -200,6 +206,7 @@ def _iteritems(self) -> Iterator[Tuple[bytes, bytes]]: raise ex def _size(self) -> int: + """Always returns 0 for Aerospike.""" return 0 def _contains(self, key: bytes) -> bool: @@ -224,10 +231,20 @@ def _contains(self, key: bytes) -> bool: raise ex def _clear(self) -> None: - pass + """This is typically used to clear data. + + This does nothing when using the Aerospike store. + + """ + ... def reset_state(self) -> None: - pass + """Remove system state. + + This does nothing when using the Aerospike store. + + """ + ... def persisted_offset(self, tp: TP) -> Optional[int]: """Return the persisted offset. @@ -237,6 +254,7 @@ def persisted_offset(self, tp: TP) -> Optional[int]: return None def aerospike_fun_call_with_retry(self, fun, *args, **kwargs): + """Call function and retry until Aerospike throws exception.""" f_tries = self.app.conf.aerospike_retries_on_exception f_delay = self.app.conf.aerospike_sleep_seconds_between_retries_on_exception while f_tries > 1: @@ -262,3 +280,23 @@ def aerospike_fun_call_with_retry(self, fun, *args, **kwargs): ex ) # crash the app to prevent the offset from progressing raise ex + + async def backup_partition( + self, tp: Union[TP, int], flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + """Backup partition from this store. + + Not yet implemented for Aerospike. + + """ + raise NotImplementedError("Not yet implemented for Aerospike.") + + def restore_backup( + self, tp: Union[TP, int], latest: bool = True, backup_id: int = 0 + ) -> None: + """Restore partition backup from this store. + + Not yet implemented for Aerospike. + + """ + raise NotImplementedError("Not yet implemented for Aerospike.") diff --git a/faust/stores/memory.py b/faust/stores/memory.py index d0aefc6b7..484517628 100644 --- a/faust/stores/memory.py +++ b/faust/stores/memory.py @@ -1,5 +1,5 @@ """In-memory table storage.""" -from typing import Any, Callable, Iterable, MutableMapping, Optional, Set, Tuple +from typing import Any, Callable, Iterable, MutableMapping, Optional, Set, Tuple, Union from faust.types import TP, EventT from faust.types.stores import KT, VT @@ -82,3 +82,23 @@ def reset_state(self) -> None: """ ... + + async def backup_partition( + self, tp: Union[TP, int], flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + """Backup partition from this store. + + This does nothing when using the in-memory store. + + """ + ... + + def restore_backup( + self, tp: Union[TP, int], latest: bool = True, backup_id: int = 0 + ) -> None: + """Restore partition backup from this store. + + This does nothing when using the in-memory store. + + """ + ... diff --git a/faust/stores/rocksdb.py b/faust/stores/rocksdb.py index e7150d120..d614b32c4 100644 --- a/faust/stores/rocksdb.py +++ b/faust/stores/rocksdb.py @@ -2,7 +2,9 @@ import asyncio import gc import math +import os import shutil +import tempfile import typing from collections import defaultdict from contextlib import suppress @@ -44,14 +46,19 @@ DEFAULT_BLOCK_CACHE_SIZE = 2 * 1024**3 DEFAULT_BLOCK_CACHE_COMPRESSED_SIZE = 500 * 1024**2 DEFAULT_BLOOM_FILTER_SIZE = 3 +ERRORS_ROCKS_IO_ERROR = ( + Exception # use general exception to avoid missing exception issues +) try: # pragma: no cover import rocksdb + + ERRORS_ROCKS_IO_ERROR = rocksdb.errors.RocksIOError except ImportError: # pragma: no cover rocksdb = None # noqa if typing.TYPE_CHECKING: # pragma: no cover - from rocksdb import DB, Options + from rocksdb import DB, Options, WriteBatch else: class DB: # noqa @@ -61,6 +68,16 @@ class Options: # noqa """Dummy Options.""" +try: # pragma: no cover + import rocksdict + from rocksdict import Options, Rdict as DB, WriteBatch # noqa F811 + + USE_ROCKSDICT = True +except ImportError: # pragma: no cover + USE_ROCKSDICT = False + rocksdict = None # noqa + + class PartitionDB(NamedTuple): """Tuple of ``(partition, rocksdb.DB)``.""" @@ -83,6 +100,7 @@ class RocksDBOptions: block_cache_size: int = DEFAULT_BLOCK_CACHE_SIZE block_cache_compressed_size: int = DEFAULT_BLOCK_CACHE_COMPRESSED_SIZE bloom_filter_size: int = DEFAULT_BLOOM_FILTER_SIZE + use_rocksdict: bool = USE_ROCKSDICT extra_options: Mapping def __init__( @@ -94,6 +112,8 @@ def __init__( block_cache_size: Optional[int] = None, block_cache_compressed_size: Optional[int] = None, bloom_filter_size: Optional[int] = None, + use_rocksdict: Optional[bool] = None, + ttl: Optional[int] = None, **kwargs: Any, ) -> None: if max_open_files is not None: @@ -110,33 +130,95 @@ def __init__( self.block_cache_compressed_size = block_cache_compressed_size if bloom_filter_size is not None: self.bloom_filter_size = bloom_filter_size + if use_rocksdict is not None: + self.use_rocksdict = use_rocksdict + self.ttl = ttl self.extra_options = kwargs def open(self, path: Path, *, read_only: bool = False) -> DB: """Open RocksDB database using this configuration.""" - return rocksdb.DB(str(path), self.as_options(), read_only=read_only) + if self.use_rocksdict: + db_options = self.as_options() + db_options.set_db_paths( + [rocksdict.DBPath(str(path), self.target_file_size_base)] + ) + db_access_type = ( + rocksdict.AccessType.read_write() + if self.ttl is None + else rocksdict.AccessType.with_ttl(self.ttl) + ) + db = DB(str(path), options=self.as_options(), access_type=db_access_type) + db.set_read_options(rocksdict.ReadOptions()) + return db + else: + return rocksdb.DB(str(path), self.as_options(), read_only=read_only) def as_options(self) -> Options: """Return :class:`rocksdb.Options` object using this configuration.""" - return rocksdb.Options( - create_if_missing=True, - max_open_files=self.max_open_files, - write_buffer_size=self.write_buffer_size, - max_write_buffer_number=self.max_write_buffer_number, - target_file_size_base=self.target_file_size_base, - table_factory=rocksdb.BlockBasedTableFactory( - filter_policy=rocksdb.BloomFilterPolicy(self.bloom_filter_size), - block_cache=rocksdb.LRUCache(self.block_cache_size), - block_cache_compressed=rocksdb.LRUCache( - self.block_cache_compressed_size + if self.use_rocksdict: + db_options = Options(raw_mode=True) + db_options.create_if_missing(True) + db_options.set_max_open_files(self.max_open_files) + db_options.set_write_buffer_size(self.write_buffer_size) + db_options.set_target_file_size_base(self.target_file_size_base) + db_options.set_max_write_buffer_number(self.max_write_buffer_number) + table_factory_options = rocksdict.BlockBasedOptions() + table_factory_options.set_bloom_filter( + self.bloom_filter_size, block_based=True + ) + table_factory_options.set_block_cache( + rocksdict.Cache(self.block_cache_size) + ) + table_factory_options.set_index_type( + rocksdict.BlockBasedIndexType.binary_search() + ) + db_options.set_block_based_table_factory(table_factory_options) + return db_options + else: + return rocksdb.Options( + create_if_missing=True, + max_open_files=self.max_open_files, + write_buffer_size=self.write_buffer_size, + max_write_buffer_number=self.max_write_buffer_number, + target_file_size_base=self.target_file_size_base, + table_factory=rocksdb.BlockBasedTableFactory( + filter_policy=rocksdb.BloomFilterPolicy(self.bloom_filter_size), + block_cache=rocksdb.LRUCache(self.block_cache_size), + block_cache_compressed=rocksdb.LRUCache( + self.block_cache_compressed_size + ), ), - ), - **self.extra_options, - ) + wal_ttl_seconds=self.ttl if self.ttl is not None else 0, + **self.extra_options, + ) class Store(base.SerializedStore): - """RocksDB table storage.""" + """RocksDB table storage. + + .. tip:: + You can specify 'read_only' as an option into a Table class + to allow a RocksDB store be used by multiple apps:: + + app.App(..., store="rocksdb://") + app.GlobalTable(..., options={'read_only': True}) + + You can also switch between RocksDB drivers this way:: + + app.GlobalTable(..., options={'driver': 'rocksdict'}) + app.GlobalTable(..., options={'driver': 'python-rocksdb'}) + + If you wish to remove the WAL files after a certain amount of + time, you can set a TTL this way:: + + app.GlobalTable(..., options={'ttl': 60 * 60 * 24}) # 1 day + + Note that the TTL is in seconds. + + .. warning:: + Note that rocksdict uses RocksDB 8. You won't be able to + return to using python-rocksdb, which uses RocksDB 6. + """ offset_key = b"__faust\0offset__" @@ -159,11 +241,14 @@ def __init__( *, key_index_size: Optional[int] = None, options: Optional[Mapping[str, Any]] = None, + read_only: Optional[bool] = False, + driver: Optional[str] = None, **kwargs: Any, ) -> None: - if rocksdb is None: + if rocksdb is None and rocksdict is None: error = ImproperlyConfigured( - "RocksDB bindings not installed? pip install python-rocksdb" + "RocksDB bindings not installed? pip install faust-streaming-rocksdb" + " or rocksdict" ) try: import rocksdb as _rocksdb # noqa: F401 @@ -175,7 +260,19 @@ def __init__( if not self.url.path: self.url /= self.table_name self.options = options or {} - self.rocksdb_options = RocksDBOptions(**self.options) + self.read_only = self.options.pop("read_only", read_only) + + self.driver = self.options.pop("driver", driver) + if self.driver == "rocksdict": + self.use_rocksdict = True + elif self.driver == "python-rocksdb": + self.use_rocksdict = False + else: + self.use_rocksdict = USE_ROCKSDICT + + self.rocksdb_options = RocksDBOptions( + **self.options, use_rocksdict=self.use_rocksdict + ) if key_index_size is None: key_index_size = app.conf.table_key_index_size self.key_index_size = key_index_size @@ -183,6 +280,103 @@ def __init__( self._key_index = LRUCache(limit=self.key_index_size) self.db_lock = asyncio.Lock() self.rebalance_ack = False + self._backup_path = os.path.join(self.path, f"{str(self.basename)}-backups") + try: + self._backup_engine = None + if not os.path.isdir(self._backup_path): + os.makedirs(self._backup_path, exist_ok=True) + testfile = tempfile.TemporaryFile(dir=self._backup_path) + testfile.close() + except PermissionError: + self.log.warning( + f'Unable to make directory for path "{self._backup_path}",' + f"disabling backups." + ) + except OSError: + self.log.warning( + f'Unable to create files in "{self._backup_path}",' f"disabling backups" + ) + else: + if rocksdb: + self._backup_engine = rocksdb.BackupEngine(self._backup_path) + + async def backup_partition( + self, tp: Union[TP, int], flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + """Backup partition from this store. + + This will be saved in a separate directory in the data directory called + '{table-name}-backups'. + + This is only available in python-rocksdb. + + Arguments: + tp: Partition to backup + flush: Flush the memset before backing up the state of the table. + purge: Purge old backups in the process + keep: How many backups to keep after purging + + This is only supported in newer versions of python-rocksdb which can read + the RocksDB database using multi-process read access. + See https://github.com/facebook/rocksdb/wiki/How-to-backup-RocksDB to know more. + + Example usage:: + + table = app.GlobalTable(..., partitions=1) + table.data.backup_partition(0, flush=True, purge=True, keep=1) + + """ + if not self.use_rocksdict and self._backup_engine: + partition = tp + if isinstance(tp, TP): + partition = tp.partition + try: + if flush: + db = await self._try_open_db_for_partition(partition) + else: + db = self.rocksdb_options.open( + self.partition_path(partition), read_only=True + ) + self._backup_engine.create_backup(db, flush_before_backup=flush) + if purge: + self._backup_engine.purge_old_backups(keep) + except Exception: + self.log.info(f"Unable to backup partition {partition}.") + else: + raise NotImplementedError("Backups not supported in rocksdict yet") + + def restore_backup( + self, tp: Union[TP, int], latest: bool = True, backup_id: int = 0 + ) -> None: + """Restore partition backup from this store. + + Arguments: + tp: Partition to restore + latest: Restore the latest backup, set as False to restore a specific ID + backup_id: Backup to restore + + An example of how the method can be accessed:: + + table = app.GlobalTable(..., partitions=1) + table.data.restore_backup(0) + + """ + if not self.use_rocksdict and self._backup_engine: + partition = tp + if isinstance(tp, TP): + partition = tp.partition + if latest: + self._backup_engine.restore_latest_backup( + str(self.partition_path(partition)), self._backup_path + ) + else: + self._backup_engine.restore_backup( + backup_id, str(self.partition_path(partition)), self._backup_path + ) + else: + raise NotImplementedError( + "Backup restoration not supported in rocksdict yet" + ) def persisted_offset(self, tp: TP) -> Optional[int]: """Return the last persisted offset. @@ -211,7 +405,7 @@ async def need_active_standby_for(self, tp: TP) -> bool: we can decide to not actively read standby messages, since that database file is already being populated. - Currently it is recommended that you use + Currently, it is recommended that you use separate data directories for multiple workers on the same machine. For example if you have a 4 CPU core machine, you can run @@ -227,7 +421,7 @@ async def need_active_standby_for(self, tp: TP) -> bool: """ try: self._db_for_partition(tp.partition) - except rocksdb.errors.RocksIOError as exc: + except ERRORS_ROCKS_IO_ERROR as exc: if "lock" not in repr(exc): raise return False @@ -249,8 +443,11 @@ def apply_changelog_batch( to_value: A callable you can use to deserialize the value of a changelog event. """ - batches: DefaultDict[int, rocksdb.WriteBatch] - batches = defaultdict(rocksdb.WriteBatch) + batches: DefaultDict[int, WriteBatch] + if self.use_rocksdict: + batches = defaultdict(lambda: rocksdict.WriteBatch(raw_mode=True)) + else: + batches = defaultdict(rocksdb.WriteBatch) tp_offsets: Dict[TP, int] = {} for event in batch: tp, offset = event.message.tp, event.message.offset @@ -285,7 +482,10 @@ def _db_for_partition(self, partition: int) -> DB: return db def _open_for_partition(self, partition: int) -> DB: - return self.rocksdb_options.open(self.partition_path(partition)) + path = self.partition_path(partition) + return self.rocksdb_options.open( + path, read_only=self.read_only if os.path.isfile(path) else False + ) def _get(self, key: bytes) -> Optional[bytes]: event = current_event() @@ -308,7 +508,11 @@ def _get(self, key: bytes) -> Optional[bytes]: db, value = dbvalue if value is None: - if db.key_may_exist(key)[0]: + if self.use_rocksdict: + key_may_exist = db.key_may_exist(key) + else: + key_may_exist = db.key_may_exist(key)[0] + if key_may_exist: return db.get(key) return value @@ -321,7 +525,11 @@ def _get_bucket_for_key(self, key: bytes) -> Optional[_DBValueTuple]: dbs = cast(Iterable[PartitionDB], self._dbs.items()) for partition, db in dbs: - if db.key_may_exist(key)[0]: + if self.use_rocksdict: + key_may_exist = db.key_may_exist(key) + else: + key_may_exist = db.key_may_exist(key)[0] + if key_may_exist: value = db.get(key) if value is not None: self._key_index[key] = partition @@ -410,7 +618,7 @@ async def _try_open_db_for_partition( f"{generation_id} app id {self.app.consumer_generation_id}" ) return self._db_for_partition(partition) - except rocksdb.errors.RocksIOError as exc: + except ERRORS_ROCKS_IO_ERROR as exc: if i == max_retries - 1 or "lock" not in repr(exc): # release all the locks and crash self.log.warning( @@ -449,7 +657,11 @@ def _contains(self, key: bytes) -> bool: else: for db in self._dbs_for_key(key): # bloom filter: false positives possible, but not false negatives - if db.key_may_exist(key)[0] and db.get(key) is not None: + if self.use_rocksdict: + key_may_exist = db.key_may_exist(key) + else: + key_may_exist = db.key_may_exist(key)[0] + if key_may_exist and db.get(key) is not None: return True return False @@ -475,15 +687,23 @@ def _size(self) -> int: return sum(self._size1(db) for db in self._dbs_for_actives()) def _visible_keys(self, db: DB) -> Iterator[bytes]: - it = db.iterkeys() # noqa: B301 - it.seek_to_first() + if self.use_rocksdict: + it = db.keys() + iter = db.iter() + iter.seek_to_first() + else: + it = db.iterkeys() # noqa: B301 + it.seek_to_first() for key in it: if key != self.offset_key: yield key def _visible_items(self, db: DB) -> Iterator[Tuple[bytes, bytes]]: - it = db.iteritems() # noqa: B301 - it.seek_to_first() + if self.use_rocksdict: + it = db.items() + else: + it = db.iteritems() # noqa: B301 + it.seek_to_first() for key, value in it: if key != self.offset_key: yield key, value diff --git a/faust/streams.py b/faust/streams.py index 1de5afb57..f23d874ad 100644 --- a/faust/streams.py +++ b/faust/streams.py @@ -206,7 +206,7 @@ def _iter_ll(self, dir_: _LinkedListDirection) -> Iterator[StreamT]: seen: Set[StreamT] = set() while node: if node in seen: - raise RuntimeError("Loop in Stream.{dir_.attr}: Call support!") + raise RuntimeError(f"Loop in Stream.{dir_.attr}: Call support!") seen.add(node) yield node node = dir_.getter(node) @@ -391,11 +391,104 @@ async def add_to_buffer(value: T) -> T: self.enable_acks = stream_enable_acks self._processors.remove(add_to_buffer) + async def take_events( + self, max_: int, within: Seconds + ) -> AsyncIterable[Sequence[EventT]]: + """Buffer n events at a time and yield a list of buffered events. + Arguments: + max_: Max number of messages to receive. When more than this + number of messages are received within the specified number of + seconds then we flush the buffer immediately. + within: Timeout for when we give up waiting for another value, + and process the values we have. + Warning: If there's no timeout (i.e. `timeout=None`), + the agent is likely to stall and block buffered events for an + unreasonable length of time(!). + """ + buffer: List[T_co] = [] + events: List[EventT] = [] + buffer_add = buffer.append + event_add = events.append + buffer_size = buffer.__len__ + buffer_full = asyncio.Event() + buffer_consumed = asyncio.Event() + timeout = want_seconds(within) if within else None + stream_enable_acks: bool = self.enable_acks + + buffer_consuming: Optional[asyncio.Future] = None + + channel_it = aiter(self.channel) + + # We add this processor to populate the buffer, and the stream + # is passively consumed in the background (enable_passive below). + async def add_to_buffer(value: T) -> T: + try: + # buffer_consuming is set when consuming buffer after timeout. + nonlocal buffer_consuming + if buffer_consuming is not None: + try: + await buffer_consuming + finally: + buffer_consuming = None + buffer_add(cast(T_co, value)) + event = self.current_event + if event is None: + raise RuntimeError("Take buffer found current_event is None") + event_add(event) + if buffer_size() >= max_: + # signal that the buffer is full and should be emptied. + buffer_full.set() + # strict wait for buffer to be consumed after buffer full. + # If max is 1000, we are not allowed to return 1001 values. + buffer_consumed.clear() + await self.wait(buffer_consumed) + except CancelledError: # pragma: no cover + raise + except Exception as exc: + self.log.exception("Error adding to take buffer: %r", exc) + await self.crash(exc) + return value + + # Disable acks to ensure this method acks manually + # events only after they are consumed by the user + self.enable_acks = False + + self.add_processor(add_to_buffer) + self._enable_passive(cast(ChannelT, channel_it)) + try: + while not self.should_stop: + # wait until buffer full, or timeout + await self.wait_for_stopped(buffer_full, timeout=timeout) + if buffer: + # make sure background thread does not add new items to + # buffer while we read. + buffer_consuming = self.loop.create_future() + try: + yield list(events) + finally: + buffer.clear() + for event in events: + await self.ack(event) + events.clear() + # allow writing to buffer again + notify(buffer_consuming) + buffer_full.clear() + buffer_consumed.set() + else: # pragma: no cover + pass + else: # pragma: no cover + pass + + finally: + # Restore last behaviour of "enable_acks" + self.enable_acks = stream_enable_acks + self._processors.remove(add_to_buffer) + async def take_with_timestamp( self, max_: int, within: Seconds, timestamp_field_name: str ) -> AsyncIterable[Sequence[T_co]]: - """Buffer n values at a time and yield a list of buffered values with the timestamp - when the message was added to kafka. + """Buffer n values at a time and yield a list of buffered values with the + timestamp when the message was added to kafka. Arguments: max_: Max number of messages to receive. When more than this @@ -689,7 +782,10 @@ def echo(self, *channels: Union[str, ChannelT]) -> StreamT: async def echoing(value: T) -> T: await asyncio.wait( - [maybe_forward(value, channel) for channel in _channels], + [ + asyncio.ensure_future(maybe_forward(value, channel)) + for channel in _channels + ], return_when=asyncio.ALL_COMPLETED, ) return value @@ -770,7 +866,6 @@ def get_key(withdrawal): if topic is not None: channel = topic else: - prefix = "" if self.prefix and not cast(TopicT, self.channel).has_prefix: prefix = self.prefix + "-" @@ -970,7 +1065,7 @@ async def _c_aiter(self) -> AsyncIterator[T_co]: # pragma: no cover yield value finally: event, self.current_event = self.current_event, None - it.after(event, do_ack, sensor_state) + it.after(event, do_ack or value is skipped_value, sensor_state) except StopAsyncIteration: # We are not allowed to propagate StopAsyncIteration in __aiter__ # (if we do, it'll be converted to RuntimeError by CPython). @@ -1113,6 +1208,8 @@ async def _py_aiter(self) -> AsyncIterator[T_co]: value = await _maybe_async(processor(value)) value = await on_merge(value) except Skip: + # We want to ack the filtered message + # otherwise the lag would increase value = skipped_value try: @@ -1121,7 +1218,9 @@ async def _py_aiter(self) -> AsyncIterator[T_co]: yield value finally: self.current_event = None - if do_ack and event is not None: + # We want to ack the filtered out message + # otherwise the lag would increase + if event is not None and (do_ack or value is skipped_value): # This inlines self.ack last_stream_to_ack = event.ack() message = event.message @@ -1130,6 +1229,7 @@ async def _py_aiter(self) -> AsyncIterator[T_co]: on_stream_event_out(tp, offset, self, event, sensor_state) if last_stream_to_ack: on_message_out(tp, offset, message) + except StopAsyncIteration: # We are not allowed to propagate StopAsyncIteration in __aiter__ # (if we do, it'll be converted to RuntimeError by CPython). diff --git a/faust/tables/base.py b/faust/tables/base.py index 45f7f63f2..7f90672fa 100644 --- a/faust/tables/base.py +++ b/faust/tables/base.py @@ -125,7 +125,7 @@ def __init__( synchronize_all_active_partitions: bool = False, **kwargs: Any, ) -> None: - Service.__init__(self, **kwargs) + Service.__init__(self, loop=app.loop, **kwargs) self.app = app self.name = cast(str, name) # set lazily so CAN BE NONE! self.default = default diff --git a/faust/tables/globaltable.py b/faust/tables/globaltable.py index 94a41a5d2..34a7e9c75 100644 --- a/faust/tables/globaltable.py +++ b/faust/tables/globaltable.py @@ -3,4 +3,11 @@ class GlobalTable(Table, GlobalTableT): - pass + """ + .. warning:: + Using a GlobalTable with multiple app instances may cause an + app to be stuck in an infinite recovery loop. The current fix + for this is to run the table with the following options:: + + app.GlobalTable(..., partitions=1, recovery_buffer_size=1) + """ diff --git a/faust/tables/objects.py b/faust/tables/objects.py index d52869397..3fb7d92a2 100644 --- a/faust/tables/objects.py +++ b/faust/tables/objects.py @@ -65,20 +65,18 @@ class ChangeloggedObjectManager(Store): data: MutableMapping _storage: Optional[StoreT] = None - _dirty: Set def __init__(self, table: Table, **kwargs: Any) -> None: self.table = table self.table_name = self.table.name self.data = {} - self._dirty = set() - Service.__init__(self, **kwargs) + Service.__init__(self, loop=table.loop, **kwargs) def send_changelog_event(self, key: Any, operation: int, value: Any) -> None: """Send changelog event to the tables changelog topic.""" event = current_event() - self._dirty.add(key) self.table._send_changelog(event, (operation, key), value) + self.storage[key] = self[key].as_stored_value() def __getitem__(self, key: Any) -> ChangeloggedObject: if key in self.data: @@ -100,10 +98,6 @@ async def on_start(self) -> None: """Call when the changelogged object manager starts.""" await self.add_runtime_dependency(self.storage) - async def on_stop(self) -> None: - """Call when the changelogged object manager stops.""" - self.flush_to_storage() - def persisted_offset(self, tp: TP) -> Optional[int]: """Get the last persisted offset for changelog topic partition.""" return self.storage.persisted_offset(tp) @@ -133,18 +127,6 @@ def sync_from_storage(self) -> None: for key, value in self.storage.items(): self[key].sync_from_storage(value) - def flush_to_storage(self) -> None: - """Flush set contents to storage.""" - for key in self._dirty: - self.storage[key] = self.data[key].as_stored_value() - self._dirty.clear() - - @Service.task - async def _periodic_flush(self) -> None: # pragma: no cover - async for sleep_time in self.itertimer(2.0, name="SetManager.flush"): - await self.sleep(sleep_time) - self.flush_to_storage() - def reset_state(self) -> None: """Reset table local state.""" # delegate to underlying RocksDB store. @@ -183,3 +165,16 @@ def apply_changelog_batch( for tp, offset in tp_offsets.items(): self.set_persisted_offset(tp, offset) + + async def backup_partition( + self, tp, flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + raise NotImplementedError + + def restore_backup( + self, + tp, + latest: bool = True, + backup_id: int = 0, + ) -> None: + raise NotImplementedError diff --git a/faust/tables/recovery.py b/faust/tables/recovery.py index 83e9f2051..51ec3cb9c 100644 --- a/faust/tables/recovery.py +++ b/faust/tables/recovery.py @@ -18,7 +18,7 @@ ) import opentracing -from kafka.errors import IllegalStateError +from aiokafka.errors import IllegalStateError from mode import Service, get_logger from mode.services import WaitArgT from mode.utils.times import humanize_seconds, humanize_seconds_ago @@ -470,6 +470,21 @@ async def _restart_recovery(self) -> None: T(self.app.flow_control.resume)() T(consumer.resume_flow)() self._set_recovery_ended() + + # The changelog partitions only in the active_tps set need to be resumed + active_only_partitions = active_tps - standby_tps + if active_only_partitions: + # Support for the specific scenario where recovery_buffer=1 + tps_resuming = [ + tp + for tp in active_only_partitions + if self.tp_to_table[tp].recovery_buffer_size == 1 + ] + if tps_resuming: + T(consumer.resume_partitions)(tps_resuming) + T(self.app.flow_control.resume)() + T(consumer.resume_flow)() + self.log.info("Recovery complete") if span: span.set_tag("Recovery-Completed", True) @@ -600,9 +615,11 @@ async def on_recovery_completed(self, generation_id: int = 0) -> None: self._set_recovery_ended() # This needs to happen if all goes well callback_coros = [ - table.on_recovery_completed( - self.actives_for_table[table], - self.standbys_for_table[table], + asyncio.ensure_future( + table.on_recovery_completed( + self.actives_for_table[table], + self.standbys_for_table[table], + ) ) for table in self.tables.values() ] @@ -690,12 +707,18 @@ async def _build_offsets( # Offsets may have been compacted, need to get to the recent ones earliest = await consumer.earliest_offsets(*tps) # FIXME To be consistent with the offset -1 logic - earliest = {tp: offset - 1 for tp, offset in earliest.items()} + earliest = { + tp: offset - 1 if offset is not None else None + for tp, offset in earliest.items() + } + for tp in tps: last_value = destination[tp] - new_value = earliest[tp] + new_value = earliest.get(tp, None) - if last_value is None: + if last_value is None and new_value is None: + destination[tp] = -1 + elif last_value is None: destination[tp] = new_value elif new_value is None: destination[tp] = last_value diff --git a/faust/tables/sets.py b/faust/tables/sets.py index 1cca3679d..8da417a62 100644 --- a/faust/tables/sets.py +++ b/faust/tables/sets.py @@ -84,7 +84,7 @@ def on_change(self, added: Set[VT], removed: Set[VT]) -> None: self.manager.send_changelog_event(self.key, OPERATION_UPDATE, [added, removed]) def sync_from_storage(self, value: Any) -> None: - self.data = cast(Set, value) + self.data = set(value) def as_stored_value(self) -> Any: return self.data @@ -204,19 +204,19 @@ async def symmetric_difference_update(self, key: KT, members: Iterable[VT]) -> N await self._send_operation(SetAction.SYMDIFF, key, members) def _update(self, key: KT, members: List[VT]) -> None: - self.set_table[key].update(members) + self.set_table[key].update(set(members)) def _difference_update(self, key: KT, members: List[VT]) -> None: - self.set_table[key].difference_update(members) + self.set_table[key].difference_update(set(members)) def _clear(self, key: KT, members: List[VT]) -> None: self.set_table[key].clear() def _intersection_update(self, key: KT, members: List[VT]) -> None: - self.set_table[key].intersection_update(members) + self.set_table[key].intersection_update(set(members)) def _symmetric_difference_update(self, key: KT, members: List[VT]) -> None: - self.set_table[key].symmetric_difference_update(members) + self.set_table[key].symmetric_difference_update(set(members)) async def _send_operation( self, action: SetAction, key: KT, members: Iterable[VT] diff --git a/faust/tables/wrappers.py b/faust/tables/wrappers.py index 4dfb18d08..5d400d3bd 100644 --- a/faust/tables/wrappers.py +++ b/faust/tables/wrappers.py @@ -341,6 +341,7 @@ def __init__( key_type=self.table.key_type, window=None, ) + self.table.app.tables.add(self.key_index_table) self._get_relative_timestamp = self._relative_handler(relative_to) def clone(self, relative_to: RelativeArg) -> WindowWrapperT: diff --git a/faust/topics.py b/faust/topics.py index 76eb46ebc..90b5e1bd6 100644 --- a/faust/topics.py +++ b/faust/topics.py @@ -465,7 +465,8 @@ def _on_published( message.set_exception(exc) logger.warning( f"_on_published error for message topic " - f"{message.channel.get_topic_name()} error {exc} message {message}" + f"{message.message.channel.get_topic_name()} " + f"error {exc} message {message}" ) self.app.sensors.on_send_error(producer, exc, state) else: @@ -481,27 +482,39 @@ async def maybe_declare(self) -> None: async def declare(self) -> None: """Declare/create this topic on the server.""" - partitions = self.partitions - if partitions is None: + partitions: int + if self.partitions: + partitions = self.partitions + else: partitions = self.app.conf.topic_partitions replicas: int - if self.replicas is None: - replicas = self.app.conf.topic_replication_factor - else: + if self.replicas: replicas = self.replicas + else: + replicas = self.app.conf.topic_replication_factor if self.app.conf.topic_allow_declare: producer = await self._get_producer() for topic in self.topics: await producer.create_topic( topic=topic, partitions=partitions, - replication=replicas or 0, + replication=replicas or 1, config=self.config, compacting=self.compacting, deleting=self.deleting, retention=self.retention, ) + def on_stop_iteration(self) -> None: + """Signal that iteration over this channel was stopped. + Tip: + Remember to call ``super`` when overriding this method. + """ + super().on_stop_iteration() + if self.active_partitions is not None: + # Remove topics for isolated partitions from the Conductor. + self.app.topics.discard(cast(TopicT, self)) + def __aiter__(self) -> ChannelT: if self.is_iterator: return self diff --git a/faust/transport/_cython/conductor.pyx b/faust/transport/_cython/conductor.pyx index 38e6c75b9..9e871e3bf 100644 --- a/faust/transport/_cython/conductor.pyx +++ b/faust/transport/_cython/conductor.pyx @@ -1,5 +1,5 @@ # cython: language_level=3 -from asyncio import ALL_COMPLETED, wait +from asyncio import ALL_COMPLETED, ensure_future, wait from faust.exceptions import KeyDecodeError, ValueDecodeError @@ -75,7 +75,7 @@ cdef class ConductorHandler: continue delivered.add(chan) if full: - await wait([self._handle_full(event, chan, delivered) + await wait([ensure_future(self._handle_full(event, chan, delivered)) for event, chan in full], return_when=ALL_COMPLETED) except KeyDecodeError as exc: diff --git a/faust/transport/base.py b/faust/transport/base.py index 6b5d173ca..ac70afb3e 100644 --- a/faust/transport/base.py +++ b/faust/transport/base.py @@ -61,7 +61,7 @@ def __init__( ) -> None: self.url = url self.app = app - self.loop = loop or asyncio.get_event_loop() + self.loop = loop or asyncio.get_event_loop_policy().get_event_loop() def create_consumer(self, callback: ConsumerCallback, **kwargs: Any) -> ConsumerT: """Create new consumer.""" diff --git a/faust/transport/conductor.py b/faust/transport/conductor.py index 8eb4e6c00..ff7d3d99f 100644 --- a/faust/transport/conductor.py +++ b/faust/transport/conductor.py @@ -168,7 +168,7 @@ async def on_message(message: Message) -> None: on_topic_buffer_full(dest_chan) await asyncio.wait( [ - dest_chan.put(dest_event) + asyncio.ensure_future(dest_chan.put(dest_event)) for dest_event, dest_chan in full ], return_when=asyncio.ALL_COMPLETED, @@ -214,6 +214,12 @@ class Conductor(ConductorT, Service): #: to call here. _tp_to_callback: MutableMapping[TP, ConsumerCallback] + #: Lock used to synchronize access to _tp_to_callback. + #: Resubscriptions and updates to the indices may modify the mapping, and + #: while that is happening, the mapping should not be accessed by message + #: handlers. + _tp_to_callback_lock: asyncio.Lock + #: Whenever a change is made, i.e. a Topic is added/removed, we notify #: the background task responsible for resubscribing. _subscription_changed: Optional[asyncio.Event] @@ -224,8 +230,21 @@ class Conductor(ConductorT, Service): _compiler: ConductorCompiler - #: We wait for 45 seconds after a resubscription request, to make - #: sure any later requests are handled at the same time. + # `_resubscribe_sleep_lock_seconds` trades off between the latency of + # receiving messages for newly added topics and the cost of resubscribing + # to topics. Note that this resubscription flow only occurs when the topic + # list has changed (see the `_subscription_changed` event). This mechanism + # attempts to coalesce topic list changes that happen in quick succession + # and prevents the framework from constantly resubscribing to topics after + # every change. + # + # If the value is set too low and an agent is adding topics very + # frequently, then resubscription will happen very often and will issue + # unnecessary work on the async loop. + # If the value is set too high, it will take a long time for a newly added + # agent to start receiving messages; this time is bounded by the value of + # `_resubscribe_sleep_lock_seconds`, barring something hogging the async + # loop. _resubscribe_sleep_lock_seconds: float = 45.0 def __init__(self, app: AppT, **kwargs: Any) -> None: @@ -235,6 +254,7 @@ def __init__(self, app: AppT, **kwargs: Any) -> None: self._topic_name_index = defaultdict(set) self._tp_index = defaultdict(set) self._tp_to_callback = {} + self._tp_to_callback_lock = asyncio.Lock() self._acking_topics = set() self._subscription_changed = None self._subscription_done = None @@ -266,12 +286,18 @@ def _compile_message_handler(self) -> ConsumerCallback: async def on_message(message: Message) -> None: tp = TP(topic=message.topic, partition=0) - return await get_callback_for_tp(tp)(message) + async with self._tp_to_callback_lock: + callback = get_callback_for_tp(tp) + + return await callback(message) else: async def on_message(message: Message) -> None: - return await get_callback_for_tp(message.tp)(message) + async with self._tp_to_callback_lock: + callback = get_callback_for_tp(message.tp) + + return await callback(message) return on_message @@ -309,11 +335,14 @@ async def _subscriber(self) -> None: # pragma: no cover # further subscription requests will happen during the same # rebalance. await self.sleep(self._resubscribe_sleep_lock_seconds) + + # Clear the event before updating indices. This way, new events + # that get triggered during the update will be handled the next + # time around. + ev.clear() subscribed_topics = await self._update_indices() await self.app.consumer.subscribe(subscribed_topics) - # clear the subscription_changed flag, so we can wait on it again. - ev.clear() # wake-up anything waiting for the subscription to be done. notify(self._subscription_done) @@ -328,15 +357,23 @@ async def maybe_wait_for_subscriptions(self) -> None: await self._subscription_done async def _update_indices(self) -> Iterable[str]: - self._topic_name_index.clear() - self._tp_to_callback.clear() - for channel in self._topics: - if channel.internal: - await channel.maybe_declare() - for topic in channel.topics: - if channel.acks: - self._acking_topics.add(topic) - self._topic_name_index[topic].add(channel) + async with self._tp_to_callback_lock: + self._topic_name_index.clear() + self._tp_to_callback.clear() + + # Make a (shallow) copy of the topics, so new additions to the set + # won't poison the iterator. Additions can come in while this + # function yields during an await. + topics = list(self._topics) + for channel in topics: + if channel.internal: + await channel.maybe_declare() + for topic in channel.topics: + if channel.acks: + self._acking_topics.add(topic) + self._topic_name_index[topic].add(channel) + + self._update_callback_map() return self._topic_name_index @@ -413,11 +450,12 @@ def add(self, topic: TopicT) -> None: def _topic_contain_unsubscribed_topics(self, topic: TopicT) -> bool: index = self._topic_name_index - return bool(index and any(t not in index for t in topic.topics)) + return bool(any(t not in index for t in topic.topics)) def discard(self, topic: Any) -> None: """Unregister topic from conductor.""" self._topics.discard(topic) + self._flag_changes() def _flag_changes(self) -> None: if self._subscription_changed is not None: diff --git a/faust/transport/consumer.py b/faust/transport/consumer.py index 4d79d9a86..0feba69c0 100644 --- a/faust/transport/consumer.py +++ b/faust/transport/consumer.py @@ -72,6 +72,7 @@ from weakref import WeakSet from aiokafka.errors import ProducerFenced +from intervaltree import IntervalTree from mode import Service, ServiceT, flight_recorder, get_logger from mode.threads import MethodQueue, QueueServiceThread from mode.utils.futures import notify @@ -363,17 +364,20 @@ async def create_topic( ensure_created: bool = False, ) -> None: """Create/declare topic on server.""" - return await self.producer.create_topic( - topic, - partitions, - replication, - config=config, - timeout=timeout, - retention=retention, - compacting=compacting, - deleting=deleting, - ensure_created=ensure_created, - ) + if self.app.conf.topic_allow_declare: + return await self.producer.create_topic( + topic, + partitions, + replication, + config=config, + timeout=timeout, + retention=retention, + compacting=compacting, + deleting=deleting, + ensure_created=ensure_created, + ) + else: + logger.warning(f"Topic creation disabled! Can't create topic {topic}") def supports_headers(self) -> bool: """Return :const:`True` if the Kafka server supports headers.""" @@ -392,7 +396,7 @@ class Consumer(Service, ConsumerT): consumer_stopped_errors: ClassVar[Tuple[Type[BaseException], ...]] = () # Mapping of TP to list of gap in offsets. - _gap: MutableMapping[TP, List[int]] + _gap: MutableMapping[TP, IntervalTree] # Mapping of TP to list of acked offsets. _acked: MutableMapping[TP, List[int]] @@ -465,7 +469,7 @@ def __init__( commit_livelock_soft_timeout or self.app.conf.broker_commit_livelock_soft_timeout ) - self._gap = defaultdict(list) + self._gap = defaultdict(IntervalTree) self._acked = defaultdict(list) self._acked_index = defaultdict(set) self._read_offset = defaultdict(lambda: None) @@ -481,7 +485,7 @@ def __init__( self.not_waiting_next_records = Event() self.not_waiting_next_records.set() self._reset_state() - super().__init__(loop=loop or self.transport.loop, **kwargs) + super().__init__(loop=loop, **kwargs) self.transactions = self.transport.create_transaction_manager( consumer=self, producer=self.app.producer, @@ -1087,16 +1091,35 @@ def _new_offset(self, tp: TP) -> Optional[int]: # the return value will be: 37 if acked: max_offset = max(acked) - gap_for_tp = self._gap[tp] + gap_for_tp: IntervalTree = self._gap[tp] if gap_for_tp: - gap_index = next( - (i for i, x in enumerate(gap_for_tp) if x > max_offset), - len(gap_for_tp), - ) - gaps = gap_for_tp[:gap_index] - acked.extend(gaps) - gap_for_tp[:gap_index] = [] + # find all the ranges up to the max of acked, add them in to acked, + # and chop them off the gap. + candidates = gap_for_tp.overlap(0, max_offset) + # note: merge_overlaps will sort the intervaltree and will ensure that + # the intervals left over don't overlap each other. So can sort by their + # start without worrying about ends overlapping. + sorted_candidates = sorted(candidates, key=lambda x: x.begin) + if sorted_candidates: + stuff_to_add = [] + for entry in sorted_candidates: + stuff_to_add.extend(range(entry.begin, entry.end)) + new_max_offset = max(stuff_to_add[-1], max_offset + 1) + acked.extend(stuff_to_add) + gap_for_tp.chop(0, new_max_offset) acked.sort() + + # We iterate over it until we handle gap in the head of acked queue + # then return the previous committed offset. + # For example if acked[tp] is: + # 34 35 36 37 + # ^-- gap + # self._committed_offset[tp] is 31 + # the return value will be None (the same as 31) + if self._committed_offset[tp]: + if min(acked) - self._committed_offset[tp] > 1: + return None + # Note: acked is always kept sorted. # find first list of consecutive numbers batch = next(consecutive_numbers(acked)) @@ -1111,12 +1134,18 @@ async def on_task_error(self, exc: BaseException) -> None: """Call when processing a message failed.""" await self.commit() - def _add_gap(self, tp: TP, offset_from: int, offset_to: int) -> None: + async def _add_gap(self, tp: TP, offset_from: int, offset_to: int) -> None: committed = self._committed_offset[tp] gap_for_tp = self._gap[tp] - for offset in range(offset_from, offset_to): - if committed is None or offset > committed: - gap_for_tp.append(offset) + if committed is not None: + offset_from = max(offset_from, committed + 1) + # intervaltree intervals exclude the end + if offset_from <= offset_to: + gap_for_tp.addi(offset_from, offset_to + 1) + # sleep 0 to allow other coroutines to get some loop time + # for example, to answer health checks while building the gap + await asyncio.sleep(0) + gap_for_tp.merge_overlaps() async def _drain_messages(self, fetcher: ServiceT) -> None: # pragma: no cover # This is the background thread started by Fetcher, used to @@ -1163,7 +1192,7 @@ async def _drain_messages(self, fetcher: ServiceT) -> None: # pragma: no cover if gap > 1 and r_offset: acks_enabled = acks_enabled_for(message.topic) if acks_enabled: - self._add_gap(tp, r_offset + 1, offset) + await self._add_gap(tp, r_offset + 1, offset) if commit_every is not None: if self._n_acked >= commit_every: self._n_acked = 0 @@ -1334,7 +1363,6 @@ def verify_recovery_event_path(self, now: float, tp: TP) -> None: class ThreadDelegateConsumer(Consumer): - _thread: ConsumerThread #: Main thread method queue. diff --git a/faust/transport/drivers/aiokafka.py b/faust/transport/drivers/aiokafka.py index cb1dc62fb..2bfc84078 100644 --- a/faust/transport/drivers/aiokafka.py +++ b/faust/transport/drivers/aiokafka.py @@ -25,25 +25,23 @@ import aiokafka import aiokafka.abc import opentracing +from aiokafka import TopicPartition from aiokafka.consumer.group_coordinator import OffsetCommitRequest +from aiokafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor from aiokafka.errors import ( CommitFailedError, ConsumerStoppedError, IllegalStateError, KafkaError, - ProducerFenced, -) -from aiokafka.structs import OffsetAndMetadata, TopicPartition as _TopicPartition -from aiokafka.util import parse_kafka_version -from kafka import TopicPartition -from kafka.errors import ( NotControllerError, + ProducerFenced, TopicAlreadyExistsError as TopicExistsError, for_code, ) -from kafka.partitioner import murmur2 -from kafka.partitioner.default import DefaultPartitioner -from kafka.protocol.metadata import MetadataRequest_v1 +from aiokafka.partitioner import DefaultPartitioner, murmur2 +from aiokafka.protocol.metadata import MetadataRequest_v1 +from aiokafka.structs import OffsetAndMetadata, TopicPartition as _TopicPartition +from aiokafka.util import parse_kafka_version from mode import Service, get_logger from mode.threads import ServiceThread, WorkerThread from mode.utils import text @@ -54,7 +52,12 @@ from opentracing.ext import tags from yarl import URL -from faust.auth import GSSAPICredentials, SASLCredentials, SSLCredentials +from faust.auth import ( + GSSAPICredentials, + OAuthCredentials, + SASLCredentials, + SSLCredentials, +) from faust.exceptions import ( ConsumerNotStarted, ImproperlyConfigured, @@ -234,17 +237,20 @@ async def create_topic( ensure_created: bool = False, ) -> None: """Create/declare topic on server.""" - await self._thread.create_topic( - topic, - partitions, - replication, - config=config, - timeout=timeout, - retention=retention, - compacting=compacting, - deleting=deleting, - ensure_created=ensure_created, - ) + if self.app.conf.topic_allow_declare: + await self._thread.create_topic( + topic, + partitions, + replication, + config=config, + timeout=timeout, + retention=retention, + compacting=compacting, + deleting=deleting, + ensure_created=ensure_created, + ) + else: + logger.warning(f"Topic creation disabled! Can't create topic {topic}") def _new_topicpartition(self, topic: str, partition: int) -> TP: return cast(TP, _TopicPartition(topic, partition)) @@ -301,7 +307,6 @@ def __init__( ) -> None: super().__init__( executor=executor, - loop=loop, thread_loop=thread_loop, Worker=Worker, **kwargs, @@ -484,18 +489,22 @@ def _create_consumer( ) -> aiokafka.AIOKafkaConsumer: transport = cast(Transport, self.transport) if self.app.client_only: - return self._create_client_consumer(transport, loop=loop) + return self._create_client_consumer(transport) else: - return self._create_worker_consumer(transport, loop=loop) + return self._create_worker_consumer(transport) def _create_worker_consumer( - self, transport: "Transport", loop: asyncio.AbstractEventLoop + self, transport: "Transport" ) -> aiokafka.AIOKafkaConsumer: isolation_level: str = "read_uncommitted" conf = self.app.conf if self.consumer.in_transaction: isolation_level = "read_committed" - self._assignor = self.app.assignor + self._assignor = ( + self.app.assignor + if self.app.conf.table_standby_replicas > 0 + else RoundRobinPartitionAssignor + ) auth_settings = credentials_to_aiokafka_auth( conf.broker_credentials, conf.ssl_context ) @@ -513,7 +522,6 @@ def _create_worker_consumer( ) return aiokafka.AIOKafkaConsumer( - loop=loop, api_version=conf.consumer_api_version, client_id=conf.broker_client_id, group_id=conf.id, @@ -543,7 +551,7 @@ def _create_worker_consumer( ) def _create_client_consumer( - self, transport: "Transport", loop: asyncio.AbstractEventLoop + self, transport: "Transport" ) -> aiokafka.AIOKafkaConsumer: conf = self.app.conf auth_settings = credentials_to_aiokafka_auth( @@ -551,7 +559,6 @@ def _create_client_consumer( ) max_poll_interval = conf.broker_max_poll_interval or 0 return aiokafka.AIOKafkaConsumer( - loop=loop, client_id=conf.broker_client_id, bootstrap_servers=server_list(transport.url, transport.default_port), request_timeout_ms=int(conf.broker_request_timeout * 1000.0), @@ -701,13 +708,15 @@ async def commit(self, offsets: Mapping[TP, int]) -> bool: async def _commit(self, offsets: Mapping[TP, int]) -> bool: consumer = self._ensure_consumer() now = monotonic() + commitable_offsets = { + tp: offset for tp, offset in offsets.items() if tp in self.assignment() + } try: aiokafka_offsets = { - tp: OffsetAndMetadata(offset, "") - for tp, offset in offsets.items() - if tp in self.assignment() + ensure_aiokafka_TP(tp): OffsetAndMetadata(offset, "") + for tp, offset in commitable_offsets.items() } - self.tp_last_committed_at.update({tp: now for tp in aiokafka_offsets}) + self.tp_last_committed_at.update({tp: now for tp in commitable_offsets}) await consumer.commit(aiokafka_offsets) except CommitFailedError as exc: if "already rebalanced" in str(exc): @@ -824,7 +833,7 @@ def _verify_aiokafka_event_path(self, now: float, tp: TP) -> bool: secs_since_started = now - self.time_started aiotp = TopicPartition(tp.topic, tp.partition) assignment = consumer._fetcher._subscriptions.subscription.assignment - if not assignment and not assignment.active: + if not assignment or not assignment.active: self.log.error(f"No active partitions for {tp}") return True poll_at = None @@ -1026,27 +1035,30 @@ async def create_topic( ensure_created: bool = False, ) -> None: """Create/declare topic on server.""" - transport = cast(Transport, self.consumer.transport) - _consumer = self._ensure_consumer() - _retention = int(want_seconds(retention) * 1000.0) if retention else None - if len(topic) > TOPIC_LENGTH_MAX: - raise ValueError( - f"Topic name {topic!r} is too long (max={TOPIC_LENGTH_MAX})" + if self.app.conf.topic_allow_declare: + transport = cast(Transport, self.consumer.transport) + _consumer = self._ensure_consumer() + _retention = int(want_seconds(retention) * 1000.0) if retention else None + if len(topic) > TOPIC_LENGTH_MAX: + raise ValueError( + f"Topic name {topic!r} is too long (max={TOPIC_LENGTH_MAX})" + ) + await self.call_thread( + transport._create_topic, + self, + _consumer._client, + topic, + partitions, + replication, + config=config, + timeout=int(want_seconds(timeout) * 1000.0), + retention=_retention, + compacting=compacting, + deleting=deleting, + ensure_created=ensure_created, ) - await self.call_thread( - transport._create_topic, - self, - _consumer._client, - topic, - partitions, - replication, - config=config, - timeout=int(want_seconds(timeout) * 1000.0), - retention=_retention, - compacting=compacting, - deleting=deleting, - ensure_created=ensure_created, - ) + else: + logger.warning(f"Topic creation disabled! Can't create topic {topic}") def key_partition( self, topic: str, key: Optional[bytes], partition: Optional[int] = None @@ -1138,7 +1150,6 @@ async def commit_transaction(self, transactional_id: str) -> None: """Commit transaction by id.""" try: async with self._trn_locks[transactional_id]: - transaction_producer = self._transaction_producers.get(transactional_id) if transaction_producer: await transaction_producer.commit_transaction() @@ -1162,7 +1173,6 @@ async def abort_transaction(self, transactional_id: str) -> None: """Abort and rollback transaction by id.""" try: async with self._trn_locks[transactional_id]: - transaction_producer = self._transaction_producers.get(transactional_id) if transaction_producer: await transaction_producer.abort_transaction() @@ -1202,7 +1212,6 @@ async def commit_transactions( for transactional_id, offsets in tid_to_offset_map.items(): # get the producer async with self._trn_locks[transactional_id]: - transaction_producer = self._transaction_producers.get(transactional_id) if transaction_producer: logger.debug( @@ -1259,22 +1268,25 @@ async def create_topic( ensure_created: bool = False, ) -> None: """Create/declare topic on server.""" - _retention = int(want_seconds(retention) * 1000.0) if retention else None - producer = self._ensure_producer() - await cast(Transport, self.transport)._create_topic( - self, - producer.client, - topic, - partitions, - replication, - config=config, - timeout=int(want_seconds(timeout) * 1000.0), - retention=_retention, - compacting=compacting, - deleting=deleting, - ensure_created=ensure_created, - ) - await producer.client.force_metadata_update() # Fixes #499 + if self.app.conf.topic_allow_declare: + _retention = int(want_seconds(retention) * 1000.0) if retention else None + producer = self._ensure_producer() + await cast(Transport, self.transport)._create_topic( + self, + producer.client, + topic, + partitions, + replication, + config=config, + timeout=int(want_seconds(timeout) * 1000.0), + retention=_retention, + compacting=compacting, + deleting=deleting, + ensure_created=ensure_created, + ) + await producer.client.force_metadata_update() # Fixes #499 + else: + logger.warning(f"Topic creation disabled! Can't create topic {topic}") def _ensure_producer(self) -> aiokafka.AIOKafkaProducer: if self._producer is None: @@ -1335,7 +1347,6 @@ async def send( try: if transactional_id: async with self._trn_locks[transactional_id]: - return cast( Awaitable[RecordMetadata], await transaction_producer.send( @@ -1471,7 +1482,7 @@ async def _create_topic( topic, partitions, replication, - loop=asyncio.get_event_loop(), + loop=self.loop, **kwargs, ) try: @@ -1577,6 +1588,13 @@ def credentials_to_aiokafka_auth( "security_protocol": credentials.protocol.value, "ssl_context": credentials.context, } + elif isinstance(credentials, OAuthCredentials): + return { + "security_protocol": credentials.protocol.value, + "sasl_mechanism": credentials.mechanism.value, + "sasl_oauth_token_provider": credentials.oauth_cb, + "ssl_context": credentials.ssl_context, + } elif isinstance(credentials, SASLCredentials): return { "security_protocol": credentials.protocol.value, @@ -1602,3 +1620,17 @@ def credentials_to_aiokafka_auth( } else: return {"security_protocol": "PLAINTEXT"} + + +def ensure_aiokafka_TP(tp: TP) -> _TopicPartition: + """Convert Faust ``TP`` to aiokafka ``TopicPartition``.""" + return ( + tp + if isinstance(tp, _TopicPartition) + else _TopicPartition(tp.topic, tp.partition) + ) + + +def ensure_aiokafka_TPset(tps: Iterable[TP]) -> Set[_TopicPartition]: + """Convert set of Faust ``TP`` to aiokafka ``TopicPartition``.""" + return {ensure_aiokafka_TP(tp) for tp in tps} diff --git a/faust/transport/producer.py b/faust/transport/producer.py index 044b637c6..e36803a52 100644 --- a/faust/transport/producer.py +++ b/faust/transport/producer.py @@ -91,7 +91,7 @@ async def wait_until_ebb(self) -> None: start_time = time.time() await self.flush_atmost(self.max_messages) end_time = time.time() - logger.info(f"producer flush took {end_time-start_time}") + logger.info(f"producer flush took {end_time - start_time}") @Service.task async def _handle_pending(self) -> None: @@ -144,7 +144,7 @@ def __init__( self.partitioner = conf.producer_partitioner api_version = self._api_version = conf.producer_api_version assert api_version is not None - super().__init__(loop=loop or self.transport.loop, **kwargs) + super().__init__(loop=loop, **kwargs) self.buffer = ProducerBuffer(loop=self.loop, beacon=self.beacon) if conf.producer_threaded: self.threaded_producer = self.create_threaded_producer() diff --git a/faust/types/agents.py b/faust/types/agents.py index 0e59c4a5d..4c533c9e7 100644 --- a/faust/types/agents.py +++ b/faust/types/agents.py @@ -72,7 +72,6 @@ class _AppT: class ActorT(ServiceT, Generic[_T]): - agent: "AgentT" stream: StreamT it: _T @@ -89,7 +88,7 @@ def __init__( stream: StreamT, it: _T, active_partitions: Optional[Set[TP]] = None, - **kwargs: Any + **kwargs: Any, ) -> None: ... @@ -122,7 +121,6 @@ class AwaitableActorT(ActorT[Awaitable], Awaitable): class AgentT(ServiceT, Generic[_T]): - name: str app: _AppT concurrency: int @@ -147,7 +145,7 @@ def __init__( key_type: ModelArg = None, value_type: ModelArg = None, isolated_partitions: bool = False, - **kwargs: Any + **kwargs: Any, ) -> None: self.fun: AgentFun = fun @@ -162,7 +160,7 @@ def __call__( index: Optional[int] = None, active_partitions: Optional[Set[TP]] = None, stream: Optional[StreamT] = None, - channel: Optional[ChannelT] = None + channel: Optional[ChannelT] = None, ) -> ActorRefT: ... @@ -171,7 +169,7 @@ def test_context( self, channel: Optional[ChannelT] = None, supervisor_strategy: SupervisorStrategyT = None, - **kwargs: Any + **kwargs: Any, ) -> "AgentTestWrapperT": ... @@ -199,7 +197,7 @@ async def cast( key: K = None, partition: Optional[int] = None, timestamp: Optional[float] = None, - headers: HeadersArg = None + headers: HeadersArg = None, ) -> None: ... @@ -213,7 +211,7 @@ async def ask( timestamp: Optional[float] = None, headers: HeadersArg = None, reply_to: ReplyToArg = None, - correlation_id: Optional[str] = None + correlation_id: Optional[str] = None, ) -> Any: ... @@ -229,7 +227,7 @@ async def send( key_serializer: CodecArg = None, value_serializer: CodecArg = None, reply_to: ReplyToArg = None, - correlation_id: Optional[str] = None + correlation_id: Optional[str] = None, ) -> Awaitable[RecordMetadata]: ... @@ -325,7 +323,6 @@ def human_tracebacks(self) -> str: class AgentTestWrapperT(AgentT, AsyncIterable): - new_value_processed: asyncio.Condition original_channel: ChannelT results: MutableMapping[int, Any] @@ -351,7 +348,7 @@ async def put( *, reply_to: ReplyToArg = None, correlation_id: Optional[str] = None, - wait: bool = True + wait: bool = True, ) -> EventT: ... @@ -365,7 +362,7 @@ def to_message( offset: int = 0, timestamp: Optional[float] = None, timestamp_type: int = 0, - headers: HeadersArg = None + headers: HeadersArg = None, ) -> Message: ... diff --git a/faust/types/assignor.py b/faust/types/assignor.py index c5fe66d93..d00ffd412 100644 --- a/faust/types/assignor.py +++ b/faust/types/assignor.py @@ -27,7 +27,6 @@ class _AppT: class PartitionAssignorT(abc.ABC): - replicas: int app: _AppT @@ -69,7 +68,6 @@ def tables_metadata(self) -> HostToPartitionMap: class LeaderAssignorT(ServiceT): - app: _AppT @abc.abstractmethod diff --git a/faust/types/auth.py b/faust/types/auth.py index 4a78f5bdd..30b99a56b 100644 --- a/faust/types/auth.py +++ b/faust/types/auth.py @@ -25,6 +25,7 @@ class SASLMechanism(Enum): GSSAPI = "GSSAPI" SCRAM_SHA_256 = "SCRAM-SHA-256" SCRAM_SHA_512 = "SCRAM-SHA-512" + OAUTHBEARER = "OAUTHBEARER" AUTH_PROTOCOLS_SSL = {AuthProtocol.SSL, AuthProtocol.SASL_SSL} diff --git a/faust/types/channels.py b/faust/types/channels.py index f8468f692..553d4f394 100644 --- a/faust/types/channels.py +++ b/faust/types/channels.py @@ -60,7 +60,7 @@ def __init__( maxsize: Optional[int] = None, root: "ChannelT" = None, active_partitions: Optional[Set[TP]] = None, - loop: Optional[asyncio.AbstractEventLoop] = None + loop: Optional[asyncio.AbstractEventLoop] = None, ) -> None: ... @@ -95,7 +95,7 @@ async def send( key_serializer: CodecArg = None, value_serializer: CodecArg = None, callback: Optional[MessageSentCallback] = None, - force: bool = False + force: bool = False, ) -> Awaitable[RecordMetadata]: ... @@ -113,7 +113,7 @@ def send_soon( value_serializer: CodecArg = None, callback: Optional[MessageSentCallback] = None, force: bool = False, - eager_partitioning: bool = False + eager_partitioning: bool = False, ) -> FutureMessage: ... diff --git a/faust/types/core.py b/faust/types/core.py index 869075d80..ae3ea2103 100644 --- a/faust/types/core.py +++ b/faust/types/core.py @@ -77,6 +77,6 @@ def merge_headers(target: OpenHeadersArg, source: Mapping[str, Any]) -> None: source = {want_str(k): want_bytes(v) for k, v in source.items()} if isinstance(target, Mapping): target = cast(MutableMapping, target) - target.update({k: v for k, v in source.items()}) + target.update({k: v for k, v in source.items()}) # noqa: C416 elif isinstance(target, list): target.extend((h for h in source.items())) diff --git a/faust/types/events.py b/faust/types/events.py index 6894137dc..96f566ba7 100644 --- a/faust/types/events.py +++ b/faust/types/events.py @@ -28,7 +28,6 @@ class _SchemaT: class EventT(Generic[T], AsyncContextManager): - app: _AppT key: K value: V diff --git a/faust/types/fixups.py b/faust/types/fixups.py index a7855980f..1cbb5e3a2 100644 --- a/faust/types/fixups.py +++ b/faust/types/fixups.py @@ -14,7 +14,6 @@ class _AppT: class FixupT(abc.ABC): - app: _AppT @abc.abstractmethod diff --git a/faust/types/models.py b/faust/types/models.py index 12737eec7..b0a432270 100644 --- a/faust/types/models.py +++ b/faust/types/models.py @@ -43,8 +43,8 @@ # Workaround for https://bugs.python.org/issue29581 try: - @typing.no_type_check # type: ignore - class _InitSubclassCheck(metaclass=abc.ABCMeta): + @typing.no_type_check # type: ignore # noqa: B024 + class _InitSubclassCheck(metaclass=abc.ABCMeta): # noqa: B024 ident: int def __init_subclass__( @@ -54,7 +54,7 @@ def __init_subclass__( super().__init__(*args, **kwargs) @typing.no_type_check # type: ignore - class _UsingKwargsInNew(_InitSubclassCheck, ident=909): + class _UsingKwargsInNew(_InitSubclassCheck, ident=909): # noqa: B024 ... except TypeError: @@ -68,7 +68,7 @@ class _UsingKwargsInNew(_InitSubclassCheck, ident=909): CoercionMapping = MutableMapping[IsInstanceArgT, CoercionHandler] -class ModelOptions(abc.ABC): +class ModelOptions(abc.ABC): # noqa: B024 serializer: Optional[CodecArg] = None namespace: str include_metadata: bool = True @@ -155,7 +155,7 @@ def loads( s: bytes, *, default_serializer: CodecArg = None, # XXX use serializer - serializer: CodecArg = None + serializer: CodecArg = None, ) -> "ModelT": ... @@ -194,7 +194,6 @@ def validation_errors(self) -> List[ValidationError]: class FieldDescriptorT(Generic[T]): - field: str input_name: str output_name: str @@ -219,7 +218,7 @@ def __init__( parent: "FieldDescriptorT" = None, exclude: Optional[bool] = None, date_parser: Callable[[Any], datetime] = None, - **kwargs: Any + **kwargs: Any, ) -> None: # we have to do this in __init__ or mypy will think # this is a method diff --git a/faust/types/sensors.py b/faust/types/sensors.py index 5132e1621..47a08f675 100644 --- a/faust/types/sensors.py +++ b/faust/types/sensors.py @@ -133,7 +133,7 @@ def on_web_request_end( response: Optional[web.Response], state: Dict, *, - view: web.View = None + view: web.View = None, ) -> None: ... @@ -147,7 +147,6 @@ class SensorT(SensorInterfaceT, ServiceT): class SensorDelegateT(SensorInterfaceT, Iterable): - # Delegate calls to many sensors. @abc.abstractmethod diff --git a/faust/types/serializers.py b/faust/types/serializers.py index 4b434f31c..79163045a 100644 --- a/faust/types/serializers.py +++ b/faust/types/serializers.py @@ -28,7 +28,6 @@ class _Message: class RegistryT(abc.ABC): - key_serializer: CodecArg value_serializer: CodecArg @@ -44,7 +43,7 @@ def loads_key( typ: Optional[_ModelArg], key: Optional[bytes], *, - serializer: CodecArg = None + serializer: CodecArg = None, ) -> K: ... @@ -54,7 +53,7 @@ def loads_value( typ: Optional[_ModelArg], value: Optional[bytes], *, - serializer: CodecArg = None + serializer: CodecArg = None, ) -> Any: ... @@ -72,7 +71,6 @@ def dumps_value( class SchemaT(Generic[KT, VT]): - key_type: Optional[_ModelArg] = None value_type: Optional[_ModelArg] = None @@ -88,7 +86,7 @@ def __init__( value_type: _ModelArg = None, key_serializer: CodecArg = None, value_serializer: CodecArg = None, - allow_empty: Optional[bool] = None + allow_empty: Optional[bool] = None, ) -> None: ... @@ -100,7 +98,7 @@ def update( value_type: _ModelArg = None, key_serializer: CodecArg = None, value_serializer: CodecArg = None, - allow_empty: Optional[bool] = None + allow_empty: Optional[bool] = None, ) -> None: ... @@ -111,7 +109,7 @@ def loads_key( message: _Message, *, loads: Optional[Callable] = None, - serializer: CodecArg = None + serializer: CodecArg = None, ) -> KT: ... @@ -122,7 +120,7 @@ def loads_value( message: _Message, *, loads: Optional[Callable] = None, - serializer: CodecArg = None + serializer: CodecArg = None, ) -> VT: ... @@ -133,7 +131,7 @@ def dumps_key( key: K, *, serializer: CodecArg = None, - headers: OpenHeadersArg + headers: OpenHeadersArg, ) -> Tuple[Any, OpenHeadersArg]: ... @@ -144,7 +142,7 @@ def dumps_value( value: V, *, serializer: CodecArg = None, - headers: OpenHeadersArg + headers: OpenHeadersArg, ) -> Tuple[Any, OpenHeadersArg]: ... diff --git a/faust/types/settings/base.py b/faust/types/settings/base.py index d07247d8f..b87b67e72 100644 --- a/faust/types/settings/base.py +++ b/faust/types/settings/base.py @@ -175,7 +175,7 @@ def _on_after_init(self) -> None: object.__setattr__(self, "_accessed", set()) object.__setattr__(self, "_initializing", False) - def __post_init__(self) -> None: + def __post_init__(self) -> None: # noqa: B027 ... def __getattribute__(self, key: str) -> Any: diff --git a/faust/types/settings/params.py b/faust/types/settings/params.py index 1b85607f6..7e7e1014d 100644 --- a/faust/types/settings/params.py +++ b/faust/types/settings/params.py @@ -374,7 +374,7 @@ def __get__(self, obj: Any, type: Type = None) -> OT: if self.version_deprecated: # we use UserWarning because DeprecationWarning is silenced # by default in Python. - warnings.warn(UserWarning(self.build_deprecation_warning())) + warnings.warn(UserWarning(self.build_deprecation_warning()), stacklevel=2) return self.on_get(obj) def __set__(self, obj: Any, value: IT) -> None: diff --git a/faust/types/stores.py b/faust/types/stores.py index 238c8cee5..85da70f62 100644 --- a/faust/types/stores.py +++ b/faust/types/stores.py @@ -33,7 +33,6 @@ class _CollectionT: class StoreT(ServiceT, FastUserDict[KT, VT]): - url: URL app: _AppT table: _CollectionT @@ -57,7 +56,7 @@ def __init__( key_serializer: CodecArg = "", value_serializer: CodecArg = "", options: Optional[Mapping[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> None: ... @@ -101,3 +100,18 @@ async def on_recovery_completed( self, active_tps: Set[TP], standby_tps: Set[TP] ) -> None: ... + + @abc.abstractmethod + async def backup_partition( + self, tp: Union[TP, int], flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + ... + + @abc.abstractmethod + def restore_backup( + self, + tp: Union[TP, int], + latest: bool = True, + backup_id: int = 0, + ) -> None: + ... diff --git a/faust/types/streams.py b/faust/types/streams.py index 48875292d..ebc255f72 100644 --- a/faust/types/streams.py +++ b/faust/types/streams.py @@ -104,7 +104,6 @@ def _human_channel(self) -> str: class StreamT(AsyncIterable[T_co], JoinableT, ServiceT): - app: _AppT channel: AsyncIterator[T_co] outbox: Optional[asyncio.Queue] = None @@ -146,7 +145,7 @@ def __init__( active_partitions: Optional[Set[TP]] = None, enable_acks: bool = True, prefix: str = "", - loop: Optional[asyncio.AbstractEventLoop] = None + loop: Optional[asyncio.AbstractEventLoop] = None, ) -> None: ... @@ -199,7 +198,7 @@ def group_by( key: GroupByKeyArg, *, name: Optional[str] = None, - topic: Optional[TopicT] = None + topic: Optional[TopicT] = None, ) -> "StreamT": ... @@ -212,7 +211,7 @@ def derive_topic( key_type: ModelArg = None, value_type: ModelArg = None, prefix: str = "", - suffix: str = "" + suffix: str = "", ) -> TopicT: ... diff --git a/faust/types/tables.py b/faust/types/tables.py index 6d16e03eb..d5eef08e3 100644 --- a/faust/types/tables.py +++ b/faust/types/tables.py @@ -125,7 +125,7 @@ def __init__( options: Optional[Mapping[str, Any]] = None, use_partitioner: bool = False, on_window_close: Optional[WindowCloseCallback] = None, - **kwargs: Any + **kwargs: Any, ) -> None: ... @@ -464,7 +464,7 @@ def __init__( *, relative_to: RelativeArg = None, key_index: bool = False, - key_index_table: Optional[TableT] = None + key_index_table: Optional[TableT] = None, ) -> None: ... diff --git a/faust/types/topics.py b/faust/types/topics.py index 9d9e390df..9250747eb 100644 --- a/faust/types/topics.py +++ b/faust/types/topics.py @@ -30,7 +30,6 @@ class _SchemaT: class TopicT(ChannelT): - #: Iterable/Sequence of topic names to subscribe to. topics: Sequence[str] @@ -89,7 +88,7 @@ def __init__( active_partitions: Optional[Set[TP]] = None, allow_empty: bool = False, has_prefix: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None + loop: Optional[asyncio.AbstractEventLoop] = None, ) -> None: ... @@ -131,6 +130,6 @@ def derive_topic( config: Optional[Mapping[str, Any]] = None, prefix: str = "", suffix: str = "", - **kwargs: Any + **kwargs: Any, ) -> "TopicT": ... diff --git a/faust/types/transports.py b/faust/types/transports.py index 9911b8b3a..db2030bc9 100644 --- a/faust/types/transports.py +++ b/faust/types/transports.py @@ -78,7 +78,6 @@ class _AppT: class ProducerBufferT(ServiceT): - max_messages: int pending: asyncio.Queue @@ -106,7 +105,6 @@ def size(self) -> int: class ProducerT(ServiceT): - #: The transport that created this Producer. transport: "TransportT" @@ -127,7 +125,7 @@ def __init__( self, transport: "TransportT", loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any + **kwargs: Any, ) -> None: ... @@ -141,7 +139,7 @@ async def send( timestamp: Optional[float], headers: Optional[HeadersArg], *, - transactional_id: Optional[str] = None + transactional_id: Optional[str] = None, ) -> Awaitable[RecordMetadata]: ... @@ -159,7 +157,7 @@ async def send_and_wait( timestamp: Optional[float], headers: Optional[HeadersArg], *, - transactional_id: Optional[str] = None + transactional_id: Optional[str] = None, ) -> RecordMetadata: ... @@ -175,7 +173,7 @@ async def create_topic( retention: Optional[Seconds] = None, compacting: Optional[bool] = None, deleting: Optional[bool] = None, - ensure_created: bool = False + ensure_created: bool = False, ) -> None: ... @@ -234,7 +232,7 @@ def __init__( *, consumer: "ConsumerT", producer: "ProducerT", - **kwargs: Any + **kwargs: Any, ) -> None: ... @@ -289,7 +287,6 @@ def iterate(self, records: Mapping[TP, List]) -> Iterator[Tuple[TP, Any]]: class ConsumerT(ServiceT): - #: The transport that created this Consumer. transport: "TransportT" @@ -318,7 +315,7 @@ def __init__( *, commit_interval: Optional[float] = None, loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any + **kwargs: Any, ) -> None: self._on_partitions_revoked: PartitionsRevokedCallback self._on_partitions_assigned: PartitionsAssignedCallback @@ -335,7 +332,7 @@ async def create_topic( retention: Optional[Seconds] = None, compacting: Optional[bool] = None, deleting: Optional[bool] = None, - ensure_created: bool = False + ensure_created: bool = False, ) -> None: ... @@ -451,7 +448,6 @@ def on_buffer_drop(self, tp: TP) -> None: class ConductorT(ServiceT, MutableSet[TopicT]): - # The topic conductor delegates messages from the Consumer # to the various Topic instances subscribed to a topic. @@ -492,7 +488,6 @@ def acking_topics(self) -> Set[str]: class TransportT(abc.ABC): - #: The Consumer class used for this type of transport. Consumer: ClassVar[Type[ConsumerT]] diff --git a/faust/types/tuples.py b/faust/types/tuples.py index bbd46c481..73142370f 100644 --- a/faust/types/tuples.py +++ b/faust/types/tuples.py @@ -113,7 +113,6 @@ def _get_len(s: Optional[bytes]) -> int: class Message: - __slots__ = ( "topic", "partition", diff --git a/faust/types/web.py b/faust/types/web.py index f42e7bd2e..6027a4aa1 100644 --- a/faust/types/web.py +++ b/faust/types/web.py @@ -96,7 +96,6 @@ class ResourceOptions(NamedTuple): class CacheBackendT(ServiceT): - Unavailable: Type[BaseException] @abc.abstractmethod @@ -121,7 +120,6 @@ async def delete(self, key: str) -> None: class CacheT(abc.ABC): - timeout: Optional[Seconds] include_headers: bool key_prefix: str @@ -133,7 +131,7 @@ def __init__( timeout: Optional[Seconds] = None, key_prefix: Optional[str] = None, backend: Union[Type[CacheBackendT], str] = None, - **kwargs: Any + **kwargs: Any, ) -> None: ... @@ -143,7 +141,7 @@ def view( timeout: Optional[Seconds] = None, include_headers: bool = False, key_prefix: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> Callable[[Callable], Callable]: ... @@ -174,7 +172,7 @@ def static( uri: str, file_or_directory: Union[str, Path], *, - name: Optional[str] = None + name: Optional[str] = None, ) -> None: ... diff --git a/faust/utils/agent_stopper.py b/faust/utils/agent_stopper.py new file mode 100644 index 000000000..820ae4d20 --- /dev/null +++ b/faust/utils/agent_stopper.py @@ -0,0 +1,17 @@ +"""agent_stopper for Faust when app fails""" +import logging +import traceback + +log = logging.getLogger(__name__) + + +async def agent_stopper(app) -> None: + """ + Raise exception and crash app + """ + log.error("%s", traceback.format_exc()) + log.warning("Closing application") + + # force the exit code of the application not to be 0 + # and prevent offsets from progressing + app._crash(RuntimeError) diff --git a/faust/utils/codegen.py b/faust/utils/codegen.py index 9cf834c07..1999eb777 100644 --- a/faust/utils/codegen.py +++ b/faust/utils/codegen.py @@ -96,7 +96,7 @@ def build_closure( assert locals is not None if return_type is not MISSING: locals["_return_type"] = return_type - exec(source, globals, locals) + exec(source, globals, locals) # nosec: B102 obj = locals[outer_name](*args) obj.__sourcecode__ = source return cast(Callable, obj) @@ -114,7 +114,7 @@ def build_function( assert locals is not None if return_type is not MISSING: locals["_return_type"] = return_type - exec(source, globals, locals) + exec(source, globals, locals) # nosec: B102 obj = locals[name] obj.__sourcecode__ = source return cast(Callable, obj) diff --git a/faust/utils/json.py b/faust/utils/json.py index 10b5756d0..66eb5a0ca 100644 --- a/faust/utils/json.py +++ b/faust/utils/json.py @@ -28,7 +28,10 @@ if typing.TYPE_CHECKING: import orjson else: # pragma: no cover - orjson = None # noqa + try: + import orjson + except ImportError: + orjson = None # noqa DEFAULT_TEXTUAL_TYPES: List[Type] = [Decimal, uuid.UUID, bytes] @@ -172,7 +175,7 @@ def dumps( return json_dumps( obj, default=on_default, - option=orjson.OPT_NAIVE_UTC, + option=orjson.OPT_NON_STR_KEYS | orjson.OPT_UTC_Z, ) def loads(s: str, json_loads: Callable = orjson.loads, **kwargs: Any) -> Any: @@ -188,7 +191,12 @@ def dumps( **kwargs: Any, ) -> str: """Serialize to json. See :func:`json.dumps`.""" - return json_dumps(obj, cls=cls, **dict(_JSON_DEFAULT_KWARGS, **kwargs)) + return json_dumps( + obj, + cls=cls, + **dict(_JSON_DEFAULT_KWARGS, **kwargs), + separators=(",", ":"), + ) def loads(s: str, json_loads: Callable = json.loads, **kwargs: Any) -> Any: """Deserialize json string. See :func:`json.loads`.""" diff --git a/faust/utils/kafka/protocol/admin.py b/faust/utils/kafka/protocol/admin.py index b7d7a778f..3e418e960 100644 --- a/faust/utils/kafka/protocol/admin.py +++ b/faust/utils/kafka/protocol/admin.py @@ -1,5 +1,5 @@ """Admin related Kafka protocol extensions.""" -from kafka.protocol import types +from aiokafka.protocol import types from .api import Request, Response diff --git a/faust/utils/kafka/protocol/api.py b/faust/utils/kafka/protocol/api.py index 8e33bed57..523d9ef0f 100644 --- a/faust/utils/kafka/protocol/api.py +++ b/faust/utils/kafka/protocol/api.py @@ -3,8 +3,8 @@ import abc from typing import Type -from kafka.protocol.struct import Struct -from kafka.protocol.types import Schema +from aiokafka.protocol.struct import Struct +from aiokafka.protocol.types import Schema class Response(Struct, metaclass=abc.ABCMeta): # type: ignore diff --git a/faust/utils/platforms.py b/faust/utils/platforms.py index 6cdfcc8ac..a1d0ec51e 100644 --- a/faust/utils/platforms.py +++ b/faust/utils/platforms.py @@ -1,6 +1,6 @@ """Platform/OS utilities.""" import platform -import subprocess +import subprocess # nosec: B404 from typing import Optional @@ -16,7 +16,7 @@ def max_open_files() -> Optional[int]: # macOS bash always returns infinity, even though there # is an actual system limit. if platform.system() == "Darwin": - output = subprocess.check_output( + output = subprocess.check_output( # nosec [ "sysctl", "-q", diff --git a/faust/utils/terminal/spinners.py b/faust/utils/terminal/spinners.py index 5975bb8c9..9a49c73b4 100644 --- a/faust/utils/terminal/spinners.py +++ b/faust/utils/terminal/spinners.py @@ -59,7 +59,7 @@ SPINNER_MOON, ] -ACTIVE_SPINNER: Sequence[str] = random.choice(SPINNERS) +ACTIVE_SPINNER: Sequence[str] = random.choice(SPINNERS) # nosec B311 class Spinner: diff --git a/faust/utils/terminal/tables.py b/faust/utils/terminal/tables.py index 7acf7ef79..e37b98f3a 100644 --- a/faust/utils/terminal/tables.py +++ b/faust/utils/terminal/tables.py @@ -30,7 +30,7 @@ def table( title: str, target: IO = None, tty: Optional[bool] = None, - **kwargs: Any + **kwargs: Any, ) -> Table: """Create suitable :pypi:`terminaltables` table for target. @@ -59,7 +59,7 @@ def logtable( target: IO = None, tty: Optional[bool] = None, headers: Optional[Sequence[str]] = None, - **kwargs: Any + **kwargs: Any, ) -> str: """Prepare table for logging. @@ -87,7 +87,7 @@ def dict_as_ansitable( sort: bool = False, sortkey: Callable[[Any], Any] = DEFAULT_SORT_KEY, target: IO = sys.stdout, - title: Optional[str] = None + title: Optional[str] = None, ) -> str: header = [text.title(key), text.title(value)] data = cast(Iterable[List[str]], d.items()) diff --git a/faust/utils/urls.py b/faust/utils/urls.py index 8d565411e..cad37c863 100644 --- a/faust/utils/urls.py +++ b/faust/utils/urls.py @@ -19,8 +19,8 @@ def urllist(arg: URIListArg, *, default_scheme: Optional[str] = None) -> List[UR # handle scalar URL argument. arg = [arg] elif isinstance(arg, str): - # Handle scalar str, including semi-colon separated lists of URLs. - urls = arg.split(";") + # Handle scalar str, including comma and semi-colon separated lists of URLs. + urls = arg.replace(",", ";").split(";") # When some of the URLs do not have a scheme, we use # the first scheme we find as the default scheme. diff --git a/faust/utils/venusian.py b/faust/utils/venusian.py index 5e7aebddb..e8f7e25b2 100644 --- a/faust/utils/venusian.py +++ b/faust/utils/venusian.py @@ -16,7 +16,7 @@ def attach( category: str, *, callback: Callable[[Scanner, str, Any], None] = None, - **kwargs: Any + **kwargs: Any, ) -> None: """Shortcut for :func:`venusian.attach`. diff --git a/faust/web/base.py b/faust/web/base.py index 79b21453d..809b1a065 100644 --- a/faust/web/base.py +++ b/faust/web/base.py @@ -186,7 +186,7 @@ def __init__(self, app: AppT, **kwargs: Any) -> None: else: blueprints.extend(self.production_blueprints) self.blueprints = BlueprintManager(blueprints) - Service.__init__(self, **kwargs) + Service.__init__(self, loop=app.loop, **kwargs) @abc.abstractmethod def text( diff --git a/faust/web/cache/backends/base.py b/faust/web/cache/backends/base.py index 27a48042b..5cd18195d 100644 --- a/faust/web/cache/backends/base.py +++ b/faust/web/cache/backends/base.py @@ -80,8 +80,10 @@ async def _recovery_context(self, key: str) -> AsyncGenerator: self.log.warning(E_CACHE_INVALIDATING, key, exc, exc_info=1) # noqa: G200 try: await self._delete(key) - except self.operational_errors + self.invalidating_errors as exc: - self.log.exception(E_CANNOT_INVALIDATE, key, exc) # noqa: G200 + except ( # noqa: B030 + self.operational_errors + self.invalidating_errors + ) as exc: + self.log.exception(E_CANNOT_INVALIDATE, key, exc) raise self.Unavailable() except self.operational_errors as exc: self.log.warning(E_CACHE_INOPERATIONAL, exc, exc_info=1) # noqa: G200 diff --git a/faust/web/cache/cache.py b/faust/web/cache/cache.py index f40278739..00aaacfdf 100644 --- a/faust/web/cache/cache.py +++ b/faust/web/cache/cache.py @@ -141,10 +141,12 @@ def build_key( self, request: Request, method: str, prefix: str, headers: Mapping[str, str] ) -> str: """Build cache key from web request and environment.""" - context = hashlib.md5( - b"".join(want_bytes(k) + want_bytes(v) for k, v in headers.items()) + context = hashlib.md5( # nosec + b"".join(want_bytes(k) + want_bytes(v) for k, v in headers.items()), + ).hexdigest() + url = hashlib.md5( # nosec + iri_to_uri(str(request.url)).encode("ascii"), ).hexdigest() - url = hashlib.md5(iri_to_uri(str(request.url)).encode("ascii")).hexdigest() return f"{self.ident}.{prefix}.{method}.{url}.{context}" diff --git a/faust/web/exceptions.py b/faust/web/exceptions.py index db78319f6..e103d3695 100644 --- a/faust/web/exceptions.py +++ b/faust/web/exceptions.py @@ -38,7 +38,7 @@ def __init__( detail: Optional[str] = None, *, code: Optional[int] = None, - **extra_context: Any + **extra_context: Any, ) -> None: if detail: self.detail = detail diff --git a/faust/web/views.py b/faust/web/views.py index 7430f900e..cad55413d 100644 --- a/faust/web/views.py +++ b/faust/web/views.py @@ -299,9 +299,11 @@ def notfound(self, reason: str = "Not Found", **kwargs: Any) -> Response: """ return self.error(404, reason, **kwargs) - def error(self, status: int, reason: str, **kwargs: Any) -> Response: + def error( + self, status: int, reason: str, headers: MutableMapping = None, **kwargs: Any + ) -> Response: """Create error JSON response.""" - return self.json({"error": reason, **kwargs}, status=status) + return self.json({"error": reason, **kwargs}, status=status, headers=headers) def takes_model(Model: Type[ModelT]) -> ViewDecorator: diff --git a/faust/worker.py b/faust/worker.py index d5366e001..1cdf65444 100644 --- a/faust/worker.py +++ b/faust/worker.py @@ -164,7 +164,7 @@ async def start_worker(worker: Worker) -> None: await worker.start() def manage_loop(): - loop = asyncio.get_event_loop() + loop = asyncio.get_event_loop_policy().get_event_loop() worker = Worker(app, loop=loop) try: loop.run_until_complete(start_worker(worker) @@ -331,8 +331,8 @@ async def on_first_start(self) -> None: worker is restarted by an exception being raised. """ self.change_workdir(self.workdir) - self.autodiscover() await self.default_on_first_start() + self.autodiscover() def change_workdir(self, path: Path) -> None: """Change the current working directory (CWD).""" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..3748de325 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,123 @@ +[project] +name = "faust-streaming" +description = "Python Stream Processing. A Faust fork" +requires-python = ">=3.8" +dynamic = [ + "version", + "optional-dependencies", + "dependencies", + +] +readme = "README.md" +license = { file = "LICENSE" } +keywords = [ + "stream", + "processing", + "asyncio", + "distributed", + "queue", + "kafka", +] +classifiers = [ + "Framework :: AsyncIO", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Natural Language :: English", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", + "Operating System :: POSIX", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: BSD", + "Operating System :: Microsoft :: Windows", + "Topic :: System :: Networking", + "Topic :: System :: Distributed Computing", +] + +authors = [ + { name = "Ask Solem Hoel", email= "ask@robinhood.com" }, + { name = "Vineet Goel", email= "vineet@robinhood.com" }, +] + +maintainers = [ + { name = "Vikram Patki", email = "vpatki@wayfair.com" }, + { name = "William Barnhart", email = "williambbarnhart@gmail.com" }, +] + +[project.scripts] +faust = "faust.cli.faust:cli" + +[project.urls] +Documentation = "https://faust-streaming.github.io/faust/" +Source = "https://github.com/faust-streaming/faust" +Changes = "https://github.com/faust-streaming/faust/releases" + +[build-system] +requires = [ + "setuptools>=30.3.0", + "setuptools_scm[toml]", + "wheel", + "cython>=0.29; implementation_name == 'cpython'", + "cython>=3.0.0; implementation_name == 'cpython' and python_version >= '3.12'", +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# enables setuptools_scm to provide the dynamic version + +[tool.flake8] +ignore = [ + "W503", + "E203", + "E231", + "E225", + "B305", +] +max-line-length = 88 + +[tool.isort] +profile = "black" +combine_as_imports = true + +[tool.mypy] +ignore_missing_imports = true +disallow_untyped_defs = true + +[tool.pytest.ini_options] +asyncio_mode = "auto" +minversion = "6.0" +addopts = [ + "--cov-report=term-missing:skip-covered", + "--cov=faust", + "-rxXs", + "--strict-config", + "--strict-markers", +] +filterwarnings = [ + "error::ResourceWarning", +] +xfail_strict = true + +testpaths = [ + "tests", + "integration", +] + + +[tool.cibuildwheel] +build = "cp3*" + +archs = ["auto64"] +skip = ["*musllinux*"] + +before-build = "pip install Cython" + +manylinux-x86_64-image = "manylinux2014" +manylinux-pypy_x86_64-image = "manylinux2014" diff --git a/requirements/ci.txt b/requirements/ci.txt index ba0ee8309..031a30cc7 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -1,3 +1,2 @@ -r typecheck.txt pytest-cov -codecov diff --git a/requirements/dist.txt b/requirements/dist.txt index f41b125b9..0e086ab03 100644 --- a/requirements/dist.txt +++ b/requirements/dist.txt @@ -1,14 +1,14 @@ --r flakes.txt asyncio-ipython-magic -bumpversion>=0.5.1 packaging pre-commit pydocstyle pytest-sugar -setuptools>=30.3.0 +setuptools>=30.3.0,<69.0.0 sphinx-autobuild sphinx2rst>=1.0 tox>=2.3.1 twine vulture wheel>=0.29.0 +intervaltree +importlib-metadata; python_version<'3.8' diff --git a/requirements/docs.txt b/requirements/docs.txt index 7ddbce9f7..0edd8a038 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,2 +1,7 @@ --r docs-plugins.txt -sphinx>=2.1,<3.0 +-r requirements.txt +sphinx<6.0.0 +sphinx-celery +sphinx-autodoc-typehints +m2r2 +sphinx_rtd_theme +jinja2<3.2.0 diff --git a/requirements/extras/aerospike.txt b/requirements/extras/aerospike.txt index 9c00e3be1..15df34911 100644 --- a/requirements/extras/aerospike.txt +++ b/requirements/extras/aerospike.txt @@ -1 +1 @@ -aerospike>=4.0.0 +aerospike>=8.0.0 diff --git a/requirements/extras/cchardet.txt b/requirements/extras/cchardet.txt index 3375bdf25..f3ad14508 100644 --- a/requirements/extras/cchardet.txt +++ b/requirements/extras/cchardet.txt @@ -1 +1 @@ -cchardet>=2.1 +faust-cchardet diff --git a/requirements/extras/eventlet.txt b/requirements/extras/eventlet.txt index 9cfd3374b..86b6a1d43 100644 --- a/requirements/extras/eventlet.txt +++ b/requirements/extras/eventlet.txt @@ -1,2 +1,2 @@ -aioeventlet~=0.5.1 +faust-aioeventlet dnspython diff --git a/requirements/extras/rocksdict.txt b/requirements/extras/rocksdict.txt new file mode 100644 index 000000000..48647c157 --- /dev/null +++ b/requirements/extras/rocksdict.txt @@ -0,0 +1 @@ +rocksdict>=0.3.11,<4.0 diff --git a/requirements/extras/statsd.txt b/requirements/extras/statsd.txt index f3cba3fa2..0089fe1e3 100644 --- a/requirements/extras/statsd.txt +++ b/requirements/extras/statsd.txt @@ -1 +1 @@ -statsd~=3.3.0 +statsd~=4.0.1 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index e2e21d41e..cdd50fedd 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,12 +1,13 @@ -aiohttp>=3.5.2,<4.0 +aiohttp>=3.8.0,<4.0 aiohttp_cors>=0.7,<2.0 -click>=6.7,<8.1 -mode-streaming==0.2.1 -aiokafka @ git+https://github.com/smaxtec/aiokafka-1@master#egg=aiokafka -#aiokafka>=0.7.1,<0.8.0 +aiokafka>=0.9.0 +click>=6.7,<8.2 +mode-streaming>=0.3.0 opentracing>=1.3.0,<=2.4.0 terminaltables>=3.1,<4.0 yarl>=1.0,<2.0 croniter>=0.3.16 mypy_extensions -venusian==3.0.0 +venusian==3.1.0 +intervaltree +six diff --git a/requirements/test.txt b/requirements/test.txt index 85d8c1412..9ada8e540 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,30 +1,30 @@ black isort autoflake -flake8 +flake8==5.0.4 # 5.0.4 can be upgraded to 6.0.0+ when we EOL Python 3.7 flake8-bugbear flake8-comprehensions hypothesis>=3.31 freezegun>=0.3.11 +mock; python_version<'3.8' # backport that contains AsyncMock class, includes mock library as dependency +pre-commit pytest-aiofiles>=0.2.0 pytest-aiohttp>=0.3.0 pytest-asyncio pytest-forked -pytest-openfiles>=0.2.0 pytest-picked pytest-cov pytest-random-order>=0.5.4 pytest>=5.4.0 python-dateutil>=2.8 pytz>=2018.7 -codecov -bandit==1.6.2 +bandit twine wheel +intervaltree -r requirements.txt -r extras/datadog.txt -r extras/redis.txt -r extras/statsd.txt -r extras/yaml.txt -r extras/prometheus.txt --r extras/orjson.txt diff --git a/scripts/coverage b/scripts/coverage deleted file mode 100755 index ba5d4dfd4..000000000 --- a/scripts/coverage +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh -e - -export PREFIX="" -if [ -d 'venv' ] ; then - export PREFIX="venv/bin/" -fi - -set -x - -${PREFIX}coverage report --show-missing --skip-covered --fail-under=60 -codecov --token=$CODECOV_TOKEN diff --git a/scripts/publish b/scripts/publish deleted file mode 100755 index 5e07c69b4..000000000 --- a/scripts/publish +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh -e - -VERSION_FILE="faust/__init__.py" - -if [ -d 'venv' ] ; then - PREFIX="venv/bin/" -else - PREFIX="" -fi - -VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` - -set -x - -${PREFIX}twine upload dist/* -# ${PREFIX}mkdocs gh-deploy --force - -git tag -a v${VERSION} -m "release v${VERSION}" -git push origin v${VERSION} diff --git a/scripts/tests b/scripts/tests index c95762d75..c6661a54d 100755 --- a/scripts/tests +++ b/scripts/tests @@ -14,6 +14,6 @@ fi ${PREFIX}pytest tests/unit tests/functional tests/integration tests/meticulous/ tests/regression $@ ${PREFIX}bandit -b extra/bandit/baseline.json -c extra/bandit/config.yaml -r faust -if [ -z $GITHUB_ACTIONS ]; then - scripts/coverage -fi +#if [ -z $GITHUB_ACTIONS ]; then +# scripts/coverage +#fi diff --git a/setup.cfg b/setup.cfg index 55827db32..bb412d374 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,24 +1,6 @@ [flake8] -ignore = W503, E203, B305 +ignore = W503, E203, E231, E225, B305 max-line-length = 88 -[mypy] -disallow_untyped_defs = True -ignore_missing_imports = True - [mypy-tests.*] disallow_untyped_defs = False - -[tool:isort] -profile = black -combine_as_imports = True - -[tool:pytest] -addopts = - --cov-report=term-missing:skip-covered - --cov=faust - -rxXs - --strict-config - --strict-markers -xfail_strict=True - diff --git a/setup.py b/setup.py index 0e17eeed9..e8047d085 100644 --- a/setup.py +++ b/setup.py @@ -33,6 +33,7 @@ "prometheus", "redis", "rocksdb", + "rocksdict", "sentry", "setproctitle", "statsd", @@ -45,12 +46,12 @@ LIBRARIES = [] E_UNSUPPORTED_PYTHON = NAME + " 1.0 requires Python %%s or later!" -if sys.version_info < (3, 6): - raise Exception(E_UNSUPPORTED_PYTHON % ("3.6",)) # NOQA +if sys.version_info < (3, 8): + raise Exception(E_UNSUPPORTED_PYTHON % ("3.8",)) # NOQA from pathlib import Path # noqa -README = Path("README.rst") +README = Path("README.md") # -*- Compiler Flags -*- @@ -109,7 +110,12 @@ def run(self): def build_extension(self, ext): try: build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsExecError, DistutilsPlatformError, ValueError): + except ( + CCompilerError, + DistutilsExecError, + DistutilsPlatformError, + ValueError, + ): raise BuildFailed() @@ -182,20 +188,16 @@ def extras_require(): def do_setup(**kwargs): setup( name="faust-streaming", - version=meta["version"], + use_scm_version=True, + setup_requires=["setuptools_scm"], description=meta["doc"], long_description=long_description, long_description_content_type="text/markdown", - author=meta["author"], - author_email=meta["contact"], - url=meta["homepage"], - platforms=["any"], - license="BSD 3-Clause", packages=find_packages(exclude=["examples", "ez_setup", "tests", "tests.*"]), # PEP-561: https://www.python.org/dev/peps/pep-0561/ package_data={"faust": ["py.typed"]}, include_package_data=True, - python_requires=">=3.6.0", + python_requires=">=3.8.0", zip_safe=False, install_requires=reqs("requirements.txt"), tests_require=reqs("test.txt"), @@ -205,40 +207,7 @@ def do_setup(**kwargs): "faust = faust.cli.faust:cli", ], }, - project_urls={ - "Bug Reports": "https://github.com/faust-streaming/faust/issues", - "Source": "https://github.com/faust-streaming/faust", - "Documentation": "https://fauststream.com/en/latest", - }, - keywords=[ - "stream", - "processing", - "asyncio", - "distributed", - "queue", - "kafka", - ], - classifiers=[ - "Framework :: AsyncIO", - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Natural Language :: English", - "License :: OSI Approved :: BSD License", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Operating System :: POSIX", - "Operating System :: POSIX :: Linux", - "Operating System :: MacOS :: MacOS X", - "Operating System :: POSIX :: BSD", - "Operating System :: Microsoft :: Windows", - "Topic :: System :: Networking", - "Topic :: System :: Distributed Computing", - ], - **kwargs + **kwargs, ) diff --git a/tests/bench/base.py b/tests/bench/base.py index f686feb11..9bad097d6 100644 --- a/tests/bench/base.py +++ b/tests/bench/base.py @@ -94,7 +94,7 @@ async def produce(self, max_latency: float, max_messages: int, **kwargs): time_start = monotonic() time_1st = monotonic() - def on_published(meta): + def on_published(meta, time_1st=time_1st): print(f"1ST OK: {meta} AFTER {monotonic() - time_1st}s") callback = on_published diff --git a/tests/conftest.py b/tests/conftest.py index 508650e43..0cd0c4b36 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,13 +3,15 @@ import time from http import HTTPStatus from typing import Any, NamedTuple, Optional +from unittest.mock import MagicMock, Mock, patch import pytest from _pytest.assertion.util import _compare_eq_dict, _compare_eq_set from aiohttp.client import ClientError, ClientSession from aiohttp.web import Response from mode.utils.futures import all_tasks -from mode.utils.mocks import AsyncContextManagerMock, AsyncMock, MagicMock, Mock, patch + +from tests.helpers import AsyncContextManagerMock, AsyncMock sentinel = object() @@ -131,7 +133,7 @@ def raise_for_status(): if 400 <= options.status_code: raise ClientError() - response = AsyncMock( + response = Mock( autospec=Response, text=AsyncMock(return_value=options.text), read=AsyncMock(return_value=options.text), diff --git a/tests/consistency/consistency_checker.py b/tests/consistency/consistency_checker.py index d9b0f073c..76f73ea17 100644 --- a/tests/consistency/consistency_checker.py +++ b/tests/consistency/consistency_checker.py @@ -2,8 +2,8 @@ from collections import defaultdict from aiokafka import AIOKafkaClient, AIOKafkaConsumer -from kafka.protocol.commit import GroupCoordinatorRequest_v0, OffsetFetchRequest_v1 -from kafka.structs import TopicPartition +from aiokafka.protocol.commit import GroupCoordinatorRequest_v0, OffsetFetchRequest_v1 +from aiokafka.structs import TopicPartition from faust.utils import json diff --git a/tests/functional/agents/helpers.py b/tests/functional/agents/helpers.py index 583066bca..ca70c6955 100644 --- a/tests/functional/agents/helpers.py +++ b/tests/functional/agents/helpers.py @@ -59,9 +59,7 @@ def __init__( self.isolated_partitions = isolated_partitions self.topic_name = self.name - self.tps = [TP(self.topic_name, p) for p in self.partitions] - self.next_tp = cycle(self.tps) - self.expected_tp = cycle(self.tps) + self._set_tps_from_partitions() self.seen_offsets = set() self.processed_total = 0 @@ -72,6 +70,11 @@ def __init__( self.agent_stopped_processing = asyncio.Event() self.finished = asyncio.Event() + def _set_tps_from_partitions(self): + self.tps = [TP(self.topic_name, p) for p in self.partitions] + self.next_tp = cycle(self.tps) + self.expected_tp = cycle(self.tps) + async def on_start(self) -> None: app = self.app topic = app.topic(self.topic_name, value_serializer=self.value_serializer) @@ -153,8 +156,10 @@ async def _send(self) -> None: self.finished.set() - async def conductor_setup(self, assigned: Set[TP]) -> None: - await self.app.agents.on_rebalance(set(), assigned) + async def conductor_setup( + self, assigned: Set[TP], revoked: Optional[Set[TP]] = None + ) -> None: + await self.app.agents.on_rebalance(revoked or set(), assigned) await self.app.topics._update_indices() await self.app.topics.on_partitions_assigned(assigned) diff --git a/tests/functional/agents/test_isolated_partitions.py b/tests/functional/agents/test_isolated_partitions.py index 33e71f5f1..45d6fb0f7 100644 --- a/tests/functional/agents/test_isolated_partitions.py +++ b/tests/functional/agents/test_isolated_partitions.py @@ -1,11 +1,11 @@ from collections import Counter from pprint import pformat -from typing import List, NamedTuple +from typing import Any, List, Mapping, NamedTuple import pytest from faust.exceptions import ImproperlyConfigured -from faust.types import EventT, StreamT +from faust.types import AppT, EventT, Message as MessageT, StreamT from .helpers import AgentCase @@ -51,6 +51,25 @@ async def test_agent_isolated_partitions__concurrency(*, app, logging): ) +@pytest.mark.asyncio +async def test_agent_isolated_partitions_rebalancing(*, app, logging): + await AgentIsolatedRebalanceCase.run_test( + app=app, + num_messages=100, + concurrency=1, + partitions=[0, 1, 2, 3], + reassign_partitions={ + 10: [0], + 20: [1], + 30: [0, 1], + 40: [2, 3], + 50: [0, 1, 2, 3], + 60: [4, 5, 6, 7], + }, + isolated_partitions=True, + ) + + class AgentIsolatedCase(AgentCase): name = "test_agent_isolated_partitions" @@ -89,3 +108,39 @@ async def assert_success(self) -> None: if max_ is None: max_ = total assert total == max_ + + +class AgentIsolatedRebalanceCase(AgentCase): + name = "test_agent_isolated_partitions_rebalancing" + + @classmethod + async def run_test( + cls, app: AppT, *, reassign_partitions: Mapping[int, List[int]], **kwargs: Any + ) -> "AgentCase": + return await super().run_test( + app, reassign_partitions=reassign_partitions, **kwargs + ) + + def __init__( + self, app: AppT, *, reassign_partitions: Mapping[int, List[int]], **kwargs: Any + ) -> None: + super().__init__(app, **kwargs) + self.reassign_partitions = reassign_partitions + + async def put(self, key: bytes, value: bytes, **kwargs: Any) -> MessageT: + message = await super().put(key, value, **kwargs) + + new_partitions = self.reassign_partitions.get(int(message.key)) + if new_partitions is not None: + await self.simulate_rebalance(new_partitions) + + return message + + async def simulate_rebalance(self, partitions: List[int]): + await self.sleep(0.1) + self.partitions = sorted(partitions) + current_tps = set(self.tps) + self._set_tps_from_partitions() + assigned = set(self.tps) + revoked = current_tps - assigned + await self.conductor_setup(assigned=assigned, revoked=revoked) diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py index 957aefc27..4d09169dc 100644 --- a/tests/functional/conftest.py +++ b/tests/functional/conftest.py @@ -2,14 +2,15 @@ import os from copy import copy from typing import IO, Dict, NamedTuple, Union +from unittest.mock import Mock import pytest from mode.utils.logging import setup_logging -from mode.utils.mocks import AsyncMock, Mock import faust from faust.utils.tracing import set_current_span from faust.web.cache.backends.memory import CacheStorage +from tests.helpers import AsyncMock class AppMarks(NamedTuple): diff --git a/tests/functional/serializers/test_registry.py b/tests/functional/serializers/test_registry.py index 013dacbbe..788ca5673 100644 --- a/tests/functional/serializers/test_registry.py +++ b/tests/functional/serializers/test_registry.py @@ -1,8 +1,8 @@ import typing from decimal import Decimal +from unittest.mock import Mock import pytest -from mode.utils.mocks import Mock import faust from faust.exceptions import KeyDecodeError, ValueDecodeError diff --git a/tests/functional/test_app.py b/tests/functional/test_app.py index 39ab6cd9c..6c1b7ad57 100644 --- a/tests/functional/test_app.py +++ b/tests/functional/test_app.py @@ -2,12 +2,12 @@ import sys from pathlib import Path from typing import Any, Mapping, NamedTuple +from unittest.mock import patch import mode import pytest import pytz from mode.supervisors import OneForAllSupervisor -from mode.utils.mocks import patch from yarl import URL import faust diff --git a/tests/functional/test_channels.py b/tests/functional/test_channels.py index 6636d5f65..92106714a 100644 --- a/tests/functional/test_channels.py +++ b/tests/functional/test_channels.py @@ -1,9 +1,9 @@ import asyncio +from unittest.mock import Mock import pytest from mode import label from mode.utils.aiter import aiter, anext -from mode.utils.mocks import Mock from mode.utils.queues import FlowControlQueue import faust diff --git a/tests/functional/test_models.py b/tests/functional/test_models.py index d029d64df..97a655a28 100644 --- a/tests/functional/test_models.py +++ b/tests/functional/test_models.py @@ -106,7 +106,7 @@ def __init__(self, x, y, **kwargs): assert p.y == 10 payload = p.dumps(serializer="json") - assert payload == b'{"x": 30, "y": 10}' + assert payload == b'{"x":30,"y":10}' data = json.loads(payload) p2 = Point.from_data(data) @@ -128,7 +128,7 @@ def __post_init__(self): assert p.z == 40 payload = p.dumps(serializer="json") - assert payload == b'{"x": 30, "y": 10}' + assert payload == b'{"x":30,"y":10}' data = json.loads(payload) p2 = Point.from_data(data) @@ -169,7 +169,10 @@ class OptionalListOfDate2(Record, coerce=True): assert OptionalDate.loads(OptionalDate(date=n1).dumps()).date == n1 assert OptionalDate.loads(OptionalDate(date=None).dumps()).date is None n2 = datetime.utcnow() - assert ListOfDate.loads(ListOfDate(dates=[n1, n2]).dumps()).dates == [n1, n2] + assert ListOfDate.loads(ListOfDate(dates=[n1, n2]).dumps()).dates == [ + n1, + n2, + ] assert ( OptionalListOfDate.loads(OptionalListOfDate(dates=None).dumps()).dates is None ) @@ -182,7 +185,10 @@ class OptionalListOfDate2(Record, coerce=True): assert OptionalListOfDate2.loads( OptionalListOfDate2(dates=[n1, n2]).dumps() ).dates == [n1, n2] - assert TupleOfDate.loads(TupleOfDate(dates=(n1, n2)).dumps()).dates == (n1, n2) + assert TupleOfDate.loads(TupleOfDate(dates=(n1, n2)).dumps()).dates == ( + n1, + n2, + ) assert TupleOfDate.loads(TupleOfDate(dates=(n2,)).dumps()).dates == (n2,) assert SetOfDate.loads(SetOfDate(dates={n1, n2}).dumps()).dates == {n1, n2} assert MapOfDate.loads(MapOfDate(dates={"A": n1, "B": n2}).dumps()).dates == { @@ -220,7 +226,10 @@ class OptionalListOfDate2(Record, coerce=False, isodates=True): n1 = datetime.utcnow() assert Date.loads(Date(date=n1).dumps()).date == n1 n2 = datetime.utcnow() - assert ListOfDate.loads(ListOfDate(dates=[n1, n2]).dumps()).dates == [n1, n2] + assert ListOfDate.loads(ListOfDate(dates=[n1, n2]).dumps()).dates == [ + n1, + n2, + ] assert ( OptionalListOfDate.loads(OptionalListOfDate(dates=None).dumps()).dates is None ) @@ -233,7 +242,10 @@ class OptionalListOfDate2(Record, coerce=False, isodates=True): assert OptionalListOfDate2.loads( OptionalListOfDate2(dates=[n1, n2]).dumps() ).dates == [n1, n2] - assert TupleOfDate.loads(TupleOfDate(dates=(n1, n2)).dumps()).dates == (n1, n2) + assert TupleOfDate.loads(TupleOfDate(dates=(n1, n2)).dumps()).dates == ( + n1, + n2, + ) assert TupleOfDate.loads(TupleOfDate(dates=(n2,)).dumps()).dates == (n2,) assert SetOfDate.loads(SetOfDate(dates={n1, n2}).dumps()).dates == {n1, n2} assert MapOfDate.loads(MapOfDate(dates={"A": n1, "B": n2}).dumps()).dates == { @@ -646,7 +658,9 @@ def test_getattr(self): Account(id="123", name="123"), Account(id="123", name="123", active=False), User( - id="123", username="foo", account=Account(id="123", name="Foo", active=True) + id="123", + username="foo", + account=Account(id="123", name="Foo", active=True), ), User(id="123", username="foo", account=None), User(id=None, username=None, account=None), @@ -837,7 +851,7 @@ class Y(Record): def test__polymorphic_fields_deeply_nested(): - class BaseAttribution(Record, abc.ABC): + class BaseAttribution(Record, abc.ABC): # noqa: B024 def __post_init__(self, *args, **kwargs) -> None: self.data_store = None @@ -868,7 +882,7 @@ class AdjustRecord(BaseAttribution): def test_compat_blessed_key_deeply_nested(): - class BaseAttribution(Record, abc.ABC): + class BaseAttribution(Record, abc.ABC): # noqa: B024 def __post_init__(self, *args, **kwargs) -> None: self.data_store = None @@ -934,12 +948,11 @@ class AdjustRecord(BaseAttribution): def test_adtribute_payload(app): - class BaseAttribution(Record, abc.ABC): + class BaseAttribution(Record, abc.ABC): # noqa: B024 def __post_init__(self) -> None: self.data_store = None class AdjustData(Record): - activity_kind: str network_name: str adid: str @@ -998,7 +1011,6 @@ def __post_init__(self) -> None: def test_overwrite_asdict(): - with pytest.raises(RuntimeError): class R(Record): @@ -1115,7 +1127,6 @@ class MyBase(faust.Record, abstract=True): def test_raises_when_defaults_in_wrong_order(): - with pytest.raises(TypeError): class X(Record): @@ -1453,7 +1464,10 @@ def test_payload_with_reserved_keyword(): class X(Record): location: str = StringField(input_name="in") foo: str = StringField( - required=False, default="BAR", input_name="bar", output_name="foobar" + required=False, + default="BAR", + input_name="bar", + output_name="foobar", ) with pytest.raises(TypeError): diff --git a/tests/functional/test_streams.py b/tests/functional/test_streams.py index 84ae325e8..95185cbeb 100644 --- a/tests/functional/test_streams.py +++ b/tests/functional/test_streams.py @@ -1,14 +1,15 @@ import asyncio from copy import copy +from unittest.mock import Mock, patch import pytest from mode import label from mode.utils.aiter import aiter, anext -from mode.utils.mocks import AsyncMock, Mock import faust from faust.exceptions import ImproperlyConfigured from faust.streams import maybe_forward +from tests.helpers import AsyncMock from .helpers import channel_empty, message, put @@ -222,6 +223,43 @@ async def myfilter(value): assert i == 3 +@pytest.mark.asyncio +async def test_stream_filter_acks_filtered_out_messages(app, event_loop): + """ + Test the filter function acknowledges the filtered out + messages regardless of the ack setting. + """ + values = [1000, 3000, 99, 5000, 3, 9999] + async with app.stream(values).filter(lambda x: x > 1000) as stream: + async for event in stream.events(): + assert event.value > 1000 + assert len(app.consumer.unacked) == 0 + + +@pytest.mark.asyncio +async def test_acks_filtered_out_messages_when_using_take(app, event_loop): + """ + Test the filter function acknowledges the filtered out messages when using take(). + """ + initial_values = [1000, 999, 3000, 99, 5000, 3, 9999] + expected_values = [v for v in initial_values if v > 1000] + original_function = app.create_event + # using patch to intercept message objects, to check if they are acked later + with patch("faust.app.base.App.create_event") as create_event_mock: + create_event_mock.side_effect = original_function + async with new_stream(app) as stream: + for value in initial_values: + await stream.channel.send(value=value) + async for values in stream.filter(lambda x: x > 1000).take( + len(expected_values), within=5 + ): + assert values == expected_values + break + messages = [call[0][3] for call in create_event_mock.call_args_list] + acked = [m.acked for m in messages if m.acked] + assert len(acked) == len(initial_values) + + @pytest.mark.asyncio async def test_events(app): async with new_stream(app) as stream: diff --git a/tests/functional/web/conftest.py b/tests/functional/web/conftest.py index 61beebd38..5101d731a 100644 --- a/tests/functional/web/conftest.py +++ b/tests/functional/web/conftest.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import Mock from faust.exceptions import SameNode diff --git a/tests/functional/web/test_cache.py b/tests/functional/web/test_cache.py index 1b2dedd8f..92044312a 100644 --- a/tests/functional/web/test_cache.py +++ b/tests/functional/web/test_cache.py @@ -2,13 +2,13 @@ import aredis import pytest -from mode.utils.mocks import Mock import faust from faust.exceptions import ImproperlyConfigured from faust.web import Blueprint, View from faust.web.cache import backends from faust.web.cache.backends import redis +from tests.helpers import Mock DEFAULT_TIMEOUT = 361.363 VIEW_B_TIMEOUT = 64.3 @@ -364,7 +364,7 @@ async def test_redis__start_twice_same_client(*, app, mocked_redis): async def test_redis_get__irrecoverable_errors(*, app, mocked_redis): from aredis.exceptions import AuthenticationError - mocked_redis.return_value.get.coro.side_effect = AuthenticationError() + mocked_redis.return_value.get.side_effect = AuthenticationError() with pytest.raises(app.cache.Unavailable): async with app.cache: @@ -386,10 +386,10 @@ async def test_redis_invalidating_error(operation, delete_error, *, app, mocked_ from aredis.exceptions import DataError mocked_op = getattr(mocked_redis.return_value, operation) - mocked_op.coro.side_effect = DataError() + mocked_op.side_effect = DataError() if delete_error: # then the delete fails - mocked_redis.return_value.delete.coro.side_effect = DataError() + mocked_redis.return_value.delete.side_effect = DataError() with pytest.raises(app.cache.Unavailable): async with app.cache: @@ -416,7 +416,7 @@ async def test_memory_delete(*, app): async def test_redis_get__operational_error(*, app, mocked_redis): from aredis.exceptions import TimeoutError - mocked_redis.return_value.get.coro.side_effect = TimeoutError() + mocked_redis.return_value.get.side_effect = TimeoutError() with pytest.raises(app.cache.Unavailable): async with app.cache: @@ -471,4 +471,7 @@ def test_cluster_client(self, app): client = backend._new_client() assert isinstance(client, aredis.StrictRedisCluster) pool = client.connection_pool - assert {"host": backend.url.host, "port": 3636} in pool.nodes.startup_nodes + assert { + "host": backend.url.host, + "port": 3636, + } in pool.nodes.startup_nodes diff --git a/tests/helpers.py b/tests/helpers.py index 260194bba..dae611da3 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,9 +1,46 @@ +import asyncio +import builtins +import sys +import types +import unittest.mock +from contextlib import contextmanager +from itertools import count from time import time +from types import ModuleType +from typing import ( + Any, + Callable, + ContextManager, + Iterator, + List, + Optional, + Type, + Union, + cast, +) +from unittest.mock import Mock + +if sys.version_info < (3, 8): + from mock.mock import AsyncMock +else: + from unittest.mock import AsyncMock from faust.events import Event from faust.types.tuples import Message -__all__ = ["message", "new_event"] +__all__ = [ + "message", + "new_event", + "FutureMock", + "mask_module", + "patch_module", + "AsyncMock", + "ContextMock", + "AsyncContextMock", + "AsyncContextManagerMock", +] + +MOCK_CALL_COUNT = count(0) def message( @@ -40,3 +77,220 @@ def new_event(app, key=None, value=None, *, headers=None, **kwargs): headers, message(key=key, value=value, headers=headers, **kwargs), ) + + +class FutureMock(unittest.mock.Mock): + """Mock a :class:`asyncio.Future`.""" + + awaited = False + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._loop = asyncio.get_event_loop() + + def __await__(self) -> Any: + self.awaited = True + yield self() + + def assert_awaited(self) -> None: + assert self.awaited + + def assert_not_awaited(self) -> None: + assert not self.awaited + + +@contextmanager +def patch_module(*names: str, new_callable: Any = Mock) -> Iterator: + """Mock one or modules such that every attribute is a :class:`Mock`.""" + prev = {} + + class MockModule(types.ModuleType): + def __getattr__(self, attr: str) -> Any: + setattr(self, attr, new_callable()) + return types.ModuleType.__getattribute__(self, attr) + + mods = [] + for name in names: + try: + prev[name] = sys.modules[name] + except KeyError: + pass + mod = sys.modules[name] = MockModule(name) + mods.append(mod) + try: + yield mods + finally: + for name in names: + try: + sys.modules[name] = prev[name] + except KeyError: + try: + del sys.modules[name] + except KeyError: + pass + + +@contextmanager +def mask_module(*modnames: str) -> Iterator: + """Ban some modules from being importable inside the context. + + For example:: + + >>> with mask_module('sys'): + ... try: + ... import sys + ... except ImportError: + ... print('sys not found') + sys not found + + >>> import sys # noqa + >>> sys.version + (2, 5, 2, 'final', 0) + + Taken from + http://bitbucket.org/runeh/snippets/src/tip/missing_modules.py + + """ + realimport = builtins.__import__ + + def myimp(name: str, *args: Any, **kwargs: Any) -> ModuleType: + if name in modnames: + raise ImportError(f"No module named {name}") + else: + return cast(ModuleType, realimport(name, *args, **kwargs)) + + builtins.__import__ = myimp + try: + yield + finally: + builtins.__import__ = realimport + + +class AsyncContextMock(unittest.mock.Mock): + """Mock for :class:`typing.AsyncContextManager`. + + You can use this to mock asynchronous context managers, + when an object with a fully defined ``__aenter__`` and ``__aexit__`` + is required. + + Here's an example mocking an :pypi:`aiohttp` client: + + .. code-block:: python + + import http + from aiohttp.client import ClientSession + from aiohttp.web import Response + from mode.utils.mocks import AsyncContextManagerMock, AsyncMock, Mock + + @pytest.fixture() + def session(monkeypatch): + session = Mock( + name='http_client', + autospec=ClientSession, + request=Mock( + return_value=AsyncContextManagerMock( + return_value=Mock( + autospec=Response, + status=http.HTTPStatus.OK, + json=AsyncMock( + return_value={'hello': 'json'}, + ), + ), + ), + ), + ) + monkeypatch.setattr('where.is.ClientSession', session) + return session + + @pytest.mark.asyncio + async def test_session(session): + from where.is import ClientSession + session = ClientSession() + async with session.get('http://example.com') as response: + assert response.status == http.HTTPStatus.OK + assert await response.json() == {'hello': 'json'} + """ + + def __init__( + self, + *args: Any, + aenter_return: Any = None, + aexit_return: Any = None, + side_effect: Union[Callable, BaseException] = None, + **kwargs: Any, + ) -> None: + super().__init__(*args, **kwargs) + self.aenter_return = aenter_return + self.aexit_return = aexit_return + self.side_effect = side_effect + + async def __aenter__(self) -> Any: + mgr = self.aenter_return or self.return_value + if self.side_effect: + if isinstance(self.side_effect, BaseException): + raise self.side_effect + else: + return self.side_effect() + if isinstance(mgr, AsyncMock): + return mgr.coro + return mgr + + async def __aexit__(self, *args: Any) -> Any: + return self.aexit_return + + +AsyncContextManagerMock = AsyncContextMock # XXX compat alias + + +class _ContextMock(Mock, ContextManager): + """Internal context mock class. + + Dummy class implementing __enter__ and __exit__ + as the :keyword:`with` statement requires these to be implemented + in the class, not just the instance. + """ + + def __enter__(self) -> "_ContextMock": + return self + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: types.TracebackType = None, + ) -> Optional[bool]: + pass + + +def ContextMock(*args: Any, **kwargs: Any) -> _ContextMock: + """Mock that mocks :keyword:`with` statement contexts.""" + obj = _ContextMock(*args, **kwargs) + obj.attach_mock(_ContextMock(), "__enter__") + obj.attach_mock(_ContextMock(), "__exit__") + obj.__enter__.return_value = obj # type: ignore + # if __exit__ return a value the exception is ignored, + # so it must return None here. + obj.__exit__.return_value = None # type: ignore + return obj + + +class Mock(unittest.mock.Mock): + """Mock object.""" + + global_call_count: Optional[int] = None + call_counts: List[int] = cast(List[int], None) + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + ret = super().__call__(*args, **kwargs) + count = self.global_call_count = next(MOCK_CALL_COUNT) + if self.call_counts is None: + # mypy thinks this is unreachable as we mask that this is Optional + self.call_counts = [count] # type: ignore + else: + self.call_counts.append(count) + return ret + + def reset_mock(self, *args: Any, **kwargs: Any) -> None: + super().reset_mock(*args, **kwargs) + if self.call_counts is not None: + self.call_counts.clear() diff --git a/tests/integration/cli/test_agents.py b/tests/integration/cli/test_agents.py index e25cacec4..227e3c1f9 100644 --- a/tests/integration/cli/test_agents.py +++ b/tests/integration/cli/test_agents.py @@ -8,7 +8,11 @@ def test_json(faust_json): "help": "Foo agent help.", } in agents assert {"name": "@app.add", "topic": "add-topic", "help": ""} in agents - assert {"name": "@app.internal", "topic": "", "help": ""} in agents + assert { + "name": "@app.internal", + "topic": "", + "help": "", + } in agents names = [agent["name"] for agent in agents] assert ( diff --git a/tests/integration/cli/test_model.py b/tests/integration/cli/test_model.py index 7f2e4b037..01d1fd583 100644 --- a/tests/integration/cli/test_model.py +++ b/tests/integration/cli/test_model.py @@ -4,7 +4,11 @@ def test_json(self, faust_json): assert not exitcode assert model == [ - {"field": "points", "type": "typing.List[__main__.Point]", "default": "*"}, + { + "field": "points", + "type": "typing.List[__main__.Point]", + "default": "*", + }, {"field": "timestamp", "type": "float", "default": "None"}, ] diff --git a/tests/integration/cli/test_models.py b/tests/integration/cli/test_models.py index 69c9c412b..495140589 100644 --- a/tests/integration/cli/test_models.py +++ b/tests/integration/cli/test_models.py @@ -4,7 +4,10 @@ def test_json(faust_json): assert {"name": "app.Arena", "help": ""} in models assert {"name": "app.Point", "help": ""} in models - assert {"name": "@ReqRepResponse", "help": "Request-Reply response."} in models + assert { + "name": "@ReqRepResponse", + "help": "Request-Reply response.", + } in models names = [model["name"] for model in models] assert ( diff --git a/tests/meticulous/assignor/test_copartitioned_assignor.py b/tests/meticulous/assignor/test_copartitioned_assignor.py index 8179012e1..05c2f0ce8 100644 --- a/tests/meticulous/assignor/test_copartitioned_assignor.py +++ b/tests/meticulous/assignor/test_copartitioned_assignor.py @@ -1,7 +1,9 @@ import copy +import platform from collections import Counter from typing import MutableMapping +import pytest from hypothesis import assume, given, settings from hypothesis.strategies import integers @@ -77,6 +79,9 @@ def client_removal_sticky( return True +@pytest.mark.skipif( + platform.python_implementation() == "PyPy", reason="Not yet supported on PyPy" +) @given( partitions=integers(min_value=0, max_value=256), replicas=integers(min_value=0, max_value=64), @@ -95,6 +100,9 @@ def test_fresh_assignment(partitions, replicas, num_clients): assert is_valid(new_assignments, partitions, replicas) +@pytest.mark.skipif( + platform.python_implementation() == "PyPy", reason="Not yet supported on PyPy" +) @given( partitions=integers(min_value=0, max_value=256), replicas=integers(min_value=0, max_value=64), @@ -131,6 +139,9 @@ def test_add_new_clients(partitions, replicas, num_clients, num_additional_clien assert clients_balanced(new_assignments) +@pytest.mark.skipif( + platform.python_implementation() == "PyPy", reason="Not yet supported on PyPy" +) @given( partitions=integers(min_value=0, max_value=256), replicas=integers(min_value=0, max_value=64), diff --git a/tests/old/app.py b/tests/old/app.py deleted file mode 100644 index 3f5d57a48..000000000 --- a/tests/old/app.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any, AsyncIterator - -import faust - -app = faust.App("faust.stress") -rpc_topic = app.topic("faust.stress.rpc") - - -@app.agent(rpc_topic, concurrency=10) -async def simple(it: AsyncIterator[Any]) -> AsyncIterator[Any]: - async for value in it: - yield value diff --git a/tests/old/test_simple.py b/tests/old/test_simple.py deleted file mode 100644 index 03d158c82..000000000 --- a/tests/old/test_simple.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Mapping - -import pytest -from mode import setup_logging - -from .app import simple - -setup_logging(loglevel="INFO") - - -def _build_data(i: int) -> Mapping: - return {"A": {"the": {"quick": {"brown": {"fox": i}}}}} - - -@pytest.mark.asyncio -async def test_simple_ask() -> None: - for i in range(100): - value = _build_data(i) - assert await simple.ask(value=value) == value - - -@pytest.mark.asyncio -async def test_simple_map() -> None: - values = [_build_data(i) for i in range(100)] - check = set(range(100)) - replies = set() - async for reply in simple.map(values): - v = reply["A"]["the"]["quick"]["brown"]["fox"] - assert v in check - replies.add(v) - assert replies == check diff --git a/tests/regression/i323/test_autodiscover.py b/tests/regression/i323/test_autodiscover.py index fff034763..a85322933 100644 --- a/tests/regression/i323/test_autodiscover.py +++ b/tests/regression/i323/test_autodiscover.py @@ -3,9 +3,9 @@ import os import sys from contextlib import ExitStack, redirect_stderr, redirect_stdout +from unittest.mock import patch import pytest -from mode.utils.mocks import patch def test_main(*, app, loop): diff --git a/tests/regression/i324/test_autodiscover.py b/tests/regression/i324/test_autodiscover.py index 344b13e8e..7d3c0060b 100644 --- a/tests/regression/i324/test_autodiscover.py +++ b/tests/regression/i324/test_autodiscover.py @@ -3,9 +3,9 @@ import os import sys from contextlib import ExitStack, redirect_stderr, redirect_stdout +from unittest.mock import patch import pytest -from mode.utils.mocks import patch def test_main(*, app, loop): diff --git a/tests/stress/app.py b/tests/stress/app.py index d81d8fbb6..e5e24dd78 100644 --- a/tests/stress/app.py +++ b/tests/stress/app.py @@ -68,7 +68,7 @@ def create_app(name, origin, base=faust.App, **kwargs: Any) -> faust.App: topic_partitions=config.topic_partitions, loghandlers=config.loghandlers(), autodiscover=True, - **kwargs + **kwargs, ) diff --git a/tests/stress/killer.py b/tests/stress/killer.py index 200bcfcd2..9d9a69f92 100644 --- a/tests/stress/killer.py +++ b/tests/stress/killer.py @@ -19,7 +19,6 @@ def seconds_to_sleep(self): class Chaos(Service): - schedule = [ # Signal -TERM/-INT between every 1 and 30 seconds. # This period lasts for at least half a minute, but never for more diff --git a/tests/stress/reports/logging.py b/tests/stress/reports/logging.py index b009a3d39..d0564cbe1 100644 --- a/tests/stress/reports/logging.py +++ b/tests/stress/reports/logging.py @@ -12,7 +12,6 @@ class LogPusher(Service): - app: faust.App queue: asyncio.Queue @@ -36,7 +35,6 @@ async def _flush(self) -> None: class LogHandler(logging.Handler): - app: faust.App def __init__(self, app: faust.App, *args: Any, **kwargs: Any) -> None: diff --git a/tests/stress/reports/models.py b/tests/stress/reports/models.py index d04482695..f6db46446 100644 --- a/tests/stress/reports/models.py +++ b/tests/stress/reports/models.py @@ -6,7 +6,6 @@ class Error(faust.Record): - #: Message (the actual formatted log message). message: str @@ -39,7 +38,6 @@ class Error(faust.Record): class Status(faust.Record): - #: The id of the app that is sending this. app_id: str diff --git a/tests/stress/requirements.txt b/tests/stress/requirements.txt index f198ca349..05d8b007a 100644 --- a/tests/stress/requirements.txt +++ b/tests/stress/requirements.txt @@ -2,4 +2,4 @@ raven>=6.7.0 raven-aiohttp>=0.7.0 envoy setproctitle -jinja2 +jinja2<3.1.0 diff --git a/tests/stress/tests/forwarder/app.py b/tests/stress/tests/forwarder/app.py index c8e763a32..3396fca41 100644 --- a/tests/stress/tests/forwarder/app.py +++ b/tests/stress/tests/forwarder/app.py @@ -96,7 +96,9 @@ async def check(forwarded_numbers: Stream[int]) -> None: # number should be larger than the previous number. # if that's not true it means we have a duplicate. app.log.error( - "Found duplicate number in %r: %r", event.message.tp, number + "Found duplicate number in %r: %r", + event.message.tp, + number, ) found_duplicates += 1 else: diff --git a/tests/stress/tests/tables/app.py b/tests/stress/tests/tables/app.py index e533301b1..4876389ff 100644 --- a/tests/stress/tests/tables/app.py +++ b/tests/stress/tests/tables/app.py @@ -137,7 +137,9 @@ async def process(numbers: Stream[int]) -> None: # WITH THE OFFSET after all!) # if the number is less than we have a problem. app.log.error( - "Found duplicate number in %r: %r", event.message.tp, number + "Found duplicate number in %r: %r", + event.message.tp, + number, ) found_duplicates += 1 else: diff --git a/tests/unit/agents/test_actor.py b/tests/unit/agents/test_actor.py index ec90b58ec..9527b9b34 100644 --- a/tests/unit/agents/test_actor.py +++ b/tests/unit/agents/test_actor.py @@ -1,12 +1,13 @@ import asyncio import collections.abc +from unittest.mock import Mock import pytest -from mode.utils.mocks import AsyncMock, Mock from faust.agents import Agent from faust.agents.actor import Actor, AsyncIterableActor, AwaitableActor from faust.types import TP +from tests.helpers import AsyncMock class FakeActor(Actor): @@ -17,7 +18,6 @@ def traceback(self) -> str: class Test_Actor: - ActorType = FakeActor @pytest.fixture() @@ -93,7 +93,6 @@ def test_repr(self, *, actor): class Test_AsyncIterableActor(Test_Actor): - ActorType = AsyncIterableActor def test_aiter(self, *, actor, it): @@ -103,7 +102,6 @@ def test_aiter(self, *, actor, it): class Test_AwaitableActor(Test_Actor): - ActorType = AwaitableActor def test_await(self, *, actor, it): diff --git a/tests/unit/agents/test_agent.py b/tests/unit/agents/test_agent.py index 55b0f3405..e58d876ee 100644 --- a/tests/unit/agents/test_agent.py +++ b/tests/unit/agents/test_agent.py @@ -1,11 +1,11 @@ import asyncio +from unittest.mock import ANY, call, patch import pytest from mode import SupervisorStrategy, label from mode.utils.aiter import aiter from mode.utils.futures import done_future from mode.utils.logging import CompositeLogger -from mode.utils.mocks import ANY, AsyncMock, FutureMock, Mock, call, patch from mode.utils.trees import Node import faust @@ -21,6 +21,7 @@ from faust.events import Event from faust.exceptions import ImproperlyConfigured from faust.types import TP, Message +from tests.helpers import AsyncMock, FutureMock, Mock class Word(Record): @@ -110,9 +111,9 @@ async def test_on_start_supervisor(self, *, agent): ) await agent._on_start_supervisor() - aref = agent._start_one.coro.return_value + aref = agent._start_one.return_value - agent._start_one.coro.assert_has_calls( + agent._start_one.assert_has_calls( [ call( index=i, @@ -122,9 +123,7 @@ async def test_on_start_supervisor(self, *, agent): for i in range(10) ] ) - agent.supervisor.add.assert_has_calls( - [call(agent._start_one.coro()) for i in range(10)] - ) + agent.supervisor.add.assert_has_calls([call(aref) for _ in range(10)]) agent.supervisor.start.assert_called_once_with() def test_get_active_partitions(self, *, agent): @@ -138,7 +137,7 @@ def test_get_active_partitions(self, *, agent): async def test_replace_actor(self, *, agent): aref = Mock(name="aref", autospec=Actor) agent._start_one = AsyncMock(name="_start_one") - assert await agent._replace_actor(aref, 101) == agent._start_one.coro() + assert await agent._replace_actor(aref, 101) == agent._start_one.return_value agent._start_one.assert_called_once_with( index=101, active_partitions=aref.active_partitions, @@ -345,7 +344,7 @@ async def test_start_isolated(self, *, agent): ) ret = await agent._start_isolated(TP("foo", 0)) agent._start_for_partitions.assert_called_once_with({TP("foo", 0)}) - assert ret is agent._start_for_partitions.coro() + assert ret is agent._start_for_partitions.return_value @pytest.mark.asyncio async def test_on_shared_partitions_revoked(self, *, agent): @@ -390,25 +389,26 @@ async def test_start_task(self, *, agent): agent._prepare_actor = AsyncMock(name="_prepare_actor") ret = await agent._start_task(index=0) agent._prepare_actor.assert_called_once_with(ANY, agent.beacon) - assert ret is agent._prepare_actor.coro() + assert ret is agent._prepare_actor.return_value @pytest.mark.asyncio - async def test_prepare_actor__AsyncIterable(self, *, agent): + async def test_prepare_actor__AsyncIterable(self, *, agent, monkeypatch): + async def mock_execute_actor(coro, aref): + await coro + + mock_beacon = Mock(name="beacon", autospec=Node) + mock_slurp = AsyncMock(name="slurp") + monkeypatch.setattr(agent, "_slurp", mock_slurp) + monkeypatch.setattr(agent, "_execute_actor", mock_execute_actor) aref = agent(index=0, active_partitions=None) - with patch("asyncio.Task") as Task: - agent._slurp = Mock(name="_slurp") - agent._execute_actor = Mock(name="_execute_actor") - beacon = Mock(name="beacon", autospec=Node) - ret = await agent._prepare_actor(aref, beacon) - agent._slurp.assert_called() - coro = agent._slurp() - agent._execute_actor.assert_called_once_with(coro, aref) - Task.assert_called_once_with(agent._execute_actor(), loop=agent.loop) - task = Task() - assert task._beacon is beacon - assert aref.actor_task is task - assert aref in agent._actors - assert ret is aref + ret = await agent._prepare_actor(aref, mock_beacon) + task = aref.actor_task + await task + mock_slurp.assert_awaited() + assert mock_slurp.await_args.args[0] is aref + assert task._beacon is mock_beacon + assert aref in agent._actors + assert ret is aref @pytest.mark.asyncio async def test_prepare_actor__Awaitable(self, *, agent2): @@ -429,22 +429,24 @@ async def test_prepare_actor__Awaitable(self, *, agent2): assert ret is aref @pytest.mark.asyncio - async def test_prepare_actor__Awaitable_with_multiple_topics(self, *, agent2): + async def test_prepare_actor__Awaitable_with_multiple_topics( + self, *, agent2, monkeypatch + ): aref = agent2(index=0, active_partitions=None) - asyncio.ensure_future(aref.it).cancel() # silence warning agent2.channel.topics = ["foo", "bar"] - with patch("asyncio.Task") as Task: - agent2._execute_actor = Mock(name="_execute_actor") - beacon = Mock(name="beacon", autospec=Node) - ret = await agent2._prepare_actor(aref, beacon) - coro = aref - agent2._execute_actor.assert_called_once_with(coro, aref) - Task.assert_called_once_with(agent2._execute_actor(), loop=agent2.loop) - task = Task() - assert task._beacon is beacon - assert aref.actor_task is task - assert aref in agent2._actors - assert ret is aref + mock_beacon = Mock(name="beacon", autospec=Node) + mock_slurp = AsyncMock(name="slurp") + mock_execute_actor = AsyncMock(name="execute_actor") + monkeypatch.setattr(agent2, "_slurp", mock_slurp) + monkeypatch.setattr(agent2, "_execute_actor", mock_execute_actor) + ret = await agent2._prepare_actor(aref, mock_beacon) + task = aref.actor_task + mock_slurp.assert_not_called() + mock_slurp.assert_not_awaited() + mock_execute_actor.assert_called_with(aref, aref) + assert task._beacon is mock_beacon + assert aref in agent2._actors + assert ret is aref @pytest.mark.asyncio async def test_prepare_actor__Awaitable_cannot_have_sinks(self, *, agent2): @@ -549,7 +551,7 @@ async def __aiter__(self): agent._reply.assert_called_once_with( None, word, word_req.reply_to, word_req.correlation_id ) - agent._delegate_to_sinks.coro.assert_has_calls( + agent._delegate_to_sinks.assert_has_calls( [ call(word), call("bar"), @@ -596,7 +598,7 @@ async def __aiter__(self): await agent._slurp(aref, it) agent._reply.assert_called_once_with(None, word, "reply_to", "correlation_id") - agent._delegate_to_sinks.coro.assert_has_calls( + agent._delegate_to_sinks.assert_has_calls( [ call(word), call("bar"), @@ -741,7 +743,10 @@ def test_create_req(self, *, agent): with patch("faust.agents.agent.uuid4") as uuid4: uuid4.return_value = "vvv" reqrep = agent._create_req( - key=b"key", value=b"value", reply_to="reply_to", headers={"k": "v"} + key=b"key", + value=b"value", + reply_to="reply_to", + headers={"k": "v"}, )[0] agent._get_strtopic.assert_called_once_with("reply_to") @@ -756,7 +761,10 @@ def test_create_req__use_reply_headers(self, *, agent): with patch("faust.agents.agent.uuid4") as uuid4: uuid4.return_value = "vvv" value, h = agent._create_req( - key=b"key", value=b"value", reply_to="reply_to", headers={"k": "v"} + key=b"key", + value=b"value", + reply_to="reply_to", + headers={"k": "v"}, ) agent._get_strtopic.assert_called_once_with("reply_to") @@ -772,7 +780,10 @@ def test_create_req__model(self, *, agent): uuid4.return_value = "vvv" value = Word("foo") reqrep = agent._create_req( - key=b"key", value=value, reply_to="reply_to", headers={"h1": "h2"} + key=b"key", + value=value, + reply_to="reply_to", + headers={"h1": "h2"}, )[0] assert isinstance(reqrep, ReqRepRequest) @@ -845,7 +856,7 @@ async def test_send(self, *, agent): force=True, ) - assert ret is agent.channel.send.coro() + assert ret is agent.channel.send.return_value @pytest.mark.asyncio async def test_send__without_reply_to(self, *, agent): @@ -884,7 +895,7 @@ async def test_send__without_reply_to(self, *, agent): force=True, ) - assert ret is agent.channel.send.coro() + assert ret is agent.channel.send.return_value def test_get_strtopic__agent(self, *, agent, agent2): assert agent._get_strtopic(agent2) == agent2.channel.get_topic_name() diff --git a/tests/unit/agents/test_manager.py b/tests/unit/agents/test_manager.py index eee73a0e6..c085ab484 100644 --- a/tests/unit/agents/test_manager.py +++ b/tests/unit/agents/test_manager.py @@ -1,10 +1,11 @@ import asyncio from typing import Hashable +from unittest.mock import Mock import pytest -from mode.utils.mocks import AsyncMock, Mock from faust.types import TP +from tests.helpers import AsyncMock class Test_AgentManager: @@ -46,8 +47,8 @@ def test_constructor(self, *, agents, app): @pytest.mark.asyncio async def test_on_stop__agent_raises_cancel(self, *, many, agent1, agent2): - agent1.stop.coro.side_effect = asyncio.CancelledError() - agent2.stop.coro.side_effect = asyncio.CancelledError() + agent1.stop.side_effect = asyncio.CancelledError() + agent2.stop.side_effect = asyncio.CancelledError() await many.on_stop() agent1.stop.assert_called_once_with() agent2.stop.assert_called_once_with() diff --git a/tests/unit/agents/test_replies.py b/tests/unit/agents/test_replies.py index 92a84bbb7..36628d9c0 100644 --- a/tests/unit/agents/test_replies.py +++ b/tests/unit/agents/test_replies.py @@ -1,12 +1,13 @@ import asyncio import json +from unittest.mock import MagicMock, Mock import pytest -from mode.utils.mocks import AsyncMock, Mock from faust import Record from faust.agents.models import ReqRepResponse from faust.agents.replies import BarrierState, ReplyConsumer, ReplyPromise +from tests.helpers import AsyncMock class Account(Record, serializer="json"): @@ -63,7 +64,7 @@ def se(): p.done.return_value = True return None - p._results.get.coro.side_effect = se + p._results.get.side_effect = se assert [x async for x in p.iterate()] == [] @@ -75,17 +76,17 @@ async def test_parallel_iterate(self): done, pending = await asyncio.wait( [ - self.adder(p), - self.fulfiller(p), - self.finalizer(p, 1.0), - self.consumer(p), + asyncio.ensure_future(self.adder(p)), + asyncio.ensure_future(self.fulfiller(p)), + asyncio.ensure_future(self.finalizer(p, 1.0)), + asyncio.ensure_future(self.consumer(p)), ], timeout=5.0, ) if pending: raise Exception( - f"Test did not return in 5s:\n" + f"Test did not return within 5s:\n" f" DONE_TASKS={done}\n" f" PENDING_TASKS={pending}\n" f" size={p.size}\n" @@ -191,7 +192,7 @@ async def test_add(self, *, c): async def test_start_fetcher(self, *, c): c._drain_replies = Mock() c._reply_topic = Mock( - return_value=Mock( + return_value=MagicMock( maybe_declare=AsyncMock(), ), ) diff --git a/tests/unit/app/test_base.py b/tests/unit/app/test_base.py index c29b3ffbb..057668f55 100644 --- a/tests/unit/app/test_base.py +++ b/tests/unit/app/test_base.py @@ -1,10 +1,10 @@ import collections import re +from unittest.mock import ANY, Mock, call, patch import pytest from mode import Service from mode.utils.compat import want_bytes -from mode.utils.mocks import ANY, AsyncMock, Mock, call, patch from yarl import URL import faust @@ -30,6 +30,7 @@ from faust.types.settings import Settings from faust.types.tables import GlobalTableT from faust.types.web import ResourceOptions +from tests.helpers import AsyncMock TEST_TOPIC = "test" CONFIG_DICT = { @@ -118,7 +119,7 @@ def test_new_producer(self, *, app): autospec=Transport, ) assert app._new_producer() is transport.create_producer.return_value - transport.create_producer.assert_called_with(beacon=ANY) + transport.create_producer.assert_called_with(loop=app.loop, beacon=ANY) assert app.producer is transport.create_producer.return_value @pytest.mark.parametrize( @@ -424,7 +425,7 @@ async def test_on_partitions_assigned__crashes(self, *, app): app.agents = Mock( on_rebalance=AsyncMock(), ) - app.agents.on_rebalance.coro.side_effect = RuntimeError() + app.agents.on_rebalance.side_effect = RuntimeError() app.crash = AsyncMock() await app._on_partitions_assigned(set()) @@ -677,7 +678,7 @@ async def test_timer__sleep_stopped(self, *, app): def on_sleep(seconds, **kwargs): app._stopped.set() - app.sleep.coro.side_effect = on_sleep + app.sleep.side_effect = on_sleep @app.timer(0.1) async def foo(): @@ -697,7 +698,7 @@ def on_sleep(seconds, **kwargs): # cannot use list side_effect arg as it causes # StopIteration to be raised. - app.sleep.coro.side_effect = on_sleep + app.sleep.side_effect = on_sleep @app.timer(300.0, on_leader=True) async def foo(): @@ -720,7 +721,7 @@ def on_sleep(seconds, **kwargs): # cannot use list side_effect arg as it causes # StopIteration to be raised. - app.sleep.coro.side_effect = on_sleep + app.sleep.side_effect = on_sleep @app.timer(300.0, on_leader=True) async def foo(app): @@ -764,7 +765,7 @@ def on_sleep(seconds, **kwargs): if app.sleep.call_count >= 3: app._stopped.set() - app.sleep.coro.side_effect = on_sleep + app.sleep.side_effect = on_sleep await foo() assert app.sleep.call_count == 3 @@ -787,7 +788,7 @@ def on_sleep(seconds, **kwargs): if app.sleep.call_count >= 3: app._stopped.set() - app.sleep.coro.side_effect = on_sleep + app.sleep.side_effect = on_sleep await foo() assert app.sleep.call_count == 3 @@ -827,7 +828,6 @@ def test_SetGlobalTable(self, *, app): assert isinstance(app.tables.data["name"], GlobalTableT) def test_page(self, *, app): - with patch("faust.app.base.venusian") as venusian: @app.page("/foo") @@ -839,7 +839,6 @@ async def view(self, request): venusian.attach.assert_called_once_with(view, category=SCAN_PAGE) def test_page__with_cors_options(self, *, app): - with patch("faust.app.base.venusian") as venusian: @app.page( @@ -882,9 +881,9 @@ async def routed(self, request): request.query = {"q": "KEY"} ret = await routed(view, request) - assert ret is app.router.route_req.coro.return_value + assert ret is app.router.route_req.return_value - app.router.route_req.coro.side_effect = SameNode() + app.router.route_req.side_effect = SameNode() ret = await routed(view, request) assert ret == 42 @@ -902,9 +901,9 @@ async def routed(self, request): request.match_info = {"q": "KEY"} ret = await routed(view, request) - assert ret is app.router.route_req.coro.return_value + assert ret is app.router.route_req.return_value - app.router.route_req.coro.side_effect = SameNode() + app.router.route_req.side_effect = SameNode() ret = await routed(view, request) assert ret == 42 @@ -920,9 +919,9 @@ async def routed(self, request): return 42 ret = await routed(view, request) - assert ret is app.router.route_req.coro.return_value + assert ret is app.router.route_req.return_value - app.router.route_req.coro.side_effect = SameNode() + app.router.route_req.side_effect = SameNode() ret = await routed(view, request) assert ret == 42 @@ -965,9 +964,9 @@ async def routed(self, request): request.query = {"q": "KEY"} ret = await routed(view, request) - assert ret is app.router.route_topic_req.coro.return_value + assert ret is app.router.route_topic_req.return_value - app.router.route_topic_req.coro.side_effect = SameNode() + app.router.route_topic_req.side_effect = SameNode() ret = await routed(view, request) assert ret == 42 @@ -985,9 +984,9 @@ async def routed(self, request): request.match_info = {"q": "KEY"} ret = await routed(view, request) - assert ret is app.router.route_topic_req.coro.return_value + assert ret is app.router.route_topic_req.return_value - app.router.route_topic_req.coro.side_effect = SameNode() + app.router.route_topic_req.side_effect = SameNode() ret = await routed(view, request) assert ret == 42 @@ -1003,9 +1002,9 @@ async def routed(self, request): return 42 ret = await routed(view, request) - assert ret is app.router.route_topic_req.coro.return_value + assert ret is app.router.route_topic_req.return_value - app.router.route_topic_req.coro.side_effect = SameNode() + app.router.route_topic_req.side_effect = SameNode() ret = await routed(view, request) assert ret == 42 @@ -1101,7 +1100,7 @@ async def test_maybe_start_producer(self, *, app): app.in_transaction = False app.producer = Mock(maybe_start=AsyncMock()) assert await app.maybe_start_producer() is app.producer - app.producer.maybe_start.coro.assert_called_once_with() + app.producer.maybe_start.assert_called_once_with() def test_repr(self, *, app): assert repr(app) diff --git a/tests/unit/app/test_router.py b/tests/unit/app/test_router.py index 35b389c94..76c881075 100644 --- a/tests/unit/app/test_router.py +++ b/tests/unit/app/test_router.py @@ -1,5 +1,6 @@ +from unittest.mock import ANY, Mock + import pytest -from mode.utils.mocks import ANY, Mock from yarl import URL from faust.app.router import Router diff --git a/tests/unit/app/test_service.py b/tests/unit/app/test_service.py index 4e5ee71a3..377e372e3 100644 --- a/tests/unit/app/test_service.py +++ b/tests/unit/app/test_service.py @@ -1,6 +1,9 @@ +from unittest.mock import Mock, call + import pytest from mode import Service, label -from mode.utils.mocks import AsyncMock, Mock, call + +from tests.helpers import AsyncMock class OtherService(Service): @@ -79,10 +82,10 @@ async def test_on_started(self, *, app): app.on_started_init_extra_tasks = AsyncMock(name="osiet") app.on_started_init_extra_services = AsyncMock(name="osies") app.on_startup_finished = None - app._wait_for_table_recovery_completed.coro.return_value = True + app._wait_for_table_recovery_completed.return_value = True await app.on_started() - app._wait_for_table_recovery_completed.coro.return_value = False + app._wait_for_table_recovery_completed.return_value = False await app.on_started() app.on_started_init_extra_tasks.assert_called_once_with() diff --git a/tests/unit/cli/conftest.py b/tests/unit/cli/conftest.py index 03160a705..c78e9b677 100644 --- a/tests/unit/cli/conftest.py +++ b/tests/unit/cli/conftest.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import Mock @pytest.fixture() diff --git a/tests/unit/cli/test_base.py b/tests/unit/cli/test_base.py index 155891efe..d2c1ac9d9 100644 --- a/tests/unit/cli/test_base.py +++ b/tests/unit/cli/test_base.py @@ -4,11 +4,11 @@ import sys import types from pathlib import Path +from unittest.mock import Mock, call, patch import click import pytest from mode import Worker -from mode.utils.mocks import AsyncMock, Mock, call, patch from faust.cli import AppCommand, Command, call_command from faust.cli.base import ( @@ -21,6 +21,7 @@ option, ) from faust.types._env import CONSOLE_PORT +from tests.helpers import AsyncMock class Test_argument: diff --git a/tests/unit/cli/test_clean_versions.py b/tests/unit/cli/test_clean_versions.py index 880dc3a19..5329a4f35 100644 --- a/tests/unit/cli/test_clean_versions.py +++ b/tests/unit/cli/test_clean_versions.py @@ -1,7 +1,7 @@ from pathlib import Path +from unittest.mock import Mock, call, patch import pytest -from mode.utils.mocks import Mock, call, patch from faust.cli.clean_versions import clean_versions diff --git a/tests/unit/cli/test_completion.py b/tests/unit/cli/test_completion.py index 4d40b4252..b37b4a484 100644 --- a/tests/unit/cli/test_completion.py +++ b/tests/unit/cli/test_completion.py @@ -1,5 +1,6 @@ +from unittest.mock import patch + import pytest -from mode.utils.mocks import patch from faust.cli.completion import completion diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index a2b153cbb..f04deb2f6 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,11 +1,12 @@ import os +from unittest.mock import Mock import pytest -from mode.utils.mocks import AsyncMock, Mock import faust from faust.transport.producer import Producer from faust.utils.tracing import set_current_span +from tests.helpers import AsyncMock @pytest.fixture() diff --git a/tests/unit/fixups/test_django.py b/tests/unit/fixups/test_django.py index 5a87a9eba..02cf34b09 100644 --- a/tests/unit/fixups/test_django.py +++ b/tests/unit/fixups/test_django.py @@ -1,8 +1,10 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import Mock, mask_module, patch_module from faust.fixups import fixups from faust.fixups.django import Fixup +from tests.helpers import mask_module, patch_module class Test_Fixup: diff --git a/tests/unit/livecheck/conftest.py b/tests/unit/livecheck/conftest.py index 95f51f7f3..f2597f899 100644 --- a/tests/unit/livecheck/conftest.py +++ b/tests/unit/livecheck/conftest.py @@ -1,10 +1,11 @@ from datetime import datetime, timedelta, timezone +from unittest.mock import patch import pytest -from mode.utils.mocks import ContextMock, patch from faust.livecheck.models import TestExecution from faust.livecheck.runners import TestRunner +from tests.helpers import ContextMock @pytest.fixture() diff --git a/tests/unit/livecheck/patches/test_aiohttp.py b/tests/unit/livecheck/patches/test_aiohttp.py index bfff20c8f..c73737a5a 100644 --- a/tests/unit/livecheck/patches/test_aiohttp.py +++ b/tests/unit/livecheck/patches/test_aiohttp.py @@ -1,7 +1,7 @@ from types import SimpleNamespace +from unittest.mock import Mock import pytest -from mode.utils.mocks import Mock from faust.livecheck.locals import current_test_stack from faust.livecheck.patches.aiohttp import LiveCheckMiddleware, patch_aiohttp_session @@ -9,7 +9,6 @@ @pytest.mark.asyncio async def test_patch_aiohttp_session(*, execution): - patch_aiohttp_session() from aiohttp.client import ClientSession diff --git a/tests/unit/livecheck/test_app.py b/tests/unit/livecheck/test_app.py index 54fdc57da..2fe40b035 100644 --- a/tests/unit/livecheck/test_app.py +++ b/tests/unit/livecheck/test_app.py @@ -1,9 +1,9 @@ import asyncio from typing import Union +from unittest.mock import Mock, call import pytest from mode.utils.compat import want_bytes -from mode.utils.mocks import AsyncMock, Mock, call from faust.livecheck import LiveCheck from faust.livecheck.app import LiveCheckSensor @@ -11,6 +11,7 @@ from faust.livecheck.locals import current_test_stack from faust.livecheck.models import SignalEvent, TestExecution, TestReport from faust.livecheck.signals import BaseSignal +from tests.helpers import AsyncMock class TestLiveCheckSensor: @@ -112,7 +113,6 @@ class SignalWithNoneOrigin(livecheck.Signal): @livecheck.case() class Test_foo: - signal1: livecheck.Signal signal2: SignalWithNoneOrigin signal3: livecheck.Signal = livecheck.Signal() diff --git a/tests/unit/livecheck/test_case.py b/tests/unit/livecheck/test_case.py index a28233eda..983d1d1db 100644 --- a/tests/unit/livecheck/test_case.py +++ b/tests/unit/livecheck/test_case.py @@ -1,14 +1,15 @@ from contextlib import contextmanager from datetime import datetime, timedelta, timezone from statistics import median +from unittest.mock import ANY, Mock, patch import pytest from mode import label -from mode.utils.mocks import ANY, AsyncMock, Mock, patch from faust.livecheck import Case from faust.livecheck.exceptions import SuiteFailed from faust.livecheck.models import State, TestReport +from tests.helpers import AsyncMock class TestCase: @@ -45,7 +46,7 @@ def on_sample(): if case._sample.call_count == 3: case._stopped.set() - case._sample.coro.side_effect = on_sample + case._sample.side_effect = on_sample await case._sampler(case) @@ -78,7 +79,7 @@ async def test_maybe_trigger(self, *, case): uniform.return_value = 0.0 async with case.maybe_trigger() as test: - assert test is case.trigger.coro.return_value + assert test is case.trigger.return_value assert case.current_test is test @pytest.mark.asyncio @@ -91,7 +92,7 @@ async def test_trigger(self, *, case): case.app = Mock(pending_tests=Mock(send=AsyncMock())) t = await case.trigger("id1", 30, kw=2) assert t.id == "id1" - case.app.pending_tests.send.coro.assert_called_once_with( + case.app.pending_tests.send.assert_called_once_with( key="id1", value=t, ) @@ -106,7 +107,7 @@ async def test_resolve_signal(self, *, case): event = Mock(name="event") case.signals[event.signal_name] = Mock(resolve=AsyncMock()) await case.resolve_signal(key, event) - case.signals[event.signal_name].resolve.coro.assert_called_once_with( + case.signals[event.signal_name].resolve.assert_called_once_with( key, event, ) @@ -123,7 +124,7 @@ async def test_execute( case.Runner.assert_called_once_with( case, execution, started=frozen_monotonic.return_value ) - runner.execute.coro.assert_called_once_with() + runner.execute.assert_called_once_with() current_execution_stack.push.assert_called_once_with(runner) @pytest.mark.asyncio @@ -172,19 +173,19 @@ async def test_on_test_skipped(self, *, case, runner, frozen_monotonic): async def test_on_test_failed(self, *, case, runner): case._set_test_error_state = AsyncMock() await case.on_test_failed(runner, KeyError()) - case._set_test_error_state.coro.assert_called_once_with(State.FAIL) + case._set_test_error_state.assert_called_once_with(State.FAIL) @pytest.mark.asyncio async def test_on_test_error(self, *, case, runner): case._set_test_error_state = AsyncMock() await case.on_test_error(runner, KeyError()) - case._set_test_error_state.coro.assert_called_once_with(State.ERROR) + case._set_test_error_state.assert_called_once_with(State.ERROR) @pytest.mark.asyncio async def test_on_test_timeout(self, *, case, runner): case._set_test_error_state = AsyncMock() await case.on_test_timeout(runner, KeyError()) - case._set_test_error_state.coro.assert_called_once_with(State.TIMEOUT) + case._set_test_error_state.assert_called_once_with(State.TIMEOUT) @pytest.mark.asyncio @pytest.mark.parametrize( @@ -208,7 +209,7 @@ async def test__set_test_error_state(self, state, failures, fail_suite, *, case) assert case.total_failures == 1 assert case.total_by_state[state] == 1 if fail_suite: - case.on_suite_fail.coro.assert_called_once_with(ANY) + case.on_suite_fail.assert_called_once_with(ANY) @pytest.mark.asyncio @pytest.mark.parametrize( @@ -223,7 +224,16 @@ async def test__set_test_error_state(self, state, failures, fail_suite, *, case) ], ) async def test_on_suite_pass( - self, initial_state, ts, now, failed, expected_state, *, case, runner, execution + self, + initial_state, + ts, + now, + failed, + expected_state, + *, + case, + runner, + execution, ): assert runner.test is execution runner.test.timestamp = Mock() @@ -332,7 +342,7 @@ def on_make_fake_request(): if case.make_fake_request.call_count == 3: case._stopped.set() - case.make_fake_request.coro.side_effect = on_make_fake_request + case.make_fake_request.side_effect = on_make_fake_request def on_is_leader(): if case.app.is_leader.call_count >= 2: @@ -442,7 +452,7 @@ async def test_on_suite_fail( assert case.last_fail == now if posts_report: case.log.exception.assert_called_once_with(str(exc)) - case.post_report.coro.assert_called_once_with( + case.post_report.assert_called_once_with( TestReport( case_name=case.name, state=State.FAIL, @@ -517,8 +527,8 @@ async def test_post_url(self, *, case): async def assert_url_called(self, case, fut, method, url, **kwargs): case.url_request = AsyncMock("url_request") response = await fut - assert response is case.url_request.coro.return_value - case.url_request.coro.assert_called_once_with( + assert response is case.url_request.return_value + case.url_request.assert_called_once_with( method, url, **kwargs, diff --git a/tests/unit/livecheck/test_locals.py b/tests/unit/livecheck/test_locals.py index f6998d3ec..df28b198b 100644 --- a/tests/unit/livecheck/test_locals.py +++ b/tests/unit/livecheck/test_locals.py @@ -1,4 +1,4 @@ -from mode.utils.mocks import Mock +from unittest.mock import Mock from faust.livecheck.locals import current_execution, current_execution_stack diff --git a/tests/unit/livecheck/test_runners.py b/tests/unit/livecheck/test_runners.py index 3e882f4de..b7ded74f0 100644 --- a/tests/unit/livecheck/test_runners.py +++ b/tests/unit/livecheck/test_runners.py @@ -1,7 +1,7 @@ import asyncio +from unittest.mock import ANY, Mock, patch import pytest -from mode.utils.mocks import ANY, AsyncMock, Mock, patch from faust.livecheck.exceptions import ( LiveCheckError, @@ -11,6 +11,7 @@ LiveCheckTestTimeout, ) from faust.livecheck.models import State +from tests.helpers import AsyncMock class TestTestRunner: @@ -60,7 +61,7 @@ async def test_execute__error_callbacks( async def test_execute__pass(self, *, runner, execution): runner.on_pass = AsyncMock() await self._do_execute(runner, execution) - runner.on_pass.coro.assert_called_once_with() + runner.on_pass.assert_called_once_with() @pytest.mark.asyncio async def test_execute__CancelledError(self, *, runner, execution): @@ -82,7 +83,7 @@ async def test_skip(self, runner): with pytest.raises(LiveCheckTestSkipped): runner.on_skipped = AsyncMock() await runner.skip("broken") - runner.on_skipped.coro.assert_called_once_with(ANY) + runner.on_skipped.assert_called_once_with(ANY) def test__prepare_args(self, *, runner): assert runner._prepare_args((1, 2, 3, object())) @@ -136,8 +137,8 @@ async def test_on_failed(self, *, runner): assert runner.error is exc assert runner.state == State.FAIL - runner.case.on_test_failed.coro.assert_called_once_with(runner, exc) - runner._finalize_report.coro.assert_called_once_with() + runner.case.on_test_failed.assert_called_once_with(runner, exc) + runner._finalize_report.assert_called_once_with() @pytest.mark.asyncio async def test_on_error(self, *, runner): @@ -150,8 +151,8 @@ async def test_on_error(self, *, runner): assert runner.error is exc assert runner.state == State.ERROR - runner.case.on_test_error.coro.assert_called_once_with(runner, exc) - runner._finalize_report.coro.assert_called_once_with() + runner.case.on_test_error.assert_called_once_with(runner, exc) + runner._finalize_report.assert_called_once_with() @pytest.mark.asyncio async def test_on_timeout(self, *, runner): @@ -164,8 +165,8 @@ async def test_on_timeout(self, *, runner): assert runner.error is exc assert runner.state == State.TIMEOUT - runner.case.on_test_timeout.coro.assert_called_once_with(runner, exc) - runner._finalize_report.coro.assert_called_once_with() + runner.case.on_test_timeout.assert_called_once_with(runner, exc) + runner._finalize_report.assert_called_once_with() @pytest.mark.asyncio async def test_on_pass(self, *, runner): @@ -177,8 +178,8 @@ async def test_on_pass(self, *, runner): assert runner.state == State.DO_NOT_SHARE assert runner.error is None - runner.case.on_test_pass.coro.assert_called_once_with(runner) - runner._finalize_report.coro.assert_called_once_with() + runner.case.on_test_pass.assert_called_once_with(runner) + runner._finalize_report.assert_called_once_with() @pytest.mark.asyncio async def test__finalize_report(self, *, runner): @@ -201,7 +202,7 @@ async def test__finalize_report(self, *, runner): assert report.signal_latency == {} assert report.error == str(exc) assert report.traceback - runner.case.post_report.coro.assert_called_once_with(report) + runner.case.post_report.assert_called_once_with(report) @pytest.mark.asyncio async def test__finalize_report__no_error(self, *, runner): @@ -219,7 +220,7 @@ async def test__finalize_report__no_error(self, *, runner): assert report.signal_latency == {} assert report.error is None assert report.traceback is None - runner.case.post_report.coro.assert_called_once_with(report) + runner.case.post_report.assert_called_once_with(report) def test_log_info(self, *, runner): runner.case.realtime_logs = False diff --git a/tests/unit/livecheck/test_signals.py b/tests/unit/livecheck/test_signals.py index 8f00c14f1..72a4ba45d 100644 --- a/tests/unit/livecheck/test_signals.py +++ b/tests/unit/livecheck/test_signals.py @@ -1,12 +1,13 @@ import asyncio +from unittest.mock import Mock, patch import pytest -from mode.utils.mocks import AsyncMock, Mock, patch from faust.livecheck.exceptions import LiveCheckTestTimeout from faust.livecheck.locals import current_execution_stack, current_test_stack from faust.livecheck.models import SignalEvent from faust.livecheck.signals import BaseSignal, Signal +from tests.helpers import AsyncMock class Test_BaseSignal: @@ -82,7 +83,7 @@ async def test_send__no_test(self, *, signal): async def test_send__no_test_force(self, *, signal): signal.case = Mock(app=Mock(bus=Mock(send=AsyncMock()))) await signal.send("value", key="k", force=True) - signal.case.app.bus.send.coro.assert_called_once_with( + signal.case.app.bus.send.assert_called_once_with( key="k", value=SignalEvent( signal_name=signal.name, @@ -97,7 +98,7 @@ async def test_send(self, *, signal, execution): with current_test_stack.push(execution): signal.case = Mock(app=Mock(bus=Mock(send=AsyncMock()))) await signal.send("value", key=None, force=True) - signal.case.app.bus.send.coro.assert_called_once_with( + signal.case.app.bus.send.assert_called_once_with( key=execution.id, value=SignalEvent( signal_name=signal.name, diff --git a/tests/unit/models/test_fields.py b/tests/unit/models/test_fields.py index 26eeea897..5985a7496 100644 --- a/tests/unit/models/test_fields.py +++ b/tests/unit/models/test_fields.py @@ -1,7 +1,7 @@ from decimal import Decimal +from unittest.mock import Mock import pytest -from mode.utils.mocks import Mock from faust import Record from faust.exceptions import ValidationError diff --git a/tests/unit/sensors/test_base.py b/tests/unit/sensors/test_base.py index b6f54ad3b..ad96d9e6e 100644 --- a/tests/unit/sensors/test_base.py +++ b/tests/unit/sensors/test_base.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import Mock from faust import Event, Stream, Table, Topic, web from faust.assignor import PartitionAssignor diff --git a/tests/unit/sensors/test_datadog.py b/tests/unit/sensors/test_datadog.py index aa8849f8d..731e74612 100644 --- a/tests/unit/sensors/test_datadog.py +++ b/tests/unit/sensors/test_datadog.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock, call + import pytest -from mode.utils.mocks import Mock, call from faust.exceptions import ImproperlyConfigured from faust.sensors.datadog import DatadogMonitor, DatadogStatsClient # noqa @@ -240,7 +241,10 @@ def test_on_send_initiated_completed(self, *, mon): client.increment.assert_has_calls( [ call( - "messages_send_failed", sample_rate=mon.rate, tags=None, value=1.0 + "messages_send_failed", + sample_rate=mon.rate, + tags=None, + value=1.0, ), ] ) @@ -264,7 +268,10 @@ def test_on_assignment_start_completed(self, *, mon): client.increment.assert_has_calls( [ call( - "assignments_complete", sample_rate=mon.rate, tags=None, value=1.0 + "assignments_complete", + sample_rate=mon.rate, + tags=None, + value=1.0, ), ] ) @@ -283,7 +290,12 @@ def test_on_assignment_start_error(self, *, mon): client = mon.client.client client.increment.assert_has_calls( [ - call("assignments_error", sample_rate=mon.rate, tags=None, value=1.0), + call( + "assignments_error", + sample_rate=mon.rate, + tags=None, + value=1.0, + ), ] ) client.timing.assert_called_once_with( @@ -343,7 +355,10 @@ def test_on_rebalance(self, *, mon): [ call("rebalances", sample_rate=mon.rate, tags=None, value=1.0), call( - "rebalances_recovering", sample_rate=mon.rate, tags=None, value=1.0 + "rebalances_recovering", + sample_rate=mon.rate, + tags=None, + value=1.0, ), ] ) @@ -366,7 +381,10 @@ def test_on_rebalance(self, *, mon): [ call("rebalances", sample_rate=mon.rate, tags=None, value=1.0), call( - "rebalances_recovering", sample_rate=mon.rate, tags=None, value=1.0 + "rebalances_recovering", + sample_rate=mon.rate, + tags=None, + value=1.0, ), ] ) diff --git a/tests/unit/sensors/test_monitor.py b/tests/unit/sensors/test_monitor.py index de8245c07..7eefcfe82 100644 --- a/tests/unit/sensors/test_monitor.py +++ b/tests/unit/sensors/test_monitor.py @@ -1,15 +1,16 @@ from http import HTTPStatus from statistics import median from typing import Any +from unittest.mock import Mock import pytest -from mode.utils.mocks import AsyncMock, Mock from faust import Event, Stream, Table, Topic from faust.sensors.monitor import Monitor, TableState from faust.transport.consumer import Consumer from faust.transport.producer import Producer from faust.types import TP, Message +from tests.helpers import AsyncMock TP1 = TP("foo", 0) @@ -66,7 +67,7 @@ def create_populated_monitor( commit_latency=[1.03, 2.33, 16.33], # noqa send_latency=[0.01, 0.04, 0.06, 0.010], # noqa topic_buffer_full={"topic": 808}, # noqa - **kwargs + **kwargs, ): return self.create_monitor( messages_active=messages_active, @@ -83,7 +84,7 @@ def create_populated_monitor( commit_latency=commit_latency, send_latency=send_latency, topic_buffer_full=topic_buffer_full, - **kwargs + **kwargs, ) def test_init_max_avg_history(self): @@ -265,7 +266,11 @@ def test_on_commit_completed(self, *, mon, time): def test_on_send_initiated(self, *, mon, time): for i in range(1, 11): state = mon.on_send_initiated( - Mock(name="producer", autospec=Producer), "topic", "message", 2, 4 + Mock(name="producer", autospec=Producer), + "topic", + "message", + 2, + 4, ) assert mon.messages_sent == i assert mon.messages_sent_by_topic["topic"] == i diff --git a/tests/unit/sensors/test_prometheus.py b/tests/unit/sensors/test_prometheus.py index 3d729df55..0df2f41b9 100644 --- a/tests/unit/sensors/test_prometheus.py +++ b/tests/unit/sensors/test_prometheus.py @@ -150,7 +150,9 @@ def test_on_stream_event_out_does_not_measure_latency_without_state( monitor.on_stream_event_out(TP1, 401, stream, event, state=None) self.assert_doesnt_have_sample_values( - metrics.events_runtime_latency, "test_events_runtime_latency_total", {} + metrics.events_runtime_latency, + "test_events_runtime_latency_total", + {}, ) def test_on_table_get( @@ -310,7 +312,10 @@ def test_on_rebalance_return( monitor.on_rebalance_return(app, state) self.assert_has_sample_value( - metrics.total_rebalances, "test_total_rebalances", {}, n_rebalances - 1 + metrics.total_rebalances, + "test_total_rebalances", + {}, + n_rebalances - 1, ) self.assert_has_sample_value( metrics.total_rebalances_recovering, diff --git a/tests/unit/sensors/test_statsd.py b/tests/unit/sensors/test_statsd.py index fa921bb59..c72625615 100644 --- a/tests/unit/sensors/test_statsd.py +++ b/tests/unit/sensors/test_statsd.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock, call + import pytest -from mode.utils.mocks import Mock, call from faust import web from faust.exceptions import ImproperlyConfigured @@ -152,7 +153,9 @@ def test_on_send_initiated_completed(self, *, mon): mon.client.timing.assert_has_calls( [ call( - "send_latency_for_error", mon.ms_since(float(state)), rate=mon.rate + "send_latency_for_error", + mon.ms_since(float(state)), + rate=mon.rate, ), ] ) diff --git a/tests/unit/serializers/test_codecs.py b/tests/unit/serializers/test_codecs.py index d640993ad..6abf80538 100644 --- a/tests/unit/serializers/test_codecs.py +++ b/tests/unit/serializers/test_codecs.py @@ -1,11 +1,11 @@ import base64 from typing import Mapping +from unittest.mock import patch import pytest from hypothesis import given from hypothesis.strategies import binary, dictionaries, text from mode.utils.compat import want_str -from mode.utils.mocks import patch from faust.exceptions import ImproperlyConfigured from faust.serializers.codecs import ( @@ -20,7 +20,7 @@ ) from faust.utils import json as _json -DATA = {"a": 1, "b": "string"} +DATA = {"a": 1, "b": "string", 1: 2} def test_interface(): @@ -34,7 +34,15 @@ def test_interface(): @pytest.mark.parametrize("codec", ["json", "pickle", "yaml"]) def test_json_subset(codec: str) -> None: - assert loads(codec, dumps(codec, DATA)) == DATA + if codec == "json": + # special exception for json since integers can be serialized + assert loads(codec, dumps(codec, DATA)) == { + "a": 1, + "b": "string", + "1": 2, + } + else: + assert loads(codec, dumps(codec, DATA)) == DATA def test_missing_yaml_library() -> None: diff --git a/tests/unit/stores/test_aerospike.py b/tests/unit/stores/test_aerospike.py index 82f96b943..ebb0ee866 100644 --- a/tests/unit/stores/test_aerospike.py +++ b/tests/unit/stores/test_aerospike.py @@ -35,7 +35,6 @@ async def test_get_aerospike_client_new(self, aero): config = {"k": "v"} return_value = AeroSpikeStore.get_aerospike_client(config) assert aero.client.called - assert client_mock.connect.called assert return_value == client_mock @pytest.mark.asyncio @@ -45,8 +44,9 @@ async def test_get_aerospike_client_error(self, aero): client_mock.connect = MagicMock(side_effect=Exception) faust.stores.aerospike.aerospike_client = None config = {"k": "v"} - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: AeroSpikeStore.get_aerospike_client(config) + assert exc_info.type == Exception @pytest.mark.asyncio async def test_get_aerospike_client_instantiated(self, aero): @@ -90,15 +90,15 @@ def test_get_none_value(self, store): def test_get_exception(self, store): store.client.get = MagicMock(side_effect=Exception) - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: store._get(b"test_get") + assert exc_info.type == Exception def test_set_success( self, store, ): with patch("faust.stores.aerospike.aerospike", MagicMock()) as aero: - store.client.put = MagicMock() key = b"key" value = b"value" @@ -119,8 +119,9 @@ def test_set_exception(self, store): store.client.put = MagicMock(side_effect=Exception) key = b"key" value = b"value" - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: store._set(key, value) + assert exc_info.type == Exception def test_persisted_offset(self, store): return_value = store.persisted_offset(MagicMock()) @@ -136,15 +137,17 @@ def test_del_success(self, store): def test_del_exception(self, store): key = b"key" store.client.remove = MagicMock(side_effect=Exception) - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: store._del(key) + assert exc_info.type == Exception def test_iterkeys_error(self, store): scan = MagicMock() store.client.scan = MagicMock(side_effect=Exception) scan.results = MagicMock(side_effect=Exception) - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: list(store._iterkeys()) + assert exc_info.type == Exception def test_iterkeys_success(self, store): scan = MagicMock() @@ -167,7 +170,11 @@ def test_itervalues_success(self, store): scan = MagicMock() store.client.scan = MagicMock(return_value=scan) scan_result = [ - (MagicMock(), {"ttl": 4294967295, "gen": 4}, {"value_key": "value1"}), + ( + MagicMock(), + {"ttl": 4294967295, "gen": 4}, + {"value_key": "value1"}, + ), (MagicMock(), {"ttl": 4294967295, "gen": 4}, None), ] scan.results = MagicMock(return_value=scan_result) @@ -181,17 +188,18 @@ def test_itervalues_success(self, store): def test_itervalues_error(self, store): store.client.scan = MagicMock(side_effect=Exception) - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: set(store._itervalues()) + assert exc_info.type == Exception def test_iteritems_error(self, store): store.client.scan = MagicMock(side_effect=Exception) - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: set(store._iteritems()) + assert exc_info.type == Exception def test_iteritems_success(self, store): with patch("faust.stores.aerospike.aerospike", MagicMock()): - scan = MagicMock() store.client.scan = MagicMock(return_value=scan) scan_result = [ @@ -218,8 +226,9 @@ def test_iteritems_success(self, store): def test_contains_error(self, store): store.client.exists = MagicMock(side_effect=Exception) key = b"key" - with pytest.raises(Exception): + with pytest.raises(Exception) as exc_info: store._contains(key) + assert exc_info.type == Exception def test_contains_does_not_exist(self, store): store.client.exists = MagicMock(return_value=(None, None)) diff --git a/tests/unit/stores/test_base.py b/tests/unit/stores/test_base.py index 3c83e2cc3..160b19952 100644 --- a/tests/unit/stores/test_base.py +++ b/tests/unit/stores/test_base.py @@ -1,6 +1,7 @@ +from unittest.mock import Mock + import pytest from mode import label -from mode.utils.mocks import Mock from faust import Event, Table from faust.stores.base import SerializedStore, Store @@ -29,6 +30,19 @@ def apply_changelog_batch(self, *args, **kwargs): def reset_state(self): ... + async def backup_partition( + self, tp, flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + ... + + def restore_backup( + self, + tp, + latest: bool = True, + backup_id: int = 0, + ) -> None: + ... + class Test_Store: @pytest.fixture @@ -62,7 +76,7 @@ async def test_on_rebalance(self, *, store): ) def test_encode_key(self, *, store): - assert store._encode_key({"foo": 1}) == b'{"foo": 1}' + assert store._encode_key({"foo": 1}) == b'{"foo":1}' def test_encode_key__cannot_be_None(self, *, store): store.key_serializer = "raw" @@ -70,7 +84,7 @@ def test_encode_key__cannot_be_None(self, *, store): store._encode_key(None) def test_encode_value(self, *, store): - assert store._encode_value({"foo": 1}) == b'{"foo": 1}' + assert store._encode_value({"foo": 1}) == b'{"foo":1}' def test_decode_key(self, *, store): assert store._decode_key(b'{"foo": 1}') == {"foo": 1} @@ -120,6 +134,19 @@ def _clear(self): def reset_state(self): ... + async def backup_partition( + self, tp, flush: bool = True, purge: bool = False, keep: int = 1 + ) -> None: + ... + + def restore_backup( + self, + tp, + latest: bool = True, + backup_id: int = 0, + ) -> None: + ... + class Test_SerializedStore: @pytest.fixture diff --git a/tests/unit/stores/test_memory.py b/tests/unit/stores/test_memory.py index a32f99f31..74753130b 100644 --- a/tests/unit/stores/test_memory.py +++ b/tests/unit/stores/test_memory.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import Mock from faust import Event from faust.stores.memory import Store diff --git a/tests/unit/stores/test_rocksdb.py b/tests/unit/stores/test_rocksdb.py index a10cd0816..04d83820b 100644 --- a/tests/unit/stores/test_rocksdb.py +++ b/tests/unit/stores/test_rocksdb.py @@ -1,14 +1,15 @@ from pathlib import Path from typing import List, Mapping, Tuple +from unittest.mock import Mock, call, patch import pytest -from mode.utils.mocks import AsyncMock, Mock, call, patch from yarl import URL from faust.exceptions import ImproperlyConfigured from faust.stores import rocksdb from faust.stores.rocksdb import RocksDBOptions, Store from faust.types import TP +from tests.helpers import AsyncMock TP1 = TP("foo", 0) TP2 = TP("foo", 1) @@ -57,9 +58,19 @@ def test_defaults(self): ) assert opts.bloom_filter_size == rocksdb.DEFAULT_BLOOM_FILTER_SIZE - def test_open(self): + def test_open_rocksdb(self): with patch("faust.stores.rocksdb.rocksdb", Mock()) as rocks: - opts = RocksDBOptions() + opts = RocksDBOptions(use_rocksdict=False) + db = opts.open(Path("foo.db"), read_only=True) + rocks.DB.assert_called_once_with( + "foo.db", opts.as_options(), read_only=True + ) + assert db is rocks.DB() + + @pytest.mark.skip("Need to make mock BlockBasedOptions") + def test_open_rocksdict(self): + with patch("faust.stores.rocksdb.rocksdict", Mock()) as rocks: + opts = RocksDBOptions(use_rocksdict=True) db = opts.open(Path("foo.db"), read_only=True) rocks.DB.assert_called_once_with( "foo.db", opts.as_options(), read_only=True @@ -67,7 +78,7 @@ def test_open(self): assert db is rocks.DB() -class Test_Store: +class Test_Store_RocksDB: @pytest.fixture() def table(self): table = Mock(name="table") @@ -79,14 +90,24 @@ def rocks(self): with patch("faust.stores.rocksdb.rocksdb") as rocks: yield rocks + @pytest.fixture() + def rocksdict(self): + with patch("faust.stores.rocksdb.rocksdict") as rocksdict: + yield rocksdict + @pytest.fixture() def no_rocks(self): with patch("faust.stores.rocksdb.rocksdb", None) as rocks: yield rocks + @pytest.fixture() + def no_rocksdict(self): + with patch("faust.stores.rocksdb.rocksdict", None) as rocksdict: + yield rocksdict + @pytest.fixture() def store(self, *, app, rocks, table): - return Store("rocksdb://", app, table) + return Store("rocksdb://", app, table, driver="python-rocksdb") @pytest.fixture() def db_for_partition(self, *, store): @@ -101,7 +122,7 @@ def test_set_key_index_size(self, *, app, rocks, table): assert s.key_index_size == 12341 - def test_no_rocksdb(self, *, app, table, no_rocks): + def test_no_rocksdb(self, *, app, table, no_rocks, no_rocksdict): with pytest.raises(ImproperlyConfigured): Store("rocksdb://", app, table) @@ -236,7 +257,7 @@ def test_db_for_partition(self, *, store): def test_open_for_partition(self, *, store): open = store.rocksdb_options.open = Mock(name="options.open") assert store._open_for_partition(1) is open.return_value - open.assert_called_once_with(store.partition_path(1)) + open.assert_called_once_with(store.partition_path(1), read_only=False) def test__get__missing(self, *, store): store._get_bucket_for_key = Mock(name="get_bucket_for_key") @@ -328,13 +349,6 @@ def test__get__has_event_value_diff_partition(self, *, store, current_event): # A global table ignores the event partition and pulls from the proper db assert store._get(b"key") == b"value" - store.table.is_global = False - store.table.is_global_global = True - store.table.use_partitioner = False - - # A global table ignores the event partition and pulls from the proper db - assert store._get(b"key") == b"value" - store.table.is_global = False store.table.use_partitioner = True @@ -371,7 +385,6 @@ def new_db(self, name, exists=False): return db def test_get_bucket_for_key__not_in_index(self, *, store): - dbs = { 1: self.new_db(name="db1"), 2: self.new_db(name="db2"), @@ -430,9 +443,7 @@ async def test_assign_partitions(self, *, store, app, table): store._try_open_db_for_partition.assert_has_calls( [ call(TP2.partition, generation_id=generation_id), - call.coro(TP2.partition, generation_id=generation_id), call(TP1.partition, generation_id=generation_id), - call.coro(TP1.partition, generation_id=generation_id), ], any_order=True, ) @@ -451,15 +462,16 @@ async def test_open_db_for_partition(self, *, store, db_for_partition): is db_for_partition.return_value ) - @pytest.mark.skip("Fix is TBD") @pytest.mark.asyncio async def test_open_db_for_partition_max_retries(self, *, store, db_for_partition): store.sleep = AsyncMock(name="sleep") + store._dbs = {"test": None} with patch("faust.stores.rocksdb.rocksdb.errors.RocksIOError", KeyError): db_for_partition.side_effect = KeyError("lock already") with pytest.raises(KeyError): await store._try_open_db_for_partition(3) - assert store.sleep.call_count == 4 + assert store.sleep.call_count == 29 + assert len(store._dbs) == 0 @pytest.mark.asyncio async def test_open_db_for_partition__raises_unexpected_error( @@ -666,6 +678,7 @@ def _setup_keys(self, **dbs: Mapping[str, List[bytes]]): def _setup_keys_db(self, name: str, values: List[bytes]): db = self.new_db(name) db.iterkeys.return_value = MockIterator.from_values(values) + db.keys.return_value = MockIterator.from_values(values) # supports rocksdict return db def test__itervalues(self, *, store): @@ -700,6 +713,7 @@ def _setup_items(self, **dbs: Mapping[str, List[Tuple[bytes, bytes]]]): def _setup_items_db(self, name: str, values: List[Tuple[bytes, bytes]]): db = self.new_db(name) db.iteritems.return_value = MockIterator.from_values(values) + db.items.return_value = MockIterator.from_values(values) # supports rocksdict return db def test__iteritems(self, *, store): @@ -736,3 +750,191 @@ def test_reset_state(self, *, store): with patch("shutil.rmtree") as rmtree: store.reset_state() rmtree.assert_called_once_with(store.path.absolute()) + + +class Test_Store_Rocksdict(Test_Store_RocksDB): + @pytest.fixture() + def store(self, *, app, rocks, table): + return Store("rocksdb://", app, table, driver="rocksdict") + + def new_db(self, name, exists=False): + db = Mock(name=name) + db.key_may_exist.return_value = exists + db.get.return_value = name + return db + + def test__contains(self, *, store): + db1 = self.new_db("db1", exists=False) + db2 = self.new_db("db2", exists=True) + dbs = {b"key": [db1, db2]} + store._dbs_for_key = Mock(side_effect=dbs.get) + + db2.get.return_value = None + assert not store._contains(b"key") + + db2.get.return_value = b"value" + assert store._contains(b"key") + + def test__iteritems(self, *, store): + dbs = self._setup_items( + db1=[ + (store.offset_key, b"1001"), + (b"k1", b"foo"), + (b"k2", b"bar"), + ], + db2=[ + (b"k3", b"baz"), + (store.offset_key, b"2002"), + (b"k4", b"xuz"), + ], + ) + store._dbs_for_actives = Mock(return_value=dbs) + + assert list(store._iteritems()) == [ + (b"k1", b"foo"), + (b"k2", b"bar"), + (b"k3", b"baz"), + (b"k4", b"xuz"), + ] + + for db in dbs: + # iteritems not available in rocksdict yet + db.items.assert_called_once_with() + + def test__iterkeys(self, *, store): + dbs = self._setup_keys( + db1=[ + store.offset_key, + b"foo", + b"bar", + ], + db2=[ + b"baz", + store.offset_key, + b"xuz", + ], + ) + store._dbs_for_actives = Mock(return_value=dbs) + + assert list(store._iterkeys()) == [ + b"foo", + b"bar", + b"baz", + b"xuz", + ] + + for db in dbs: + # iterkeys not available in rocksdict yet + db.keys.assert_called_once_with() + + def test__itervalues(self, *, store): + dbs = self._setup_items( + db1=[ + (store.offset_key, b"1001"), + (b"k1", b"foo"), + (b"k2", b"bar"), + ], + db2=[ + (b"k3", b"baz"), + (store.offset_key, b"2002"), + (b"k4", b"xuz"), + ], + ) + store._dbs_for_actives = Mock(return_value=dbs) + + assert list(store._itervalues()) == [ + b"foo", + b"bar", + b"baz", + b"xuz", + ] + + for db in dbs: + # items must be used instead of iteritems for now + # TODO: seek_to_first() should be called once rocksdict is updated + db.items.assert_called_once_with() + + def test__get__dbvalue_is_None(self, *, store): + db = Mock(name="db") + store._get_bucket_for_key = Mock(name="get_bucket_for_key") + store._get_bucket_for_key.return_value = (db, None) + + db.key_may_exist.return_value = False + assert store._get(b"key") is None + + db.key_may_exist.return_value = True + db.get.return_value = None + assert store._get(b"key") is None + + db.get.return_value = b"bar" + assert store._get(b"key") == b"bar" + + def test_get_bucket_for_key__is_in_index(self, *, store): + store._key_index[b"key"] = 30 + db = store._dbs[30] = Mock(name="db-p30") + + db.key_may_exist.return_value = False + assert store._get_bucket_for_key(b"key") is None + + db.key_may_exist.return_value = True + db.get.return_value = None + assert store._get_bucket_for_key(b"key") is None + + db.get.return_value = b"value" + assert store._get_bucket_for_key(b"key") == (db, b"value") + + def test_apply_changelog_batch(self, *, store, rocksdict, db_for_partition): + def new_event(name, tp: TP, offset, key, value) -> Mock: + return Mock( + name="event1", + message=Mock( + tp=tp, + topic=tp.topic, + partition=tp.partition, + offset=offset, + key=key, + value=value, + ), + ) + + events = [ + new_event("event1", TP1, 1001, "k1", "v1"), + new_event("event2", TP2, 2002, "k2", "v2"), + new_event("event3", TP3, 3003, "k3", "v3"), + new_event("event4", TP4, 4004, "k4", "v4"), + new_event("event5", TP4, 4005, "k5", None), + ] + + dbs = { + TP1.partition: Mock(name="db1"), + TP2.partition: Mock(name="db2"), + TP3.partition: Mock(name="db3"), + TP4.partition: Mock(name="db4"), + } + db_for_partition.side_effect = dbs.get + + store.set_persisted_offset = Mock(name="set_persisted_offset") + + store.apply_changelog_batch(events, None, None) + + rocksdict.WriteBatch.return_value.delete.assert_called_once_with("k5") + rocksdict.WriteBatch.return_value.put.assert_has_calls( + [ + call("k1", "v1"), + call("k2", "v2"), + call("k3", "v3"), + call("k4", "v4"), + ] + ) + + for db in dbs.values(): + db.write.assert_called_once_with(rocksdict.WriteBatch(raw_mode=True)) + + store.set_persisted_offset.assert_has_calls( + [ + call(TP1, 1001), + call(TP2, 2002), + call(TP3, 3003), + call(TP4, 4005), + ] + ) diff --git a/tests/unit/tables/test_base.py b/tests/unit/tables/test_base.py index 8b5733f2a..84e800083 100644 --- a/tests/unit/tables/test_base.py +++ b/tests/unit/tables/test_base.py @@ -1,10 +1,10 @@ import asyncio import operator from copy import copy +from unittest.mock import Mock, call, patch import pytest from mode import label, shortlabel -from mode.utils.mocks import AsyncMock, Mock, call, patch from faust import Event, Record, Stream, Topic, joins from faust.exceptions import PartitionsMismatch @@ -12,6 +12,7 @@ from faust.tables.base import Collection from faust.types import TP from faust.windows import Window +from tests.helpers import AsyncMock TP1 = TP("foo", 0) @@ -119,7 +120,7 @@ async def test_need_active_standby_for(self, *, table): ) assert ( await table.need_active_standby_for(TP1) - == table._data.need_active_standby_for.coro() + == table._data.need_active_standby_for.return_value ) def test_reset_state(self, *, table): @@ -185,6 +186,43 @@ def test_on_changelog_sent__transactions(self, *, table): fut.result().offset, ) + @pytest.mark.asyncio + async def test_last_closed_window(self, *, table): + assert table.last_closed_window == 0.0 + + table.window = Mock(name="window") + table._data = { + ("boo", (1.1, 1.4)): "BOO", + ("moo", (1.4, 1.6)): "MOO", + ("faa", (1.9, 2.0)): "FAA", + ("bar", (4.1, 4.2)): "BAR", + } + table._partition_timestamps = { + TP1: [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], + } + table._partition_timestamp_keys = { + (TP1, 2.0): [ + ("boo", (1.1, 1.4)), + ("moo", (1.4, 1.6)), + ("faa", (1.9, 2.0)), + ], + (TP1, 5.0): [ + ("bar", (4.1, 4.2)), + ], + } + + def get_stale(limit): + def is_stale(timestamp, latest_timestamp): + return timestamp < limit + + return is_stale + + table.window.stale.side_effect = get_stale(4.0) + + await table._del_old_keys() + + assert table.last_closed_window == 1.9 + @pytest.mark.asyncio async def test_del_old_keys__empty(self, *, table): table.window = Mock(name="window") @@ -230,12 +268,12 @@ def is_stale(timestamp, latest_timestamp): on_window_close.assert_has_calls( [ + call.__bool__(), call(("boo", (1.1, 1.4)), "BOO"), - call.coro(("boo", (1.1, 1.4)), "BOO"), + call.__bool__(), call(("moo", (1.4, 1.6)), "MOO"), - call.coro(("moo", (1.4, 1.6)), "MOO"), + call.__bool__(), call(("faa", (1.9, 2.0)), "FAA"), - call.coro(("faa", (1.9, 2.0)), "FAA"), ] ) diff --git a/tests/unit/tables/test_manager.py b/tests/unit/tables/test_manager.py index 6325ee7cc..f3536aa09 100644 --- a/tests/unit/tables/test_manager.py +++ b/tests/unit/tables/test_manager.py @@ -1,8 +1,10 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import AsyncMock, Mock from faust.tables.recovery import Recovery from faust.types import TP +from tests.helpers import AsyncMock TP1 = TP("foo", 3) TP2 = TP("bar", 4) diff --git a/tests/unit/tables/test_objects.py b/tests/unit/tables/test_objects.py index edba41b8d..8c891dcf3 100644 --- a/tests/unit/tables/test_objects.py +++ b/tests/unit/tables/test_objects.py @@ -1,8 +1,10 @@ +from unittest.mock import MagicMock, Mock, patch + import pytest -from mode.utils.mocks import AsyncMock, Mock, patch from faust.tables.objects import ChangeloggedObjectManager from faust.types import TP +from tests.helpers import AsyncMock TP1 = TP("foo", 3) @@ -52,6 +54,7 @@ class Test_ChangeloggedObjectManager: def man(self, *, table): man = ChangeloggedObjectManager(table) man.ValueType = ValueType + man.storage.__setitem__ = MagicMock() return man @pytest.fixture() @@ -60,7 +63,7 @@ def storage(self, *, table): def test_send_changelog_event(self, *, man, table, key, current_event): man.send_changelog_event(key, 3, "value") - assert key in man._dirty + man.storage.__setitem__.assert_called_once() table._send_changelog.assert_called_once_with( current_event(), (3, key), @@ -96,12 +99,6 @@ async def test_on_start(self, *, man): await man.on_start() man.add_runtime_dependency.assert_called_once_with(man.storage) - @pytest.mark.asyncio - async def test_on_stop(self, *, man): - man.flush_to_storage = Mock() - await man.on_stop() - man.flush_to_storage.assert_called_once_with() - def test_persisted_offset(self, *, man, storage): ret = man.persisted_offset(TP1) storage.persisted_offset.assert_called_once_with(TP1) @@ -133,14 +130,6 @@ def test_sync_from_storage(self, *, man, storage): assert 1 in man["foo"].synced assert 2 in man["bar"].synced - def test_flush_to_storage(self, *, man): - man._storage = {} - man._dirty = {"foo", "bar"} - assert man["foo"] - assert man["bar"] - man.flush_to_storage() - assert man._storage["foo"] == "foo-stored" - def test_reset_state(self, *, man, storage): man.reset_state() storage.reset_state.assert_called_once_with() diff --git a/tests/unit/tables/test_recovery.py b/tests/unit/tables/test_recovery.py index e7de15d2e..81fcad8d5 100644 --- a/tests/unit/tables/test_recovery.py +++ b/tests/unit/tables/test_recovery.py @@ -1,11 +1,11 @@ from collections import Counter -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock import pytest -from mode.utils.mocks import AsyncMock, Mock from faust.tables.recovery import RebalanceAgain, Recovery, ServiceStopped from faust.types import TP +from tests.helpers import AsyncMock TP1 = TP("foo", 6) TP2 = TP("bar", 3) @@ -135,8 +135,8 @@ async def test__wait__recovery_restart(self, *, recovery): async def assert_wait(self, recovery, stopped=False, done=None, timeout=None): coro = Mock() recovery.wait_first = AsyncMock() - recovery.wait_first.coro.return_value.stopped = stopped - recovery.wait_first.coro.return_value.done = {done} if done else set() + recovery.wait_first.return_value.stopped = stopped + recovery.wait_first.return_value.done = {done} if done else set() ret = await recovery._wait(coro) recovery.wait_first.assert_called_once_with( @@ -237,6 +237,52 @@ async def test__build_offsets(self, *, recovery): } ) + @pytest.mark.asyncio + async def test__build_offsets_with_none(self, *, recovery, app) -> None: + consumer = Mock( + name="consumer", + earliest_offsets=AsyncMock( + return_value={TP1: 0, TP2: 3, TP3: 5, TP4: None} + ), + ) + tps = {TP1, TP2, TP3, TP4} + destination = {TP1: None, TP2: 1, TP3: 8, TP4: -1} + await recovery._build_offsets(consumer, tps, destination, "some-title") + assert len(destination) == 4 + assert destination[TP1] == -1 + assert destination[TP2] == 2 + assert destination[TP3] == 8 + assert destination[TP4] == -1 + + @pytest.mark.asyncio + async def test__build_offsets_both_none(self, *, recovery, app) -> None: + consumer = Mock( + name="consumer", + earliest_offsets=AsyncMock(return_value={TP1: None}), + ) + tps = {TP1} + destination = {TP1: None} + await recovery._build_offsets(consumer, tps, destination, "some-title") + assert len(destination) == 1 + assert destination[TP1] == -1 + + @pytest.mark.asyncio + async def test__build_offsets_partial_consumer_response( + self, *, recovery, app + ) -> None: + consumer = Mock( + name="consumer", + earliest_offsets=AsyncMock(return_value={TP1: None}), + ) + tps = {TP1} + destination = {TP1: 3, TP2: 4, TP3: 5, TP4: 20} + await recovery._build_offsets(consumer, tps, destination, "some-title") + assert len(destination) == 4 + assert destination[TP1] == 3 + assert destination[TP2] == 4 + assert destination[TP3] == 5 + assert destination[TP4] == 20 + @pytest.mark.asyncio async def test__seek_offsets(self, *, recovery): consumer = Mock( @@ -346,7 +392,13 @@ def test__is_changelog_tp(self, *, recovery, tables): [ ({TP1: 0, TP2: -1}, {TP1: -1, TP2: -1}, True, 1, {TP1: 1, TP2: 0}), ({TP1: -1, TP2: -1}, {TP1: -1, TP2: -1}, False, 0, {TP1: 0, TP2: 0}), - ({TP1: 100, TP2: -1}, {TP1: -1, TP2: -1}, True, 101, {TP1: 101, TP2: 0}), + ( + {TP1: 100, TP2: -1}, + {TP1: -1, TP2: -1}, + True, + 101, + {TP1: 101, TP2: 0}, + ), ], ) def test_recovery_from_offset_0( diff --git a/tests/unit/tables/test_sets.py b/tests/unit/tables/test_sets.py index 72b1dc7fc..164e7f16b 100644 --- a/tests/unit/tables/test_sets.py +++ b/tests/unit/tables/test_sets.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock, call + import pytest -from mode.utils.mocks import AsyncMock, Mock, call import faust from faust.tables.sets import ( @@ -13,6 +14,7 @@ SetTableManager, SetWindowSet, ) +from tests.helpers import AsyncMock @pytest.fixture() @@ -187,7 +189,7 @@ def test_constructor_disabled(self, *, app, stable): async def test_add(self, *, man): man._send_operation = AsyncMock() await man.add("key", "member") - man._send_operation.coro.assert_called_once_with( + man._send_operation.assert_called_once_with( SetAction.ADD, "key", ["member"], @@ -197,7 +199,7 @@ async def test_add(self, *, man): async def test_discard(self, *, man): man._send_operation = AsyncMock() await man.discard("key", "member") - man._send_operation.coro.assert_called_once_with( + man._send_operation.assert_called_once_with( SetAction.DISCARD, "key", ["member"], @@ -207,7 +209,7 @@ async def test_discard(self, *, man): async def test_clear(self, *, man): man._send_operation = AsyncMock() await man.clear("key") - man._send_operation.coro.assert_called_once_with( + man._send_operation.assert_called_once_with( SetAction.CLEAR, "key", [], @@ -217,7 +219,7 @@ async def test_clear(self, *, man): async def test_difference_update(self, *, man): man._send_operation = AsyncMock() await man.difference_update("key", ["v1", "v2"]) - man._send_operation.coro.assert_called_once_with( + man._send_operation.assert_called_once_with( SetAction.DISCARD, "key", ["v1", "v2"], @@ -227,7 +229,7 @@ async def test_difference_update(self, *, man): async def test_intersection_update(self, *, man): man._send_operation = AsyncMock() await man.intersection_update("key", ["v1", "v2"]) - man._send_operation.coro.assert_called_once_with( + man._send_operation.assert_called_once_with( SetAction.INTERSECTION, "key", ["v1", "v2"], @@ -237,7 +239,7 @@ async def test_intersection_update(self, *, man): async def test_symmetric_difference_update(self, *, man): man._send_operation = AsyncMock() await man.symmetric_difference_update("key", ["v1", "v2"]) - man._send_operation.coro.assert_called_once_with( + man._send_operation.assert_called_once_with( SetAction.SYMDIFF, "key", ["v1", "v2"], @@ -246,12 +248,12 @@ async def test_symmetric_difference_update(self, *, man): def test__update(self, *, man): man.set_table = {"a": Mock(name="a"), "b": Mock(name="b")} man._update("a", ["v1"]) - man.set_table["a"].update.assert_called_once_with(["v1"]) + man.set_table["a"].update.assert_called_once_with({"v1"}) def test__difference_update(self, *, man): man.set_table = {"a": Mock(name="a"), "b": Mock(name="b")} man._difference_update("a", ["v1"]) - man.set_table["a"].difference_update.assert_called_once_with(["v1"]) + man.set_table["a"].difference_update.assert_called_once_with({"v1"}) def test__clear(self, *, man): man.set_table = {"a": Mock(name="a"), "b": Mock(name="b")} @@ -262,14 +264,14 @@ def test__intersection_update(self, *, man): man.set_table = {"a": Mock(name="a"), "b": Mock(name="b")} man._intersection_update("a", ["v1", "v2", "v3"]) man.set_table["a"].intersection_update.assert_called_once_with( - ["v1", "v2", "v3"], + {"v1", "v2", "v3"}, ) def test__symmetric_difference_update(self, *, man): man.set_table = {"a": Mock(name="a"), "b": Mock(name="b")} man._symmetric_difference_update("a", ["v1", "v2", "v3"]) man.set_table["a"].symmetric_difference_update.assert_called_once_with( - ["v1", "v2", "v3"], + {"v1", "v2", "v3"}, ) @pytest.mark.asyncio @@ -394,29 +396,29 @@ async def stream_items(): await man._modify_set(stream) - man.set_table["k1"].update.assert_called_with(["v"]) - man.set_table["k2"].difference_update.assert_called_with(["v2"]) - man.set_table["k3"].difference_update.assert_called_with([X(10, 30)]) + man.set_table["k1"].update.assert_called_with({"v"}) + man.set_table["k2"].difference_update.assert_called_with({"v2"}) + man.set_table["k3"].difference_update.assert_called_with({X(10, 30)}) man.set_table["k5"].update.assert_called_with( - [ + { X(10, 30), X(20, 40), "v3", - ] + } ) man.set_table["k6"].intersection_update.assert_called_with( - [ + { X(10, 30), X(20, 40), "v3", - ] + } ) man.set_table["k7"].symmetric_difference_update.assert_called_with( - [ + { X(10, 30), X(20, 40), "v3", - ] + } ) man.set_table["k8"].clear.assert_called_once_with() diff --git a/tests/unit/tables/test_table.py b/tests/unit/tables/test_table.py index 093eaa978..29e0419c2 100644 --- a/tests/unit/tables/test_table.py +++ b/tests/unit/tables/test_table.py @@ -1,7 +1,7 @@ import datetime +from unittest.mock import Mock, patch import pytest -from mode.utils.mocks import Mock, patch import faust from faust.events import Event @@ -55,7 +55,13 @@ def strict_table(self, *, app): return self.create_table(app, name="strict") def create_table( - self, app, *, name="foo", key_type=TableKey, value_type=TableValue, **kwargs + self, + app, + *, + name="foo", + key_type=TableKey, + value_type=TableValue, + **kwargs, ): return app.Table(name, key_type=key_type, value_type=value_type, **kwargs) diff --git a/tests/unit/tables/test_wrappers.py b/tests/unit/tables/test_wrappers.py index bcc7e06d9..18c233f6d 100644 --- a/tests/unit/tables/test_wrappers.py +++ b/tests/unit/tables/test_wrappers.py @@ -1,9 +1,9 @@ import operator import random from datetime import datetime +from unittest.mock import Mock, patch import pytest -from mode.utils.mocks import Mock, patch import faust from faust.events import Event diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 89125794f..57ecd84d0 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -1,9 +1,16 @@ +import asyncio import ssl +from unittest.mock import Mock, patch import pytest -from mode.utils.mocks import Mock, patch +from aiokafka.conn import AbstractTokenProvider -from faust.auth import GSSAPICredentials, SASLCredentials, SSLCredentials +from faust.auth import ( + GSSAPICredentials, + OAuthCredentials, + SASLCredentials, + SSLCredentials, +) from faust.types.auth import AuthProtocol, SASLMechanism @@ -29,14 +36,18 @@ class Test_SASLCredentials: pytest.param( "mechanism from enum", SASLCredentials( - username="george", password="pw1", mechanism=SASLMechanism.GSSAPI + username="george", + password="pw1", + mechanism=SASLMechanism.GSSAPI, ), {"mechanism": SASLMechanism.GSSAPI}, ), pytest.param( "ssl context gives SASL_SSL", SASLCredentials( - username="george", password="pw1", ssl_context={"xxx": "yyy"} + username="george", + password="pw1", + ssl_context={"xxx": "yyy"}, ), { "username": "george", @@ -69,7 +80,8 @@ class Test_GSSAPICredentials: pytest.param( "retains arguments", GSSAPICredentials( - kerberos_service_name="george", kerberos_domain_name="domain" + kerberos_service_name="george", + kerberos_domain_name="domain", ), { "kerberos_service_name": "george", @@ -140,3 +152,41 @@ def test_having_context(self): context = Mock(name="context") c = SSLCredentials(context) assert c.context is context + + +class Test_OAuthCredentials: + class TokenProvider(AbstractTokenProvider): + async def token(self): + return await asyncio.get_running_loop().run_in_executor(None, self._token) + + def _token(self): + return "token" + + @pytest.mark.parametrize( + "reason,credentials,expected_fields", + [ + pytest.param( + "retains arguments", + OAuthCredentials(oauth_cb=TokenProvider()), + { + "mechanism": SASLMechanism.OAUTHBEARER, + "protocol": AuthProtocol.SASL_PLAINTEXT, + }, + ), + pytest.param( + "retains arguments", + OAuthCredentials( + oauth_cb=TokenProvider(), + ssl_context={"xxx": "yyy"}, + ), + { + "mechanism": SASLMechanism.OAUTHBEARER, + "protocol": AuthProtocol.SASL_SSL, + }, + ), + ], + ) + def test_constructor(self, credentials, expected_fields, reason): + assert repr(credentials) + for field, value in expected_fields.items(): + assert getattr(credentials, field) == value, reason diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index f26e97653..626329893 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -1,7 +1,9 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import AsyncMock, Mock from faust import Event +from tests.helpers import AsyncMock class Test_Event: diff --git a/tests/unit/test_joins.py b/tests/unit/test_joins.py index cd50bf2ee..6d2d928c6 100644 --- a/tests/unit/test_joins.py +++ b/tests/unit/test_joins.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import Mock from faust import Event, Record, Stream from faust.joins import InnerJoin, Join, LeftJoin, OuterJoin, RightJoin diff --git a/tests/unit/test_streams.py b/tests/unit/test_streams.py index 318f468d2..1a9c03035 100644 --- a/tests/unit/test_streams.py +++ b/tests/unit/test_streams.py @@ -1,14 +1,14 @@ import asyncio from collections import defaultdict +from unittest.mock import Mock, patch import pytest from mode.utils.contexts import ExitStack -from mode.utils.mocks import AsyncMock, Mock, patch import faust from faust import joins from faust.exceptions import Skip -from tests.helpers import new_event +from tests.helpers import AsyncMock, new_event class Model(faust.Record): @@ -51,7 +51,7 @@ def test_outer_join(self, *, stream): @pytest.mark.asyncio async def test_on_merge__with_join_strategy(self, *, stream): join = stream.join_strategy = Mock(process=AsyncMock()) - assert (await stream.on_merge("v")) is join.process.coro.return_value + assert (await stream.on_merge("v")) is join.process.return_value def test_combine__finalized(self, *, stream): stream._finalized = True diff --git a/tests/unit/test_topics.py b/tests/unit/test_topics.py index 39321c0b8..38d290fa6 100644 --- a/tests/unit/test_topics.py +++ b/tests/unit/test_topics.py @@ -1,13 +1,14 @@ import asyncio import re +from unittest.mock import Mock, call, patch import pytest -from mode.utils.mocks import AsyncMock, Mock, call, patch import faust from faust import Event, Record from faust.exceptions import KeyDecodeError, ValueDecodeError from faust.types import Message +from tests.helpers import AsyncMock class Dummy(Record): @@ -151,7 +152,7 @@ async def test_publish_message__wait_enabled(self, *, topic, app): await topic.publish_message(fm, wait=True) key, headers = topic.prepare_key("foo", "json", None, headers) value, headers = topic.prepare_value("bar", "json", None, headers) - producer.send_and_wait.coro.assert_called_once_with( + producer.send_and_wait.assert_called_once_with( topic.get_topic_name(), key, value, @@ -394,7 +395,7 @@ async def test_declare__disabled(self, *, topic): topic.replicas = 202 topic.topics = ["foo", "bar"] await topic.declare() - producer.create_topic.coro.assert_not_called() + producer.create_topic.assert_not_called() @pytest.mark.asyncio async def test_declare(self, *, topic): @@ -405,7 +406,7 @@ async def test_declare(self, *, topic): topic.replicas = 202 topic.topics = ["foo", "bar"] await topic.declare() - producer.create_topic.coro.assert_has_calls( + producer.create_topic.assert_has_calls( [ call( topic="foo", @@ -437,7 +438,7 @@ async def test_declare__defaults(self, *, topic): topic.replicas = None topic.topics = ["foo", "bar"] await topic.declare() - producer.create_topic.coro.assert_has_calls( + producer.create_topic.assert_has_calls( [ call( topic="foo", diff --git a/tests/unit/test_worker.py b/tests/unit/test_worker.py index 407b77dc0..541dd1121 100644 --- a/tests/unit/test_worker.py +++ b/tests/unit/test_worker.py @@ -2,16 +2,17 @@ import logging import warnings from pathlib import Path +from unittest.mock import Mock, patch import pytest from mode.utils.logging import CompositeLogger -from mode.utils.mocks import AsyncMock, Mock, patch from mode.utils.trees import Node from yarl import URL from faust import Sensor from faust.utils import terminal from faust.worker import Worker +from tests.helpers import AsyncMock class CoroEq: @@ -152,8 +153,8 @@ async def test_on_first_start(self, worker): worker.default_on_first_start = AsyncMock(name="on_first_start") await worker.on_first_start() worker.change_workdir.assert_called_once_with(worker.workdir) - worker.autodiscover.assert_called_once_with() worker.default_on_first_start.assert_called_once_with() + worker.autodiscover.assert_called_once_with() def test_change_workdir(self, worker): with patch("os.chdir") as chdir: diff --git a/tests/unit/transport/drivers/test_aiokafka.py b/tests/unit/transport/drivers/test_aiokafka.py index 6876f7c9a..b524aac23 100644 --- a/tests/unit/transport/drivers/test_aiokafka.py +++ b/tests/unit/transport/drivers/test_aiokafka.py @@ -2,6 +2,7 @@ import string from contextlib import contextmanager from typing import Optional +from unittest.mock import ANY, MagicMock, Mock, call, patch import aiokafka import opentracing @@ -10,7 +11,6 @@ from aiokafka.structs import OffsetAndMetadata, TopicPartition from mode.utils import text from mode.utils.futures import done_future -from mode.utils.mocks import ANY, AsyncMock, MagicMock, Mock, call, patch from opentracing.ext import tags import faust @@ -34,10 +34,12 @@ ThreadedProducer, Transport, credentials_to_aiokafka_auth, + ensure_aiokafka_TPset, server_list, ) from faust.types import TP from faust.types.tuples import FutureMessage, PendingMessage +from tests.helpers import AsyncMock TP1 = TP("topic", 23) TP2 = TP("topix", 23) @@ -136,7 +138,13 @@ def thread(self): @pytest.fixture() def consumer( - self, *, thread, app, callback, on_partitions_revoked, on_partitions_assigned + self, + *, + thread, + app, + callback, + on_partitions_revoked, + on_partitions_assigned, ): consumer = Consumer( app.transport, @@ -528,7 +536,6 @@ def test_timed_out(self, *, cthread, now, tp, logger): @pytest.mark.skip("Needs fixing") class Test_VEP_stream_idle_no_highwater(Test_verify_event_path_base): - highwater = 10 committed_offset = 10 @@ -691,7 +698,6 @@ def test_committing_fine(self, *, app, cthread, now, tp, logger): logger.error.assert_not_called() -@pytest.mark.skip("Needs fixing") class Test_AIOKafkaConsumerThread(AIOKafkaConsumerThreadFixtures): def test_constructor(self, *, cthread): assert cthread._partitioner @@ -726,9 +732,7 @@ def test__create_consumer__client(self, *, cthread, app): cthread._create_client_consumer = Mock(name="_create_client_consumer") c = cthread._create_consumer(loop=loop) assert c is cthread._create_client_consumer.return_value - cthread._create_client_consumer.assert_called_once_with( - cthread.transport, loop=loop - ) + cthread._create_client_consumer.assert_called_once_with(cthread.transport) def test__create_consumer__worker(self, *, cthread, app): app.client_only = False @@ -736,9 +740,7 @@ def test__create_consumer__worker(self, *, cthread, app): cthread._create_worker_consumer = Mock(name="_create_worker_consumer") c = cthread._create_consumer(loop=loop) assert c is cthread._create_worker_consumer.return_value - cthread._create_worker_consumer.assert_called_once_with( - cthread.transport, loop=loop - ) + cthread._create_worker_consumer.assert_called_once_with(cthread.transport) def test_session_gt_request_timeout(self, *, cthread, app): app.conf.broker_session_timeout = 90 @@ -771,7 +773,6 @@ def assert_create_worker_consumer( isolation_level="read_uncommitted", api_version=None, ): - loop = Mock(name="loop") transport = cthread.transport conf = app.conf cthread.consumer.in_transaction = in_transaction @@ -779,15 +780,14 @@ def assert_create_worker_consumer( conf.broker_credentials, conf.ssl_context ) with patch("aiokafka.AIOKafkaConsumer") as AIOKafkaConsumer: - c = cthread._create_worker_consumer(transport, loop) + c = cthread._create_worker_consumer(transport) assert c is AIOKafkaConsumer.return_value max_poll_interval = conf.broker_max_poll_interval AIOKafkaConsumer.assert_called_once_with( - loop=loop, api_version=app.conf.consumer_api_version, client_id=conf.broker_client_id, group_id=conf.id, - # group_instance_id=conf.consumer_group_instance_id, + group_instance_id=conf.consumer_group_instance_id, bootstrap_servers=server_list(transport.url, transport.default_port), partition_assignment_strategy=[cthread._assignor], enable_auto_commit=False, @@ -802,6 +802,8 @@ def assert_create_worker_consumer( session_timeout_ms=int(conf.broker_session_timeout * 1000.0), heartbeat_interval_ms=int(conf.broker_heartbeat_interval * 1000.0), isolation_level=isolation_level, + connections_max_idle_ms=conf.consumer_connections_max_idle_ms, + metadata_max_age_ms=conf.consumer_metadata_max_age_ms, # traced_from_parent_span=cthread.traced_from_parent_span, # start_rebalancing_span=cthread.start_rebalancing_span, # start_coordinator_span=cthread.start_coordinator_span, @@ -811,18 +813,16 @@ def assert_create_worker_consumer( ) def test__create_client_consumer(self, *, cthread, app): - loop = Mock(name="loop") transport = cthread.transport conf = app.conf auth_settings = credentials_to_aiokafka_auth( conf.broker_credentials, conf.ssl_context ) with patch("aiokafka.AIOKafkaConsumer") as AIOKafkaConsumer: - c = cthread._create_client_consumer(transport, loop) + c = cthread._create_client_consumer(transport) max_poll_interval = conf.broker_max_poll_interval assert c is AIOKafkaConsumer.return_value AIOKafkaConsumer.assert_called_once_with( - loop=loop, client_id=conf.broker_client_id, bootstrap_servers=server_list(transport.url, transport.default_port), request_timeout_ms=int(conf.broker_request_timeout * 1000.0), @@ -854,6 +854,7 @@ def test__start_span(self, *, cthread, app): def test_trace_category(self, *, cthread, app): assert cthread.trace_category == f"{app.conf.name}-_aiokafka" + @pytest.mark.skip("Needs fixing") def test_transform_span_lazy(self, *, cthread, app, tracer): cthread._consumer = Mock(name="_consumer") cthread._consumer._coordinator.generation = -1 @@ -866,6 +867,7 @@ def test_transform_span_lazy(self, *, cthread, app, tracer): cthread.on_generation_id_known() assert not pending + @pytest.mark.skip("Needs fixing") def test_transform_span_flush_spans(self, *, cthread, app, tracer): cthread._consumer = Mock(name="_consumer") cthread._consumer._coordinator.generation = -1 @@ -884,6 +886,7 @@ def test_span_without_operation_name(self, *, cthread): assert cthread._on_span_cancelled_early(span) is None + @pytest.mark.skip("Needs fixing") def test_transform_span_lazy_no_consumer(self, *, cthread, app, tracer): cthread._consumer = Mock(name="_consumer") cthread._consumer._coordinator.generation = -1 @@ -897,6 +900,7 @@ def test_transform_span_lazy_no_consumer(self, *, cthread, app, tracer): span = pending.popleft() cthread._on_span_generation_known(span) + @pytest.mark.skip("Needs fixing") def test_transform_span_eager(self, *, cthread, app, tracer): cthread._consumer = Mock(name="_consumer") cthread._consumer._coordinator.generation = 10 @@ -991,6 +995,7 @@ async def test_commit(self, *, cthread, _consumer): with self.assert_calls_thread(cthread, _consumer, cthread._commit, offsets): await cthread.commit(offsets) + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test__commit(self, *, cthread, _consumer): offsets = {TP1: 1001} @@ -1001,12 +1006,14 @@ async def test__commit(self, *, cthread, _consumer): {TP1: OffsetAndMetadata(1001, "")}, ) + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test__commit__already_rebalancing(self, *, cthread, _consumer): cthread._consumer = _consumer _consumer.commit.side_effect = CommitFailedError("already rebalanced") assert not (await cthread._commit({TP1: 1001})) + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test__commit__CommitFailedError(self, *, cthread, _consumer): cthread._consumer = _consumer @@ -1017,6 +1024,7 @@ async def test__commit__CommitFailedError(self, *, cthread, _consumer): cthread.crash.assert_called_once_with(exc) cthread.supervisor.wakeup.assert_called_once() + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test__commit__IllegalStateError(self, *, cthread, _consumer): cthread._consumer = _consumer @@ -1123,9 +1131,7 @@ async def test_highwaters(self, *, cthread, _consumer): async def test__highwaters(self, *, cthread, _consumer): cthread.consumer.in_transaction = False cthread._consumer = _consumer - assert await cthread._highwaters([TP1]) is ( - _consumer.end_offsets.coro.return_value - ) + assert await cthread._highwaters([TP1]) is (_consumer.end_offsets.return_value) @pytest.mark.asyncio async def test__highwaters__in_transaction(self, *, cthread, _consumer): @@ -1185,7 +1191,7 @@ async def test__fetch_records(self, *, cthread, _consumer): ret = await cthread._fetch_records( _consumer, {TP1}, timeout=312.3, max_records=1000 ) - assert ret is fetcher.fetched_records.coro.return_value + assert ret is fetcher.fetched_records.return_value fetcher.fetched_records.assert_called_once_with( {TP1}, timeout=312.3, @@ -1421,7 +1427,10 @@ async def test_commit_transactions(self, *, producer, _producer_call): def test__settings_extra(self, *, producer, app): app.in_transaction = True - assert producer._settings_extra() == {"acks": "all", "enable_idempotence": True} + assert producer._settings_extra() == { + "acks": "all", + "enable_idempotence": True, + } app.in_transaction = False assert producer._settings_extra() == {} @@ -1443,14 +1452,16 @@ def test__new_producer(self, *, app): marks=pytest.mark.conf(broker="kafka://a:9092;b:9092"), ), pytest.param( - {"client_id": "foo"}, marks=pytest.mark.conf(broker_client_id="foo") + {"client_id": "foo"}, + marks=pytest.mark.conf(broker_client_id="foo"), ), pytest.param( {"compression_type": "snappy"}, marks=pytest.mark.conf(producer_compression_type="snappy"), ), pytest.param( - {"linger_ms": 9345}, marks=pytest.mark.conf(producer_linger=9.345) + {"linger_ms": 9345}, + marks=pytest.mark.conf(producer_linger=9.345), ), pytest.param( {"max_batch_size": 41223}, @@ -1513,7 +1524,7 @@ async def test_create_topic(self, *, producer, _producer): deleting=True, ensure_created=True, ) - producer.transport._create_topic.coro.assert_called_once_with( + producer.transport._create_topic.assert_called_once_with( producer, _producer.client, "foo", @@ -1548,7 +1559,7 @@ async def test_on_start(self, *, producer, loop): assert producer._producer is _producer producer._new_producer.assert_called_once_with() producer.beacon.add.assert_called_with(_producer) - _producer.start.coro.assert_called_once_with() + _producer.start.assert_called_once_with() @pytest.mark.asyncio async def test_on_stop(self, *, producer, _producer): @@ -1692,7 +1703,7 @@ async def test_send__no_timestamp(self, producer, _producer): @pytest.mark.asyncio async def test_send__KafkaError(self, producer, _producer): - _producer.send.coro.side_effect = KafkaError() + _producer.send.side_effect = KafkaError() with pytest.raises(ProducerSendError): await producer.send( "topic", @@ -1705,7 +1716,7 @@ async def test_send__KafkaError(self, producer, _producer): @pytest.mark.asyncio async def test_send__trn_KafkaError(self, producer, _producer): - _producer.send.coro.side_effect = KafkaError() + _producer.send.side_effect = KafkaError() await producer.begin_transaction("tid") with pytest.raises(ProducerSendError): await producer.send( @@ -1777,32 +1788,46 @@ def mocked_producer(self, *, new_producer_mock: Mock): @pytest.mark.asyncio async def test_on_start( - self, *, threaded_producer: ThreadedProducer, mocked_producer: Mock, loop + self, + *, + threaded_producer: ThreadedProducer, + mocked_producer: Mock, + loop, ): await threaded_producer.on_start() try: assert threaded_producer._producer is mocked_producer threaded_producer._new_producer.assert_called_once_with() - mocked_producer.start.coro.assert_called_once_with() + mocked_producer.start.assert_called_once_with() finally: await threaded_producer.start() await threaded_producer.stop() + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test_on_thread_stop( - self, *, threaded_producer: ThreadedProducer, mocked_producer: Mock, loop + self, + *, + threaded_producer: ThreadedProducer, + mocked_producer: Mock, + loop, ): await threaded_producer.start() await threaded_producer.on_thread_stop() try: - mocked_producer.flush.coro.assert_called_once_with() - mocked_producer.stop.coro.assert_called_once_with() + # Flush and stop currently are called twice + mocked_producer.flush.assert_called_once() + mocked_producer.stop.assert_called_once() finally: await threaded_producer.stop() @pytest.mark.asyncio async def test_publish_message( - self, *, threaded_producer: ThreadedProducer, mocked_producer: Mock, loop + self, + *, + threaded_producer: ThreadedProducer, + mocked_producer: Mock, + loop, ): await threaded_producer.start() try: @@ -1821,13 +1846,17 @@ async def test_publish_message( ) ) ) - mocked_producer.send.coro.assert_called_once() + mocked_producer.send.assert_called_once() finally: await threaded_producer.stop() @pytest.mark.asyncio async def test_publish_message_with_wait( - self, *, threaded_producer: ThreadedProducer, mocked_producer: Mock, loop + self, + *, + threaded_producer: ThreadedProducer, + mocked_producer: Mock, + loop, ): await threaded_producer.start() try: @@ -1847,7 +1876,7 @@ async def test_publish_message_with_wait( ) ), ) - mocked_producer.send_and_wait.coro.assert_called_once() + mocked_producer.send_and_wait.assert_called_once() finally: await threaded_producer.stop() @@ -1896,7 +1925,7 @@ async def test__create_topic(self, *, transport): partitions=100, replication=3, ) - transport._topic_waiters["foo"].coro.assert_called_with() + transport._topic_waiters["foo"].assert_called_with() @pytest.mark.asyncio async def test__create_topic__missing(self, *, transport, loop): @@ -1920,7 +1949,7 @@ async def test__create_topic__missing(self, *, transport, loop): 3, loop=loop, ) - SW.return_value.coro.assert_called_once_with() + SW.return_value.assert_called_once_with() assert transport._topic_waiters["foo"] is SW.return_value @pytest.mark.asyncio @@ -1929,7 +1958,7 @@ async def test__create_topic__raises(self, *, transport, loop): transport._topic_waiters.clear() with patch("faust.transport.drivers.aiokafka.StampedeWrapper") as SW: SW.return_value = AsyncMock() - SW.return_value.coro.side_effect = KeyError("foo") + SW.return_value.side_effect = KeyError("foo") with pytest.raises(KeyError): await transport._create_topic( transport, @@ -1991,6 +2020,16 @@ async def test__create_topic__raises(self, *, transport, loop): "ssl_context": None, }, ), + ( + auth.OAuthCredentials(oauth_cb="token"), + None, + { + "security_protocol": "SASL_PLAINTEXT", + "sasl_oauth_token_provider": "token", + "sasl_mechanism": "OAUTHBEARER", + "ssl_context": None, + }, + ), ], ) def test_credentials_to_aiokafka(credentials, ssl_context, expected): @@ -2000,3 +2039,9 @@ def test_credentials_to_aiokafka(credentials, ssl_context, expected): def test_credentials_to_aiokafka__invalid(): with pytest.raises(ImproperlyConfigured): credentials_to_aiokafka_auth(object()) + + +def test_ensure_aiokafka_TPset(): + actual = ensure_aiokafka_TPset({TP(topic="foo", partition=0)}) + assert actual == {TopicPartition("foo", 0)} + assert all(isinstance(tp, TopicPartition) for tp in actual) diff --git a/tests/unit/transport/test_conductor.py b/tests/unit/transport/test_conductor.py index 55533cd19..9f01b8184 100644 --- a/tests/unit/transport/test_conductor.py +++ b/tests/unit/transport/test_conductor.py @@ -1,14 +1,15 @@ import asyncio +from unittest.mock import Mock, patch import pytest from mode import label, shortlabel from mode.utils.futures import done_future -from mode.utils.mocks import AsyncMock, Mock, patch from faust import App, Channel, Topic from faust.transport.conductor import Conductor from faust.transport.consumer import Consumer from faust.types import TP, Message +from tests.helpers import AsyncMock TP1 = TP("foo", 0) TP2 = TP("foo", 1) @@ -48,7 +49,7 @@ async def test_con_client_only(self, *, con_client_only): cb = con_client_only._tp_to_callback[tp] = AsyncMock(name="cb") ret = await con_client_only.on_message(message) - assert ret is cb.coro.return_value + assert ret is cb.return_value cb.assert_called_once_with(message) diff --git a/tests/unit/transport/test_consumer.py b/tests/unit/transport/test_consumer.py index 088acfeff..89c33ae21 100644 --- a/tests/unit/transport/test_consumer.py +++ b/tests/unit/transport/test_consumer.py @@ -1,10 +1,11 @@ import asyncio +from unittest.mock import ANY, Mock, call, patch import pytest +from intervaltree import Interval, IntervalTree from mode import Service from mode.threads import MethodQueue from mode.utils.futures import done_future -from mode.utils.mocks import ANY, AsyncMock, Mock, call, patch from faust import App from faust.app._attached import Attachments @@ -21,6 +22,7 @@ TransactionManager, ) from faust.types import TP, Message +from tests.helpers import AsyncMock TP1 = TP("foo", 0) TP2 = TP("foo", 1) @@ -69,7 +71,7 @@ async def test_on_stop__drainer_done(self, *, fetcher): async def test_on_stop_drainer__drainer_done2(self, *, fetcher): fetcher._drainer = Mock(done=Mock(return_value=False)) with patch("asyncio.wait_for", AsyncMock()) as wait_for: - wait_for.coro.return_value = None + wait_for.return_value = None await fetcher.on_stop() fetcher._drainer.cancel.assert_called_once_with() assert wait_for.call_count @@ -89,7 +91,8 @@ async def test_on_stop__drainer_raises_StopIteration(self, *, fetcher): fetcher._drainer = Mock(done=Mock(return_value=False)) with patch("asyncio.wait_for", AsyncMock()) as wait_for: wait_for.side_effect = StopIteration() - await fetcher.on_stop() + with pytest.raises(RuntimeError): + await fetcher.on_stop() wait_for.assert_called_once_with( fetcher._drainer, timeout=1.0, @@ -99,7 +102,7 @@ async def test_on_stop__drainer_raises_StopIteration(self, *, fetcher): async def test_on_stop__drainer_raises_CancelledError(self, *, fetcher): fetcher._drainer = Mock(done=Mock(return_value=False)) with patch("asyncio.wait_for", AsyncMock()) as wait_for: - wait_for.coro.side_effect = asyncio.CancelledError() + wait_for.side_effect = asyncio.CancelledError() await fetcher.on_stop() wait_for.assert_called_once_with( fetcher._drainer, @@ -110,7 +113,7 @@ async def test_on_stop__drainer_raises_CancelledError(self, *, fetcher): async def test_on_stop__drainer_raises_TimeoutError(self, *, fetcher): fetcher._drainer = Mock(done=Mock(return_value=False)) with patch("asyncio.wait_for", AsyncMock()) as wait_for: - wait_for.coro.side_effect = [ + wait_for.side_effect = [ asyncio.TimeoutError(), asyncio.TimeoutError(), None, @@ -193,28 +196,42 @@ async def test_on_rebalance(self, *, manager): await manager.on_rebalance(set(), set(), set()) + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test__stop_transactions(self, *, manager, producer): - await manager._stop_transactions(["0-0", "1-0"]) - producer.stop_transaction.assert_has_calls( + tids = ["0-0", "1-0"] + manager._start_new_producer = AsyncMock() + await manager._stop_transactions(tids) + producer.stop_transaction.assert_called() + producer.stop_transaction.assert_called_once_with( [ + # The problem is that some reason calls with extra + # (commented out) garbage are being included + # call.shortlabel.__bool__(), + # call.shortlabel._str__(), call("0-0"), - call.coro("0-0"), + # call.shortlabel.__bool__(), + # call.shortlabel._str__(), call("1-0"), - call.coro("1-0"), ] ) + @pytest.mark.skip("Needs fixing") @pytest.mark.asyncio async def test_start_transactions(self, *, manager, producer): + tids = ["0-0", "1-0"] manager._start_new_producer = AsyncMock() - await manager._start_transactions(["0-0", "1-0"]) + await manager._start_transactions(tids) producer.maybe_begin_transaction.assert_has_calls( [ + # The problem is that some reason calls with extra + # (commented out) garbage are being included + # call.shortlabel.__bool__(), + # call.shortlabel._str__(), call("0-0"), - call.coro("0-0"), + # call.shortlabel.__bool__(), + # call.shortlabel._str__(), call("1-0"), - call.coro("1-0"), ] ) @@ -922,16 +939,18 @@ async def test_handle_attached(self, *, consumer): TP2: 6006, } ) - consumer.app._attachments.publish_for_tp_offset.coro.assert_has_calls( + consumer.app._attachments.publish_for_tp_offset.assert_has_calls( [ call(TP1, 3003), + call(TP1, 3003).__bool__(), call(TP2, 6006), + call(TP2, 6006).__bool__(), ] ) - consumer.app.producer.wait_many.coro.assert_called_with(ANY) + consumer.app.producer.wait_many.assert_called_with(ANY) att = consumer.app._attachments - att.publish_for_tp_offset.coro.return_value = None + att.publish_for_tp_offset.return_value = None await consumer._handle_attached( { TP1: 3003, @@ -990,7 +1009,7 @@ async def test_commit_offsets__in_transaction(self, *, consumer): {TP1: 3003, TP2: 6006}, start_new_transaction=True, ) - assert ret is consumer.transactions.commit.coro() + assert ret is consumer.transactions.commit.return_value @pytest.mark.asyncio async def test_commit_offsets__no_committable_offsets(self, *, consumer): @@ -1023,7 +1042,7 @@ async def test_commit(self, *, consumer): topics, start_new_transaction=start_new_transaction, ) - assert ret is consumer.force_commit.coro() + assert ret is consumer.force_commit.return_value assert consumer._commit_fut is None def test_filter_tps_with_pending_acks(self, *, consumer): @@ -1075,13 +1094,26 @@ def test_new_offset(self, tp, acked, expected_offset, expected_acked, *, consume "tp,acked,gaps,expected_offset", [ (TP1, [], [], None), - (TP1, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [], 11), - (TP1, [1, 2, 3, 4, 5, 6, 7, 8, 10], [9], 11), - (TP1, [1, 2, 3, 4, 6, 7, 8, 10], [5], 9), - (TP1, [1, 3, 4, 6, 7, 8, 10], [2, 5, 9], 11), + (TP1, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], IntervalTree(), 11), + ( + TP1, + [1, 2, 3, 4, 5, 6, 7, 8, 10], + IntervalTree([Interval(9, 10)]), + 11, + ), + (TP1, [1, 2, 3, 4, 6, 7, 8, 10], IntervalTree([Interval(5, 6)]), 9), + ( + TP1, + [1, 3, 4, 6, 7, 8, 10], + IntervalTree([Interval(2, 3), Interval(5, 6), Interval(9, 10)]), + 11, + ), + (TP1, [3, 4], IntervalTree(), None), + (TP1, [3, 4], IntervalTree([Interval(2, 3)]), 5), ], ) def test_new_offset_with_gaps(self, tp, acked, gaps, expected_offset, *, consumer): + consumer._committed_offset[tp] = 1 consumer._acked[tp] = acked consumer._gap[tp] = gaps assert consumer._new_offset(tp) == expected_offset @@ -1092,19 +1124,21 @@ async def test_on_task_error(self, *, consumer): await consumer.on_task_error(KeyError()) consumer.commit.assert_called_once_with() - def test__add_gap(self, *, consumer): + @pytest.mark.asyncio + async def test__add_gap(self, *, consumer): tp = TP1 consumer._committed_offset[tp] = 299 - consumer._add_gap(TP1, 300, 343) + await consumer._add_gap(TP1, 300, 343) - assert consumer._gap[tp] == list(range(300, 343)) + assert consumer._gap[tp] == IntervalTree([Interval(300, 344)]) - def test__add_gap__previous_to_committed(self, *, consumer): + @pytest.mark.asyncio + async def test__add_gap__previous_to_committed(self, *, consumer): tp = TP1 consumer._committed_offset[tp] = 400 - consumer._add_gap(TP1, 300, 343) + await consumer._add_gap(TP1, 300, 343) - assert consumer._gap[tp] == [] + assert consumer._gap[tp] == IntervalTree() @pytest.mark.asyncio async def test_commit_handler(self, *, consumer): @@ -1120,7 +1154,7 @@ def on_sleep(secs, **kwargs): consumer.commit = AsyncMock(name="commit") await consumer._commit_handler(consumer) - consumer.sleep.coro.assert_has_calls( + consumer.sleep.assert_has_calls( [ call(consumer.commit_interval), call(pytest.approx(consumer.commit_interval, rel=1e-1)), @@ -1286,7 +1320,7 @@ async def test__getmany(self, *, consumer): {TP1, TP2}, 30.334, ) - assert ret is consumer._thread.getmany.coro.return_value + assert ret is consumer._thread.getmany.return_value @pytest.mark.asyncio async def test_subscribe(self, *, consumer): @@ -1299,13 +1333,13 @@ async def test_subscribe(self, *, consumer): async def test_seek_to_committed(self, *, consumer): ret = await consumer.seek_to_committed() consumer._thread.seek_to_committed.assert_called_once_with() - assert ret is consumer._thread.seek_to_committed.coro.return_value + assert ret is consumer._thread.seek_to_committed.return_value @pytest.mark.asyncio async def test_position(self, *, consumer): ret = await consumer.position(TP1) consumer._thread.position.assert_called_once_with(TP1) - assert ret is consumer._thread.position.coro.return_value + assert ret is consumer._thread.position.return_value @pytest.mark.asyncio async def test_seek_wait(self, *, consumer): @@ -1336,13 +1370,13 @@ def test_topic_partitions(self, *, consumer): async def test_earliest_offsets(self, *, consumer): ret = await consumer.earliest_offsets(TP1, TP2) consumer._thread.earliest_offsets.assert_called_once_with(TP1, TP2) - assert ret is consumer._thread.earliest_offsets.coro.return_value + assert ret is consumer._thread.earliest_offsets.return_value @pytest.mark.asyncio async def test_highwaters(self, *, consumer): ret = await consumer.highwaters(TP1, TP2) consumer._thread.highwaters.assert_called_once_with(TP1, TP2) - assert ret is consumer._thread.highwaters.coro.return_value + assert ret is consumer._thread.highwaters.return_value @pytest.mark.asyncio async def test_commit(self, *, consumer): @@ -1350,7 +1384,7 @@ async def test_commit(self, *, consumer): consumer._thread.commit.assert_called_once_with( {TP1: 301, TP2: 302}, ) - assert ret is consumer._thread.commit.coro.return_value + assert ret is consumer._thread.commit.return_value @pytest.mark.asyncio async def test_maybe_wait_for_commit_to_finish(self, *, loop, consumer): diff --git a/tests/unit/transport/test_producer.py b/tests/unit/transport/test_producer.py index 87bb8697a..0d1b6c3a3 100644 --- a/tests/unit/transport/test_producer.py +++ b/tests/unit/transport/test_producer.py @@ -1,11 +1,11 @@ import asyncio from typing import Any, Optional -from unittest.mock import PropertyMock +from unittest.mock import Mock, PropertyMock, call import pytest -from mode.utils.mocks import AsyncMock, Mock, call from faust.transport.producer import Producer, ProducerBuffer +from tests.helpers import AsyncMock class TestProducerBuffer: @@ -26,14 +26,14 @@ def test_put(self, *, buf): async def test_on_stop(self, *, buf): buf.flush = AsyncMock(name="flush") await buf.on_stop() - buf.flush.coro.assert_called_once_with() + buf.flush.assert_called_once_with() @pytest.mark.asyncio async def test__send_pending(self, *, buf): fut = Mock(name="future_message") fut.message.channel.publish_message = AsyncMock() await buf._send_pending(fut) - fut.message.channel.publish_message.coro.assert_called_once_with( + fut.message.channel.publish_message.assert_called_once_with( fut, wait=False, ) @@ -53,8 +53,8 @@ async def on_send(fut): buf._send_pending.assert_has_calls( [ - call(buf.pending.get.coro.return_value), - call(buf.pending.get.coro.return_value), + call(buf.pending.get.return_value), + call(buf.pending.get.return_value), ] ) @@ -121,7 +121,6 @@ def _put(self, buf, items): @pytest.mark.asyncio async def test_flush_atmost(self, *, buf): - sent_messages = 0 def create_send_pending_mock(max_messages): diff --git a/tests/unit/utils/terminal/test_spinners.py b/tests/unit/utils/terminal/test_spinners.py index b1ce5497f..de8e41ea4 100644 --- a/tests/unit/utils/terminal/test_spinners.py +++ b/tests/unit/utils/terminal/test_spinners.py @@ -1,6 +1,5 @@ import logging - -from mode.utils.mocks import Mock, call, patch +from unittest.mock import Mock, call, patch from faust.utils.terminal.spinners import Spinner, SpinnerHandler diff --git a/tests/unit/utils/terminal/test_tables.py b/tests/unit/utils/terminal/test_tables.py index 51f69e76e..f607a7a80 100644 --- a/tests/unit/utils/terminal/test_tables.py +++ b/tests/unit/utils/terminal/test_tables.py @@ -1,8 +1,8 @@ from contextlib import ExitStack, contextmanager +from unittest.mock import Mock, patch import pytest import terminaltables -from mode.utils.mocks import Mock, patch from faust.utils.terminal import tables @@ -59,9 +59,21 @@ def test_table(target, contexts, kwargs, expected_tty): "tty,contexts,headers,expected_tty,expected_data", [ (None, [mock_logging(isatty=True)], None, True, TABLE_DATA), - (None, [mock_logging(isatty=True)], ["foo"], True, [["foo"]] + TABLE_DATA), + ( + None, + [mock_logging(isatty=True)], + ["foo"], + True, + [["foo"]] + TABLE_DATA, + ), (None, [mock_logging(isatty=False)], None, False, TABLE_DATA), - (None, [mock_logging(isatty=False)], ["f"], False, [["f"]] + TABLE_DATA), + ( + None, + [mock_logging(isatty=False)], + ["f"], + False, + [["f"]] + TABLE_DATA, + ), (True, [], None, True, TABLE_DATA), (True, [], ["foo"], True, [["foo"]] + TABLE_DATA), (False, [], None, False, TABLE_DATA), diff --git a/tests/unit/utils/test_urls.py b/tests/unit/utils/test_urls.py index f433e3901..18243cb80 100644 --- a/tests/unit/utils/test_urls.py +++ b/tests/unit/utils/test_urls.py @@ -49,6 +49,14 @@ def test_urllist_strsep(): ] +def test_urllist_strsep_comma(): + assert urllist("foo://localhost,bar.com,example.com") == [ + URL("foo://localhost"), + URL("foo://bar.com"), + URL("foo://example.com"), + ] + + def test_urllist_strsep_no_scheme(): assert urllist("localhost;bar.com;example.com", default_scheme="bar") == [ URL("bar://localhost"), diff --git a/tests/unit/utils/test_venusian.py b/tests/unit/utils/test_venusian.py index a7bdfbca5..890dfe9ba 100644 --- a/tests/unit/utils/test_venusian.py +++ b/tests/unit/utils/test_venusian.py @@ -1,4 +1,4 @@ -from mode.utils.mocks import Mock, patch +from unittest.mock import Mock, patch from faust.utils import venusian diff --git a/tests/unit/web/drivers/test_aiohttp.py b/tests/unit/web/drivers/test_aiohttp.py index 34ace4749..1748f1363 100644 --- a/tests/unit/web/drivers/test_aiohttp.py +++ b/tests/unit/web/drivers/test_aiohttp.py @@ -1,11 +1,11 @@ import asyncio import sys from pathlib import Path +from unittest.mock import ANY, Mock, call, patch import aiohttp_cors import pytest from aiohttp.web import Application -from mode.utils.mocks import ANY, AsyncMock, Mock, call, patch from yarl import URL from faust.types.web import ResourceOptions @@ -16,6 +16,7 @@ ServerThread, _prepare_cors_options, ) +from tests.helpers import AsyncMock if sys.platform == "win32": DATAPATH = "c:/Program Files/Faust/data" @@ -293,7 +294,8 @@ def test_add_static(self, *, web): def test_route__with_cors_options(self, *, web): handler = Mock() handler.get_methods = Mock( - name="get_methods", return_value=set({"GET", "PUT", "POST", "DELETE"}) + name="get_methods", + return_value=set({"GET", "PUT", "POST", "DELETE"}), ) cors_options = { "http://example.com": ResourceOptions( @@ -320,7 +322,8 @@ def test_route__with_cors_options(self, *, web): web._cors.add.assert_has_calls( [ call( - web.web_app.router.add_route(), _prepare_cors_options(cors_options) + web.web_app.router.add_route(), + _prepare_cors_options(cors_options), ) for _ in NON_OPTIONS_METHODS ], diff --git a/tests/unit/web/test_base.py b/tests/unit/web/test_base.py index 1f28bdd7c..b38588440 100644 --- a/tests/unit/web/test_base.py +++ b/tests/unit/web/test_base.py @@ -1,5 +1,6 @@ +from unittest.mock import Mock, patch + import pytest -from mode.utils.mocks import Mock, patch from yarl import URL from faust.web import Blueprint diff --git a/tests/unit/web/test_blueprints.py b/tests/unit/web/test_blueprints.py index 8cfaa8ee5..9887d3487 100644 --- a/tests/unit/web/test_blueprints.py +++ b/tests/unit/web/test_blueprints.py @@ -1,7 +1,7 @@ from pathlib import Path +from unittest.mock import Mock import pytest -from mode.utils.mocks import Mock from faust import web diff --git a/tests/unit/web/test_views.py b/tests/unit/web/test_views.py index 3d5e24545..ca473c0d1 100644 --- a/tests/unit/web/test_views.py +++ b/tests/unit/web/test_views.py @@ -1,8 +1,10 @@ +from unittest.mock import Mock + import pytest -from mode.utils.mocks import AsyncMock, Mock from faust.web import Request, View, Web from faust.web.exceptions import MethodNotAllowed +from tests.helpers import AsyncMock @View.from_handler @@ -69,7 +71,7 @@ async def test_dispatch(self, method, *, view): request.match_info = {} handler = AsyncMock(name=method) view.methods[method.lower()] = handler - assert await view(request) is handler.coro() + assert await view(request) is handler.return_value handler.assert_called_once_with(request) def test_path_for(self, *, view): @@ -202,13 +204,13 @@ def test_route(self, *, view, web): assert res is handler def test_error(self, *, view, web): - response = view.error(303, "foo", arg="bharg") + response = view.error(303, "foo", arg="bharg", headers={"k": "v"}) web.json.assert_called_once_with( {"error": "foo", "arg": "bharg"}, status=303, reason=None, - headers=None, content_type=None, + headers={"k": "v"}, ) assert response is web.json() @@ -229,4 +231,4 @@ async def test_read_request_content(self, *, view, web): web.read_request_content = AsyncMock() ret = await view.read_request_content(request) web.read_request_content.assert_called_once_with(request) - assert ret is web.read_request_content.coro.return_value + assert ret is web.read_request_content.return_value diff --git a/tox.ini b/tox.ini index 9b7a19c4c..59aff06e1 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = 3.10,3.9,3.8,3.7,3.6,flake8,apicheck,configcheck,typecheck,docstyle,bandit,spell +envlist = 3.12,3.11,3.10,3.9,3.8,flake8,apicheck,configcheck,typecheck,docstyle,bandit,spell [testenv] deps= @@ -17,11 +17,11 @@ recreate = False commands = py.test --random-order --open-files -xvv --cov=faust tests/unit tests/functional tests/integration tests/meticulous/ tests/regression basepython = - 3.10: python3.10 + 3.12,flake8,apicheck,linkcheck,configcheck,typecheck,docstyle,bandit,spell: python3.12 + 3.11,flake8,apicheck,linkcheck,configcheck,typecheck,docstyle,bandit,spell: python3.11 + 3.10,flake8,apicheck,linkcheck,configcheck,typecheck,docstyle,bandit,spell: python3.10 3.9,flake8,apicheck,linkcheck,configcheck,typecheck,docstyle,bandit,spell: python3.9 3.8,flake8,apicheck,linkcheck,configcheck,typecheck,docstyle,bandit,spell: python3.8 - 3.7: python3.7 - 3.6: python3.6 [testenv:apicheck] setenv =