diff --git a/.build/choose_gcc.sh b/.build/choose_gcc.sh index f2922d8..21e143c 100755 --- a/.build/choose_gcc.sh +++ b/.build/choose_gcc.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -set -Eeuxo pipefail +# set -Eeuxo pipefail # if [[ "$(uname)" == "Linux" ]]; then # sudo update-alternatives \ @@ -20,14 +20,17 @@ set -Eeuxo pipefail # fi if [[ "$(uname)" == "Darwin" ]]; then - set -Eeuxo pipefail - sudo ln -s /usr/local/bin/gcc-$1 /usr/local/bin/gcc - sudo ln -s /usr/local/bin/g++-$1 /usr/local/bin/g++ - sudo ln -s /usr/local/bin/cpp-$1 /usr/local/bin/cpp + BIN_ROOT_DIR="/usr/local/bin" + which gcc-$1 + which gcc + sudo ln -s $(which gcc-$1) "${BIN_ROOT_DIR}/gcc" + which gcc + sudo ln -s $(which g++-$1) "${BIN_ROOT_DIR}/g++" + sudo ln -s $(which cpp-$1) "${BIN_ROOT_DIR}/cpp" # sudo ln -s /usr/local/bin/gfortran-$1 /usr/local/bin/gfortran # already exists - sudo ln -s /usr/local/bin/gcc-ar-$1 /usr/local/bin/gcc-ar - sudo ln -s /usr/local/bin/gcc-nm-$1 /usr/local/bin/gcc-nm - sudo ln -s /usr/local/bin/gcc-ranlib-$1 /usr/local/bin/gcc-ranlib - sudo ln -s /usr/local/bin/gcov-$1 /usr/local/bin/gcov - sudo ln -s /usr/local/bin/gcov-dump-$1 /usr/local/bin/gcov-dump + sudo ln -s $(which gcc-ar-$1) "${BIN_ROOT_DIR}/gcc-ar" + sudo ln -s $(which gcc-nm-$1) "${BIN_ROOT_DIR}/gcc-nm" + sudo ln -s $(which gcc-ranlib-$1) "${BIN_ROOT_DIR}/gcc-ranlib" + sudo ln -s $(which gcov-$1) "${BIN_ROOT_DIR}/gcov" + sudo ln -s $(which gcov-dump-$1) "${BIN_ROOT_DIR}/gcov-dump" fi diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..6309476 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly + - package-ecosystem: pip + directory: / + schedule: + interval: monthly + versioning-strategy: increase-if-necessary + ignore: + - dependency-name: >- + * + update-types: + - version-update:semver-patch diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 0000000..18280c2 --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,31 @@ +name: actions + +on: + push: + pull_request: + branches: + - $default-branch + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + # TODO: restore 'macos-latest' and 'windows-latest' after fixing on linux + # os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest] + python-version: ['3.11', '3.12'] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - run: pip install -r requirements_ci.txt + - run: python -m coverage run --branch --source . -m unittest -v + # TODO: restore C, C++ and Fortran tests after fixing python + # - run: pip install -r requirements_all.txt + # - run: python -m coverage run --append --branch --source . -m unittest -v + - run: python -m coverage report --show-missing + - run: codecov diff --git a/.travis.yml b/.travis.yml index 129ad69..fb5561e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,22 +28,30 @@ matrix: - os: linux language: python python: "3.7" + - os: linux + language: python + python: "3.8" - os: osx - osx_image: xcode10.2 + osx_image: xcode11.2 language: generic env: TRAVIS_PYTHON_VERSION="3.5" - os: osx - osx_image: xcode10.2 + osx_image: xcode11.2 language: generic env: TRAVIS_PYTHON_VERSION="3.6" - os: osx - osx_image: xcode10.2 + osx_image: xcode11.2 language: generic env: TRAVIS_PYTHON_VERSION="3.7" + - os: osx + osx_image: xcode11.2 + language: generic + env: TRAVIS_PYTHON_VERSION="3.8" before_install: - - if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then sudo installer -pkg /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg -target /; fi # https://github.com/pyenv/pyenv/issues/1219 - - .build/choose_gcc.sh 8 + - echo "${PATH}" + - which gcc + - .build/choose_gcc.sh 9 - | set -e if [[ "$(uname)" == "Darwin" ]]; then @@ -58,15 +66,17 @@ before_install: brew doctor 2>&1 | grep "/usr/local/include" | awk '{$1=$1;print}' | xargs -I _ mv _ /tmp/includes fi set +e + - which gcc - gcc --version - g++ --version - swig -version - gfortran --version - java --version + - qwertyuiop - .build/install_pyenv.sh # apps - git clone "https://github.com/mbdevpl/ffb-mini" "../ffb-mini" - - git clone "https://github.com/ECP-Astro/FLASH5.git" "../flash5" + - git clone "https://github.com/mbdevpl/FLASH5" "../flash5" - git clone "https://github.com/mbdevpl/miranda_io" "../miranda_io" install: diff --git a/MANIFEST.in b/MANIFEST.in index 599ceb2..5325bc8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,8 +1,15 @@ -include setup_boilerplate.py -include *requirements.txt -include extras_requirements.json +include requirements.txt include LICENSE include NOTICE +include ./*/py.typed + +include requirements_all.txt +include requirements_c.txt +include requirements_cpp.txt +include requirements_cython.txt +include requirements_fortran.txt +include requirements_opencl.txt + +include requirements_test.txt recursive-include ./transpyle/resources *.* recursive-include ./test/examples *.* -include ./*/py.typed diff --git a/NOTICE b/NOTICE index d4d9d93..ad752bc 100644 --- a/NOTICE +++ b/NOTICE @@ -1,4 +1,5 @@ -Copyright 2017-2019 Mateusz Bysiek https://mbdevpl.github.io/ +transpyle +Copyright (c) 2017-2025 Mateusz Bysiek https://mbdevpl.github.io/ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.rst b/README.rst index ad5faac..ca96635 100644 --- a/README.rst +++ b/README.rst @@ -15,8 +15,8 @@ Human-oriented and high-performing transpiler for Python. :target: https://pypi.org/project/transpyle :alt: package version from PyPI -.. image:: https://travis-ci.org/mbdevpl/transpyle.svg?branch=master - :target: https://travis-ci.org/mbdevpl/transpyle +.. image:: https://travis-ci.com/mbdevpl/transpyle.svg?branch=master + :target: https://travis-ci.com/mbdevpl/transpyle :alt: build status from Travis CI .. image:: https://ci.appveyor.com/api/projects/status/github/mbdevpl/transpyle?branch=master&svg=true @@ -286,7 +286,7 @@ The core functionality of transpyle is platform-independent. However, as support depends on presence of additional software, some functionality might be limited/unavailable on selected platforms. -Transpyle is fully tested on Linux, and partially tested on OS X and Windows. +Transpyle is fully tested on Linux, and partially tested on macOS and Windows. Installation diff --git a/appveyor.yml b/appveyor.yml index 9869c8d..096a18a 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -2,24 +2,18 @@ version: "{build}" environment: matrix: - - ARCHITECTURE: "x86" - PYTHON_VERSION: "3.5" - PYTHON: "C:\\Python35" - ARCHITECTURE: "x64" PYTHON_VERSION: "3.5" PYTHON: "C:\\Python35-x64" - - ARCHITECTURE: "x86" - PYTHON_VERSION: "3.6" - PYTHON: "C:\\Python36" - ARCHITECTURE: "x64" PYTHON_VERSION: "3.6" PYTHON: "C:\\Python36-x64" - - ARCHITECTURE: "x86" - PYTHON_VERSION: "3.7" - PYTHON: "C:\\Python37" - ARCHITECTURE: "x64" PYTHON_VERSION: "3.7" PYTHON: "C:\\Python37-x64" + - ARCHITECTURE: "x64" + PYTHON_VERSION: "3.8" + PYTHON: "C:\\Python37-x64" init: - set PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH% diff --git a/ci_requirements.txt b/ci_requirements.txt deleted file mode 100644 index 666e7d4..0000000 --- a/ci_requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -codecov -coverage --rtest_requirements.txt diff --git a/extras_requirements.json b/extras_requirements.json deleted file mode 100644 index c7c2f4c..0000000 --- a/extras_requirements.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "all": ["cython", "numpy", "open_fortran_parser ~= 0.6.0", "pcpp", "pycparser", "pyopencl"], - "c": ["cython", "pcpp", "pycparser"], - "cpp": [], - "cython": ["cython"], - "fortran": ["numpy", "open_fortran_parser ~= 0.6.0"], - "opencl": ["pyopencl"] -} diff --git a/pyproject.toml b/pyproject.toml index be6034b..fa9155c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,37 @@ [build-system] -requires=['docutils ~= 0.15.1', 'setuptools >= 41.0', 'version-query >= 1.0.5, == 1.*', 'wheel >= 0.33'] +requires = [ + 'boilerplates[setup] ~= 1.0' +] + +[tool.flake8] +max-line-length = 100 +max-doc-length = 100 + +[tool.pydocstyle] +ignore = [ + 'D102', 'D103', 'D105', 'D107', + 'D203', 'D213', + 'D406', 'D407', 'D412', 'D413' +] + +[tool.pylint.MASTER] +load-plugins = [ + 'pylint.extensions.broad_try_clause', + 'pylint.extensions.mccabe', + 'pylint.extensions.no_self_use', + 'pylint.extensions.redefined_variable_type' +] + +[tool.pylint.'MESSAGES CONTROL'] +docstring-min-length = 5 + +[tool.pylint.SIMILARITIES] +ignore-imports = 'yes' +min-similarity-lines = 5 + +[tool.pylint.BASIC] +no-docstring-rgx = '^(test)?_|.*Tests$' +unsafe-load-any-extension = 'yes' + +[tool.pylint.REPORTS] +output-format = 'colorized' diff --git a/requirements.txt b/requirements.txt index 43266a1..ff4d819 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,12 +1,11 @@ -argunparse -astunparse -colorama -colorlog +argunparse ~= 0.1.2 +astunparse ~= 1.6 +colorama ~= 0.4.3 +colorlog ~= 4.1 encrypted-config horast ~= 0.4.0 -pandas -setuptools >= 41.0 +pandas ~= 1.0 static-typing ~= 0.2.7 typed_ast ~= 1.4 typed-astunparse >= 2.1.4, == 2.* -version-query >= 1.0.5, == 1.* +version-query ~= 1.5 diff --git a/requirements_all.txt b/requirements_all.txt new file mode 100644 index 0000000..4f7490f --- /dev/null +++ b/requirements_all.txt @@ -0,0 +1,5 @@ +-r requirements_c.txt +# -r requirements_cpp.txt +# -r requirements_cython.txt +-r requirements_fortran.txt +# -r requirements_opencl.txt diff --git a/requirements_c.txt b/requirements_c.txt new file mode 100644 index 0000000..d3117be --- /dev/null +++ b/requirements_c.txt @@ -0,0 +1,3 @@ +-r requirements_cython.txt +pcpp ~= 1.30 +pycparser ~= 2.22 diff --git a/requirements_ci.txt b/requirements_ci.txt new file mode 100644 index 0000000..0785ea8 --- /dev/null +++ b/requirements_ci.txt @@ -0,0 +1,3 @@ +-r requirements_test.txt +codecov >= 2.0.15 +coverage >= 5.0.3 diff --git a/requirements_cpp.txt b/requirements_cpp.txt new file mode 100644 index 0000000..e69de29 diff --git a/requirements_cython.txt b/requirements_cython.txt new file mode 100644 index 0000000..7624486 --- /dev/null +++ b/requirements_cython.txt @@ -0,0 +1 @@ +Cython ~= 0.29.14 diff --git a/requirements_fortran.txt b/requirements_fortran.txt new file mode 100644 index 0000000..00b680e --- /dev/null +++ b/requirements_fortran.txt @@ -0,0 +1,2 @@ +numpy >= 1.18 +open_fortran_parser ~= 0.6.0 diff --git a/requirements_opencl.txt b/requirements_opencl.txt new file mode 100644 index 0000000..bdb19ff --- /dev/null +++ b/requirements_opencl.txt @@ -0,0 +1 @@ +pyopencl >= 2019.1 diff --git a/requirements_test.txt b/requirements_test.txt new file mode 100644 index 0000000..6bead1d --- /dev/null +++ b/requirements_test.txt @@ -0,0 +1,4 @@ +-r requirements.txt +boilerplates[packaging_tests] ~= 1.0 +numba >= 0.47 +timing ~= 0.4 diff --git a/setup.py b/setup.py index 38e7aa1..4688fbf 100644 --- a/setup.py +++ b/setup.py @@ -1,15 +1,13 @@ """Setup script for transpyle package.""" -import json -import pathlib -import setup_boilerplate +import boilerplates.setup -class Package(setup_boilerplate.Package): +class Package(boilerplates.setup.Package): """Package metadata.""" name = 'transpyle' - description = 'performance-oriented transpiler for Python' + description = 'Performance-oriented transpiler for Python.' url = 'https://github.com/mbdevpl/transpyle' classifiers = [ 'Development Status :: 2 - Pre-Alpha', @@ -20,9 +18,8 @@ class Package(setup_boilerplate.Package): 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3 :: Only', 'Topic :: Education', 'Topic :: Scientific/Engineering', @@ -31,14 +28,15 @@ class Package(setup_boilerplate.Package): 'Topic :: Software Development :: Pre-processors', 'Topic :: Utilities'] keywords = ['compiler', 'just-in-time', 'source-to-source', 'transpilation', 'transpiler'] - extras_require = {} - entry_points = { - 'console_scripts': ['transpyle = transpyle.__main__:main']} + extras_require = { + 'all': boilerplates.setup.parse_requirements('requirements_all.txt'), + 'c': boilerplates.setup.parse_requirements('requirements_c.txt'), + 'cpp': boilerplates.setup.parse_requirements('requirements_cpp.txt'), + 'cython': boilerplates.setup.parse_requirements('requirements_cython.txt'), + 'fortran': boilerplates.setup.parse_requirements('requirements_fortran.txt'), + 'opencl': boilerplates.setup.parse_requirements('requirements_opencl.txt')} + entry_points = {'console_scripts': ['transpyle = transpyle.__main__:main']} if __name__ == '__main__': - _HERE = pathlib.Path(__file__).parent - _EXTRAS = pathlib.Path(_HERE, 'extras_requirements.json') - with _EXTRAS.open() as json_file: - Package.extras_require = json.load(json_file) Package.setup() diff --git a/setup_boilerplate.py b/setup_boilerplate.py deleted file mode 100644 index 3a75c23..0000000 --- a/setup_boilerplate.py +++ /dev/null @@ -1,318 +0,0 @@ -"""Below code is generic boilerplate and normally should not be changed. - -To avoid setup script boilerplate, create "setup.py" file with the minimal contents as given -in SETUP_TEMPLATE below, and modify it according to the specifics of your package. - -See the implementation of setup_boilerplate.Package for default metadata values and available -options. -""" - -import pathlib -import runpy -import sys -import typing as t - -import setuptools - -__updated__ = '2019-06-04' - -SETUP_TEMPLATE = '''"""Setup script.""" - -import setup_boilerplate - - -class Package(setup_boilerplate.Package): - - """Package metadata.""" - - name = '' - description = '' - url = 'https://github.com/mbdevpl/...' - classifiers = [ - 'Development Status :: 1 - Planning', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3 :: Only'] - keywords = [] - - -if __name__ == '__main__': - Package.setup() -''' - -HERE = pathlib.Path(__file__).resolve().parent - - -def find_version( - package_name: str, version_module_name: str = '_version', - version_variable_name: str = 'VERSION') -> str: - """Simulate behaviour of "from package_name._version import VERSION", and return VERSION. - - To avoid importing whole package only to read the version, just module containing the version - is imported. Therefore relative imports in that module will break the setup. - """ - version_module_path = '{}/{}.py'.format(package_name.replace('-', '_'), version_module_name) - version_module_vars = runpy.run_path(version_module_path) - return version_module_vars[version_variable_name] - - -def find_packages(root_directory: str = '.') -> t.List[str]: - """Find packages to pack.""" - exclude = ['test', 'test.*'] if ('bdist_wheel' in sys.argv or 'bdist' in sys.argv) else [] - packages_list = setuptools.find_packages(root_directory, exclude=exclude) - return packages_list - - -def parse_requirements( - requirements_path: str = 'requirements.txt') -> t.List[str]: - """Read contents of requirements.txt file and return data from its relevant lines. - - Only non-empty and non-comment lines are relevant. - """ - requirements = [] - with HERE.joinpath(requirements_path).open() as reqs_file: - for requirement in [line.strip() for line in reqs_file.read().splitlines()]: - if not requirement or requirement.startswith('#'): - continue - requirements.append(requirement) - return requirements - - -def partition_version_classifiers( - classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ', - only_suffix: str = ' :: Only') -> t.Tuple[t.List[str], t.List[str]]: - """Find version number classifiers in given list and partition them into 2 groups.""" - versions_min, versions_only = [], [] - for classifier in classifiers: - version = classifier.replace(version_prefix, '') - versions = versions_min - if version.endswith(only_suffix): - version = version.replace(only_suffix, '') - versions = versions_only - try: - versions.append(tuple([int(_) for _ in version.split('.')])) - except ValueError: - pass - return versions_min, versions_only - - -def find_required_python_version( - classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ', - only_suffix: str = ' :: Only') -> t.Optional[str]: - """Determine the minimum required Python version.""" - versions_min, versions_only = partition_version_classifiers( - classifiers, version_prefix, only_suffix) - if len(versions_only) > 1: - raise ValueError( - 'more than one "{}" version encountered in {}'.format(only_suffix, versions_only)) - only_version = None - if len(versions_only) == 1: - only_version = versions_only[0] - for version in versions_min: - if version[:len(only_version)] != only_version: - raise ValueError( - 'the "{}" version {} is inconsistent with version {}' - .format(only_suffix, only_version, version)) - min_supported_version = None - for version in versions_min: - if min_supported_version is None or \ - (len(version) >= len(min_supported_version) and version < min_supported_version): - min_supported_version = version - if min_supported_version is None: - if only_version is not None: - return '.'.join([str(_) for _ in only_version]) - else: - return '>=' + '.'.join([str(_) for _ in min_supported_version]) - return None - - -def resolve_relative_rst_links(text: str, base_link: str): - """Resolve all relative links in a given RST document. - - All links of form `link`_ become `link `_. - """ - import docutils.nodes - import docutils.parsers.rst - import docutils.utils - - def parse_rst(text: str) -> docutils.nodes.document: - """Parse text assuming it's an RST markup.""" - parser = docutils.parsers.rst.Parser() - components = (docutils.parsers.rst.Parser,) - settings = docutils.frontend.OptionParser(components=components).get_default_values() - document = docutils.utils.new_document('', settings=settings) - parser.parse(text, document) - return document - - class SimpleRefCounter(docutils.nodes.NodeVisitor): - """Find all simple references in a given docutils document.""" - - def __init__(self, *args, **kwargs): - """Initialize the SimpleRefCounter object.""" - super().__init__(*args, **kwargs) - self.references = [] - - def visit_reference(self, node: docutils.nodes.reference) -> None: - """Call for "reference" nodes.""" - if len(node.children) != 1 or not isinstance(node.children[0], docutils.nodes.Text) \ - or not all(_ in node.attributes for _ in ('name', 'refuri')): - return - path = pathlib.Path(node.attributes['refuri']) - try: - if path.is_absolute(): - return - resolved_path = path.resolve() - except FileNotFoundError: # in resolve(), prior to Python 3.6 - return - except OSError: # in is_absolute() and resolve(), on URLs in Windows - return - try: - resolved_path.relative_to(HERE) - except ValueError: - return - if not path.is_file(): - return - assert node.attributes['name'] == node.children[0].astext() - self.references.append(node) - - def unknown_visit(self, node: docutils.nodes.Node) -> None: - """Call for unknown node types.""" - return - - document = parse_rst(text) - visitor = SimpleRefCounter(document) - document.walk(visitor) - for target in visitor.references: - name = target.attributes['name'] - uri = target.attributes['refuri'] - new_link = '`{} <{}{}>`_'.format(name, base_link, uri) - if name == uri: - text = text.replace('`<{}>`_'.format(uri), new_link) - else: - text = text.replace('`{} <{}>`_'.format(name, uri), new_link) - return text - - -class Package: - """Default metadata and behaviour for a Python package setup script.""" - - root_directory = '.' # type: str - """Root directory of the source code of the package, relative to the setup.py file location.""" - - name = None # type: str - - version = None # type: str - """"If None, it will be obtained from "package_name._version.VERSION" variable.""" - - description = None # type: str - - long_description = None # type: str - """If None, it will be generated from readme.""" - - long_description_content_type = None # type: str - """If None, it will be set accodring to readme file extension. - - For this field to be automatically set, also long_description field has to be None. - """ - - url = 'https://github.com/mbdevpl' # type: str - download_url = None # type: str - author = 'Mateusz Bysiek' # type: str - author_email = 'mateusz.bysiek@gmail.com' # type: str - # maintainer = None # type: str - # maintainer_email = None # type: str - license_str = 'Apache License 2.0' # type: str - - classifiers = [] # type: t.List[str] - """List of valid project classifiers: https://pypi.org/pypi?:action=list_classifiers""" - - keywords = [] # type: t.List[str] - - packages = None # type: t.List[str] - """If None, determined with help of setuptools.""" - - package_data = {} - exclude_package_data = {} - - install_requires = None # type: t.List[str] - """If None, determined using requirements.txt.""" - - extras_require = {} # type: t.Mapping[str, t.List[str]] - """A dictionary containing entries of type 'some_feature': ['requirement1', 'requirement2'].""" - - python_requires = None # type: str - """If None, determined from provided classifiers.""" - - entry_points = {} # type: t.Mapping[str, t.List[str]] - """A dictionary used to enable automatic creation of console scripts, gui scripts and plugins. - - Example entry: - 'console_scripts': ['script_name = package.subpackage:function'] - """ - - test_suite = 'test' # type: str - - @classmethod - def try_fields(cls, *names) -> t.Optional[t.Any]: - """Return first existing of given class field names.""" - for name in names: - if hasattr(cls, name): - return getattr(cls, name) - raise AttributeError((cls, names)) - - @classmethod - def parse_readme(cls, readme_path: str = 'README.rst', - encoding: str = 'utf-8') -> t.Tuple[str, str]: - """Parse readme and resolve relative links in it if it is feasible. - - Links are resolved if readme is in rst format and the package is hosted on GitHub. - """ - readme_path = pathlib.Path(readme_path) - with HERE.joinpath(readme_path).open(encoding=encoding) as readme_file: - long_description = readme_file.read() # type: str - - if readme_path.suffix.lower() == '.rst' and cls.url.startswith('https://github.com/'): - base_url = '{}/blob/v{}/'.format(cls.url, cls.version) - long_description = resolve_relative_rst_links(long_description, base_url) - - long_description_content_type = {'.rst': 'text/x-rst', '.md': 'text/markdown'}.get( - readme_path.suffix.lower(), 'text/plain') - long_description_content_type += '; charset=UTF-8' - - return long_description, long_description_content_type - - @classmethod - def prepare(cls) -> None: - """Fill in possibly missing package metadata.""" - if cls.version is None: - cls.version = find_version(cls.name) - if cls.long_description is None: - cls.long_description, cls.long_description_content_type = cls.parse_readme() - if cls.packages is None: - cls.packages = find_packages(cls.root_directory) - if cls.install_requires is None: - cls.install_requires = parse_requirements() - if cls.python_requires is None: - cls.python_requires = find_required_python_version(cls.classifiers) - - @classmethod - def setup(cls) -> None: - """Call setuptools.setup with correct arguments.""" - cls.prepare() - setuptools.setup( - name=cls.name, version=cls.version, description=cls.description, - long_description=cls.long_description, - long_description_content_type=cls.long_description_content_type, - url=cls.url, download_url=cls.download_url, - author=cls.author, author_email=cls.author_email, - maintainer=cls.try_fields('maintainer', 'author'), - maintainer_email=cls.try_fields('maintainer_email', 'author_email'), - license=cls.license_str, classifiers=cls.classifiers, keywords=cls.keywords, - packages=cls.packages, package_dir={'': cls.root_directory}, - include_package_data=True, - package_data=cls.package_data, exclude_package_data=cls.exclude_package_data, - install_requires=cls.install_requires, extras_require=cls.extras_require, - python_requires=cls.python_requires, - entry_points=cls.entry_points, test_suite=cls.test_suite) diff --git a/test/common.py b/test/common.py index 27609f8..9a6412e 100644 --- a/test/common.py +++ b/test/common.py @@ -2,7 +2,6 @@ import collections.abc import datetime -import io import itertools import os import pathlib @@ -14,7 +13,6 @@ import horast import numpy as np -import pycparser.c_ast # import static_typing as st import typed_ast.ast3 import typed_astunparse @@ -33,9 +31,7 @@ def now_timestamp(): return datetime.datetime.now().strftime('%Y%m%d%H%M%S%f') -def random_data(shape=None, dtype=np.int): - if shape is None: - return dtype(np.random.rand() * 1000) +def random_data(shape, dtype): return (np.random.rand(*shape) * 1000).astype(dtype) @@ -118,16 +114,6 @@ def make_tmp_folder(sub_path: pathlib.Path, input_path: pathlib.Path) -> pathlib return output_dir -def c_ast_dump(node: pycparser.c_ast.Node) -> str: - io_ = io.StringIO() - node.show(io_, attrnames=True, nodenames=True, showcoord=True) - return io_.getvalue() - - -def basic_check_c_ast(case: unittest.TestCase, path, c_tree, **kwargs): - basic_check_ast(case, path, c_tree, pycparser.c_ast.FileAST, '.yaml', c_ast_dump, **kwargs) - - def basic_check_cpp_code(case: unittest.TestCase, path, code, **kwargs): basic_check_code(case, path, code, 'cpp14', **kwargs) diff --git a/test/fortran/__init__.py b/test/fortran/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/fortran/test_apps.py b/test/fortran/test_apps.py new file mode 100644 index 0000000..74c14ea --- /dev/null +++ b/test/fortran/test_apps.py @@ -0,0 +1,179 @@ +"""Integration tests based on various scientific applications written in Fortran.""" + +import logging +import os +import pathlib +import unittest + +from transpyle.general import Language, CodeReader, Parser, AstGeneralizer, Unparser, CodeWriter + +from ..common import \ + basic_check_fortran_code, basic_check_fortran_ast, \ + basic_check_python_code, basic_check_python_ast, \ + APPS_ROOT, execute_on_examples +from ..test_apps import _prepare_roundtrip, AppTests + +_LOG = logging.getLogger(__name__) + + +def _roundtrip_fortran(case, path, results_path, parser, ast_generalizer, unparser): + with path.open() as original_file: + basic_check_fortran_code(case, path, original_file.read(), results=results_path, + suffix=None) + fortran_ast = parser.parse('', path) + basic_check_fortran_ast(case, path, fortran_ast, results=results_path) + tree = ast_generalizer.generalize(fortran_ast) + basic_check_python_ast(case, path, tree, results=results_path) + # python_code = python_unparser.unparse(tree) + # basic_check_python_code(self, path, python_code, results=results_path) + # tree = python_parser.parse(python_code) + # basic_check_python_ast(self, path, tree, results=results_path) + fortran_code = unparser.unparse(tree) + basic_check_fortran_code(case, path, fortran_code, results=results_path) + + +def _migrate_fortran(case, path, results_path, parser, ast_generalizer, unparser): + with path.open() as original_file: + basic_check_fortran_code(case, path, original_file.read(), results=results_path, + suffix=None) + fortran_ast = parser.parse('', path) + basic_check_fortran_ast(case, path, fortran_ast, results=results_path) + tree = ast_generalizer.generalize(fortran_ast) + basic_check_python_ast(case, path, tree, results=results_path) + python_code = unparser.unparse(tree) + basic_check_python_code(case, path, python_code, results=results_path) + + +@unittest.skipIf( + Language.find('Fortran') is None, 'skipping due to missing Fortran language support') +class FFBMiniTests(AppTests): + + app_name = 'FFB-MINI' + + app_source_folder = APPS_ROOT.joinpath('ffb-mini', 'src') + + paths = [ + pathlib.Path(root, name) for root, _, files in os.walk(str(app_source_folder)) + for name in files if pathlib.Path(name).suffix in ('.f', '.F', '.f90', '.F90') + and name not in { + 'ddcom4.F', # SyntaxError - just not implemented yet + 'ffb_mini_main.F90', # NotImplementedError + 'f_test.F90', # NotImplementedError + 'mod_maprof.F90', # NotImplementedError + # OFP fails for the following files + # issues need to be resolved upstream or files need to be modified + 'bcgs3x.F', 'bcgsxe.F', 'calax3.F', 'callap.F', 'dd_mpi.F', 'e2plst.F', 'extrfn.F', + 'gfutil.f', 'grad3x.F', 'les3x.F', 'lesrop.F', 'lesrpx.F', 'lessfx.F', 'lrfnms.F', + 'makemesh.F90', 'miniapp_util.F', 'mfname.F', 'neibr2.F', 'nodlex.F', 'pres3e.F', + 'rcmelm.F', 'rfname.F', 'srfexx.F', 'vel3d1.F', 'vel3d2.F'}] + + @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test') + def test_roundtrip(self): + self._test_app(_prepare_roundtrip(self, Language.find('Fortran')), _roundtrip_fortran) + + +@unittest.skipIf( + Language.find('Fortran') is None, 'skipping due to missing Fortran language support') +class Flash5Tests(unittest.TestCase): + + app_name = 'FLASH5' + + app_source_folder = APPS_ROOT.joinpath('flash5', 'source') + + paths = path_selection_tree(app_source_folder, { + # pathlib.Path('physics', 'Eos', 'EosMain', 'Helmholtz_starkiller', + # 'SpeciesBased'): 'actual_eos.F90', # access specifiers (i.e public/private) + pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit'): [ + 'hy_getFaceFlux.F90', + # 'hy_getRiemannState.F90', # need to preprocess 1 macro + 'hy_TVDslope.F90', + 'hy_upwindTransverseFlux.F90', pathlib.Path('MHD', 'hy_eigenVector.F90')], + pathlib.Path('physics', 'sourceTerms', 'Burn'): { + 'BurnMain': { + 'nuclearBurn': [ + 'Burn.F90', 'bn_burner.F90', 'bn_azbar.F90', 'bn_screen4.F90', 'bn_sneutx.F90', + 'bn_mcord.F90'], + pathlib.Path('nuclearBurn', 'Aprox13'): [ + 'bn_mapNetworkToSpecies.F90', 'bn_networkTable.F90', 'bn_networkRates.F90', + 'bn_networkScreen.F90', 'bn_network.F90', 'bn_networkSparseJakob.F90', + 'bn_networkSparsePointers.F90', 'bn_networkDenseJakob.F90', 'bn_gift.F90']}, + 'BurnIntegrate': ['bn_netIntegrate.F90', 'bn_baderMa28.F90', 'bn_rosenMa28.F90']}, + pathlib.Path('Simulation'): 'Simulation_init.F90' + }) + paths.append(app_source_folder.parent.joinpath('lib', 'ma28', 'source', 'Ma28.F90')) + + @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test') + @execute_on_examples(paths) + def test_roundtrip(self, input_path): + reader = CodeReader() + fortran_code = reader.read_file(input_path) + results_path = pathlib.Path(APPS_RESULTS_ROOT, 'flash5') + results_path.mkdir(exist_ok=True) + basic_check_fortran_code(self, input_path, fortran_code, results=results_path, suffix=None) + parser = Parser.find(Language.find('Fortran'))() + fortran_ast = parser.parse(fortran_code, input_path) + basic_check_fortran_ast(self, input_path, fortran_ast, results=results_path) + ast_generalizer = AstGeneralizer.find(Language.find('Fortran'))() + syntax = ast_generalizer.generalize(fortran_ast) + basic_check_python_ast(self, input_path, syntax, results=results_path) + unparser = Unparser.find(Language.find('Fortran'))() + code = unparser.unparse(syntax) + basic_check_fortran_code(self, input_path, code, results=results_path) + + def test_partial_inline_burn(self): + _ = self.app_source_folder.joinpath( + 'physics', 'sourceTerms', 'Burn', 'BurnMain', 'nuclearBurn') + inlined_path = _.joinpath('Aprox13', 'bn_mapNetworkToSpecies.F90') + target_path = _.joinpath('Burn.F90') + + reader = CodeReader() + inlined_code = reader.read_file(inlined_path) + target_code = reader.read_file(target_path) + + parser = Parser.find(Language.find('Fortran'))() + inlined_fortran_ast = parser.parse(inlined_code, inlined_path) + # inlined_fortran_ast = inlined_fortran_ast.find('.//subroutine') + target_fortran_ast = parser.parse(target_code, target_path) + + ast_generalizer = AstGeneralizer.find(Language.find('Fortran'))() + inlined_syntax = ast_generalizer.generalize(inlined_fortran_ast) + inlined_function = inlined_syntax.body[-1] + # TODO: implement object finding to find function + target_syntax = ast_generalizer.generalize(target_fortran_ast) + target_function = target_syntax.body[-1] + # TODO: implement object finding to find function + + # import horast + # print(horast.unparse(inlined_function)) + # print(horast.unparse(target_function)) + # import ipdb; ipdb.set_trace() + + # import static_typing + inlined_syntax = inline_syntax( + target_function, inlined_function, + # globals_={'NSPECIES': 13, 'st': static_typing, **globals()}, + verbose=True) + annotation = horast_nodes.OpenAccPragma('parallel loop') + annotate_loop_syntax(inlined_syntax, annotation) + + unparser = Unparser.find(Language.find('Fortran'))() + transformed_code = unparser.unparse(inlined_syntax) + + results_path = pathlib.Path(APPS_RESULTS_ROOT, 'flash5-inlined') + results_path.mkdir(exist_ok=True) + CodeWriter().write_file(transformed_code, results_path.joinpath('Burn.inlined_some.F90')) + + +@unittest.skipIf( + Language.find('Fortran') is None, 'skipping due to missing Fortran language support') +@unittest.skipUnless(os.environ.get('TEST_MIRANDA'), 'skipping tests on MIRANDA code') +class MirandaIOTests(AppTests): + + app_name = 'miranda_io' + + app_source_folder = APPS_ROOT.joinpath('miranda_io') + + paths = [app_source_folder.joinpath('miranda_io.f90')] + + def test_roundtrip_miranda_io(self): + self._test_app(_prepare_roundtrip(self, Language.find('Fortran')), _roundtrip_fortran) diff --git a/test/test_apps.py b/test/test_apps.py index 2bf4c98..0b5ada4 100644 --- a/test/test_apps.py +++ b/test/test_apps.py @@ -1,7 +1,6 @@ """Integration tests based on various scientific applications.""" import logging -import os import pathlib import typing as t import unittest @@ -10,13 +9,11 @@ import typed_ast.ast3 as typed_ast3 import horast.nodes as horast_nodes -from transpyle.general import Language, CodeReader, Parser, AstGeneralizer, Unparser, CodeWriter +from transpyle.general import Language, Parser, AstGeneralizer, Unparser from transpyle.pair import inline_syntax, annotate_loop_syntax from .common import \ - basic_check_fortran_code, basic_check_fortran_ast, \ - basic_check_python_code, basic_check_python_ast, \ - APPS_ROOT, APPS_RESULTS_ROOT, execute_on_examples + APPS_RESULTS_ROOT _LOG = logging.getLogger(__name__) @@ -95,34 +92,6 @@ def _prepare_roundtrip(case, language: Language): return parser, ast_generalizer, unparser -def _roundtrip_fortran(case, path, results_path, parser, ast_generalizer, unparser): - with path.open() as original_file: - basic_check_fortran_code(case, path, original_file.read(), results=results_path, - suffix=None) - fortran_ast = parser.parse('', path) - basic_check_fortran_ast(case, path, fortran_ast, results=results_path) - tree = ast_generalizer.generalize(fortran_ast) - basic_check_python_ast(case, path, tree, results=results_path) - # python_code = python_unparser.unparse(tree) - # basic_check_python_code(self, path, python_code, results=results_path) - # tree = python_parser.parse(python_code) - # basic_check_python_ast(self, path, tree, results=results_path) - fortran_code = unparser.unparse(tree) - basic_check_fortran_code(case, path, fortran_code, results=results_path) - - -def _migrate_fortran(case, path, results_path, parser, ast_generalizer, unparser): - with path.open() as original_file: - basic_check_fortran_code(case, path, original_file.read(), results=results_path, - suffix=None) - fortran_ast = parser.parse('', path) - basic_check_fortran_ast(case, path, fortran_ast, results=results_path) - tree = ast_generalizer.generalize(fortran_ast) - basic_check_python_ast(case, path, tree, results=results_path) - python_code = unparser.unparse(tree) - basic_check_python_code(case, path, python_code, results=results_path) - - class AppTests(unittest.TestCase): app_name = None @@ -140,132 +109,3 @@ def _test_app(self, tools, test, dir_name=None): for path in self.paths: with self.subTest(path=path): test(self, path, results_path, *tools) - - -class FFBMiniTests(AppTests): - - app_name = 'FFB-MINI' - - app_source_folder = APPS_ROOT.joinpath('ffb-mini', 'src') - - paths = [ - pathlib.Path(root, name) for root, _, files in os.walk(str(app_source_folder)) - for name in files if pathlib.Path(name).suffix in ('.f', '.F', '.f90', '.F90') - and name not in { - 'ddcom4.F', # SyntaxError - just not implemented yet - 'ffb_mini_main.F90', # NotImplementedError - 'f_test.F90', # NotImplementedError - 'mod_maprof.F90', # NotImplementedError - # OFP fails for the following files - # issues need to be resolved upstream or files need to be modified - 'bcgs3x.F', 'bcgsxe.F', 'calax3.F', 'callap.F', 'dd_mpi.F', 'e2plst.F', 'extrfn.F', - 'gfutil.f', 'grad3x.F', 'les3x.F', 'lesrop.F', 'lesrpx.F', 'lessfx.F', 'lrfnms.F', - 'makemesh.F90', 'miniapp_util.F', 'mfname.F', 'neibr2.F', 'nodlex.F', 'pres3e.F', - 'rcmelm.F', 'rfname.F', 'srfexx.F', 'vel3d1.F', 'vel3d2.F'}] - - @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test') - def test_roundtrip(self): - self._test_app(_prepare_roundtrip(self, Language.find('Fortran')), _roundtrip_fortran) - - -class Flash5Tests(unittest.TestCase): - - app_name = 'FLASH5' - - app_source_folder = APPS_ROOT.joinpath('flash5', 'source') - - paths = path_selection_tree(app_source_folder, { - # pathlib.Path('physics', 'Eos', 'EosMain', 'Helmholtz_starkiller', - # 'SpeciesBased'): 'actual_eos.F90', # access specifiers (i.e public/private) - pathlib.Path('physics', 'Hydro', 'HydroMain', 'unsplit'): [ - 'hy_getFaceFlux.F90', - # 'hy_getRiemannState.F90', # need to preprocess 1 macro - 'hy_TVDslope.F90', - 'hy_upwindTransverseFlux.F90', pathlib.Path('MHD', 'hy_eigenVector.F90')], - pathlib.Path('physics', 'sourceTerms', 'Burn'): { - 'BurnMain': { - 'nuclearBurn': [ - 'Burn.F90', 'bn_burner.F90', 'bn_azbar.F90', 'bn_screen4.F90', 'bn_sneutx.F90', - 'bn_mcord.F90'], - pathlib.Path('nuclearBurn', 'Aprox13'): [ - 'bn_mapNetworkToSpecies.F90', 'bn_networkTable.F90', 'bn_networkRates.F90', - 'bn_networkScreen.F90', 'bn_network.F90', 'bn_networkSparseJakob.F90', - 'bn_networkSparsePointers.F90', 'bn_networkDenseJakob.F90', 'bn_gift.F90']}, - 'BurnIntegrate': ['bn_netIntegrate.F90', 'bn_baderMa28.F90', 'bn_rosenMa28.F90']}, - pathlib.Path('Simulation'): 'Simulation_init.F90' - }) - paths.append(app_source_folder.parent.joinpath('lib', 'ma28', 'source', 'Ma28.F90')) - - @unittest.skipUnless(os.environ.get('TEST_LONG'), 'skipping long test') - @execute_on_examples(paths) - def test_roundtrip(self, input_path): - reader = CodeReader() - fortran_code = reader.read_file(input_path) - results_path = pathlib.Path(APPS_RESULTS_ROOT, 'flash5') - results_path.mkdir(exist_ok=True) - basic_check_fortran_code(self, input_path, fortran_code, results=results_path, suffix=None) - parser = Parser.find(Language.find('Fortran'))() - fortran_ast = parser.parse(fortran_code, input_path) - basic_check_fortran_ast(self, input_path, fortran_ast, results=results_path) - ast_generalizer = AstGeneralizer.find(Language.find('Fortran'))() - syntax = ast_generalizer.generalize(fortran_ast) - basic_check_python_ast(self, input_path, syntax, results=results_path) - unparser = Unparser.find(Language.find('Fortran'))() - code = unparser.unparse(syntax) - basic_check_fortran_code(self, input_path, code, results=results_path) - - def test_partial_inline_burn(self): - _ = self.app_source_folder.joinpath( - 'physics', 'sourceTerms', 'Burn', 'BurnMain', 'nuclearBurn') - inlined_path = _.joinpath('Aprox13', 'bn_mapNetworkToSpecies.F90') - target_path = _.joinpath('Burn.F90') - - reader = CodeReader() - inlined_code = reader.read_file(inlined_path) - target_code = reader.read_file(target_path) - - parser = Parser.find(Language.find('Fortran'))() - inlined_fortran_ast = parser.parse(inlined_code, inlined_path) - # inlined_fortran_ast = inlined_fortran_ast.find('.//subroutine') - target_fortran_ast = parser.parse(target_code, target_path) - - ast_generalizer = AstGeneralizer.find(Language.find('Fortran'))() - inlined_syntax = ast_generalizer.generalize(inlined_fortran_ast) - inlined_function = inlined_syntax.body[-1] - # TODO: implement object finding to find function - target_syntax = ast_generalizer.generalize(target_fortran_ast) - target_function = target_syntax.body[-1] - # TODO: implement object finding to find function - - # import horast - # print(horast.unparse(inlined_function)) - # print(horast.unparse(target_function)) - # import ipdb; ipdb.set_trace() - - # import static_typing - inlined_syntax = inline_syntax( - target_function, inlined_function, - # globals_={'NSPECIES': 13, 'st': static_typing, **globals()}, - verbose=True) - annotation = horast_nodes.OpenAccPragma('parallel loop') - annotate_loop_syntax(inlined_syntax, annotation) - - unparser = Unparser.find(Language.find('Fortran'))() - transformed_code = unparser.unparse(inlined_syntax) - - results_path = pathlib.Path(APPS_RESULTS_ROOT, 'flash5-inlined') - results_path.mkdir(exist_ok=True) - CodeWriter().write_file(transformed_code, results_path.joinpath('Burn.inlined_some.F90')) - - -@unittest.skipUnless(os.environ.get('TEST_MIRANDA'), 'skipping tests on MIRANDA code') -class MirandaIOTests(AppTests): - - app_name = 'miranda_io' - - app_source_folder = APPS_ROOT.joinpath('miranda_io') - - paths = [app_source_folder.joinpath('miranda_io.f90')] - - def test_roundtrip_miranda_io(self): - self._test_app(_prepare_roundtrip(self, Language.find('Fortran')), _roundtrip_fortran) diff --git a/test/test_c.py b/test/test_c.py index cb2c3b9..f7616c2 100644 --- a/test/test_c.py +++ b/test/test_c.py @@ -6,17 +6,28 @@ import timing import typed_astunparse -from transpyle.general.code_reader import CodeReader -from transpyle.c.parser import C99Parser -from transpyle.c.ast_generalizer import CAstGeneralizer - -from .common import basic_check_c_ast, basic_check_python_ast, execute_on_language_examples +try: + from transpyle.c.ast_generalizer import CAstGeneralizer +except ImportError: + pass +try: + from transpyle.c.parser import C99Parser +except ImportError: + pass +from transpyle.general import AstGeneralizer, CodeReader, Parser + +from .common import basic_check_python_ast, execute_on_language_examples +try: + from .tools_c import basic_check_c_ast +except ImportError: + pass _LOG = logging.getLogger(__name__) _TIME = timing.get_timing_group(__name__) +@unittest.skipIf(Parser.find('C') is None, 'skipping due to missing C language support') class ParserTests(unittest.TestCase): @execute_on_language_examples('c11') @@ -30,6 +41,7 @@ def test_parse_examples(self, input_path): _LOG.info('parsed "%s" in %fs', input_path, timer.elapsed) +@unittest.skipIf(AstGeneralizer.find('C') is None, 'skipping due to missing C language support') class AstGeneralizerTests(unittest.TestCase): @execute_on_language_examples('c11') diff --git a/test/test_cpp.py b/test/test_cpp.py index ff2c243..3c05738 100644 --- a/test/test_cpp.py +++ b/test/test_cpp.py @@ -11,13 +11,29 @@ import timing import typed_astunparse -from transpyle.general.code_reader import CodeReader -from transpyle.general.binder import Binder -from transpyle.cpp.parser import CppParser -from transpyle.cpp.ast_generalizer import CppAstGeneralizer -from transpyle.cpp.unparser import Cpp14Unparser -from transpyle.cpp.compiler import CppSwigCompiler -from transpyle.cpp.compiler_interface import GppInterface +from transpyle.general import AstGeneralizer, Binder, CodeReader, Compiler, Parser, Unparser +from transpyle.general.exc import ExternalToolError + +try: + from transpyle.cpp.parser import CppParser +except (ImportError, ExternalToolError): + pass +try: + from transpyle.cpp.ast_generalizer import CppAstGeneralizer +except (ImportError, ExternalToolError): + pass +try: + from transpyle.cpp.unparser import Cpp14Unparser +except (ImportError, ExternalToolError): + pass +try: + from transpyle.cpp.compiler import CppSwigCompiler +except (ImportError, ExternalToolError): + pass +try: + from transpyle.cpp.compiler_interface import GppInterface +except (ImportError, ExternalToolError): + pass from .common import \ PERFORMANCE_RESULTS_ROOT, EXAMPLES_ROOT, EXAMPLES_ROOTS, \ @@ -29,6 +45,7 @@ _TIME = timing.get_timing_group(__name__) +@unittest.skipIf(Parser.find('C++') is None, 'skipping due to missing C++ language support') class ParserTests(unittest.TestCase): @execute_on_language_examples('cpp14') @@ -53,6 +70,7 @@ def test_try_parse_invalid(self): _LOG.debug('%s', err.exception) +@unittest.skipIf(AstGeneralizer.find('C++') is None, 'skipping due to missing C++ language support') class AstGeneralizerTests(unittest.TestCase): @execute_on_language_examples('cpp14') @@ -71,6 +89,7 @@ def test_generalize_examples(self, input_path): _LOG.debug('%s', typed_astunparse.unparse(syntax)) +@unittest.skipIf(Unparser.find('C++') is None, 'skipping due to missing C++ language support') class UnparserTests(unittest.TestCase): @execute_on_language_examples('cpp14') @@ -97,6 +116,7 @@ def test_unparse_examples(self, input_path): _LOG.info('unparsed "%s" in %fs', input_path, timer.elapsed) +@unittest.skipIf(Compiler.find('C++') is None, 'skipping due to missing C++ language support') class CompilerTests(unittest.TestCase): def test_cpp_paths_exist(self): diff --git a/test/test_dependencies.py b/test/test_dependencies.py index f961cb7..f41e636 100644 --- a/test/test_dependencies.py +++ b/test/test_dependencies.py @@ -6,25 +6,6 @@ class Tests(unittest.TestCase): - def test_cython(self): - import cython - - def test_numpy(self): - import numpy as np - self.assertIsNotNone(np.zeros((10, 10), dtype=int)) - - @unittest.skip('not ready yet') - def test_pyopencl(self): - import pyopencl - - def test_gfortran(self): - gfortran_path = shutil.which('gfortran') - self.assertIsNotNone(gfortran_path) - - def test_swig(self): - swig_path = shutil.which('swig') - self.assertIsNotNone(swig_path) - def test_typed_ast(self): from typed_ast import ast3 self.assertGreaterEqual(ast3.LATEST_MINOR_VERSION, 6) diff --git a/test/test_fortran.py b/test/test_fortran.py index 822adac..6b411b9 100644 --- a/test/test_fortran.py +++ b/test/test_fortran.py @@ -10,13 +10,28 @@ import numpy as np import timing -from transpyle.general.code_reader import CodeReader -from transpyle.general.binder import Binder -from transpyle.fortran.parser import FortranParser -from transpyle.fortran.ast_generalizer import FortranAstGeneralizer -from transpyle.fortran.unparser import Fortran77Unparser -from transpyle.fortran.compiler import F2PyCompiler -from transpyle.fortran.compiler_interface import GfortranInterface, PgifortranInterface +from transpyle.general import AstGeneralizer, Binder, CodeReader, Compiler, Parser, Unparser +from transpyle.general.exc import ExternalToolError +try: + from transpyle.fortran.parser import FortranParser +except (ImportError, ExternalToolError): + pass +try: + from transpyle.fortran.ast_generalizer import FortranAstGeneralizer +except (ImportError, ExternalToolError): + pass +try: + from transpyle.fortran.unparser import Fortran77Unparser +except (ImportError, ExternalToolError): + pass +try: + from transpyle.fortran.compiler import F2PyCompiler +except (ImportError, ExternalToolError): + pass +try: + from transpyle.fortran.compiler_interface import GfortranInterface, PgifortranInterface +except (ImportError, ExternalToolError): + pass from .common import \ random_data, EXAMPLES_ROOT, EXAMPLES_ROOTS, PERFORMANCE_RESULTS_ROOT, \ @@ -32,6 +47,7 @@ MB = 1024 * KB +@unittest.skipIf(Parser.find('Fortran') is None, 'skipping due to missing Fortran language support') class ParserTests(unittest.TestCase): @execute_on_language_examples('f77', 'f95') @@ -51,6 +67,8 @@ def test_try_parse_invalid(self): _LOG.debug('%s', err.exception) +@unittest.skipIf( + AstGeneralizer.find('Fortran') is None, 'skipping due to missing Fortran language support') class AstGeneralizerTests(unittest.TestCase): @execute_on_language_examples('f77', 'f95') @@ -65,6 +83,8 @@ def test_generalize_examples(self, input_path): _LOG.info('generalized "%s" in %fs', input_path, timer.elapsed) +@unittest.skipIf( + Unparser.find('Fortran') is None, 'skipping due to missing Fortran language support') class UnparserTests(unittest.TestCase): @execute_on_language_fundamentals('f77', 'f95') @@ -82,6 +102,8 @@ def test_unparse_fundamentals(self, input_path): _LOG.info('unparsed "%s" in %fs', input_path, timer.elapsed) +@unittest.skipIf( + Compiler.find('Fortran') is None, 'skipping due to missing Fortran language support') class CompilerTests(unittest.TestCase): @execute_on_language_examples('f77', 'f95', predicate_not=accelerated) diff --git a/test/test_packaging.py b/test/test_packaging.py new file mode 100644 index 0000000..dd14160 --- /dev/null +++ b/test/test_packaging.py @@ -0,0 +1,7 @@ +"""Tests for packaging.""" + +import boilerplates.packaging_tests + + +class Tests(boilerplates.packaging_tests.PackagingTests): + pass diff --git a/test/test_script.py b/test/test_script.py index debb4c8..5370e68 100644 --- a/test/test_script.py +++ b/test/test_script.py @@ -4,7 +4,7 @@ import io import unittest -from .test_setup import run_module +from boilerplates.packaging_tests import run_module class Tests(unittest.TestCase): diff --git a/test/test_setup.py b/test/test_setup.py deleted file mode 100644 index 52625af..0000000 --- a/test/test_setup.py +++ /dev/null @@ -1,351 +0,0 @@ -"""Tests for setup scripts.""" - -import importlib -import itertools -import os -import pathlib -import runpy -import subprocess -import sys -import tempfile -import types -import typing as t -import unittest - -__updated__ = '2019-06-04' - - -def run_program(*args, glob: bool = False): - """Run subprocess with given args. Use path globbing for each arg that contains an asterisk.""" - if glob: - cwd = pathlib.Path.cwd() - args = tuple(itertools.chain.from_iterable( - list(str(_.relative_to(cwd)) for _ in cwd.glob(arg)) if '*' in arg else [arg] - for arg in args)) - process = subprocess.Popen(args) - process.wait() - if process.returncode != 0: - raise AssertionError('execution of {} returned {}'.format(args, process.returncode)) - return process - - -def run_pip(*args, **kwargs): - python_exec_name = pathlib.Path(sys.executable).name - pip_exec_name = python_exec_name.replace('python', 'pip') - run_program(pip_exec_name, *args, **kwargs) - - -def run_module(name: str, *args, run_name: str = '__main__') -> None: - backup_sys_argv = sys.argv - sys.argv = [name + '.py'] + list(args) - runpy.run_module(name, run_name=run_name) - sys.argv = backup_sys_argv - - -def import_module(name: str = 'setup') -> types.ModuleType: - setup_module = importlib.import_module(name) - return setup_module - - -def import_module_member(module_name: str, member_name: str) -> t.Any: - module = import_module(module_name) - return getattr(module, member_name) - - -CLASSIFIERS_LICENSES = ( - 'License :: OSI Approved :: Python License (CNRI Python License)', - 'License :: OSI Approved :: Python Software Foundation License', - 'License :: Other/Proprietary License', - 'License :: Public Domain') - -CLASSIFIERS_PYTHON_VERSIONS = tuple("""Programming Language :: Python -Programming Language :: Python :: 2 -Programming Language :: Python :: 2.2 -Programming Language :: Python :: 2.7 -Programming Language :: Python :: 2 :: Only -Programming Language :: Python :: 3 -Programming Language :: Python :: 3.0 -Programming Language :: Python :: 3.5 -Programming Language :: Python :: 3 :: Only""".splitlines()) - -CLASSIFIERS_PYTHON_IMPLEMENTATIONS = tuple("""Programming Language :: Python :: Implementation -Programming Language :: Python :: Implementation :: CPython -Programming Language :: Python :: Implementation :: Jython -Programming Language :: Python :: Implementation :: PyPy -Programming Language :: Python :: Implementation :: Stackless""".splitlines()) - -CLASSIFIERS_VARIOUS = ( - 'Framework :: IPython', - 'Topic :: Scientific/Engineering', - 'Topic :: Sociology', - 'Topic :: Security :: Cryptography', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Software Development :: Version Control :: Git', - 'Topic :: System', - 'Topic :: Utilities') - -CLASSIFIERS_LICENSES_TUPLES = tuple((_,) for _ in CLASSIFIERS_LICENSES) + ((),) - -CLASSIFIERS_PYTHON_VERSIONS_COMBINATIONS = tuple((_,) for _ in CLASSIFIERS_PYTHON_VERSIONS) - -CLASSIFIERS_PYTHON_IMPLEMENTATIONS_TUPLES = tuple((_,) for _ in CLASSIFIERS_PYTHON_IMPLEMENTATIONS) - -CLASSIFIERS_VARIOUS_COMBINATIONS = tuple(itertools.combinations( - CLASSIFIERS_VARIOUS, len(CLASSIFIERS_VARIOUS) - 1)) + (CLASSIFIERS_VARIOUS,) - -ALL_CLASSIFIERS_VARIANTS = [ - licenses + versions + implementations + various - for licenses in CLASSIFIERS_LICENSES_TUPLES - for versions in CLASSIFIERS_PYTHON_VERSIONS_COMBINATIONS - for implementations in CLASSIFIERS_PYTHON_IMPLEMENTATIONS_TUPLES - for various in CLASSIFIERS_VARIOUS_COMBINATIONS] - -LINK_EXAMPLES = [ - (None, 'setup.py', True), ('this file', 'setup.py', True), (None, 'test/test_setup.py', True), - (None, 'http://site.com', False), (None, '../something/else', False), (None, 'no.thing', False), - (None, '/my/abs/path', False)] - - -def get_package_folder_name(): - """Attempt to guess the built package name.""" - cwd = pathlib.Path.cwd() - directories = [ - path for path in cwd.iterdir() if pathlib.Path(cwd, path).is_dir() - and pathlib.Path(cwd, path, '__init__.py').is_file() and path.name != 'test'] - assert len(directories) == 1, directories - return directories[0].name - - -class UnitTests(unittest.TestCase): - """Test basic functionalities of the setup boilerplate.""" - - def test_find_version(self): - find_version = import_module_member('setup_boilerplate', 'find_version') - result = find_version(get_package_folder_name()) - self.assertIsInstance(result, str) - - def test_find_packages(self): - find_packages = import_module_member('setup_boilerplate', 'find_packages') - results = find_packages() - self.assertIsInstance(results, list) - for result in results: - self.assertIsInstance(result, str) - - def test_requirements(self): - parse_requirements = import_module_member('setup_boilerplate', 'parse_requirements') - results = parse_requirements() - self.assertIsInstance(results, list) - self.assertTrue(all(isinstance(result, str) for result in results), msg=results) - - def test_requirements_empty(self): - parse_requirements = import_module_member('setup_boilerplate', 'parse_requirements') - reqs_file = tempfile.NamedTemporaryFile('w', delete=False) - reqs_file.close() - results = parse_requirements(reqs_file.name) - self.assertIsInstance(results, list) - self.assertEqual(len(results), 0) - os.remove(reqs_file.name) - - def test_requirements_comments(self): - parse_requirements = import_module_member('setup_boilerplate', 'parse_requirements') - reqs = ['# comment', 'numpy', '', '# another comment', 'scipy', '', '# one more comment'] - reqs_file = tempfile.NamedTemporaryFile('w', delete=False) - for req in reqs: - print(req, file=reqs_file) - reqs_file.close() - results = parse_requirements(reqs_file.name) - self.assertIsInstance(results, list) - self.assertGreater(len(results), 0) - self.assertLess(len(results), len(reqs)) - os.remove(reqs_file.name) - - def test_python_versions(self): - find_required_python_version = import_module_member( - 'setup_boilerplate', 'find_required_python_version') - for variant in ALL_CLASSIFIERS_VARIANTS: - with self.subTest(variant=variant): - result = find_required_python_version(variant) - if result is not None: - self.assertIsInstance(result, str) - - def test_python_versions_combined(self): - find_required_python_version = import_module_member( - 'setup_boilerplate', 'find_required_python_version') - classifiers = [ - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.5'] - req = find_required_python_version(classifiers) - self.assertEqual(req, '>=3.5') - - def test_python_versions_reversed(self): - find_required_python_version = import_module_member( - 'setup_boilerplate', 'find_required_python_version') - classifiers = [ - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6'] - req = find_required_python_version(classifiers) - self.assertEqual(req, '>=3.4') - req = find_required_python_version(reversed(classifiers)) - self.assertEqual(req, '>=3.4') - - def test_python_versions_none(self): - find_required_python_version = import_module_member( - 'setup_boilerplate', 'find_required_python_version') - result = find_required_python_version([]) - self.assertIsNone(result) - - def test_python_versions_many_only(self): - find_required_python_version = import_module_member( - 'setup_boilerplate', 'find_required_python_version') - classifiers = [ - 'Programming Language :: Python :: 2 :: Only', - 'Programming Language :: Python :: 3 :: Only'] - with self.assertRaises(ValueError): - find_required_python_version(classifiers) - - def test_python_versions_conflict(self): - find_required_python_version = import_module_member( - 'setup_boilerplate', 'find_required_python_version') - classifier_variants = [ - ['Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3 :: Only'], - ['Programming Language :: Python :: 2 :: Only', - 'Programming Language :: Python :: 3.0']] - for classifiers in classifier_variants: - with self.assertRaises(ValueError): - find_required_python_version(classifiers) - - -class PackageTests(unittest.TestCase): - - """Test methods of Package class.""" - - def test_try_fields(self): - package = import_module_member('setup_boilerplate', 'Package') - - class Package(package): # pylint: disable=too-few-public-methods - name = 'package name' - description = 'package description' - self.assertEqual(Package.try_fields('name', 'description'), 'package name') - self.assertEqual(Package.try_fields('bad_field', 'description'), 'package description') - with self.assertRaises(AttributeError): - self.assertIsNone(Package.try_fields()) - with self.assertRaises(AttributeError): - Package.try_fields('bad_field', 'another_bad_field') - - def test_parse_readme(self): - package = import_module_member('setup_boilerplate', 'Package') - - class Package(package): # pylint: disable=too-few-public-methods - name = 'package name' - description = 'package description' - version = '1.2.3.4' - url = 'https://github.com/example' - - with tempfile.NamedTemporaryFile('w', suffix='.md', delete=False) as temp_file: - temp_file.write('test test test') - result, content_type = Package.parse_readme(temp_file.name) - os.remove(temp_file.name) - self.assertIsInstance(result, str) - self.assertIsInstance(content_type, str) - - prefix = 'https://github.com/example/blob/v1.2.3.4/' - for name, link, done in LINK_EXAMPLES: - name = '' if name is None else name + ' ' - text = 'Please see `{}<{}>`_ for details.'.format(name, link) - with tempfile.NamedTemporaryFile('w', suffix='.rst', delete=False) as temp_file: - temp_file.write(text) - result, content_type = Package.parse_readme(temp_file.name) - os.remove(temp_file.name) - self.assertIsInstance(result, str) - self.assertIsInstance(content_type, str) - if not done: - self.assertEqual(result, text) - continue - if name == '': - name = link + ' ' - self.assertIn('`{}<{}{}>`_'.format(name, prefix, link), result) - - def test_prepare(self): - package = import_module_member('setup_boilerplate', 'Package') - - version_ = '1.2.3.4.5.6.7' - long_description_ = 'long package description' - - class Package(package): # pylint: disable=too-few-public-methods, missing-docstring - name = 'package name' - version = version_ - description = 'package description' - long_description = long_description_ - packages = [] - install_requires = [] - python_requires = '' - - self.assertEqual(Package.version, version_) - self.assertEqual(Package.long_description, long_description_) - Package.prepare() - self.assertEqual(Package.version, version_) - self.assertEqual(Package.long_description, long_description_) - - Package.long_description = None - Package.packages = None - Package.install_requires = None - Package.python_requires = None - Package.prepare() - - Package.version = None - with self.assertRaises(FileNotFoundError): - Package.prepare() - - -@unittest.skipUnless(os.environ.get('TEST_PACKAGING') or os.environ.get('CI'), - 'skipping packaging tests for actual package') -class IntergrationTests(unittest.TestCase): - - """Test if the boilerplate can actually create a valid package.""" - - pkg_name = get_package_folder_name() - - def test_build_binary(self): - run_module('setup', 'bdist') - self.assertTrue(os.path.isdir('dist')) - - def test_build_wheel(self): - run_module('setup', 'bdist_wheel') - self.assertTrue(os.path.isdir('dist')) - - def test_build_source(self): - run_module('setup', 'sdist', '--formats=gztar,zip') - self.assertTrue(os.path.isdir('dist')) - - def test_install_code(self): - run_pip('install', '.') - run_pip('uninstall', '-y', self.pkg_name) - - def test_install_source_tar(self): - find_version = import_module_member('setup_boilerplate', 'find_version') - version = find_version(self.pkg_name) - run_pip('install', 'dist/*-{}.tar.gz'.format(version), glob=True) - run_pip('uninstall', '-y', self.pkg_name) - - def test_install_source_zip(self): - find_version = import_module_member('setup_boilerplate', 'find_version') - version = find_version(self.pkg_name) - run_pip('install', 'dist/*-{}.zip'.format(version), glob=True) - run_pip('uninstall', '-y', self.pkg_name) - - def test_install_wheel(self): - find_version = import_module_member('setup_boilerplate', 'find_version') - version = find_version(self.pkg_name) - run_pip('install', 'dist/*-{}-*.whl'.format(version), glob=True) - run_pip('uninstall', '-y', self.pkg_name) - - def test_pip_error(self): - with self.assertRaises(AssertionError): - run_pip('wrong_pip_command') - - def test_setup_do_nothing_or_error(self): - run_module('setup', 'wrong_setup_command', run_name='__not_main__') - with self.assertRaises(SystemExit): - run_module('setup', 'wrong_setup_command') diff --git a/test/tools_c.py b/test/tools_c.py new file mode 100644 index 0000000..8b3f556 --- /dev/null +++ b/test/tools_c.py @@ -0,0 +1,18 @@ +"""Tools for testing C language support.""" + +import io +import unittest + +import pycparser.c_ast + +from .common import basic_check_ast + + +def c_ast_dump(node: pycparser.c_ast.Node) -> str: + io_ = io.StringIO() + node.show(io_, attrnames=True, nodenames=True, showcoord=True) + return io_.getvalue() + + +def basic_check_c_ast(case: unittest.TestCase, path, c_tree, **kwargs): + basic_check_ast(case, path, c_tree, pycparser.c_ast.FileAST, '.yaml', c_ast_dump, **kwargs) diff --git a/test_requirements.txt b/test_requirements.txt deleted file mode 100644 index 7e52136..0000000 --- a/test_requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -cython -docutils ~= 0.15.1 -numba -numpy -open-fortran-parser ~= 0.6.0 -pcpp -pip >= 10.0 -pycparser -pygments >= 2.4.2 -timing -wheel --rrequirements.txt diff --git a/transpyle/__init__.py b/transpyle/__init__.py index c7d085d..636850f 100644 --- a/transpyle/__init__.py +++ b/transpyle/__init__.py @@ -3,6 +3,7 @@ import logging from .configuration import configure +from .general.exc import ExternalToolError configure() @@ -17,7 +18,7 @@ try: from .cpp import * -except ImportError: +except (ImportError, ExternalToolError): _LOG.warning("C++ unavailable") # try: @@ -27,7 +28,7 @@ try: from .fortran import * -except ImportError: +except (ImportError, ExternalToolError): _LOG.warning("Fortran unavailable") # try: diff --git a/transpyle/cpp/compiler.py b/transpyle/cpp/compiler.py index 624c739..07ffa55 100644 --- a/transpyle/cpp/compiler.py +++ b/transpyle/cpp/compiler.py @@ -9,10 +9,12 @@ import typing as t import argunparse +import version_query from ..general import \ temporarily_change_dir, run_tool, \ - Language, CodeReader, Parser, AstGeneralizer, Unparser, Compiler + ExternalTool, Language, CodeReader, Parser, AstGeneralizer, Unparser, Compiler +from ..general.exc import ExternalToolVersionError from .compiler_interface import GppInterface, ClangppInterface SWIG_INTERFACE_TEMPLATE = '''/* File: {module_name}.i */ @@ -90,6 +92,23 @@ _LOG = logging.getLogger(__name__) +class Swig(ExternalTool): + """Define requirements for SWIG.""" + + path = pathlib.Path('swig') + _version_arg = '-version' + + @classmethod + def _version_output_filter(cls, output: str) -> str: + for output_line in output.splitlines(): + if output_line.startswith('SWIG Version '): + return output_line.replace('SWIG Version ', '') + raise ExternalToolVersionError(f'could not extract version from output: {output}') + + +Swig.assert_version_at_least(version_query.Version(4, 0)) + + class SwigCompiler(Compiler): # TODO: create SWIG compiler interface similarily to F2PY interface diff --git a/transpyle/cpp/parser.py b/transpyle/cpp/parser.py index 84c078b..07e44da 100644 --- a/transpyle/cpp/parser.py +++ b/transpyle/cpp/parser.py @@ -7,13 +7,33 @@ import xml.etree.ElementTree as ET import argunparse +import version_query -from ..general import Parser +from ..general import ExternalTool, Parser +from ..general.exc import ExternalToolVersionError from ..general.tools import run_tool _LOG = logging.getLogger(__name__) -CASTXML_PATH = pathlib.Path('castxml') + +class CastXml(ExternalTool): + """Define how to execute CastXML tool. + + https://github.com/CastXML/CastXML + """ + + path = pathlib.Path('castxml') + _version_arg = '--version' + + @classmethod + def _version_output_filter(cls, output: str) -> str: + for output_line in output.splitlines(): + if output_line.startswith('castxml version '): + return output_line.replace('castxml version ', '') + raise ExternalToolVersionError(f'could not extract version from output: {output}') + + +CastXml.assert_version_at_least(version_query.Version(0, 4)) def run_castxml(input_path: pathlib.Path, output_path: pathlib.Path, gcc: bool = False): @@ -29,7 +49,7 @@ def run_castxml(input_path: pathlib.Path, output_path: pathlib.Path, gcc: bool = elif platform.system() == 'Darwin': kwargs['castxml-cc-gnu'] = 'clang++' kwargs['o'] = str(output_path) - return run_tool(CASTXML_PATH, args, kwargs, + return run_tool(CastXml.path, args, kwargs, argunparser=argunparse.ArgumentUnparser(opt_value=' ')) diff --git a/transpyle/fortran/compiler_interface.py b/transpyle/fortran/compiler_interface.py index 5141092..a281862 100644 --- a/transpyle/fortran/compiler_interface.py +++ b/transpyle/fortran/compiler_interface.py @@ -5,8 +5,10 @@ import argunparse import numpy.f2py +import version_query -from ..general import call_tool, CompilerInterface +from ..general import call_tool, CompilerInterface, ExternalTool +from ..general.exc import ExternalToolVersionError _LOG = logging.getLogger(__name__) @@ -33,6 +35,23 @@ class GfortranInterface(CompilerInterface): } +class Gfortran(ExternalTool): + """Define requirements for GNU Fortran compiler.""" + + path = GfortranInterface._executables[''] + _version_arg = '--version' + + @classmethod + def _version_output_filter(cls, output: str) -> str: + for output_line in output.splitlines(): + if output_line.startswith('GNU Fortran '): + return output_line.split(' ')[-1] + raise ExternalToolVersionError(f'could not extract version from output: {output}') + + +Gfortran.assert_version_at_least(version_query.Version(10, 0)) + + class PgifortranInterface(CompilerInterface): """PGI Fortran compiler interface.""" diff --git a/transpyle/general/__init__.py b/transpyle/general/__init__.py index a56eaf0..7c3ca50 100644 --- a/transpyle/general/__init__.py +++ b/transpyle/general/__init__.py @@ -1,6 +1,7 @@ """Language-agnostic modules and base classes for language-specific modules in transpyle.""" from .tools import temporarily_change_dir, redirect_stdout_and_stderr, run_tool, call_tool +from .external_tool import ExternalTool from .language import Language @@ -19,6 +20,7 @@ from .transpiler import Transpiler, AutoTranspiler __all__ = ['temporarily_change_dir', 'redirect_stdout_and_stderr', 'run_tool', 'call_tool', + 'ExternalTool', 'Language', 'CodeReader', 'Parser', 'AstGeneralizer', 'IdentityAstGeneralizer', 'XmlAstGeneralizer', 'GeneralizingAutoParser', diff --git a/transpyle/general/exc.py b/transpyle/general/exc.py index c383872..1882828 100644 --- a/transpyle/general/exc.py +++ b/transpyle/general/exc.py @@ -1,5 +1,16 @@ """Non-standard exception types used in transpyle.""" +class ExternalToolError(Exception): + """Indicates an issue with an external tool.""" + + +class ExternalToolMissingError(ExternalToolError, FileNotFoundError): + """Raised when an external tool is not found.""" + + +class ExternalToolVersionError(ExternalToolError, AssertionError): + """Raised when an external tool doesn't satisfy the version requirements.""" + class ContinueIteration(StopIteration): diff --git a/transpyle/general/external_tool.py b/transpyle/general/external_tool.py new file mode 100644 index 0000000..90822f1 --- /dev/null +++ b/transpyle/general/external_tool.py @@ -0,0 +1,55 @@ +"""Evaluate external tool and .""" + +import pathlib +import shutil + +import version_query + +from .exc import ExternalToolMissingError, ExternalToolVersionError +from .tools import run_tool + + +class ExternalTool: + """Generic tool definition. + + When inheriting, the following has to be defined: + * path: path to the tool + * _version_arg: argument to get the version of the tool when executing it + * _version_output_filter: function to filter the output of the version command + """ + + path: pathlib.Path + _version_arg: str + _version: version_query.Version | None = None + + @classmethod + def exists(cls) -> bool: + path = shutil.which(cls.path.as_posix()) + return path is not None + + @classmethod + def assert_exists(cls) -> None: + """Assert that the external tool exists.""" + if not cls.exists(): + raise ExternalToolMissingError(f'{cls.path} not found') + + @classmethod + def version(cls) -> version_query.Version: + """Determine the version of the external tool.""" + if cls._version is None: + result = run_tool(cls.path, [cls._version_arg]) + version_str = cls._version_output_filter(result.stdout) + cls._version = version_query.Version.from_str(version_str) + return cls._version + + @classmethod + def _version_output_filter(cls, output: str) -> str: + raise NotImplementedError('this method needs to be implemented') + + @classmethod + def assert_version_at_least(cls, version: version_query.Version) -> None: + """Assert that the external tool is at least the given version.""" + cls.assert_exists() + if cls.version() < version: + raise ExternalToolVersionError( + f'{cls.path} version {cls.version} does not satisfy the requirement >= {version}') diff --git a/transpyle/main.py b/transpyle/main.py index 242bb6f..e09a385 100644 --- a/transpyle/main.py +++ b/transpyle/main.py @@ -11,7 +11,7 @@ from .general import Parser, AstGeneralizer, Unparser, Compiler, Binder PROG_NAME = 'transpyle' -COPYRIGHT_NOTICE = 'Copyright 2017-2019 Mateusz Bysiek https://mbdevpl.github.io/,' \ +COPYRIGHT_NOTICE = 'Copyright 2017-2025 Mateusz Bysiek https://mbdevpl.github.io/,' \ ' Apache License 2.0' STEP_DESCRIPTIONS = { 'parsing':