diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..c8a325df --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,15 @@ +# This file allows setting automatically reviewers for pull requests. +# Each line is a file pattern followed by one or more owners. +# The last match takes precedence over previous ones. +# Do not edit unless specifically mandated to do so. + +# Experts on specific backends +src/hdf5 @fleuryl-ai +src/mdsplus @GabrieleManduchi +src/uda @deepakmaroo + +# Global/fallback and technical modifications. +* @prasad-sawantdesai @olivhoenen + +# Modifications to CODEOWNERS and action workflows +.github/ @SimonPinches @olivhoenen diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b6e859eb..4430d290 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -65,10 +65,74 @@ jobs: UDA_REF: "2.9.3" FMT_REF: "11.1.4" - # # - os: macos-13 - # # python: 311 - # # platform_id: macosx_x86_64 + - os: ubuntu-24.04 + triplet: x64-linux + python: 314 + cibw_platform: linux + cibw_archs: x86_64 + platform_id: manylinux_x86_64 + manylinux_image: quay.io/pypa/manylinux_2_28_x86_64 + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=ON + UDA_REF: "2.9.3" + FMT_REF: "11.1.4" + + - os: macos-14 + triplet: arm64-osx + python: 310 + platform_id: macosx_arm64 + cibw_platform: macos + cibw_archs: arm64 + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=OFF + UDA_REF: "2.9.3" + - os: macos-14 + triplet: arm64-osx + python: 311 + platform_id: macosx_arm64 + cibw_platform: macos + cibw_archs: arm64 + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=OFF + UDA_REF: "2.9.3" + + - os: macos-14 + triplet: arm64-osx + python: 312 + platform_id: macosx_arm64 + cibw_platform: macos + cibw_archs: arm64 + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=OFF + UDA_REF: "2.9.3" + + - os: macos-14 + triplet: arm64-osx + python: 313 + platform_id: macosx_arm64 + cibw_platform: macos + cibw_archs: arm64 + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=OFF + UDA_REF: "2.9.3" + + - os: macos-14 + triplet: arm64-osx + python: 314 + platform_id: macosx_arm64 + cibw_platform: macos + cibw_archs: arm64 + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=OFF + UDA_REF: "2.9.3" + - os: windows-2022 triplet: x64-windows python: 310 @@ -121,6 +185,19 @@ jobs: UDA_REF: "2.9.3" FMT_REF: "11.1.4" + - os: windows-2022 + triplet: x64-windows + python: 314 + platform_id: win_amd64 + cibw_platform: windows + cibw_archs: AMD64 + manylinux_image: windows + AL_BACKEND_HDF5: AL_BACKEND_HDF5=ON + AL_BACKEND_MDSPLUS: AL_BACKEND_MDSPLUS=OFF + AL_BACKEND_UDA: AL_BACKEND_UDA=OFF + UDA_REF: "2.9.3" + FMT_REF: "11.1.4" + steps: - uses: actions/checkout@v4 with: @@ -145,7 +222,7 @@ jobs: - name: Install cibuildwheel for windows if: startsWith(matrix.os, 'windows') run: | - python -m pip install cibuildwheel==3.0.0 + python -m pip install cibuildwheel==3.1.0 - name: Restore cibuildwheel cache id: cache @@ -181,6 +258,7 @@ jobs: CIBW_PLATFORM: ${{ matrix.cibw_platform }} CIBW_ARCHS: ${{ matrix.cibw_archs }} CIBW_BUILD_VERBOSITY: 1 + CIBW_SKIP: "cp31?t-*" # Skip free-threading builds (not supported yet) CIBW_CONFIG_SETTINGS: > cmake.define.${{ matrix.AL_BACKEND_HDF5 }} cmake.define.${{ matrix.AL_BACKEND_MDSPLUS }} @@ -203,13 +281,14 @@ jobs: path: C:\vcpkg\installed key: ${{ matrix.os }}-${{ env.cache-name }}-a - - uses: pypa/cibuildwheel@v3.0.0 + - uses: pypa/cibuildwheel@v3.1.0 if: startsWith(matrix.os, 'ubuntu-') env: CIBW_BUILD: cp${{ matrix.python }}-${{ matrix.platform_id }} CIBW_PLATFORM: ${{ matrix.cibw_platform }} CIBW_ARCHS: ${{ matrix.cibw_archs }} CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux_image }} + CIBW_SKIP: "cp31?t-*" # Skip free-threading builds (not supported yet) # Install system libraries # NOTE: manylinux_2_28 is AlmaLinux 8 based, e.g. use yum/dnf @@ -255,6 +334,45 @@ jobs: output-dir: wheelhouse config-file: "{package}/pyproject.toml" + - uses: pypa/cibuildwheel@v3.1.0 + if: startsWith(matrix.os, 'macos-') + env: + CIBW_BUILD: cp${{ matrix.python }}-${{ matrix.platform_id }} + CIBW_PLATFORM: macos + CIBW_ARCHS: ${{ matrix.cibw_archs }} + CIBW_SKIP: "cp31?t-*" # Skip free-threading builds (not supported yet) + + CIBW_CONFIG_SETTINGS: > + cmake.define.${{ matrix.AL_BACKEND_HDF5 }} + cmake.define.${{ matrix.AL_BACKEND_MDSPLUS }} + cmake.define.${{ matrix.AL_BACKEND_UDA }} + + # Dependency installationinto /tmp + CIBW_BEFORE_ALL_MACOS: > + brew update >&2; + brew install cmake pkg-config boost hdf5 libomp ninja fmt spdlog libxml2 openssl capnp libmemcached >&2; + git clone --depth 1 --branch ${{ matrix.UDA_REF }} https://github.com/ukaea/UDA.git >&2 && + cd UDA >&2; + cmake -G Ninja -B build . \ + -DBUILD_SHARED_LIBS=ON \ + -DSSLAUTHENTICATION=ON \ + -DCLIENT_ONLY=ON \ + -DENABLE_CAPNP=ON \ + -DMACOSX_DEPLOYMENT_TARGET=14.0 \ + -DCMAKE_INSTALL_PREFIX=/tmp/uda-install >&2 && + cmake --build build --target install -j >&2; + + # Where to find the dependencies + CIBW_ENVIRONMENT_MACOS: > + MACOSX_DEPLOYMENT_TARGET=14.0 + CMAKE_PREFIX_PATH="/tmp/uda-install:/opt/homebrew:/usr/local" + PKG_CONFIG_PATH="/tmp/uda-install/lib/pkgconfig:/opt/homebrew/lib/pkgconfig:/usr/local/lib/pkgconfig" + + with: + package-dir: . + output-dir: wheelhouse + config-file: "{package}/pyproject.toml" + - uses: actions/upload-artifact@v4 with: name: cibw-wheels-cp${{ matrix.python }}-${{ matrix.platform_id }} diff --git a/CMakeLists.txt b/CMakeLists.txt index 7be9f9d4..e35f6e6e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,10 @@ # CMake build configuration for Access Layer core cmake_minimum_required(VERSION 3.21) +if(POLICY CMP0144) + cmake_policy(SET CMP0144 NEW) +endif() + if(${CMAKE_VERSION} VERSION_GREATER_EQUAL 3.30) cmake_policy(SET CMP0167 NEW) # Use system BoostConfig instead of cmake FindBoost endif() @@ -69,12 +73,56 @@ if(NOT PROJECT_VERSION_TWEAK EQUAL 0) message("Building a development version of the Access Layer core") endif() +if(APPLE) + # Disable MDSPlus: + message(STATUS "Disabling MDSPlus backend on macOS") + set(AL_BACKEND_MDSPLUS OFF CACHE BOOL "MDSPlus backend" FORCE) + # Disable UDA: + message(STATUS "Disabling UDA backend on macOS") + set(AL_BACKEND_UDA OFF CACHE BOOL "UDA backend" FORCE) +endif() + # Dependencies # ############################################################################## if(WIN32) - find_package(PThreads4W CONFIG REQUIRED) + # Ensure vcpkg paths are in CMAKE_PREFIX_PATH for finding packages + if(DEFINED VCPKG_INSTALLED_DIR AND DEFINED VCPKG_TARGET_TRIPLET) + list(APPEND CMAKE_PREFIX_PATH "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}") + message(STATUS "al-core: Added CMAKE_PREFIX_PATH: ${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}") + else() + # Try to auto-detect vcpkg installed directory from build path + if(CMAKE_CURRENT_BINARY_DIR MATCHES "(.*/build)/") + set(_BUILD_DIR "${CMAKE_MATCH_1}") + set(_VCPKG_PATH "${_BUILD_DIR}/vcpkg_installed/x64-windows") + if(EXISTS "${_VCPKG_PATH}") + list(APPEND CMAKE_PREFIX_PATH "${_VCPKG_PATH}") + message(STATUS "al-core: Auto-detected vcpkg path: ${_VCPKG_PATH}") + + # Set PKG_CONFIG_EXECUTABLE for vcpkg's pkgconf + if(EXISTS "${_VCPKG_PATH}/tools/pkgconf/pkgconf.exe") + set(PKG_CONFIG_EXECUTABLE "${_VCPKG_PATH}/tools/pkgconf/pkgconf.exe" CACHE FILEPATH "pkg-config executable") + message(STATUS "al-core: Set PKG_CONFIG_EXECUTABLE to ${PKG_CONFIG_EXECUTABLE}") + endif() + endif() + endif() + endif() + + message(STATUS "al-core: CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") + + # Add cmake module path for FindPThreads4W.cmake fallback + list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/common/cmake") + + # Try CONFIG mode first, fall back to MODULE mode + find_package(PThreads4W CONFIG QUIET) + if(NOT PThreads4W_FOUND) + message(STATUS "PThreads4W CONFIG not found, trying MODULE mode") + find_package(PThreads4W MODULE REQUIRED) + else() + message(STATUS "Found PThreads4W via CONFIG mode") + endif() + find_package(dlfcn-win32 CONFIG REQUIRED) endif() @@ -108,6 +156,7 @@ set(PUBLIC_HEADER_FILES include/readback_plugin_feature.h include/uri_parser.h include/data_interpolation.h + include/fix_include_windows.h # al_defs.h is generated in the binary folder with configure_file: ${CMAKE_CURRENT_BINARY_DIR}/include/al_defs.h) diff --git a/common/al_env.sh.in b/common/al_env.sh.in index 22878aac..1d363128 100644 --- a/common/al_env.sh.in +++ b/common/al_env.sh.in @@ -13,7 +13,7 @@ if [ -d "${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages" ]; then export PYTHONPATH="${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages:$PYTHONPATH" fi if [ -d "${CMAKE_INSTALL_PREFIX}/models/mdsplus" ]; then - export ids_path="${CMAKE_INSTALL_PREFIX}/models/mdsplus" + export MDSPLUS_MODELS_PATH="${CMAKE_INSTALL_PREFIX}/models/mdsplus" fi if [ -f "${CMAKE_INSTALL_PREFIX}/include/IDSDef.xml" ]; then export IDSDEF_PATH="${CMAKE_INSTALL_PREFIX}/include/IDSDef.xml" diff --git a/common/cmake/ALBuildDataDictionary.cmake b/common/cmake/ALBuildDataDictionary.cmake index 6aff7f7a..7961b623 100644 --- a/common/cmake/ALBuildDataDictionary.cmake +++ b/common/cmake/ALBuildDataDictionary.cmake @@ -11,18 +11,95 @@ if( AL_DOCS_ONLY ) endif() # Find Python for the xsltproc.py program -find_package(Python REQUIRED COMPONENTS Interpreter Development.Module) -# Find LibXslt for the xsltproc program -find_package( LibXslt QUIET ) -if( NOT LIBXSLT_XSLTPROC_EXECUTABLE ) - message( FATAL_ERROR "Could not find xsltproc" ) +if(WIN32) + if(NOT Python3_FOUND AND NOT PYTHON_EXECUTABLE) + # Check if Python is in PATH + find_program(PYTHON_EXECUTABLE NAMES python3.exe python.exe python3 python DOC "Python interpreter") + if(NOT PYTHON_EXECUTABLE) + message(FATAL_ERROR "Could not find Python. Please ensure Python is installed and in PATH.") + endif() + else() + set(PYTHON_EXECUTABLE ${Python3_EXECUTABLE}) + endif() +else() + find_package(Python REQUIRED COMPONENTS Interpreter Development.Module) + set(PYTHON_EXECUTABLE ${Python_EXECUTABLE}) +endif() + +message(STATUS "Found Python: ${PYTHON_EXECUTABLE}") + +# Set up Python venv paths for saxonche (used for all XSLT transformations) +if(WIN32) + set(_VENV_PYTHON "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/Scripts/python.exe") + set(_VENV_PIP "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/Scripts/pip.exe") +else() + set(_VENV_PYTHON "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/python") + set(_VENV_PIP "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/pip") endif() if( NOT AL_DOWNLOAD_DEPENDENCIES AND NOT AL_DEVELOPMENT_LAYOUT ) # The DD easybuild module should be loaded, use that module: - # Use idsinfo idspath command to get the path to IDSDef.xml or data_dictionary.xml + # Create Python venv first and install imas_data_dictionary + if(NOT EXISTS "${_VENV_PYTHON}") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -m venv dd_build_env + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + RESULT_VARIABLE _VENV_EXITCODE + OUTPUT_VARIABLE _VENV_OUTPUT + ERROR_VARIABLE _VENV_ERROR + ) + + if(_VENV_EXITCODE) + message(STATUS "venv stdout: ${_VENV_OUTPUT}") + message(STATUS "venv stderr: ${_VENV_ERROR}") + message(FATAL_ERROR "Failed to create venv (exit code: ${_VENV_EXITCODE}). Ensure Python has venv module installed: python -m venv --help") + endif() + + if(DEFINED DD_VERSION) + execute_process( + COMMAND ${_VENV_PIP} install imas_data_dictionary==${DD_VERSION} + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + else() + execute_process( + COMMAND ${_VENV_PIP} install imas_data_dictionary + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + endif() + + if(_PIP_EXITCODE) + message(STATUS "imas_data_dictionary pip output: ${_PIP_OUTPUT}") + message(STATUS "imas_data_dictionary pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install imas_data_dictionary dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() + + execute_process( + COMMAND ${_VENV_PIP} install saxonche + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + + if(_PIP_EXITCODE) + message(STATUS "saxonche pip output: ${_PIP_OUTPUT}") + message(STATUS "saxonche pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install saxonche dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() + endif() +# Set up idsinfo command path +if(WIN32) + set(_IDSINFO_COMMAND "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/Scripts/idsinfo.exe") +else() + set(_IDSINFO_COMMAND "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/idsinfo") +endif() + + # Use idsinfo idspath command from venv to get the path to IDSDef.xml or data_dictionary.xml execute_process( - COMMAND idsinfo idspath + COMMAND ${_IDSINFO_COMMAND} idspath OUTPUT_VARIABLE IDSDEF OUTPUT_STRIP_TRAILING_WHITESPACE RESULT_VARIABLE _IDSINFO_EXITCODE @@ -58,37 +135,115 @@ if( NOT AL_DOWNLOAD_DEPENDENCIES AND NOT AL_DEVELOPMENT_LAYOUT ) if( NOT DD_IDENTIFIER_FILES ) message( WARNING "No identifier XML files found in Data Dictionary at: ${IDSDEF}" ) endif() -else() - # Build the DD from source: - include(FetchContent) - - if( AL_DOWNLOAD_DEPENDENCIES ) - # Download the Data Dictionary from the ITER git: - FetchContent_Declare( - data-dictionary - GIT_REPOSITORY ${DD_GIT_REPOSITORY} - GIT_TAG ${DD_VERSION} + + # When using pre-installed DD, we still need venv for extracting IDS names and version + # Create Python venv and install saxonche if not already done + if(NOT EXISTS "${_VENV_PYTHON}") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -m venv dd_build_env + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + RESULT_VARIABLE _VENV_EXITCODE + OUTPUT_VARIABLE _VENV_OUTPUT + ERROR_VARIABLE _VENV_ERROR ) - else() - # Look in ../data-dictionary for the data dictionary - if( NOT( AL_PARENT_FOLDER ) ) - set( AL_PARENT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}/.. ) - endif() - set( DD_SOURCE_DIRECTORY ${AL_PARENT_FOLDER}/data-dictionary ) - if( NOT IS_DIRECTORY ${DD_SOURCE_DIRECTORY} ) - message( FATAL_ERROR - "${DD_SOURCE_DIRECTORY} does not exist. Please clone the " - "data-dictionary repository or set AL_DOWNLOAD_DEPENDENCIES=ON." - ) + + if(_VENV_EXITCODE) + message(STATUS "venv stdout: ${_VENV_OUTPUT}") + message(STATUS "venv stderr: ${_VENV_ERROR}") + message(FATAL_ERROR "Failed to create venv (exit code: ${_VENV_EXITCODE}). Ensure Python has venv module installed: python -m venv --help") endif() - FetchContent_Declare( - data-dictionary - SOURCE_DIR ${DD_SOURCE_DIRECTORY} + execute_process( + COMMAND ${_VENV_PIP} install saxonche + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR ) - set( DD_SOURCE_DIRECTORY ) # unset temporary var + + if(_PIP_EXITCODE) + message(STATUS "saxonche pip output: ${_PIP_OUTPUT}") + message(STATUS "saxonche pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install saxonche dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() endif() - FetchContent_MakeAvailable( data-dictionary ) +else() + if(WIN32) + # Build the DD from source using direct git commands: + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the Data Dictionary from the ITER git: + set( data-dictionary_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/data-dictionary-src" ) + if( NOT EXISTS "${data-dictionary_SOURCE_DIR}/.git" ) + message( STATUS "Cloning data-dictionary from ${DD_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${DD_GIT_REPOSITORY}" "${data-dictionary_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR + ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone data-dictionary: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${data-dictionary_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT + ) + execute_process( + COMMAND git checkout "${DD_VERSION}" + WORKING_DIRECTORY "${data-dictionary_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${DD_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + else() + # Look in ../data-dictionary for the data dictionary + if( NOT( AL_PARENT_FOLDER ) ) + set( AL_PARENT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}/.. ) + endif() + set( data-dictionary_SOURCE_DIR ${AL_PARENT_FOLDER}/data-dictionary ) + if( NOT IS_DIRECTORY ${data-dictionary_SOURCE_DIR} ) + message( FATAL_ERROR + "${data-dictionary_SOURCE_DIR} does not exist. Please clone the " + "data-dictionary repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + endif() + else() + # Build the DD from source: + include(FetchContent) + + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the Data Dictionary from the ITER git: + FetchContent_Declare( + data-dictionary + GIT_REPOSITORY ${DD_GIT_REPOSITORY} + GIT_TAG ${DD_VERSION} + ) + else() + # Look in ../data-dictionary for the data dictionary + if( NOT( AL_PARENT_FOLDER ) ) + set( AL_PARENT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}/.. ) + endif() + set( DD_SOURCE_DIRECTORY ${AL_PARENT_FOLDER}/data-dictionary ) + if( NOT IS_DIRECTORY ${DD_SOURCE_DIRECTORY} ) + message( FATAL_ERROR + "${DD_SOURCE_DIRECTORY} does not exist. Please clone the " + "data-dictionary repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + + FetchContent_Declare( + data-dictionary + SOURCE_DIR ${DD_SOURCE_DIRECTORY} + ) + set( DD_SOURCE_DIRECTORY ) # unset temporary var + endif() + FetchContent_MakeAvailable( data-dictionary ) + endif() + # get version of the data dictionary execute_process( @@ -108,33 +263,52 @@ else() endif() # We need the IDSDef.xml at configure time, ensure it is built - execute_process( - COMMAND ${Python_EXECUTABLE} -m venv dd_build_env - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - ) - - execute_process( - COMMAND ${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/pip install saxonche - RESULT_VARIABLE _PIP_EXITCODE - ) - - if(_PIP_EXITCODE) - message(FATAL_ERROR "Failed to install saxonche dependency") + # Create Python venv and install saxonche if not already done + if(NOT EXISTS "${_VENV_PYTHON}") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -m venv dd_build_env + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + RESULT_VARIABLE _VENV_EXITCODE + OUTPUT_VARIABLE _VENV_OUTPUT + ERROR_VARIABLE _VENV_ERROR + ) + + if(_VENV_EXITCODE) + message(STATUS "venv stdout: ${_VENV_OUTPUT}") + message(STATUS "venv stderr: ${_VENV_ERROR}") + message(FATAL_ERROR "Failed to create venv (exit code: ${_VENV_EXITCODE}). Ensure Python has venv module installed: python -m venv --help") + endif() + + execute_process( + COMMAND ${_VENV_PIP} install saxonche + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + + if(_PIP_EXITCODE) + message(STATUS "saxonche pip output: ${_PIP_OUTPUT}") + message(STATUS "saxonche pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install saxonche dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() endif() execute_process( - COMMAND ${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/python "${al-core_SOURCE_DIR}/xsltproc.py" + COMMAND ${_VENV_PYTHON} "${al-common_SOURCE_DIR}/xsltproc.py" -xsl "dd_data_dictionary.xml.xsl" -o "IDSDef.xml" -s "dd_data_dictionary.xml.xsd" DD_GIT_DESCRIBE=${DD_GIT_DESCRIBE} WORKING_DIRECTORY ${data-dictionary_SOURCE_DIR} RESULT_VARIABLE _MAKE_DD_EXITCODE + OUTPUT_VARIABLE _MAKE_DD_OUTPUT + ERROR_VARIABLE _MAKE_DD_ERROR ) if( _MAKE_DD_EXITCODE ) - # make did not succeed: - message( FATAL_ERROR "Error while building the Data Dictionary. See output on previous lines." ) + message(STATUS "xsltproc.py output: ${_MAKE_DD_OUTPUT}") + message(STATUS "xsltproc.py error: ${_MAKE_DD_ERROR}") + message(FATAL_ERROR "Error while building the Data Dictionary (exit code: ${_MAKE_DD_EXITCODE}). Check paths and Saxon-HE configuration.") endif() # Populate IDSDEF filename @@ -149,19 +323,74 @@ else() endif() # Find out which IDSs exist and populate IDS_NAMES +# Ensure saxonche is installed before using xsltproc.py +# Check if saxonche is available in the venv +execute_process( + COMMAND ${_VENV_PYTHON} -c "import saxonche" + RESULT_VARIABLE _SAXONCHE_CHECK + OUTPUT_QUIET + ERROR_QUIET +) + +if(_SAXONCHE_CHECK) + message(STATUS "Installing saxonche in venv...") + execute_process( + COMMAND ${_VENV_PIP} install saxonche + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + + if(_PIP_EXITCODE) + message(STATUS "saxonche pip output: ${_PIP_OUTPUT}") + message(STATUS "saxonche pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install saxonche dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() +endif() + set( list_idss_file ${al-common_SOURCE_DIR}/list_idss.xsl ) set( CMAKE_CONFIGURE_DEPENDS ${CMAKE_CONFIGURE_DEPENDS};${list_idss_file};${IDSDEF} ) +set( ids_names_tmpfile "${CMAKE_CURRENT_BINARY_DIR}/ids_names_tmp.txt" ) execute_process( COMMAND - ${LIBXSLT_XSLTPROC_EXECUTABLE} ${list_idss_file} ${IDSDEF} - OUTPUT_VARIABLE IDS_NAMES + ${_VENV_PYTHON} "${al-common_SOURCE_DIR}/xsltproc.py" + -xsl ${list_idss_file} + -s ${IDSDEF} + -o ${ids_names_tmpfile} + RESULT_VARIABLE _XSLT_RESULT + ERROR_VARIABLE _XSLT_ERROR ) +if(_XSLT_RESULT) + message(FATAL_ERROR "Failed to extract IDS names: ${_XSLT_ERROR}") +endif() +if(EXISTS ${ids_names_tmpfile}) + file(READ ${ids_names_tmpfile} IDS_NAMES) + string(STRIP "${IDS_NAMES}" IDS_NAMES) + file(REMOVE ${ids_names_tmpfile}) +else() + message(FATAL_ERROR "IDS names output file not created") +endif() set( list_idss_file ) # unset temporary var # DD version set( dd_version_file ${al-common_SOURCE_DIR}/dd_version.xsl ) +set( dd_version_tmpfile "${CMAKE_CURRENT_BINARY_DIR}/dd_version_tmp.txt" ) execute_process( COMMAND - ${LIBXSLT_XSLTPROC_EXECUTABLE} ${dd_version_file} ${IDSDEF} - OUTPUT_VARIABLE DD_VERSION + ${_VENV_PYTHON} "${al-common_SOURCE_DIR}/xsltproc.py" + -xsl ${dd_version_file} + -s ${IDSDEF} + -o ${dd_version_tmpfile} + RESULT_VARIABLE _XSLT_RESULT + ERROR_VARIABLE _XSLT_ERROR ) -string( REGEX REPLACE "[+-]" "_" DD_SAFE_VERSION ${DD_VERSION} ) +if(_XSLT_RESULT) + message(FATAL_ERROR "Failed to extract DD version: ${_XSLT_ERROR}") +endif() +if(EXISTS ${dd_version_tmpfile}) + file(READ ${dd_version_tmpfile} DD_VERSION) + string(STRIP "${DD_VERSION}" DD_VERSION) + file(REMOVE ${dd_version_tmpfile}) +else() + message(FATAL_ERROR "DD version output file not created") +endif() +string( REGEX REPLACE "[+-]" "_" DD_SAFE_VERSION "${DD_VERSION}" ) set( dd_version_file ) # unset temporary var diff --git a/common/cmake/ALCommonConfig.cmake b/common/cmake/ALCommonConfig.cmake index badee05b..2431a5d9 100644 --- a/common/cmake/ALCommonConfig.cmake +++ b/common/cmake/ALCommonConfig.cmake @@ -13,8 +13,8 @@ option( AL_PLUGINS "Enable plugin framework for tests and examples" OFF ) option( AL_HLI_DOCS "Build the Sphinx-based High Level Interface documentation" OFF ) option( AL_DOCS_ONLY "Don't build anything, except the Sphinx-based High Level Interface documentation" OFF ) -# Find Saxon XSLT processor -find_package( SaxonHE REQUIRED ) +# Saxon XSLT processor has been replaced with Python saxonche +# No longer need to find SaxonHE - saxonche is installed automatically via pip in virtual environments if( NOT AL_DOWNLOAD_DEPENDENCIES ) if( DEFINED ENV{AL_COMMON_PATH} ) diff --git a/common/cmake/ALCore.cmake b/common/cmake/ALCore.cmake index 9492263e..21c6bc74 100644 --- a/common/cmake/ALCore.cmake +++ b/common/cmake/ALCore.cmake @@ -11,69 +11,176 @@ if( NOT AL_DOWNLOAD_DEPENDENCIES AND NOT AL_DEVELOPMENT_LAYOUT ) # Stop processing return() endif() - -include(FetchContent) - -if( AL_DOWNLOAD_DEPENDENCIES ) - # Download the AL core from the ITER git: - FetchContent_Declare( - al-core - GIT_REPOSITORY ${AL_CORE_GIT_REPOSITORY} - GIT_TAG ${AL_CORE_VERSION} - ) -else() - # Look in ../al-core - set( AL_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-core ) - if( NOT IS_DIRECTORY ${AL_SOURCE_DIRECTORY} ) - # Repository used to be called "al-lowlevel", check this directory as well for - # backwards compatibility: - set( AL_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-lowlevel ) - if( NOT IS_DIRECTORY ${AL_SOURCE_DIRECTORY} ) - message( FATAL_ERROR - "${AL_SOURCE_DIRECTORY} does not exist. Please clone the " - "al-core repository or set AL_DOWNLOAD_DEPENDENCIES=ON." +if(WIN32) + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the AL core from the ITER git using direct git commands: + set( al-core_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/al-core-src" ) + if( NOT EXISTS "${al-core_SOURCE_DIR}/.git" ) + message( STATUS "Cloning al-core from ${AL_CORE_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${AL_CORE_GIT_REPOSITORY}" "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone al-core: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT + ) + execute_process( + COMMAND git checkout "${AL_CORE_VERSION}" + WORKING_DIRECTORY "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${AL_CORE_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + else() + # Look in ../al-core + set( al-core_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../al-core ) + if( NOT IS_DIRECTORY ${al-core_SOURCE_DIR} ) + # Repository used to be called "al-lowlevel", check this directory as well for + # backwards compatibility: + set( al-core_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../al-lowlevel ) + if( NOT IS_DIRECTORY ${al-core_SOURCE_DIR} ) + message( FATAL_ERROR + "${al-core_SOURCE_DIR} does not exist. Please clone the " + "al-core repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() endif() endif() +else() + include(FetchContent) + + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the AL core from the ITER git: + FetchContent_Declare( + al-core + GIT_REPOSITORY ${AL_CORE_GIT_REPOSITORY} + GIT_TAG ${AL_CORE_VERSION} + ) + else() + # Look in ../al-core + set( AL_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-core ) + if( NOT IS_DIRECTORY ${AL_SOURCE_DIRECTORY} ) + # Repository used to be called "al-lowlevel", check this directory as well for + # backwards compatibility: + set( AL_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-lowlevel ) + if( NOT IS_DIRECTORY ${AL_SOURCE_DIRECTORY} ) + message( FATAL_ERROR + "${AL_SOURCE_DIRECTORY} does not exist. Please clone the " + "al-core repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + endif() - FetchContent_Declare( - al-core - SOURCE_DIR ${AL_SOURCE_DIRECTORY} - ) - set( AL_SOURCE_DIRECTORY ) # unset temporary var + FetchContent_Declare( + al-core + SOURCE_DIR ${AL_SOURCE_DIRECTORY} + ) + set( AL_SOURCE_DIRECTORY ) # unset temporary var + endif() endif() + # Don't load the AL core when only building documentation if( NOT AL_DOCS_ONLY ) - FetchContent_MakeAvailable( al-core ) + # Ensure vcpkg packages are found in the subdirectory + if(WIN32) + # On Windows, ensure vcpkg packages are available to the subdirectory + if(DEFINED VCPKG_INSTALLED_DIR AND DEFINED VCPKG_TARGET_TRIPLET) + # Add vcpkg installed directory to CMAKE_PREFIX_PATH for the subdirectory + set(CMAKE_PREFIX_PATH "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET};${CMAKE_PREFIX_PATH}") + # Pass vcpkg variables to subdirectory by setting them in parent scope + set(VCPKG_INSTALLED_DIR "${VCPKG_INSTALLED_DIR}" CACHE STRING "vcpkg installed dir" FORCE) + set(VCPKG_TARGET_TRIPLET "${VCPKG_TARGET_TRIPLET}" CACHE STRING "vcpkg triplet" FORCE) + message(STATUS "ALCore: Passing vcpkg paths to al-core subdirectory") + message(STATUS " VCPKG_INSTALLED_DIR: ${VCPKG_INSTALLED_DIR}") + message(STATUS " VCPKG_TARGET_TRIPLET: ${VCPKG_TARGET_TRIPLET}") + message(STATUS " CMAKE_PREFIX_PATH: ${CMAKE_PREFIX_PATH}") + endif() + add_subdirectory( ${al-core_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/_deps/al-core-build ) + else() + FetchContent_MakeAvailable( al-core ) + endif() get_target_property( AL_CORE_VERSION al VERSION ) endif() if( ${AL_PLUGINS} ) - if( ${AL_DOWNLOAD_DEPENDENCIES} ) - # Download the AL plugins from the ITER git: - FetchContent_Declare( - al-plugins - GIT_REPOSITORY ${AL_PLUGINS_GIT_REPOSITORY} - GIT_TAG ${AL_PLUGINS_VERSION} - ) - else() - # Look in ../plugins - set( PLUGINS_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-plugins ) - if( NOT IS_DIRECTORY ${PLUGINS_SOURCE_DIRECTORY} ) - message( FATAL_ERROR - "${PLUGINS_SOURCE_DIRECTORY} does not exist. Please clone the " - "al-plugins repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + if(WIN32) + if( ${AL_DOWNLOAD_DEPENDENCIES} ) + # Download the AL plugins from the ITER git using direct git commands: + set( al-plugins_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/al-plugins-src" ) + if( NOT EXISTS "${al-plugins_SOURCE_DIR}/.git" ) + message( STATUS "Cloning al-plugins from ${AL_PLUGINS_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${AL_PLUGINS_GIT_REPOSITORY}" "${al-plugins_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR + ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone al-plugins: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${al-plugins_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT ) + execute_process( + COMMAND git checkout "${AL_PLUGINS_VERSION}" + WORKING_DIRECTORY "${al-plugins_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${AL_PLUGINS_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + else() + # Look in ../plugins + set( al-plugins_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../al-plugins ) + if( NOT IS_DIRECTORY ${al-plugins_SOURCE_DIR} ) + message( FATAL_ERROR + "${al-plugins_SOURCE_DIR} does not exist. Please clone the " + "al-plugins repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() endif() - FetchContent_Declare( - al-plugins - SOURCE_DIR ${PLUGINS_SOURCE_DIRECTORY} - ) - set( PLUGINS_SOURCE_DIRECTORY ) # unset temporary var + else() + if( ${AL_DOWNLOAD_DEPENDENCIES} ) + # Download the AL plugins from the ITER git: + FetchContent_Declare( + al-plugins + GIT_REPOSITORY ${AL_PLUGINS_GIT_REPOSITORY} + GIT_TAG ${AL_PLUGINS_VERSION} + ) + else() + # Look in ../plugins + set( PLUGINS_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-plugins ) + if( NOT IS_DIRECTORY ${PLUGINS_SOURCE_DIRECTORY} ) + message( FATAL_ERROR + "${PLUGINS_SOURCE_DIRECTORY} does not exist. Please clone the " + "al-plugins repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + + FetchContent_Declare( + al-plugins + SOURCE_DIR ${PLUGINS_SOURCE_DIRECTORY} + ) + set( PLUGINS_SOURCE_DIRECTORY ) # unset temporary var + endif() + FetchContent_MakeAvailable( al-plugins ) endif() - FetchContent_MakeAvailable( al-plugins ) endif() if( AL_HLI_DOCS ) diff --git a/common/cmake/ALSetCompilerFlags.cmake b/common/cmake/ALSetCompilerFlags.cmake index 4bd88c3b..b821ffce 100644 --- a/common/cmake/ALSetCompilerFlags.cmake +++ b/common/cmake/ALSetCompilerFlags.cmake @@ -34,8 +34,8 @@ endif() if(NOT DEFINED CMAKE_CXX_STANDARD) set( CMAKE_CXX_STANDARD 17 ) endif() -if( CMAKE_CXX_COMPILER_ID STREQUAL "Intel" ) - # icpc options +if( CMAKE_CXX_COMPILER_ID STREQUAL "Intel" OR CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM" ) + # icpc/icpx options string( APPEND CMAKE_CXX_FLAGS # " -Wall" ) diff --git a/common/cmake/FindPThreads4W.cmake b/common/cmake/FindPThreads4W.cmake new file mode 100644 index 00000000..4de1783b --- /dev/null +++ b/common/cmake/FindPThreads4W.cmake @@ -0,0 +1,70 @@ +# FindPThreads4W.cmake - Minimal version +# Find the PThreads-Win32 library + +message(STATUS "FindPThreads4W: CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") +message(STATUS "FindPThreads4W: VCPKG_INSTALLED_DIR = ${VCPKG_INSTALLED_DIR}") +message(STATUS "FindPThreads4W: VCPKG_TARGET_TRIPLET = ${VCPKG_TARGET_TRIPLET}") +message(STATUS "FindPThreads4W: CMAKE_CURRENT_SOURCE_DIR = ${CMAKE_CURRENT_SOURCE_DIR}") +message(STATUS "FindPThreads4W: CMAKE_CURRENT_BINARY_DIR = ${CMAKE_CURRENT_BINARY_DIR}") + +# Try to determine vcpkg installed directory from build directory structure +set(_POSSIBLE_VCPKG_PATHS "") +if(CMAKE_CURRENT_BINARY_DIR MATCHES "(.*/build)/") + set(_BUILD_DIR "${CMAKE_MATCH_1}") + list(APPEND _POSSIBLE_VCPKG_PATHS + "${_BUILD_DIR}/vcpkg_installed/x64-windows" + "${_BUILD_DIR}/vcpkg_installed/x86-windows" + ) + message(STATUS "FindPThreads4W: Detected build dir: ${_BUILD_DIR}") +endif() + +# Find include directory and library +find_path(PThreads4W_INCLUDE_DIR + NAMES pthread.h + HINTS + ${PThreads4W_DIR} + ${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET} + ${CMAKE_PREFIX_PATH} + ${_POSSIBLE_VCPKG_PATHS} + PATH_SUFFIXES include +) + +find_library(PThreads4W_LIBRARY + NAMES pthreadVC3 pthreadVCE3 pthreadVSE3 pthread pthreads + HINTS + ${PThreads4W_DIR} + ${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET} + ${CMAKE_PREFIX_PATH} + ${_POSSIBLE_VCPKG_PATHS} + PATH_SUFFIXES lib +) + +message(STATUS "FindPThreads4W: PThreads4W_INCLUDE_DIR = ${PThreads4W_INCLUDE_DIR}") +message(STATUS "FindPThreads4W: PThreads4W_LIBRARY = ${PThreads4W_LIBRARY}") + +# Use standard CMake handling +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(PThreads4W + REQUIRED_VARS PThreads4W_INCLUDE_DIR PThreads4W_LIBRARY +) + +if(PThreads4W_FOUND) + set(PThreads4W_INCLUDE_DIRS ${PThreads4W_INCLUDE_DIR}) + set(PThreads4W_LIBRARIES ${PThreads4W_LIBRARY}) + + # Create imported target + if(NOT TARGET PThreads4W::PThreads4W) + add_library(PThreads4W::PThreads4W UNKNOWN IMPORTED) + set_target_properties(PThreads4W::PThreads4W PROPERTIES + IMPORTED_LOCATION "${PThreads4W_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${PThreads4W_INCLUDE_DIR}" + ) + if(WIN32) + set_target_properties(PThreads4W::PThreads4W PROPERTIES + INTERFACE_LINK_LIBRARIES "ws2_32" + ) + endif(WIN32) + endif(NOT TARGET PThreads4W::PThreads4W) +endif(PThreads4W_FOUND) + +mark_as_advanced(PThreads4W_INCLUDE_DIR PThreads4W_LIBRARY) diff --git a/common/cmake/FindSaxonHE.cmake b/common/cmake/FindSaxonHE.cmake deleted file mode 100644 index af2b5d2f..00000000 --- a/common/cmake/FindSaxonHE.cmake +++ /dev/null @@ -1,56 +0,0 @@ -# Find Saxon-HE XSLT processor -# -# Sets the following variables -# - SaxonHE_FOUND -# - SaxonHE_CLASSPATH -# - SaxonHE_VERSION - -find_package( Java COMPONENTS Runtime ) -include( FindPackageHandleStandardArgs ) - -macro( TestSaxon CLASSPATH ) - execute_process( - COMMAND ${Java_JAVA_EXECUTABLE} -cp "${CLASSPATH}" net.sf.saxon.Transform -? - ERROR_VARIABLE _Saxon_OUTPUT - RESULT_VARIABLE _Saxon_RESULT - OUTPUT_QUIET - ) - if( _Saxon_RESULT EQUAL 0 ) - set( SaxonHE_CLASSPATH "${CLASSPATH}" CACHE STRING "Java classpath containing Saxon-HE" FORCE ) - if( _Saxon_OUTPUT MATCHES "Saxon(-HE)? ([^ ]*) from" ) - set( SaxonHE_VERSION ${CMAKE_MATCH_2} ) - else() - set( SaxonHE_VERSION "Unknown" ) - endif() - endif() -endmacro() - -if( Java_FOUND AND NOT SaxonHE_CLASSPATH ) - # Check if saxon is already on the classpath - TestSaxon( "$ENV{CLASSPATH}" ) - if( NOT SaxonHE_CLASSPATH ) - # Try to find Saxon in /usr/share/java: - find_file( SaxonHE_JAR - NAMES Saxon-HE.jar saxon-he.jar - PATHS - /usr/share/java/ - /usr/local/share/java/ - ) - if( SaxonHE_JAR ) - TestSaxon( "${SaxonHE_JAR}" ) - endif() - endif() -endif() - -if( SaxonHE_CLASSPATH ) - # Saxon found (or classpath set by user) - if( NOT SaxonHE_VERSION ) - TestSaxon( "${SaxonHE_CLASSPATH}" ) - endif() -endif() - -find_package_handle_standard_args( - SaxonHE - REQUIRED_VARS SaxonHE_CLASSPATH SaxonHE_VERSION - VERSION_VAR SaxonHE_VERSION -) diff --git a/common/doc_common/building_installing.rst b/common/doc_common/building_installing.rst index 07b2fe67..a5571fb3 100644 --- a/common/doc_common/building_installing.rst +++ b/common/doc_common/building_installing.rst @@ -7,6 +7,11 @@ Documentation for developers wishing to contribute to the Access Layer can be fo the :ref:`Access Layer development guide`. Please refer to that guide if you wish to set up a development environment. +.. note:: + + For Windows-specific installation instructions, please refer to the + :doc:`Windows Installation Guide `. + .. _`build prerequisites`: @@ -404,3 +409,4 @@ Troubleshooting **Problem:** ``Target Boost::log already has an imported location`` This problem is known to occur with the ``2020b`` toolchain on SDCC. Add the CMake configuration option ``-D Boost_NO_BOOST_CMAKE=ON`` to work around the problem. + diff --git a/common/doc_common/imas_uri.rst b/common/doc_common/imas_uri.rst index 891e76ac..30acbf6d 100644 --- a/common/doc_common/imas_uri.rst +++ b/common/doc_common/imas_uri.rst @@ -248,6 +248,6 @@ The :ref:`UDA backend` also recognizes these backend-specific query keys. and downloading IDS files to the local ``local_cache`` directory. ``local_cache`` - UDA ``local_cache`` is set to ``tmp/path_in_uri`` by default. This is used along with ``fetch=1`` in the query. + UDA ``local_cache`` is set to ``tmp/uda-cache-of-$USER/path_in_uri`` by default. This is used along with ``fetch=1`` in the query. Set ``local_cache=/path/to/local/cache/directory`` and the download directory will be ``local_cache/path_in_uri``. ``local_cache`` specifies the path to the local cache directory where IDSs will be downloaded. diff --git a/common/doc_common/plugins_examples.rst b/common/doc_common/plugins_examples.rst index 281b5ce4..03bbf340 100644 --- a/common/doc_common/plugins_examples.rst +++ b/common/doc_common/plugins_examples.rst @@ -1,6 +1,11 @@ Plugins examples ================ +.. note:: + + The plugin examples referenced in this documentation are maintained in the al-plugins repository. + Please refer to: https://git.iter.org/projects/IMAS/repos/al-plugins/browse for the complete source code. + The ``debug`` plugin -------------------- @@ -9,8 +14,7 @@ In this first example, we want to display the value of the field ``ids_properties/version_put/access_layer`` for a given IDS during the execution of a ``get()`` operation. -The debug plugin is a C++ class named ``Debug_plugin``. `Figure 10`_ shows -the header code: +The debug plugin is a C++ class named ``Debug_plugin``. The header code shows: - The Debug_plugin class inherits from the access_layer_plugin plugin interface @@ -20,12 +24,11 @@ the header code: - The private attributes shot, dataobjectname and occurrence will be initialized during the initialization of the plugin -.. literalinclude:: ./plugins/debug_plugin.h - :caption: **Figure 10:** Header of the Debug_plugin class - :name: Figure 10 - :language: C++ +.. note:: + + The complete source code for the debug_plugin.h header is available in the al-plugins repository. -`Figure 10a`_ shows the plugin implementation code: +The plugin implementation code includes: - Plugin initialization occurs in the ``begin_global_action(...)`` function. However, no initialization is required in this example. @@ -42,19 +45,16 @@ the header code: ``getReadbackName(path, index)`` returns an empty string, meaning that the ``debug`` plugin does not define any *readback* plugin. +.. note:: -.. literalinclude:: ./plugins/debug_plugin.cpp - :caption: **Figure 10a:** C++ plugin implementation code - :name: Figure 10a - :language: C++ + The complete source code for the debug_plugin.cpp implementation is available in the al-plugins repository. Plugin compilation: creating a shared library ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -`Figure 11`_ shows the Makefile for compiling the plugin. This makefile -will also compile the C++ HLI code ``test_debug_plugin.cpp``, which uses -this plugin (`Figure 13`_). +The Makefile for compiling the plugin will also compile the C++ HLI code ``test_debug_plugin.cpp``, which uses +this plugin. Executing the Makefile generates the shared library ``debug_plugin.so`` and creates an executable ``test_debug_plugin`` for the HLI test @@ -336,25 +336,22 @@ Simplifying plugin code: introducing the ``AL_reader_helper_plugin`` class In this section, we show how to simplify the ``debug`` plugin code presented previously. For this purpose, we introduce the new helper class ``AL_reader_helper_plugin`` whose part of the header (provenance -feature operations have been removed for clarity) is shown in `Figure 15`_. -`Figure 16`_ depicts its implementation code. +feature operations have been removed for clarity) is shown in Figure 15. +Figure 16 depicts its implementation code. -.. literalinclude:: ./plugins/al_reader_helper_plugin.h - :caption: **Figure 15:** the ``AL_reader_helper_plugin`` class header - :name: Figure 15 - :language: C++ +.. note:: + + The complete source code for the al_reader_helper_plugin.h header is available in the al-plugins repository. By inheriting the helper class, we obtain the header of the ``Debug_plugin`` -class depicted in `Figure 17`_. The header code declares only the +class depicted in Figure 17. The header code declares only the ``read_data(..)`` function (from the plugin interface) whose implementation is overridden in the simplified implementation code of the ``Debug_plugin`` -class (`Figure 18`_). +class (Figure 18). +.. note:: -.. literalinclude:: ./plugins/al_reader_helper_plugin.cpp - :caption: **Figure 16:** the ``AL_reader_helper_plugin`` class implementation - :name: Figure 16 - :language: C++ + The complete source code for the al_reader_helper_plugin.cpp implementation is available in the al-plugins repository. .. code-block:: C++ :caption: **Figure 17:** the simplified ``Debug_plugin`` class header @@ -425,20 +422,13 @@ The requirement of IMAS-3121 ITER JIRA ticket is to fill in the ``ids_properties/creation_date`` node during a ``put()`` operation with the current date in the form YYYY-MM-DD. -.. literalinclude:: ./plugins/creation_date_plugin.cpp - :caption: **Figure 25:** Creation_date_plugin class implementation - :name: Figure 25 - :language: C++ +.. note:: -The ``Creation_date_plugin`` class whose implementation is depicted in -`Figure 25`_ implements this feature. `Figure 26`_ displays the header file -content. Provenance feature operations have been removed for clarity in -these files. + The complete source code for the creation_date_plugin.cpp implementation is available in the al-plugins repository. -.. literalinclude:: ./plugins/creation_date_plugin.cpp - :caption: **Figure 26:** Creation_date_plugin class header - :name: Figure 26 - :language: C++ +The ``Creation_date_plugin`` class implements this feature. The header file +content is also available in the al-plugins repository. Provenance feature operations have been removed for clarity in +these files. .. code-block:: python :caption: **Figure 27:** python client of the ``creation_date`` plugin @@ -789,21 +779,15 @@ Building a partial ``get()`` operation Skipping the read of an array of structure ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. literalinclude:: ./plugins/partial_get_plugin.cpp - :caption: **Figure 31:** the PartialGetPlugin class implementation - :name: Figure 31 - :language: C++ +.. note:: -.. literalinclude:: ./plugins/partial_get_plugin.h - :caption: **Figure 32:** the PartialGetPlugin header class - :name: Figure 32 - :language: C++ + The complete source code for the PartialGetPlugin class implementation (partial_get_plugin.cpp) + and header (partial_get_plugin.h) is available in the al-plugins repository. In a first use-case, the user wants to access only few attributes of the ``equilibrium`` IDS for many shots. In order to speed up reading, he decides to skip the loading of the ``grids_ggd`` array of structures (AOS). -The use of the ``PartialGetPlugin`` class implementation (`Figure 31`_) with -its header (`Figure 32`_) provides an efficient solution. During the ``get()`` +The ``PartialGetPlugin`` class provides an efficient solution. During the ``get()`` operation, the plugin intercepts the HLI call to the function ``al_begin_arraystruct_action(...)`` for the ``grids_ggd`` AOS and sets its size (using the arraySize pointer) to 0 after displaying a warning to the diff --git a/common/xsltproc.py b/common/xsltproc.py new file mode 100644 index 00000000..009b7ad4 --- /dev/null +++ b/common/xsltproc.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# simple net.sf.saxon.Transform cli replacement via saxonche Python bindings +# example invokation: +# ./xsltproc.py -xsl IDSDef2MDSpreTree.xsl -s IDSDef.xml -o output.xml DD_GIT_DESCRIBE=1 AL_GIT_DESCRIBE=1 + +import argparse +import logging + +import saxonche + + +def parse_arguments() -> tuple: + """Parse arguments, similar to net.sf.saxon.Transform...""" + + parser = argparse.ArgumentParser( + prog="xsltproc.py", + description="Imitates Saxon-HE's net.sf.saxon.Transform.", + epilog="Additional arguments in format key=value will be set as xml parameters", + ) + parser.add_argument( + "-xsl", + "--stylesheet_file", + type=str, + required=True, + help="XSL style sheet file", + ) + parser.add_argument( + "-s", + "--source_file", + type=str, + required=True, + help="source XML document", + ) + parser.add_argument( + "-o", + "--output_file", + type=str, + required=True, + help="transformed output XML document", + ) + + args, other_args = parser.parse_known_args() + # Convert list of strings "key=value" into dict(key=value, ...) + other_kwargs = {k: v for k, v in map(lambda x: x.split("="), other_args)} + return (args, other_kwargs) + + +def saxon_xsltproc( + source_file: str, stylesheet_file: str, output_file: str, **kwargs +) -> None: + with saxonche.PySaxonProcessor(license=False) as proc: + xsltproc = proc.new_xslt30_processor() + for key, value in kwargs.items(): + string_value = proc.make_string_value(value) + xsltproc.set_parameter(key, string_value) + xsltproc.transform_to_file( + source_file=source_file, + stylesheet_file=stylesheet_file, + output_file=output_file, + ) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + args, other_kwargs = parse_arguments() + saxon_xsltproc( + source_file=args.source_file, + stylesheet_file=args.stylesheet_file, + output_file=args.output_file, + **other_kwargs, + ) diff --git a/docs/requirements.txt b/docs/requirements.txt index e512142f..d700e884 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,5 @@ sphinx-immaterial>=0.12.0 sphinx-tabs>=3.4.0 sphinx-design>=0.5.0 -sphinx>=5.0 +sphinx>=5.0,<9.0 numpy>=1.20.0 diff --git a/docs/source/user_guide/backends_guide.rst b/docs/source/user_guide/backends_guide.rst index f20840a4..f918a7ea 100644 --- a/docs/source/user_guide/backends_guide.rst +++ b/docs/source/user_guide/backends_guide.rst @@ -217,6 +217,6 @@ The :ref:`UDA backend` also recognizes these backend-specific query keys. and downloading IDS files to the local ``local_cache`` directory. ``local_cache`` - UDA ``local_cache`` is set to ``tmp/path_in_uri`` by default. This is used along with ``fetch=1`` in the query. + UDA ``local_cache`` is set to ``tmp/uda-cache-of-$USER/path_in_uri`` by default. This is used along with ``fetch=1`` in the query. Set ``local_cache=/path/to/local/cache/directory`` and the download directory will be ``local_cache/path_in_uri``. ``local_cache`` specifies the path to the local cache directory where IDSs will be downloaded. diff --git a/docs/source/user_guide/configuration.rst b/docs/source/user_guide/configuration.rst index 185a5243..97186c06 100644 --- a/docs/source/user_guide/configuration.rst +++ b/docs/source/user_guide/configuration.rst @@ -46,6 +46,15 @@ Environment variables controlling IMAS-Core behaviour in the Pythonens-user API. +``IMAS_LOCAL_HOSTS`` + If you have a UDA server on a site where users have direct access to the IMAS data files, + the IMAS-Core backend can decide to use a direct file access (via the relevant backend, e.g. HDF5) + instead of going via the UDA server (which has performance overheads). If you want to use this + feature, you need to set a list of UDA server hostname (cf. ``UDA_HOST`` [#uda_uri]_ ) in the + ``IMAS_LOCAL_HOSTS`` environment variable. You can specify several servers separated with a + semi-colon ``;``. + + Environment variables controlling access layer plugins ------------------------------------------------------ @@ -102,6 +111,10 @@ Backend specific environment variables Specify the path to storing temporary data. If it is not set, the default location `/dev/shm/` or the current working directory will be chosen. +``MDSPLUS_MODELS_PATH`` + Specify the path where the MDSplus models files are stored for a given + version of the Data Dictionary (previously set by the `ids_path`, which + is now internally handled by the backend). UDA client configuration to reach the server at ITER diff --git a/docs/source/user_guide/installation.rst b/docs/source/user_guide/installation.rst index 3b72bdd0..dc2f77bd 100644 --- a/docs/source/user_guide/installation.rst +++ b/docs/source/user_guide/installation.rst @@ -17,11 +17,7 @@ Requirements - **Python 3.8+** - **Linux** (fully supported) -- **macOS and Windows** (experimental - in testing) -.. note:: - macOS and Windows support is still being tested. Linux is the primary supported platform. - Please report any issues on `GitHub `_. Binary wheels are provided for all platforms, so you don't need to compile anything. @@ -51,6 +47,13 @@ To build the IMAS-Core you need: - Boost C++ libraries (1.66 or newer) - PkgConfig +On Windows +- **Visual Studio 2022** with: + - Desktop Development with C++ + - C++ Make Tools for Windows +- **CMake** (included with Visual Studio) + + The following dependencies are only required for some of the components: - Backends @@ -62,9 +65,7 @@ The following dependencies are only required for some of the components: .. [#uda_install] When installing UDA, make sure you have `Cap'n'Proto `__ installed in your system - and add its support by adding the CMake switch `-DENABLE_CAPNP=ON` when configuring UDA. - - + and add its support by adding the CMake switch `-DENABLE_CAPNP=ON` when configuring UDA. Standard environments: .. md-tab-set:: @@ -89,6 +90,23 @@ Standard environments: details. - MATLAB, which is not freely available. + .. md-tab-item:: Windows with Visual Studio + + First, set up vcpkg: + + .. code-block:: bash + + git clone https://github.com/microsoft/vcpkg.git + cd vcpkg # VCPKG_INSTALLATION_PATH + bootstrap-vcpkg.bat + + Then run these commands in PowerShell before building: + + .. code-block:: powershell + + $env:PATH += ";C:\Program Files\Microsoft Visual Studio\2022\Community\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin\" + $env:PATH += ";C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Tools\MSVC\14.44.35207\bin\HostX86\x86" + $env:PATH += ";" Building and installing IMAS-Core --------------------------------- @@ -107,18 +125,38 @@ First you need to clone the repository of the IMAS-Core you want to build: git clone git@github.com:iterorganization/IMAS-Core.git -cmake configuration +CMake configuration ~~~~~~~~~~~~~~~~~~~ Once you have cloned the repository, navigate your shell to the folder and run cmake. You can pass configuration options with ``-D OPTION=VALUE``. See below list for an overview of configuration options. +On Linux +'''''''' + .. code-block:: bash cd IMAS-Core cmake -B build -D CMAKE_INSTALL_PREFIX=$HOME/install -D OPTION1=VALUE1 -D OPTION2=VALUE2 [...] +On Windows +'''''''''' + +**Debug Build:** + +.. code-block:: bash + + cmake -Bbuild -S . -DVCPKG=ON -DCMAKE_INSTALL_PREFIX="" -DCMAKE_TOOLCHAIN_FILE="/scripts/buildsystems/vcpkg.cmake" -DAL_DOWNLOAD_DEPENDENCIES=ON cmake -B build -DCMAKE_INSTALL_PREFIX="$(pwd)/test-install/" -DAL_BACKEND_HDF5=ON -DAL_BACKEND_MDSPLUS=ON -DAL_BACKEND_UDA=ON -DAL_BUILD_MDSPLUS_MODELS=ON -DAL_PYTHON_BINDINGS=no-build-isolation -DAL_DOWNLOAD_DEPENDENCIES=ON -DDD_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Data-Dictionary.git -DDD_VERSION=main -DBoost_NO_BOOST_CMAKE=ON -DCMAKE_CXX_STANDARD=17 -DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ + +**Release Build:** + +.. code-block:: bash + + cmake -Bbuild -S . -DCMAKE_BUILD_TYPE=Release -DVCPKG=ON -DCMAKE_INSTALL_PREFIX="" -DCMAKE_TOOLCHAIN_FILE="/scripts/buildsystems/vcpkg.cmake" -DAL_DOWNLOAD_DEPENDENCIES=ON cmake -B build -DCMAKE_INSTALL_PREFIX="$(pwd)/test-install/" -DAL_BACKEND_HDF5=ON -DAL_BACKEND_MDSPLUS=ON -DAL_BACKEND_UDA=ON -DAL_BUILD_MDSPLUS_MODELS=ON -DAL_PYTHON_BINDINGS=no-build-isolation -DAL_DOWNLOAD_DEPENDENCIES=ON -DDD_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Data-Dictionary.git -DDD_VERSION=main -DBoost_NO_BOOST_CMAKE=ON -DCMAKE_CXX_STANDARD=17 -DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ + + + .. note:: CMake will automatically fetch dependencies from required repositories diff --git a/include/fix_include_windows.h b/include/fix_include_windows.h index 01692d40..db160e10 100644 --- a/include/fix_include_windows.h +++ b/include/fix_include_windows.h @@ -16,6 +16,9 @@ To fix std::min() after any #include "microsoft-mega-api.h" // use the Standard C++ std::min() and std::max() and ensure to #include #include + +#ifdef __cplusplus #include using std::max; -using std::min; \ No newline at end of file +using std::min; +#endif \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index d6e383c8..9a6d0ee5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,6 +61,15 @@ VCPKG_KEEP_ENV_VARS = "VCPKG_ROOT;CMAKE_PREFIX_PATH;CMAKE_TOOLCHAIN_FILE;CMAKE_P before-build = "bash ./ci/wheels/cibw_before_build_win.sh" repair-wheel-command = "bash ./ci/wheels/repair_windows.sh {wheel} {dest_dir}" +[tool.cibuildwheel.macos.environment] +CMAKE_PREFIX_PATH = "/opt/homebrew;/usr/local" +PKG_CONFIG_PATH = "/opt/homebrew/lib/pkgconfig:/usr/local/lib/pkgconfig" +MACOSX_DEPLOYMENT_TARGET = "14.0" + +[tool.cibuildwheel.macos] +archs = ["arm64"] +repair-wheel-command = "delocate-wheel -w {dest_dir} {wheel}" + [tool.scikit-build.cmake.define] BUILD_SHARED_LIBS = "ON" DOCS_ONLY = { env = "DOCS_ONLY", default = "OFF" } diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 0f213273..528e33f6 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -7,8 +7,16 @@ python_add_library(_al_lowlevel MODULE ${_al_lowlevel_source} WITH_SOABI) python_add_library(al_defs MODULE ${al_defs_source} WITH_SOABI) target_link_libraries(_al_lowlevel PRIVATE Python::NumPy al) target_link_libraries(al_defs PRIVATE Python::NumPy al) -set_target_properties(al PROPERTIES INSTALL_RPATH $ORIGIN) -set_target_properties(_al_lowlevel al_defs PROPERTIES INSTALL_RPATH $ORIGIN/../imas_core.libs) + +# Handling RPATH in macOS: +if(APPLE) + set_target_properties(al PROPERTIES INSTALL_RPATH "@loader_path") + set_target_properties(_al_lowlevel al_defs PROPERTIES INSTALL_RPATH "@loader_path/../imas_core.libs") +else() + set_target_properties(al PROPERTIES INSTALL_RPATH $ORIGIN) + set_target_properties(_al_lowlevel al_defs PROPERTIES INSTALL_RPATH $ORIGIN/../imas_core.libs) +endif() + install(FILES $ DESTINATION imas_core.libs) install(FILES $ DESTINATION imas_core.libs) install(FILES $ DESTINATION imas_core.libs) @@ -18,6 +26,62 @@ else() set(LIBRARY_DIRS) endif() +# Add vcpkg directories on Windows +if(WIN32) + message(STATUS "Searching for vcpkg directories...") + message(STATUS " CMAKE_BINARY_DIR: ${CMAKE_BINARY_DIR}") + message(STATUS " CMAKE_TOOLCHAIN_FILE: ${CMAKE_TOOLCHAIN_FILE}") + message(STATUS " VCPKG_INSTALLED_DIR: ${VCPKG_INSTALLED_DIR}") + message(STATUS " VCPKG_TARGET_TRIPLET: ${VCPKG_TARGET_TRIPLET}") + + # Method 1: Try VCPKG_INSTALLED_DIR variable + if(DEFINED VCPKG_INSTALLED_DIR AND DEFINED VCPKG_TARGET_TRIPLET) + if(EXISTS "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/bin") + list(APPEND LIBRARY_DIRS "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/bin") + list(APPEND LIBRARY_DIRS "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/debug/bin") + message(STATUS " Found via VCPKG_INSTALLED_DIR: ${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}") + endif() + endif() + + # Method 2: Try extracting from CMAKE_TOOLCHAIN_FILE + if(DEFINED CMAKE_TOOLCHAIN_FILE) + get_filename_component(VCPKG_ROOT "${CMAKE_TOOLCHAIN_FILE}" DIRECTORY) + get_filename_component(VCPKG_ROOT "${VCPKG_ROOT}" DIRECTORY) + get_filename_component(VCPKG_ROOT "${VCPKG_ROOT}" DIRECTORY) + if(EXISTS "${VCPKG_ROOT}/installed/x64-windows/bin") + list(APPEND LIBRARY_DIRS "${VCPKG_ROOT}/installed/x64-windows/bin") + list(APPEND LIBRARY_DIRS "${VCPKG_ROOT}/installed/x64-windows/debug/bin") + message(STATUS " Found via CMAKE_TOOLCHAIN_FILE: ${VCPKG_ROOT}/installed/x64-windows") + endif() + endif() + + # Method 3: Search parent directories for vcpkg_installed (FetchContent/ExternalProject builds) + set(SEARCH_DIR "${CMAKE_BINARY_DIR}") + foreach(i RANGE 6) + get_filename_component(SEARCH_DIR "${SEARCH_DIR}" DIRECTORY) + if(EXISTS "${SEARCH_DIR}/vcpkg_installed/x64-windows/bin") + list(APPEND LIBRARY_DIRS "${SEARCH_DIR}/vcpkg_installed/x64-windows/bin") + list(APPEND LIBRARY_DIRS "${SEARCH_DIR}/vcpkg_installed/x64-windows/debug/bin") + message(STATUS " Found via parent search: ${SEARCH_DIR}/vcpkg_installed") + break() + endif() + endforeach() + + # Remove duplicates + if(LIBRARY_DIRS) + list(REMOVE_DUPLICATES LIBRARY_DIRS) + endif() + + # Ensure we have at least some directories (install-time fallback) + if(NOT LIBRARY_DIRS) + message(WARNING "Could not find vcpkg directories at configure time. Adding PATH as fallback.") + # This will be resolved at install time from system PATH + list(APPEND LIBRARY_DIRS "$ENV{PATH}") + endif() + + message(STATUS "Python wheel LIBRARY_DIRS: ${LIBRARY_DIRS}") +endif() + # Build POST_EXCLUDE_REGEXES list based on platform set(POST_EXCLUDE_PATTERNS ".*system32/.*\\.dll" # Windows system DLLs @@ -48,4 +112,4 @@ install( "ext-ms-" # Windows extension DLLs POST_EXCLUDE_REGEXES ${POST_EXCLUDE_PATTERNS} ) -install(TARGETS _al_lowlevel al_defs DESTINATION imas_core) \ No newline at end of file +install(TARGETS _al_lowlevel al_defs DESTINATION imas_core) diff --git a/skbuild.cmake b/skbuild.cmake index 21668734..ab217342 100644 --- a/skbuild.cmake +++ b/skbuild.cmake @@ -51,6 +51,7 @@ set_target_properties( al-python-bindings PROPERTIES DIST_FOLDER ${CMAKE_CURRENT_BINARY_DIR}/dist/) install(CODE "execute_process(COMMAND ${Python_EXECUTABLE} -m pip install imas_core +--no-index --prefix=${CMAKE_INSTALL_PREFIX} --find-links ${CMAKE_CURRENT_BINARY_DIR}/dist/ )" diff --git a/src/ascii_backend.cpp b/src/ascii_backend.cpp index 34735eac..40d943ed 100644 --- a/src/ascii_backend.cpp +++ b/src/ascii_backend.cpp @@ -218,6 +218,8 @@ void AsciiBackend::beginAction(OperationContext *ctx) this->pulsefile.open(this->fname, std::ios::in); if (this->pulsefile.fail()) throw ALBackendException("Failed to open file "+this->fname+" in read mode",LOG); + this->curcontent.str(std::string()); + this->curcontent.clear(); this->curcontent << this->pulsefile.rdbuf(); this->curcontent_map.clear(); while (std::getline(this->curcontent, this->curline)) { @@ -486,6 +488,7 @@ int AsciiBackend::readData(Context *ctx, return 0; } + this->curcontent.clear(); this->curcontent.seekg((*seekpos).second); this->curline = pathname; diff --git a/src/ascii_backend.h b/src/ascii_backend.h index ef3df61c..fe884e5c 100644 --- a/src/ascii_backend.h +++ b/src/ascii_backend.h @@ -92,15 +92,15 @@ class IMAS_CORE_LIBRARY_API AsciiBackend : public Backend void get_occurrences(Context* ctx, const char* ids_name, int** occurrences_list, int* size) override; - bool supportsTimeDataInterpolation() { + bool supportsTimeDataInterpolation() override { return false; } - void initDataInterpolationComponent() { + void initDataInterpolationComponent() override { throw ALBackendException("ASCII backend does not support time range and time slices operations",LOG); } - bool supportsTimeRangeOperation() { + bool supportsTimeRangeOperation() override { return false; } diff --git a/src/flexbuffers_backend.cpp b/src/flexbuffers_backend.cpp index 5240109f..32f71811 100644 --- a/src/flexbuffers_backend.cpp +++ b/src/flexbuffers_backend.cpp @@ -1,6 +1,7 @@ #include "flexbuffers_backend.h" #include +#include #include #define ENDIAN_MARKER_VALUE uint32_t(0x01020304) @@ -166,7 +167,9 @@ void FlexbuffersBackend::beginAction(OperationContext* ctx) { _push_element_map(root.AsVector()); // Check that we have the same endian-ness as the machine that // serialized - uint32_t endian_marker = *reinterpret_cast(_cur_vector.top()[0].AsBlob().data()); + uint32_t endian_marker; + auto blob = _cur_vector.top()[0].AsBlob(); + memcpy(&endian_marker, blob.data(), sizeof(endian_marker)); if (endian_marker != ENDIAN_MARKER_VALUE) { std::stringstream ss; ss << "Error when deserializing data: expected endian marker 0x"; diff --git a/src/hdf5/CMakeLists.txt b/src/hdf5/CMakeLists.txt index dc9c043b..f9a19479 100644 --- a/src/hdf5/CMakeLists.txt +++ b/src/hdf5/CMakeLists.txt @@ -1,6 +1,13 @@ # CMake configuration for the HDF5 backend - -find_package( HDF5 COMPONENTS C HL REQUIRED ) +if(WIN32) + # Try modern CONFIG mode first (vcpkg), fallback to legacy FindHDF5 module + find_package( hdf5 CONFIG QUIET ) + if( NOT hdf5_FOUND ) + find_package( HDF5 COMPONENTS C HL REQUIRED ) + endif() +else() + find_package( HDF5 COMPONENTS C HL REQUIRED ) +endif() target_sources( al PRIVATE hdf5_backend.cpp @@ -14,8 +21,29 @@ target_sources( al PRIVATE hdf5_backend_factory.cpp ) target_compile_definitions( al PRIVATE -DHDF5 ) - -target_include_directories( al PRIVATE ${HDF5_C_INCLUDE_DIRS} ${CMAKE_CURRENT_SOURCE_DIR} ) -target_link_libraries( al PRIVATE ${HDF5_C_LIBRARIES} ) -target_compile_definitions( al PRIVATE ${HDF5_C_DEFINITIONS} ) +if(WIN32) + # Use modern CMake targets if available (vcpkg), otherwise legacy variables + if( TARGET hdf5::hdf5-shared ) + target_link_libraries( al PRIVATE hdf5::hdf5-shared ) + if( TARGET hdf5::hdf5_hl-shared ) + target_link_libraries( al PRIVATE hdf5::hdf5_hl-shared ) + endif() + elseif( TARGET hdf5::hdf5-static ) + target_link_libraries( al PRIVATE hdf5::hdf5-static ) + if( TARGET hdf5::hdf5_hl-static ) + target_link_libraries( al PRIVATE hdf5::hdf5_hl-static ) + endif() + else() + # Legacy FindHDF5 module + target_include_directories( al PRIVATE ${HDF5_C_INCLUDE_DIRS} ) + target_link_libraries( al PRIVATE ${HDF5_C_LIBRARIES} ) + target_compile_definitions( al PRIVATE ${HDF5_C_DEFINITIONS} ) + endif() + + target_include_directories( al PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) +else() + target_include_directories( al PRIVATE ${HDF5_C_INCLUDE_DIRS} ${CMAKE_CURRENT_SOURCE_DIR} ) + target_link_libraries( al PRIVATE ${HDF5_C_LIBRARIES} ) + target_compile_definitions( al PRIVATE ${HDF5_C_DEFINITIONS} ) +endif() diff --git a/src/hdf5/hdf5_backend.h b/src/hdf5/hdf5_backend.h index 21ce3511..26bdc001 100644 --- a/src/hdf5/hdf5_backend.h +++ b/src/hdf5/hdf5_backend.h @@ -132,14 +132,14 @@ class HDF5Backend:public Backend { void get_occurrences(Context* ctx, const char* ids_name, int** occurrences_list, int* size) override; - bool supportsTimeDataInterpolation() { + bool supportsTimeDataInterpolation() override { return true; } - void initDataInterpolationComponent() { + void initDataInterpolationComponent() override { } - bool supportsTimeRangeOperation() { + bool supportsTimeRangeOperation() override { return true; } diff --git a/src/hdf5/hdf5_dataset_handler.cpp b/src/hdf5/hdf5_dataset_handler.cpp index c437c9af..f65c774b 100644 --- a/src/hdf5/hdf5_dataset_handler.cpp +++ b/src/hdf5/hdf5_dataset_handler.cpp @@ -1168,35 +1168,43 @@ void HDF5DataSetHandler::readUsingHyperslabs(const std::vector < int >¤t_a int size[H5S_MAX_RANK]; hsSelectionReader.getSize(size, slice_mode, is_dynamic); int strings_count = size[0]; - std::vector t(strings_count, NULL); - status = H5Dread(dataset_id, hsSelectionReader.dtype_id, hsSelectionReader.memspace, hsSelectionReader.dataspace, H5P_DEFAULT, (char **) &t[0]); - - std::vector strs(strings_count); - int maxlength = 0; - for (int i = 0; i < strings_count; i++) { - if (t[i] != NULL) - strs[i] = std::string(t[i]); - else { - strs[i] = std::string(""); - continue; - } - //printf("strs[%d]=%s\n", i, strs[i].c_str()); - if ((int) strs[i].length() > maxlength) - maxlength = strs[i].length(); + if (strings_count <= 0) { + // No strings to read - allocate an empty result + *data = (void*)malloc(1); + ((char*)*data)[0] = '\0'; + status = 0; + } else { + std::vector t(strings_count, NULL); + status = + H5Dread(dataset_id, hsSelectionReader.dtype_id, + hsSelectionReader.memspace, hsSelectionReader.dataspace, + H5P_DEFAULT, (char**)&t[0]); + + std::vector strs(strings_count); + int maxlength = 0; + for (int i = 0; i < strings_count; i++) { + if (t[i] != NULL) + strs[i] = std::string(t[i]); + else { + strs[i] = std::string(""); + continue; + } + // printf("strs[%d]=%s\n", i, strs[i].c_str()); + if ((int)strs[i].length() > maxlength) maxlength = strs[i].length(); + } + // allocate 1 additional char so all strings are definitely + // null-terminated: + *data = (void*)malloc(sizeof(char) * (strings_count * maxlength + 1)); + char* p = (char*)*data; + memset(p, 0, strings_count * maxlength + 1); + for (int i = 0; i < strings_count; i++) { + char* q = const_cast(strs[i].data()); + memcpy(p + i * maxlength, q, strs[i].length()); + } + for (int i = 0; i < strings_count; i++) free(t[i]); + size[1] = maxlength; + hsSelectionReader.setSize(size, 2); } - // allocate 1 additional char so all strings are definitely null-terminated: - *data = (void*) malloc(sizeof(char) * (strings_count * maxlength + 1)); - char* p = (char *) *data; - memset(p, 0, strings_count * maxlength + 1); - for(int i=0; i < strings_count; i++) - { - char* q = const_cast (strs[i].data()); - memcpy(p + i * maxlength, q, strs[i].length()); - } - for (int i = 0; i < strings_count; i++) - free(t[i]); - size[1] = maxlength; - hsSelectionReader.setSize(size, 2); } if (status < 0) { diff --git a/src/hdf5/hdf5_utils.cpp b/src/hdf5/hdf5_utils.cpp index 3739b61e..c76352a9 100644 --- a/src/hdf5/hdf5_utils.cpp +++ b/src/hdf5/hdf5_utils.cpp @@ -5,6 +5,7 @@ #include #include #include +#include #include using namespace boost::filesystem; @@ -178,7 +179,10 @@ void HDF5Utils::deleteIDSFile(const std::string &filePath) { void HDF5Utils::deleteMasterFile(const std::string &filePath, hid_t *file_id, std::unordered_map < std::string, hid_t > &opened_IDS_files, std::string &files_directory, std::string &relative_file_path) { if (exists(filePath.c_str())) { - openMasterFile(file_id, filePath); + openMasterFile(file_id, filePath, true); + if (*file_id == -1) { + return; + } initExternalLinks(file_id, opened_IDS_files, files_directory, relative_file_path); deleteIDSFiles(opened_IDS_files, files_directory, relative_file_path); closeMasterFile(file_id); @@ -237,7 +241,7 @@ void HDF5Utils::createIDSFile(OperationContext * ctx, std::string &IDSpulseFile, } -void HDF5Utils::openIDSFile(OperationContext * ctx, std::string &IDSpulseFile, hid_t *IDS_file_id, bool try_read_only) { +void HDF5Utils::openIDSFile(OperationContext * ctx, std::string &IDSpulseFile, hid_t *IDS_file_id, bool try_read_only, std::string backend_version) { if (!exists(IDSpulseFile.c_str())) return; *IDS_file_id = H5Fopen(IDSpulseFile.c_str(), H5F_ACC_RDWR, H5P_DEFAULT); @@ -254,15 +258,26 @@ void HDF5Utils::openIDSFile(OperationContext * ctx, std::string &IDSpulseFile, h } } else { - char error_message[200]; - sprintf(error_message, "Unable to open external file in Read-Write mode for IDS: %s. It might indicate that the file is being currently handled by a writing concurrent process.\n", ctx->getDataobjectName().c_str()); - throw ALBackendException(error_message, LOG); + if (errno == EAGAIN || errno == EACCES || errno == EBUSY) { + char error_message[200]; + sprintf(error_message, "Unable to open external file in Read-Write mode for IDS: %s. It might indicate that the file is being currently handled by a writing concurrent process.\n", ctx->getDataobjectName().c_str()); + throw ALBackendException(error_message, LOG); + } + else { + //H5Eprint(H5E_DEFAULT, stderr); + remove(IDSpulseFile.c_str()); + *IDS_file_id = -2; + + createIDSFile(ctx, IDSpulseFile, backend_version, IDS_file_id); + + } + } } } -void HDF5Utils::openMasterFile(hid_t *file_id, const std::string &filePath) { //open master file +void HDF5Utils::openMasterFile(hid_t *file_id, const std::string &filePath, bool for_deletion) { //open master file if (*file_id != -1) return; if (!exists(filePath)) { @@ -276,13 +291,18 @@ void HDF5Utils::openMasterFile(hid_t *file_id, const std::string &filePath) { // if (*file_id < 0) { //have a try now in read only access *file_id = H5Fopen(filePath.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT); if (*file_id < 0) { - std::string message("Unable to open HDF5 master file: "); - message += filePath; - throw ALBackendException(message, LOG); + if (for_deletion) { + remove(filePath.c_str()); + printf("WARNING: The HDF5 master file '%s' was corrupted and re-created, but linked \ + IDS h5 files may remain and would need to be removed manually unless replaced by subsequent write operations.\n", filePath.c_str()); + *file_id = -1; + return; + } else { + std::string message("Unable to open HDF5 master file: "); + message += filePath; + throw ALBackendException(message, LOG); + } } - else { - //printf("master file read successfully with file_id=%d\n", *file_id); - } } } @@ -375,17 +395,8 @@ herr_t file_info(hid_t loc_id, const char *IDS_link_name, const H5L_info_t * lin std::string IDSpulseFile = hdf5_utils.getIDSPulseFilePath(od->files_directory, od->relative_file_path, std::string(IDS_link_name)); if (exists(IDSpulseFile.c_str())) { hid_t IDS_file_id = H5Fopen(IDSpulseFile.c_str(), H5F_ACC_RDWR, H5P_DEFAULT); - if (IDS_file_id < 0) { - std::string message("Unable to open external file: "); - message += IDSpulseFile; - throw ALBackendException(message, LOG); - - /*if (!od->mode) { - if (H5Lexists(IDS_file_id, IDS_link_name, H5P_DEFAULT) > 0) - H5Ldelete(IDS_file_id, IDS_link_name, H5P_DEFAULT); - }*/ - } - hdf5_utils.closeIDSFile(IDS_file_id, IDS_link_name); //closing the IDS file + if (IDS_file_id >= 0) + hdf5_utils.closeIDSFile(IDS_file_id, IDS_link_name); //closing the IDS file } od->link_names[od->count] = (char *) malloc(100); strcpy(od->link_names[od->count], IDS_link_name); @@ -755,5 +766,3 @@ void HDF5Utils::readOptions(uri::Uri uri, bool *compression_enabled, bool *readB *write_cache = (size_t) (atof(value.c_str()) * 1024 * 1024); } } - - diff --git a/src/hdf5/hdf5_utils.h b/src/hdf5/hdf5_utils.h index 9d072606..a7d621a5 100644 --- a/src/hdf5/hdf5_utils.h +++ b/src/hdf5/hdf5_utils.h @@ -39,7 +39,7 @@ class HDF5Utils { std::string getPulseFilePath(DataEntryContext * ctx, int mode, int strategy, std::string & files_directory, std::string & relative_file_path); void deleteIDSFiles(std::unordered_map < std::string, hid_t > &opened_IDS_files, std::string & files_directory, std::string & relative_file_path); void createMasterFile(DataEntryContext * ctx, std::string &filePath, hid_t *file_id, std::string &backend_version); - void openMasterFile(hid_t *file_id, const std::string &filePath); + void openMasterFile(hid_t *file_id, const std::string &filePath, bool for_deletion = false); void initExternalLinks(hid_t *file_id, std::unordered_map < std::string, hid_t > &opened_IDS_files, std::string &files_directory, std::string &relative_file_path); public: @@ -58,7 +58,7 @@ class HDF5Utils { bool pulseFileExists(const std::string &IDS_pulse_file); void closeMasterFile(hid_t *file_id); void removeLinkFromMasterPulseFile(hid_t &file_id, const std::string &link_name); - void openIDSFile(OperationContext * ctx, std::string &IDSpulseFile, hid_t *IDS_file_id, bool try_read_only); + void openIDSFile(OperationContext * ctx, std::string &IDSpulseFile, hid_t *IDS_file_id, bool try_read_only, std::string backend_version = ""); void closeIDSFile(hid_t pulse_file_id, const std::string &external_link_name) ; void createIDSFile(OperationContext * ctx, std::string &IDSpulseFile, std::string &backend_version, hid_t *IDS_file_id); void removeLinkFromIDSPulseFile(hid_t &IDS_file_id, const std::string &IDS_link_name); diff --git a/src/hdf5/hdf5_writer.cpp b/src/hdf5/hdf5_writer.cpp index 7aefa3b6..57d8f702 100644 --- a/src/hdf5/hdf5_writer.cpp +++ b/src/hdf5/hdf5_writer.cpp @@ -138,7 +138,7 @@ void HDF5Writer::create_IDS_group(OperationContext * ctx, hid_t file_id, std::un hdf5_utils.createIDSFile(ctx, IDSpulseFile, backend_version, &IDS_file_id); } else { - hdf5_utils.openIDSFile(ctx, IDSpulseFile, &IDS_file_id, false); + hdf5_utils.openIDSFile(ctx, IDSpulseFile, &IDS_file_id, false, backend_version); } opened_IDS_files[IDS_link_name] = IDS_file_id; @@ -150,7 +150,7 @@ void HDF5Writer::create_IDS_group(OperationContext * ctx, hid_t file_id, std::un hdf5_utils.createIDSFile(ctx, IDSpulseFile, backend_version, &IDS_file_id); } else { - hdf5_utils.openIDSFile(ctx, IDSpulseFile, &IDS_file_id, false); + hdf5_utils.openIDSFile(ctx, IDSpulseFile, &IDS_file_id, false, backend_version); } opened_IDS_files[IDS_link_name] = IDS_file_id; @@ -751,7 +751,11 @@ void HDF5Writer::createOrUpdateAOSShapesDataSet(ArraystructContext * ctx, hid_t dataSetHandler->setSliceMode(ctx); timedAOS_shape = readTimedAOSShape(ctx, loc_id, arrctx_indices); //printf("modified timedAOS_shape=%d\n", timedAOS_shape); + if (aos_shapes.size() == 0) { + throw ALBackendException("HDF5Backend: unexpected AOS shapes (size=0).", LOG); + } aos_shapes[timed_AOS_index] = timedAOS_shape - slices_extension; + /*for (size_t i = 0; i < aos_shapes.size(); i++) { printf("tensorized_path=%s, aos_shapes[%d]=%d\n", tensorized_path.c_str(), i, aos_shapes[i]); }*/ diff --git a/src/mdsplus/mdsplus_backend.cpp b/src/mdsplus/mdsplus_backend.cpp index a6bdbfcc..a0c73f1b 100644 --- a/src/mdsplus/mdsplus_backend.cpp +++ b/src/mdsplus/mdsplus_backend.cpp @@ -1428,7 +1428,7 @@ static char *getPathInfo(MDSplus::Data *data, MDSplus::TreeNode *refNode) std::string translatedBaseStr(translatedBase); if(originalIdsPath == "") { - char *origPath = getenv(szPath); + char *origPath = getenv("MDSPLUS_MODELS_PATH"); if(origPath) originalIdsPath = origPath; } @@ -1495,7 +1495,7 @@ void MDSplusBackend::resetIdsPath(std::string strTree) { if(originalIdsPath == "") //Do it only once in case it is defined { - char *origPath = getenv(szPath); + char *origPath = getenv("MDSPLUS_MODELS_PATH"); if(origPath) originalIdsPath = origPath; } diff --git a/src/memory_backend.h b/src/memory_backend.h index 97c3d24d..e74bcb92 100644 --- a/src/memory_backend.h +++ b/src/memory_backend.h @@ -674,15 +674,15 @@ class IMAS_CORE_LIBRARY_API MemoryBackend:public Backend void get_occurrences(Context* ctx, const char* ids_name, int** occurrences_list, int* size) override; - bool supportsTimeDataInterpolation() { + bool supportsTimeDataInterpolation() override { return false; } - void initDataInterpolationComponent() { + void initDataInterpolationComponent() override { throw ALBackendException("Memory backend does not support time range and time slices operations",LOG); } - bool supportsTimeRangeOperation() { + bool supportsTimeRangeOperation() override { return false; } diff --git a/src/no_backend.h b/src/no_backend.h index d3f8d3ac..8b46e6b2 100644 --- a/src/no_backend.h +++ b/src/no_backend.h @@ -29,14 +29,14 @@ class IMAS_CORE_LIBRARY_API NoBackend : public Backend ~NoBackend() {}; void openPulse(DataEntryContext *ctx, - int mode); + int mode) override; void closePulse(DataEntryContext *ctx, - int mode); + int mode) override; - void beginAction(OperationContext *ctx); + void beginAction(OperationContext *ctx) override; - void endAction(Context *ctx); + void endAction(Context *ctx) override; void writeData(Context *ctx, std::string fieldname, @@ -44,7 +44,7 @@ class IMAS_CORE_LIBRARY_API NoBackend : public Backend void* data, int datatype, int dim, - int* size); + int* size) override; int readData(Context *ctx, std::string fieldname, @@ -52,26 +52,26 @@ class IMAS_CORE_LIBRARY_API NoBackend : public Backend void** data, int* datatype, int* dim, - int* size); + int* size) override; void deleteData(OperationContext *ctx, - std::string path); + std::string path) override; void beginArraystructAction(ArraystructContext *ctx, - int *size); + int *size) override; - std::pair getVersion(DataEntryContext *ctx); + std::pair getVersion(DataEntryContext *ctx) override; void get_occurrences(Context* ctx, const char* ids_name, int** occurrences_list, int* size) override; - bool supportsTimeDataInterpolation() { + bool supportsTimeDataInterpolation() override { return false; } - void initDataInterpolationComponent() { + void initDataInterpolationComponent() override { } - bool supportsTimeRangeOperation() { + bool supportsTimeRangeOperation() override { return false; } diff --git a/src/uda/uda_backend.cpp b/src/uda/uda_backend.cpp index 02fabbaa..b1ccb81b 100644 --- a/src/uda/uda_backend.cpp +++ b/src/uda/uda_backend.cpp @@ -421,7 +421,10 @@ void UDABackend::fetch_files(const uri::Uri& uri) } remote_path_ = std::filesystem::path{ maybe_path.value()}; - auto cache_path = maybe_local_cache ? std::filesystem::path{maybe_local_cache.value()} : std::filesystem::temp_directory_path(); + + // Determine cache path: explicit parameter, or default to /tmp/uda-cache-of-$USER/path_in_uri for isolation + const char* user = std::getenv("USER"); + auto cache_path = maybe_local_cache ? std::filesystem::path{maybe_local_cache.value()} : user ? std::filesystem::temp_directory_path() / ("uda-cache-of-" + std::string{user}) : std::filesystem::temp_directory_path() / "uda-cache"; local_path_ = cache_path / remote_path_.relative_path(); std::string backend = maybe_backend.value(); diff --git a/src/uda/uda_backend.h b/src/uda/uda_backend.h index faa32f4b..0869aa3c 100644 --- a/src/uda/uda_backend.h +++ b/src/uda/uda_backend.h @@ -184,13 +184,13 @@ class IMAS_CORE_LIBRARY_API UDABackend : public Backend void get_occurrences(Context* ctx, const char* ids_name, int** occurrences_list, int* size) override; - bool supportsTimeDataInterpolation(); + bool supportsTimeDataInterpolation() override; // Do nothing, UDA plugin will need to initDataInterpolationComponent on data backend when it knows which backend // is being used, i.e. when a URI is given. - void initDataInterpolationComponent() {} + void initDataInterpolationComponent() override {} - bool supportsTimeRangeOperation() { + bool supportsTimeRangeOperation() override { return this->supportsTimeDataInterpolation(); }