diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3ffc5ee4..340c5c7e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,61 +4,32 @@ name: Build XmippCore # Specify when the Action should be triggered: when a pull request is opened against the 'devel' or 'master' branch on: pull_request: - branches: [ devel, master ] + workflow_dispatch: -# A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: - # This workflow contains a single job called "build" build: - # The type of runner that the job will run on runs-on: ubuntu-22.04 - - # Steps represent a sequence of tasks that will be executed as part of the job - # Disabling shallow clone is recommended for improving relevancy of reporting steps: -# - name: Extract branch name -# shell: bash -# run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT -# id: extract_branch - - - name: Install dependencies run: | sudo apt-get update - sudo apt-get install -y scons libfftw3-dev libopenmpi-dev openmpi-bin libhdf5-dev python3-numpy python3-dev libtiff5-dev unzip libopencv-dev + sudo apt-get install -y libfftw3-dev libopenmpi-dev openmpi-bin libhdf5-dev python3-numpy python3-dev libtiff5-dev unzip - name: Export CI variables run: echo "BUILD_DIR=CIBuild" >> $GITHUB_ENV # Using CCache to speed C/C++ compilation - - uses: hendrikmuhs/ccache-action@v1.2 - - # Installing CUDA -# - uses: Jimver/cuda-toolkit@v0.2.11 -# id: cuda-toolkit -# with: -# cuda: '11.8.0' -# method: network -# sub-packages: '["nvcc", "toolkit"]' + - uses: hendrikmuhs/ccache-action@main # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v4 + - uses: actions/checkout@main with: fetch-depth: 0 - - name: Fetching Xmipp - run: ./scripts/ci_build - - - name: Getting xmipp.conf - run: ./xmipp config #all br=${{ steps.extract_branch.outputs.branch }} noAsk - working-directory: ${{ env.BUILD_DIR }} - env: - CIBuild: 1 - BUILD_TESTS: True - # If we got here, Xmipp can be build with this xmippCore # Build only the core to be able to run the static code analysis - name: Compile XmippCore run: | - cp ${BUILD_DIR}/xmipp.conf ./ - python3 $(which scons) . -j4 + cmake -S . -B build/ -DXMIPP_VERSIONS_FILE=build/versions.txt + cmake --build build/ -j 4 + diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..5b652065 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,44 @@ +name: Release new version +on: + workflow_dispatch: + +jobs: + generate-new-release: + environment: + name: release-approval + + runs-on: ubuntu-latest + steps: + + - name: Checkout repository + uses: actions/checkout@main + + - name: Retrieve tag name, release name & changelog + run: | + echo "TAG_NAME=$(python ./scripts/version.py)" >> $GITHUB_OUTPUT + awk '/## Release/{if (p) exit; p=1} p' CHANGELOG.md | tail -n +2 > latest_changelog.md + echo "RELEASE_NAME=$(python ./scripts/version.py --keep-format)" >> $GITHUB_OUTPUT + id: variables + + - name: Generate tag + id: tag_version + uses: mathieudutour/github-tag-action@v6.2 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + custom_tag: ${{ steps.variables.outputs.TAG_NAME }} + + - name: Update major version tag + run: | + VERSION=${{ steps.variables.outputs.TAG_NAME }} + MAJOR=${VERSION%%.*} + git config --global user.name 'GitHub Actions' + git config --global user.email 'action@github.com' + git tag -fa "${MAJOR}" -m 'Update major version tag with $VERSION' + git push origin "${MAJOR}" --force + + - name: Create a GitHub release + uses: ncipollo/release-action@main + with: + tag: ${{ steps.tag_version.outputs.new_tag }} + name: ${{ steps.variables.outputs.RELEASE_NAME }} + bodyFile: latest_changelog.md diff --git a/.gitignore b/.gitignore index 49b7bbaf..d50ba771 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,7 @@ #### Python *.pyc +__pycache__ #### Eclipse and so on .project @@ -34,9 +35,6 @@ .classpath .idea -# Scons auxiliary files -.sconsign.dblite - # Other gmon.out *~ @@ -55,4 +53,6 @@ gmon.out /Debug/ # VS Code -.vscode \ No newline at end of file +.vscode + +build/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 5156c0d6..0fcc51e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,14 @@ -## devel +## Release 4.0.0 + - Details about the changes on [Xmipp repository](https://github.com/I2PC/xmipp/blob/devel/CHANGELOG.md) -## ms_fix_pixelsize -11/August/2023: Fixed pixel size not being properly stored +## Release 3.25.06.0 - Rhea + - Details about the changes on [Xmipp repository](https://github.com/I2PC/xmipp/blob/devel/CHANGELOG.md) + +## Release 3.24.12.0 - Poseidon + - Details about the changes on [Xmipp repository](https://github.com/I2PC/xmipp/blob/devel/CHANGELOG.md) -## ms_transpose_MRC -11/August/2023: MRC files with axis order other than default is now supported +## Release 3.24.06 - Oceanus + - Details about the changes on [Xmipp repository](https://github.com/I2PC/xmipp/blob/devel/CHANGELOG.md) -## co_MRCFileFormat -22/July/2021: .ali, .preali, .rec added to the set of MRC file +## Release 3.23.11 - Nereus + - Details about the changes on [Xmipp repository](https://github.com/I2PC/xmipp/blob/devel/CHANGELOG.md) diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 00000000..1b0b7667 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,105 @@ +#*************************************************************************** +# Authors: Oier Lauzirika Zarrabeitia (oierlauzi@bizkaia.eu) +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA +# 02111-1307 USA +# +# All comments concerning this program package may be sent to the +# e-mail address 'xmipp@cnb.csic.es' +# *************************************************************************** + +cmake_minimum_required(VERSION 3.16) + +# Define the project +project( + XmippCore + VERSION 3.24.06 + LANGUAGES C CXX +) + +include(GNUInstallDirs) + +list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules) + +# Find dependencies +find_package(HDF5 1.8 COMPONENTS C CXX REQUIRED) +if(XMIPP_VERSIONS_FILE) + file(APPEND ${XMIPP_VERSIONS_FILE} "HDF5=${HDF5_VERSION}\n") +endif() + +find_package(TIFF REQUIRED) + +find_package(JPEG REQUIRED) +if(XMIPP_VERSIONS_FILE) + file(APPEND ${XMIPP_VERSIONS_FILE} "JPEG=${JPEG_VERSION}\n") +endif() + +find_package(SQLite3 REQUIRED) +if(XMIPP_VERSIONS_FILE) + file(APPEND ${XMIPP_VERSIONS_FILE} "SQLite3=${SQLite3_VERSION}\n") +endif() + +find_package(Threads REQUIRED) + +find_package(FFTW REQUIRED) + +# Register all source and header files +file(GLOB_RECURSE + SOURCES + ${CMAKE_CURRENT_SOURCE_DIR}/core/*.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/core/*.cc + ${CMAKE_CURRENT_SOURCE_DIR}/core/*.c +) +file(GLOB_RECURSE + HEADERS + ${PROJECT_SOURCE_DIR}/include/*.h + ${PROJECT_SOURCE_DIR}/include/*.hpp +) + +# Create the shared library +add_library(${PROJECT_NAME} SHARED ${SOURCES}) +set_target_properties( + ${PROJECT_NAME} PROPERTIES + CXX_STANDARD 17 +) +target_include_directories( + ${PROJECT_NAME} + PUBLIC + ${CMAKE_CURRENT_SOURCE_DIR} + ${HDF5_INCLUDE_DIRS} +) +target_precompile_headers( + ${PROJECT_NAME} + PUBLIC ${HEADERS} +) +target_link_libraries( + ${PROJECT_NAME} + PUBLIC + TIFF::TIFF + SQLite::SQLite3 + JPEG::JPEG + Threads::Threads + FFTW::Double + FFTW::Float + FFTW::DoubleThreads + FFTW::FloatThreads + ${HDF5_LIBRARIES} +) + +# Install library's binary files and headers +install( + TARGETS ${PROJECT_NAME} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} +) diff --git a/README.md b/README.md index 7b470c24..37971564 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # xmippCore -[![CI](https://github.com/I2PC/xmippCore/actions/workflows/main.yml/badge.svg)](https://github.com/I2PC/xmippCore/actions/workflows/main.yml) +[![CI](https://github.com/I2PC/xmippCore/actions/workflows/build.yml/badge.svg)](https://github.com/I2PC/xmippCore/actions/workflows/build.yml) **>>> To install Xmipp, please visit [this](https://github.com/I2PC/xmipp#xmipp) <<<** diff --git a/SConscript b/SConscript deleted file mode 100644 index cb1f7a25..00000000 --- a/SConscript +++ /dev/null @@ -1,148 +0,0 @@ -#!/usr/bin/env python3 - -# ************************************************************************** -# * -# * Authors: Carlos Oscar Sorzano (coss@cnb.csic.es) -# * -# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia, CSIC -# * -# * This program is free software; you can redistribute it and/or modify -# * it under the terms of the GNU General Public License as published by -# * the Free Software Foundation; either version 2 of the License, or -# * (at your option) any later version. -# * -# * This program is distributed in the hope that it will be useful, -# * but WITHOUT ANY WARRANTY; without even the implied warranty of -# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# * GNU General Public License for more details. -# * -# * You should have received a copy of the GNU General Public License -# * along with this program; if not, write to the Free Software -# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA -# * 02111-1307 USA -# * -# * All comments concerning this program package may be sent to the -# * e-mail address 'ifoche@cnb.csic.es' -# * -# ************************************************************************** - -import os -from os.path import join -from glob import glob -from datetime import datetime -import sysconfig -import sys - -PYTHON_LIB = os.environ.get("PYTHON_LIB") - -Import('env') - - -AddOption('--no-opencv', dest='opencv', action='store_false', default=True, - help='Avoid compilation of opencv programs') -AddOption('--no-scipy', dest='scipy', action='store_false', default=True, - help='Avoid compilation with scipy support') - - - -# Define some variables used by Scons. Note that some of -# the variables will be passed by Scipion in the environment (env). - -env['CUDA_SDK_PATH'] = os.environ.get('CUDA_SDK_PATH', '') -env['CUDA_LIB_PATH'] = os.environ.get('CUDA_LIB_PATH', '') - -get = lambda x: os.environ.get(x, '0').lower() in ['true', 'yes', 'y', '1'] - -gtest = get('GTEST') -debug = get('DEBUG') - -# Read some flags -CYGWIN = env['PLATFORM'] == 'cygwin' -MACOSX = env['PLATFORM'] == 'darwin' -MINGW = env['PLATFORM'] == 'win32' - -XMIPP_PATH = Dir('.').abspath -XMIPP_BUNDLE = Dir('..').abspath - - -# *********************************************************************** -# * Xmipp C++ Libraries * -# *********************************************************************** - -# Create a shortcut and customized function -# to add the Xmipp CPP libraries -def addLib(name, **kwargs): - # Install all libraries in scipion/software/lib - # COSS kwargs['installDir'] = '#software/lib' - # Add always the xmipp path as -I for include and also xmipp/libraries - incs = kwargs.get('incs', []) - kwargs['incs'] = incs - - deps = kwargs.get('deps', []) - kwargs['deps'] = deps - - # Add libraries in libs as deps if not present - libs = kwargs.get('libs', []) - for lib in libs: - deps.append(lib) - - # If pattern not provided use *.cpp as default - patterns = kwargs.get('patterns', '*.cpp') - kwargs['patterns'] = patterns - lib = env.AddCppLibrary(name, **kwargs) - - env.Alias('xmipp-libs', lib) - - return lib - - -# Gtest -#addLib('XmippGtest', -# dirs=['external'], -# patterns=['gtest/*.cc'], -# default=gtest, -# libs=['pthread'] -# ) - -def getHdf5Name(libdirs): - for dir in libdirs: - if os.path.exists(os.path.join(dir.strip(),"libhdf5.so")): - return "hdf5" - elif os.path.exists(os.path.join(dir.strip(),"libhdf5_serial.so")): - return "hdf5_serial" - return "hdf5" - -# Data -addLib('XmippCore', - patterns=['*.cpp','*.c','bilib/*.cc','alglib/*.cpp', 'utils/*.cpp'], - dirs=['core'] * 5, # one relative path for each pattern - libs=['fftw3', 'fftw3_threads', - getHdf5Name(env['EXTERNAL_LIBDIRS']),'hdf5_cpp', - 'tiff', - 'jpeg', - 'sqlite3', - 'pthread']) - -# Python binding -def remove_prefix(text, prefix): - return text[text.startswith(prefix) and len(prefix):] -env['PYTHONINCFLAGS'] = os.environ.get('PYTHONINCFLAGS', '').split() -if len(env["PYTHONINCFLAGS"])>0: - python_incdirs = [remove_prefix(os.path.expandvars(x),"-I") for x in env["PYTHONINCFLAGS"]] -else: - python_incdirs = [] - -addLib('xmippCore.so', - dirs=['bindings'], - patterns=['python/*.cpp'], - incs=python_incdirs, - libs=[PYTHON_LIB, 'XmippCore'], - prefix='', target='xmippCore') - - -# *********************************************************************** -# * Xmipp Scripts * -# *********************************************************************** - -XmippAlias = env.Alias('xmipp', ['xmipp-libs']) -Return('XmippAlias') diff --git a/SConstruct b/SConstruct deleted file mode 100644 index 7a6f584d..00000000 --- a/SConstruct +++ /dev/null @@ -1,395 +0,0 @@ -#!/usr/bin/env python3 - -# ************************************************************************** -# * -# * Authors: I. Foche Perez (ifoche@cnb.csic.es) -# * J. Burguet Castell (jburguet@cnb.csic.es) -# * -# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia, CSIC -# * -# * This program is free software; you can redistribute it and/or modify -# * it under the terms of the GNU General Public License as published by -# * the Free Software Foundation; either version 2 of the License, or -# * (at your option) any later version. -# * -# * This program is distributed in the hope that it will be useful, -# * but WITHOUT ANY WARRANTY; without even the implied warranty of -# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# * GNU General Public License for more details. -# * -# * You should have received a copy of the GNU General Public License -# * along with this program; if not, write to the Free Software -# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA -# * 02111-1307 USA -# * -# * All comments concerning this program package may be sent to the -# * e-mail address 'ifoche@cnb.csic.es' -# * -# ************************************************************************** - -# Builders and pseudobuilders used be SConscript to install things. - -EnsurePythonVersion(3, 3) - -import os -import sys -import shutil -from os.path import join -from SCons import Node, Script -try: - from itertools import izip -except: - izip = zip - -from glob import glob -import fnmatch -import platform -import SCons.SConf -from configparser import ConfigParser, ParsingError - -MACOSX = (platform.system() == 'Darwin') -WINDOWS = (platform.system() == 'Windows') -LINUX = (platform.system() == 'Linux') - - -# Create the environment the whole build will use. -env = Environment(ENV=os.environ, - BUILDERS=Environment()['BUILDERS'], - tools=['Make', 'AutoConfig'], - toolpath=[join('install', 'scons-tools')]) -# TODO: BUILDERS var added from the tricky creation of a new environment. -# If not, they lose default builders like "Program", which are needed later -# (by CheckLib and so on). See http://www.scons.org/doc/2.0.1/HTML/scons-user/x3516.html -# See how to change it into a cleaner way (not doing BUILDERS=Environment()['BUILDERS']!) - -AddOption('--verbose', dest='verbose', action='store_true', - help='Show full message of compilation lines') -# Message from autoconf and make, so we don't see all its verbosity. -if not GetOption('verbose'): - env['AUTOCONFIGCOMSTR'] = "Configuring $TARGET from $SOURCES" - env['MAKECOMSTR'] = "Compiling & installing $TARGET from $SOURCES " - - -def targetInBuild(env, targetName): - return targetName in map(str, BUILD_TARGETS) - - -# Add the path to dynamic libraries so the linker can find them. - -if LINUX: - env.AppendUnique(LIBPATH=os.environ.get('LD_LIBRARY_PATH', '')) -elif MACOSX: - env.AppendUnique(LIBPATH=os.environ.get('DYLD_FALLBACK_LIBRARY_PATH', '')) -elif WINDOWS: - print("OS not tested yet") - sys.exit(1) -else: - print("Unknown system: %s\nPlease tell the developers." % platform.system()) - - -# Python and SCons versions are fixed -# env.EnsurePythonVersion(2,7) -# env.EnsureSConsVersion(2,3,2) -# TODO: see after all is clean and crispy if we can avoid fixing the versions. -# We can specify a range of valid version after we check it works with them. - - -# ************************************************************************ -# * * -# * Auxiliar functions * -# * * -# ************************************************************************ - - -def appendUnique(elist, element): - 'Add element to a list only if it doesnt previously exist' - if element not in elist: - if not isinstance(element, str): - elist.extend(element) - else: - elist.append(element) - - -# ************************************************************************ -# * * -# * Extra options * -# * * -# ************************************************************************ -cf = ConfigParser() -cf.optionxform = str # keep case (stackoverflow.com/questions/1611799) -try: - configFile = "../../xmipp.conf" - if not os.path.isfile(configFile): # in case of the CI build - configFile = "xmipp.conf" # config file will be directly in the folder - cf.read_file(open(configFile)) -except OSError: - sys.exit("Config file not found.") -except ParsingError: - sys.exit("%s\nError while parsing the config file." % sys.exc_info()[1]) -if not 'BUILD' in cf.sections(): - print("Cannot find section BUILD in the config file.") -os.environ.update(dict(cf.items('BUILD'))) - -env['CPPPATH'] = os.environ.get('CPPPATH', []) -env['CC'] = os.environ.get('CC') -env['CXX'] = os.environ.get('CXX') -env['LINKERFORPROGRAMS'] = os.environ.get('LINKERFORPROGRAMS') -env['CCFLAGS'] = os.environ.get('CCFLAGS', '').split() -cxxFlags = os.environ.get('CXXFLAGS', '') -if os.environ.get('DEBUG', '0') == 'True': #FIXME, use 1, true, yes... - cxxFlags += ' -g' -else: - if cxxFlags.find("-O")==-1: - cxxFlags += (" -O3" if 'TRAVIS' not in os.environ else " -O0") #don't optimize on Travis, as it slows down the build -env['CXXFLAGS'] = cxxFlags.split() -os.environ['CXXFLAGS'] = cxxFlags # FIXME use only env or os.environ in the rest of the code -env['LINKFLAGS'] = os.environ.get('LINKFLAGS', '').split() - - -xmippPath = Dir('.').abspath -env['PACKAGE'] = {'NAME': 'xmippCore', - 'SCONSCRIPT': xmippPath - } - - -# ************************************************************************ -# * * -# * Pseudobuilders * -# * * -# ************************************************************************ - -def remove_prefix(text, prefix): - return text[text.startswith(prefix) and len(prefix):] - -env['INCDIRFLAGS'] = os.environ.get('INCDIRFLAGS', '').split() -env['LIBDIRFLAGS'] = os.environ.get('LIBDIRFLAGS', '').split() - -if len(env["INCDIRFLAGS"])>0: - external_incdirs = [remove_prefix(os.path.expandvars(x),"-I") for x in env["INCDIRFLAGS"]] -else: - external_incdirs = [] - -if len(env["LIBDIRFLAGS"])>0: - external_libdirs = [remove_prefix(os.path.expandvars(x),"-L") for x in env["LIBDIRFLAGS"]] -else: - external_libdirs = [] - -env['EXTERNAL_INCDIRS'] = external_incdirs -env['EXTERNAL_LIBDIRS'] = external_libdirs - -def addCppLibrary(env, name, dirs=[], tars=[], untarTargets=['configure'], patterns=[], incs=[], - libs=[], prefix=None, suffix=None, installDir=None, libpath=['lib'], deps=[], - mpi=False, cuda=False, default=True, target=None): - """Add self-made and compiled shared library to the compilation process - - This pseudobuilder access given directory, compiles it - and installs it. It also tells SCons about it dependencies. - - If default=False, the library will not be built unless the option - --with- is used. - - Returns the final targets, the ones that Make will create. - """ - _libs = list(libs) - _libpath = list(libpath)+external_libdirs - _incs = list(incs)+external_incdirs - lastTarget = deps - prefix = 'lib' if prefix is None else prefix - suffix = '.so' if suffix is None else suffix - - basedir = 'lib' - targetName = join(basedir, target if target else prefix + name) - sources = [] - - for d, p in izip(dirs, patterns): - sources += glob(join(env['PACKAGE']['SCONSCRIPT'], d, p)) - - if not sources and env.TargetInBuild(name): - Exit('No sources found for Library: %s. Exiting!!!' % name) - - env2 = Environment() - env2['ENV']['PATH'] = env['ENV']['PATH'] - env2['CXX'] = env['CXX'] - - env2['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 - env2.SetOption('warn', 'no-duplicate-environment') - env2.Tool('compilation_db') - xmipp_path = os.path.dirname(os.path.dirname(os.getcwd())) - cdb = env2.CompilationDatabase(os.path.join(xmipp_path, 'compile_commands_2.json')) - Alias('cdb', cdb) - BUILD_TARGETS.append('cdb') - - mpiArgs = {} - if mpi: - _libpath.append(env['MPI_LIBDIR']) - _libs.append(env['MPI_LIB']) - _incs.append(env['MPI_INCLUDE']) - - mpiArgs = {'CC': env['MPI_CC'], - 'CXX': env['MPI_CXX'], - 'LINK': env['MPI_LINKERFORPROGRAMS']} -# conf = Configure(env, custom_tests = {'CheckMPI': CheckMPI}) -# if not conf.CheckMPI(env['MPI_INCLUDE'], env['MPI_LIBDIR'], -# env['MPI_LIB'], env['MPI_CC'], env['MPI_CXX'], -# env['MPI_LINKERFORPROGRAMS'], False): -# print >> sys.stderr, 'ERROR: MPI is not properly working. Exiting...' -# Exit(1) -# env = conf.Finish() - env2.PrependENVPath('PATH', env['MPI_BINDIR']) - - - _incs.append(env['CPPPATH']) - - library = env2.SharedLibrary( - target=targetName, - #source=lastTarget, - source=sources, - CPPPATH=_incs, - LIBPATH=_libpath, - LIBS=_libs, - SHLIBPREFIX=prefix, - SHLIBSUFFIX=suffix, - CXXFLAGS=env['CXXFLAGS']+env['INCDIRFLAGS'], - LINKFLAGS=env['LINKFLAGS']+env['LIBDIRFLAGS'], - **mpiArgs - ) - SideEffect('dummy', library) - env.Depends(library, sources) - - if installDir: - install = env.Install(installDir, library) - SideEffect('dummy', install) - lastTarget = install - else: - lastTarget = library - env.Default(lastTarget) - - for dep in deps: - env.Depends(sources, dep) - - env.Alias(name, lastTarget) - - return lastTarget - -def symLink(env, target, source): - #As the link will be in bin/ directory we need to move up - sources = source - current = Dir('.').path+'/' - import SCons - if isinstance(target, SCons.Node.NodeList) or isinstance(target, list): - link = target[0].path - else: - link = target - if isinstance(link, str) and link.startswith(current): - link = link.split(current)[1] - if isinstance(sources, SCons.Node.NodeList) or isinstance(sources, list): - sources = source[0].path - if isinstance(sources, str) and sources.startswith(current): - sources = sources.split(current)[1] - - sources = os.path.relpath(sources, os.path.split(link)[0]) - #if os.path.lexists(link): - # os.remove(link) - #print 'Linking to %s from %s' % (sources, link) - #os.symlink(sources, link) - result = env.Command(Entry(link), - Entry(source), - Action('rm -rf %s && ln -v -s %s %s' % (Entry(link).abspath, sources, - Entry(link).abspath), - 'Creating a link from %s to %s' % (link, sources))) - env.Default(result) - return result - - -def Cmd(cmd): - print(cmd) - os.system(cmd) - - -def AddMatchingFiles(params, directory, files): - """ Callback, adds all matching files in dir - params[0] = pattern - params[1] = blacklist - params[2] = sources - """ - for filename in fnmatch.filter(files, params[0]): - if filename not in params[1]: - params[2].append(join(directory, filename)) - - -def Glob(path, pattern, blacklist=[]): - """ Custom made globbing, walking into all subdirectories from path. """ - sources = [] - for root, dirs, files in os.walk(path): - for file in fnmatch.filter(files, pattern): - if file not in blacklist: - sources.append(join(root, file)) - return sources - - -def CreateFileList(path, pattern, filename, root='', root2=''): - fOut = open(filename, 'w+') - files = [f.replace(root, root2) + '\n' for f in Glob(path, pattern, [])] - fOut.writelines(files) - fOut.close() - - -def compilerConfig(env): - """Check the good state of the C and C++ compilers and return the proper env.""" - - conf = Configure(env) - # ---- check for environment variables - if 'CC' in os.environ: - conf.env.Replace(CC=os.environ['CC']) - else: - conf.env.Replace(CC='gcc') - print(">> Using C compiler: " + conf.env.get('CC')) - - if 'CFLAGS' in os.environ: - conf.env.Replace(CFLAGS=os.environ['CFLAGS']) - print(">> Using custom C build flags") - - if 'CXX' in os.environ: - conf.env.Replace(CXX=os.environ['CXX']) - else: - conf.env.Replace(CXX='g++') - print(">> Using C++ compiler: " + conf.env.get('CXX')) - - if 'CXXFLAGS' in os.environ: - conf.env.Append(CPPFLAGS=os.environ['CXXFLAGS']) - print(">> Appending custom C++ build flags : " + os.environ['CXXFLAGS']) - - if 'LDFLAGS' in os.environ: - conf.env.Append(LINKFLAGS=os.environ['LDFLAGS']) - print(">> Appending custom link flags : " + os.environ['LDFLAGS']) - - conf.CheckCC() - conf.CheckCXX() - env = conf.Finish() - return env - - -def libraryTest(env, name, lang='c'): - """Check the existence of a concrete C/C++ library.""" - env2 = Environment(LIBS=env.get('LIBS','')) - conf = Configure(env2) - conf.CheckLib(name, language=lang) - env2 = conf.Finish() - # conf.Finish() returns the environment it used, and we may want to use it, - # like: return conf.Finish() but we don't do that so we keep our env clean :) - -# Add methods so SConscript can call them. -env.AddMethod(compilerConfig, 'CompilerConfig') -env.AddMethod(addCppLibrary, 'AddCppLibrary') -env.AddMethod(symLink, 'SymLink') -env.AddMethod(targetInBuild, 'TargetInBuild') - -# Run SConscript -env.SConscript('SConscript', exports='env') - -# Add original help (the one that we would have if we didn't use -# Help() before). But remove the "usage:" part (first line). -phelp = SCons.Script.Main.OptionsParser.format_help().split('\n') -Help('\n'.join(phelp[1:])) -# This is kind of a hack, because the #@!^ scons doesn't give you easy -# access to the original help message. diff --git a/cmake/modules/FindFFTW.cmake b/cmake/modules/FindFFTW.cmake new file mode 100644 index 00000000..e39bcadf --- /dev/null +++ b/cmake/modules/FindFFTW.cmake @@ -0,0 +1,419 @@ +# - Find the FFTW library +# +# Original version of this file: +# Copyright (c) 2015, Wenzel Jakob +# https://github.com/wjakob/layerlab/blob/master/cmake/FindFFTW.cmake, commit 4d58bfdc28891b4f9373dfe46239dda5a0b561c6 +# Modifications: +# Copyright (c) 2017, Patrick Bos +# +# Usage: +# find_package(FFTW [REQUIRED] [QUIET] [COMPONENTS component1 ... componentX] ) +# +# It sets the following variables: +# FFTW_FOUND ... true if fftw is found on the system +# FFTW_[component]_LIB_FOUND ... true if the component is found on the system (see components below) +# FFTW_LIBRARIES ... full paths to all found fftw libraries +# FFTW_[component]_LIB ... full path to one of the components (see below) +# FFTW_INCLUDE_DIRS ... fftw include directory paths +# +# The following variables will be checked by the function +# FFTW_USE_STATIC_LIBS ... if true, only static libraries are found, otherwise both static and shared. +# FFTW_ROOT ... if set, the libraries are exclusively searched +# under this path +# +# This package supports the following components: +# FLOAT_LIB +# DOUBLE_LIB +# LONGDOUBLE_LIB +# FLOAT_THREADS_LIB +# DOUBLE_THREADS_LIB +# LONGDOUBLE_THREADS_LIB +# FLOAT_OPENMP_LIB +# DOUBLE_OPENMP_LIB +# LONGDOUBLE_OPENMP_LIB +# + +# TODO (maybe): extend with ExternalProject download + build option +# TODO: put on conda-forge + + +if( NOT FFTW_ROOT AND DEFINED ENV{FFTWDIR} ) + set( FFTW_ROOT $ENV{FFTWDIR} ) +endif() + +# Check if we can use PkgConfig +find_package(PkgConfig) + +#Determine from PKG +if( PKG_CONFIG_FOUND AND NOT FFTW_ROOT ) + pkg_check_modules( PKG_FFTW QUIET "fftw3" ) +endif() + +#Check whether to search static or dynamic libs +set( CMAKE_FIND_LIBRARY_SUFFIXES_SAV ${CMAKE_FIND_LIBRARY_SUFFIXES} ) + +if( ${FFTW_USE_STATIC_LIBS} ) + set( CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX} ) +else() + set( CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_SAV} ) +endif() + +if( FFTW_ROOT ) + # find libs + + find_library( + FFTW_DOUBLE_LIB + NAMES "fftw3" libfftw3-3 + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_DOUBLE_THREADS_LIB + NAMES "fftw3_threads" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_DOUBLE_OPENMP_LIB + NAMES "fftw3_omp" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_DOUBLE_MPI_LIB + NAMES "fftw3_mpi" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_FLOAT_LIB + NAMES "fftw3f" libfftw3f-3 + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_FLOAT_THREADS_LIB + NAMES "fftw3f_threads" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_FLOAT_OPENMP_LIB + NAMES "fftw3f_omp" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_FLOAT_MPI_LIB + NAMES "fftw3f_mpi" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_LONGDOUBLE_LIB + NAMES "fftw3l" libfftw3l-3 + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_LONGDOUBLE_THREADS_LIB + NAMES "fftw3l_threads" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_LONGDOUBLE_OPENMP_LIB + NAMES "fftw3l_omp" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + find_library( + FFTW_LONGDOUBLE_MPI_LIB + NAMES "fftw3l_mpi" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "lib" "lib64" + NO_DEFAULT_PATH + ) + + #find includes + find_path(FFTW_INCLUDE_DIRS + NAMES "fftw3.h" + PATHS ${FFTW_ROOT} + PATH_SUFFIXES "include" + NO_DEFAULT_PATH + ) + +else() + + find_library( + FFTW_DOUBLE_LIB + NAMES "fftw3" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_DOUBLE_THREADS_LIB + NAMES "fftw3_threads" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_DOUBLE_OPENMP_LIB + NAMES "fftw3_omp" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_DOUBLE_MPI_LIB + NAMES "fftw3_mpi" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_FLOAT_LIB + NAMES "fftw3f" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_FLOAT_THREADS_LIB + NAMES "fftw3f_threads" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_FLOAT_OPENMP_LIB + NAMES "fftw3f_omp" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_FLOAT_MPI_LIB + NAMES "fftw3f_mpi" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_LONGDOUBLE_LIB + NAMES "fftw3l" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library( + FFTW_LONGDOUBLE_THREADS_LIB + NAMES "fftw3l_threads" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library(FFTW_LONGDOUBLE_OPENMP_LIB + NAMES "fftw3l_omp" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_library(FFTW_LONGDOUBLE_MPI_LIB + NAMES "fftw3l_mpi" + PATHS ${PKG_FFTW_LIBRARY_DIRS} ${LIB_INSTALL_DIR} + ) + + find_path(FFTW_INCLUDE_DIRS + NAMES "fftw3.h" + PATHS ${PKG_FFTW_INCLUDE_DIRS} ${INCLUDE_INSTALL_DIR} + ) + +endif( FFTW_ROOT ) + +#--------------------------------------- components + +if (FFTW_DOUBLE_LIB) + set(FFTW_DOUBLE_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_DOUBLE_LIB}) + add_library(FFTW::Double INTERFACE IMPORTED) + set_target_properties(FFTW::Double + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_DOUBLE_LIB}" + ) +else() + set(FFTW_DOUBLE_LIB_FOUND FALSE) +endif() + +if (FFTW_FLOAT_LIB) + set(FFTW_FLOAT_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_FLOAT_LIB}) + add_library(FFTW::Float INTERFACE IMPORTED) + set_target_properties(FFTW::Float + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_FLOAT_LIB}" + ) +else() + set(FFTW_FLOAT_LIB_FOUND FALSE) +endif() + +if (FFTW_LONGDOUBLE_LIB) + set(FFTW_LONGDOUBLE_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_LONGDOUBLE_LIB}) + add_library(FFTW::LongDouble INTERFACE IMPORTED) + set_target_properties(FFTW::LongDouble + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_LONGDOUBLE_LIB}" + ) +else() + set(FFTW_LONGDOUBLE_LIB_FOUND FALSE) +endif() + +if (FFTW_DOUBLE_THREADS_LIB) + set(FFTW_DOUBLE_THREADS_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_DOUBLE_THREADS_LIB}) + add_library(FFTW::DoubleThreads INTERFACE IMPORTED) + set_target_properties(FFTW::DoubleThreads + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_DOUBLE_THREADS_LIB}" + ) +else() + set(FFTW_DOUBLE_THREADS_LIB_FOUND FALSE) +endif() + +if (FFTW_FLOAT_THREADS_LIB) + set(FFTW_FLOAT_THREADS_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_FLOAT_THREADS_LIB}) + add_library(FFTW::FloatThreads INTERFACE IMPORTED) + set_target_properties(FFTW::FloatThreads + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_FLOAT_THREADS_LIB}" + ) +else() + set(FFTW_FLOAT_THREADS_LIB_FOUND FALSE) +endif() + +if (FFTW_LONGDOUBLE_THREADS_LIB) + set(FFTW_LONGDOUBLE_THREADS_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_LONGDOUBLE_THREADS_LIB}) + add_library(FFTW::LongDoubleThreads INTERFACE IMPORTED) + set_target_properties(FFTW::LongDoubleThreads + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_LONGDOUBLE_THREADS_LIB}" + ) +else() + set(FFTW_LONGDOUBLE_THREADS_LIB_FOUND FALSE) +endif() + +if (FFTW_DOUBLE_OPENMP_LIB) + set(FFTW_DOUBLE_OPENMP_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_DOUBLE_OPENMP_LIB}) + add_library(FFTW::DoubleOpenMP INTERFACE IMPORTED) + set_target_properties(FFTW::DoubleOpenMP + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_DOUBLE_OPENMP_LIB}" + ) +else() + set(FFTW_DOUBLE_OPENMP_LIB_FOUND FALSE) +endif() + +if (FFTW_FLOAT_OPENMP_LIB) + set(FFTW_FLOAT_OPENMP_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_FLOAT_OPENMP_LIB}) + add_library(FFTW::FloatOpenMP INTERFACE IMPORTED) + set_target_properties(FFTW::FloatOpenMP + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_FLOAT_OPENMP_LIB}" + ) +else() + set(FFTW_FLOAT_OPENMP_LIB_FOUND FALSE) +endif() + +if (FFTW_LONGDOUBLE_OPENMP_LIB) + set(FFTW_LONGDOUBLE_OPENMP_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_LONGDOUBLE_OPENMP_LIB}) + add_library(FFTW::LongDoubleOpenMP INTERFACE IMPORTED) + set_target_properties(FFTW::LongDoubleOpenMP + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_LONGDOUBLE_OPENMP_LIB}" + ) +else() + set(FFTW_LONGDOUBLE_OPENMP_LIB_FOUND FALSE) +endif() + +if (FFTW_DOUBLE_MPI_LIB) + set(FFTW_DOUBLE_MPI_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_DOUBLE_MPI_LIB}) + add_library(FFTW::DoubleMPI INTERFACE IMPORTED) + set_target_properties(FFTW::DoubleMPI + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_DOUBLE_MPI_LIB}" + ) +else() + set(FFTW_DOUBLE_MPI_LIB_FOUND FALSE) +endif() + +if (FFTW_FLOAT_MPI_LIB) + set(FFTW_FLOAT_MPI_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_FLOAT_MPI_LIB}) + add_library(FFTW::FloatMPI INTERFACE IMPORTED) + set_target_properties(FFTW::FloatMPI + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_FLOAT_MPI_LIB}" + ) +else() + set(FFTW_FLOAT_MPI_LIB_FOUND FALSE) +endif() + +if (FFTW_LONGDOUBLE_MPI_LIB) + set(FFTW_LONGDOUBLE_MPI_LIB_FOUND TRUE) + set(FFTW_LIBRARIES ${FFTW_LIBRARIES} ${FFTW_LONGDOUBLE_MPI_LIB}) + add_library(FFTW::LongDoubleMPI INTERFACE IMPORTED) + set_target_properties(FFTW::LongDoubleMPI + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES "${FFTW_LONGDOUBLE_MPI_LIB}" + ) +else() + set(FFTW_LONGDOUBLE_MPI_LIB_FOUND FALSE) +endif() + +#--------------------------------------- end components + +set( CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_SAV} ) + +include(FindPackageHandleStandardArgs) + +find_package_handle_standard_args(FFTW + REQUIRED_VARS FFTW_INCLUDE_DIRS + HANDLE_COMPONENTS + ) + +mark_as_advanced( + FFTW_INCLUDE_DIRS + FFTW_LIBRARIES + FFTW_FLOAT_LIB + FFTW_DOUBLE_LIB + FFTW_LONGDOUBLE_LIB + FFTW_FLOAT_THREADS_LIB + FFTW_DOUBLE_THREADS_LIB + FFTW_LONGDOUBLE_THREADS_LIB + FFTW_FLOAT_OPENMP_LIB + FFTW_DOUBLE_OPENMP_LIB + FFTW_LONGDOUBLE_OPENMP_LIB + FFTW_FLOAT_MPI_LIB + FFTW_DOUBLE_MPI_LIB + FFTW_LONGDOUBLE_MPI_LIB + ) \ No newline at end of file diff --git a/core/argsparser.cpp b/core/argsparser.cpp index 0404f4c3..254022d4 100644 --- a/core/argsparser.cpp +++ b/core/argsparser.cpp @@ -623,7 +623,7 @@ bool ParamDef::parse() pOpt->name = pOpt->token.lexeme; pOpt->parseArgumentList(); pOpt->parseCommentList(pOpt->comments); - pOpt->parseParamList(TOK_REQUIRES, prog, pOpt->requires, false); + pOpt->parseParamList(TOK_REQUIRES, prog, pOpt->requirements, false); pArg->subParams.push_back(pOpt); } @@ -634,7 +634,7 @@ bool ParamDef::parse() parseParamList(TOK_ALIAS, prog, aliases, true); //REQUIRES section - parseParamList(TOK_REQUIRES, prog, requires, false); + parseParamList(TOK_REQUIRES, prog, requirements, false); return true; } @@ -700,12 +700,12 @@ bool ParamDef::checkRequires(std::stringstream & errors, ProgramDef * prog) { ParamDef * param; bool correct = true; - for (size_t i = 0; i < requires.size(); ++i) + for (size_t i = 0; i < requirements.size(); ++i) { - param = prog->findParam(requires[i]); + param = prog->findParam(requirements[i]); if (param->counter < 1) { - errors << "Parameter " << name << " requires " << requires[i] << std::endl; + errors << "Parameter " << name << " requirements " << requirements[i] << std::endl; correct = false; } } @@ -725,7 +725,7 @@ void ParamDef::check(std::stringstream & errors) if (counter == 1) { - //Check requires restrictions + //Check requirements restrictions checkRequires(errors, prog); //Check the number of arguments diff --git a/core/argsparser.h b/core/argsparser.h index f88f651d..b8c8cbe3 100644 --- a/core/argsparser.h +++ b/core/argsparser.h @@ -202,7 +202,7 @@ class ParamDef: public ASTNode CommentList comments; StringVector aliases; - StringVector requires; + StringVector requirements; //Empty constructor ParamDef(ArgLexer *lexer, ASTNode * parent); diff --git a/core/argsprinter.cpp b/core/argsprinter.cpp index cf033d17..303763b9 100644 --- a/core/argsprinter.cpp +++ b/core/argsprinter.cpp @@ -100,13 +100,13 @@ void ConsolePrinter::printSection(const SectionDef §ion, int v) } } -void ConsolePrinter::printRequiresList(StringVector requires) +void ConsolePrinter::printRequiresList(StringVector requirements) { - if (!requires.empty()) + if (!requirements.empty()) { - *pOut << " ( requires "; - for (size_t i = 0; i < requires.size(); ++i) - *pOut << requires[i] << " "; + *pOut << " ( requirements "; + for (size_t i = 0; i < requirements.size(); ++i) + *pOut << requirements[i] << " "; *pOut << ")"; } } @@ -138,7 +138,7 @@ void ConsolePrinter::printParam(const ParamDef ¶m, int v) if (!param.notOptional) *pOut << "]"; - printRequiresList(param.requires); + printRequiresList(param.requirements); *pOut << std::endl; printCommentList(param.comments, v); @@ -156,7 +156,7 @@ void ConsolePrinter::printParam(const ParamDef ¶m, int v) *pOut << " "; printArgument(*(arg.subParams[j]->arguments[k])); } - printRequiresList(arg.subParams[j]->requires); + printRequiresList(arg.subParams[j]->requirements); *pOut << std::endl; printCommentList(arg.subParams[j]->comments, v); @@ -386,13 +386,13 @@ void WikiPrinter::printSection(const SectionDef §ion, int v) } } -void WikiPrinter::printRequiresList(StringVector requires) +void WikiPrinter::printRequiresList(StringVector requirements) { - if (!requires.empty()) + if (!requirements.empty()) { - *pOut << " ( requires "; - for (size_t i = 0; i < requires.size(); ++i) - *pOut << requires[i] << " "; + *pOut << " ( requirements "; + for (size_t i = 0; i < requirements.size(); ++i) + *pOut << requirements[i] << " "; *pOut << ")"; } } @@ -421,7 +421,7 @@ void WikiPrinter::printParam(const ParamDef ¶m, int v) printArgument(*param.arguments[i], v); } *pOut << " %ENDCOLOR%="; - printRequiresList(param.requires); + printRequiresList(param.requirements); *pOut <<": " ; printCommentList(param.comments, v); @@ -443,7 +443,7 @@ void WikiPrinter::printParam(const ParamDef ¶m, int v) printArgument(*(arg.subParams[j]->arguments[k]), v); } *pOut << " %ENDCOLOR%" << std::endl; - printRequiresList(arg.subParams[j]->requires); + printRequiresList(arg.subParams[j]->requirements); // *pOut << std::endl; printCommentList(arg.subParams[j]->comments, v); diff --git a/core/argsprinter.h b/core/argsprinter.h index bd1f7041..2652c3f1 100644 --- a/core/argsprinter.h +++ b/core/argsprinter.h @@ -53,7 +53,7 @@ class ConsolePrinter: public Printer { protected: std::ostream * pOut; - void printRequiresList(StringVector requires); + void printRequiresList(StringVector requirements); public: bool color; /**Constructor */ @@ -88,7 +88,7 @@ class WikiPrinter: public Printer { protected: std::ostream * pOut; - void printRequiresList(StringVector requires); + void printRequiresList(StringVector requirements); public: /**Constructor */ WikiPrinter(std::ostream &out=std::cout); diff --git a/core/metadata_label.h b/core/metadata_label.h index 0c8b4099..690ebbe8 100644 --- a/core/metadata_label.h +++ b/core/metadata_label.h @@ -451,12 +451,15 @@ enum MDLabel MDL_SPH_TSNE_COEFF2D, ///tsne coefficients in 2D MDL_STDDEV, ///> data.doubleValue; + { + std::string tmp; + is >> tmp; + + char* end = nullptr; + data.doubleValue = std::strtod(tmp.c_str(), &end); + + if (end != tmp.c_str() + tmp.size()) + { + // Set failure flag + is.setstate(std::ios::failbit); + } + } break; case LABEL_STRING: { diff --git a/core/metadata_static.h b/core/metadata_static.h index a07a9abc..814436e1 100644 --- a/core/metadata_static.h +++ b/core/metadata_static.h @@ -674,14 +674,18 @@ class MDLabelStaticInit MDL::addLabel(MDL_SPH_TSNE_COEFF2D, LABEL_VECTOR_DOUBLE, "sphTsne2D"); MDL::addLabel(MDL_STDDEV, LABEL_DOUBLE, "stddev"); MDL::addLabel(MDL_STAR_COMMENT, LABEL_STRING, "starComment"); + MDL::addLabel(MDL_SUBTOMOID, LABEL_SIZET, "subtomogramId"); MDL::addLabel(MDL_SUBTRACTION_R2, LABEL_DOUBLE, "R2subtraction"); + MDL::addLabel(MDL_SUBTRACTION_B, LABEL_DOUBLE, "Bsubtraction"); MDL::addLabel(MDL_SUBTRACTION_BETA0, LABEL_DOUBLE, "B0subtraction"); MDL::addLabel(MDL_SUBTRACTION_BETA1, LABEL_DOUBLE, "B1subtraction"); MDL::addLabel(MDL_SUM, LABEL_DOUBLE, "sum"); MDL::addLabel(MDL_SUMWEIGHT, LABEL_DOUBLE, "sumWeight"); MDL::addLabel(MDL_SYMNO, LABEL_INT, "symNo"); - MDL::addLabel(MDL_TOMOGRAM_VOLUME, LABEL_STRING, "tomogramVolume", TAGLABEL_IMAGE); + + MDL::addLabel(MDL_TILTPARTICLEID, LABEL_SIZET, "tiltParticleId"); + MDL::addLabel(MDL_TOMOGRAM_VOLUME, LABEL_STRING, "tomogramVolume", TAGLABEL_IMAGE); MDL::addLabel(MDL_TOMOGRAMMD, LABEL_STRING, "tomogramMetadata", TAGLABEL_METADATA); MDL::addLabel(MDL_TSID, LABEL_STRING, "tiltSeriesId"); diff --git a/core/metadata_vec.cpp b/core/metadata_vec.cpp index 111d310c..86338b76 100644 --- a/core/metadata_vec.cpp +++ b/core/metadata_vec.cpp @@ -820,7 +820,10 @@ void MetaDataVec::split(size_t parts, std::vector &results, const M REPORT_ERROR(ERR_MD, "MetaDataDb::split: Couldn't split a metadata in more parts than its size"); MetaDataVec sorted; - sorted.sort(*this, sortLabel); + if (sortLabel == MDL_UNDEFINED) + sorted = *this; + else + sorted.sort(*this, sortLabel); results.clear(); results.resize(parts); diff --git a/core/rwEER.cpp b/core/rwEER.cpp index 1f893520..65a2bb91 100644 --- a/core/rwEER.cpp +++ b/core/rwEER.cpp @@ -573,7 +573,6 @@ int ImageBase::readEER(size_t select_img) { size_t found = filename.find_first_of("#"); FileName infolist = filename.substr(found + 1); filename = filename.substr(0, found); - infolist.toLowercase(); splitString(infolist, ",", info, false); if (info.size() < 3) @@ -603,11 +602,12 @@ int ImageBase::readEER(size_t select_img) { REPORT_ERROR(ERR_PARAM_INCORRECT, "Incorrect output size. Valid sizes are: 4K, 8K."); } - _zDim = _nDim = 1; + _zDim = 1; + _nDim = select_img > 0 ? 1 : fractioning; setDimensions(_xDim, _yDim, _zDim, _nDim); mdaBase->coreAllocateReuse(); - if(info[2] == "uint8") + if(info[2] == "uint8") datatype = DT_UChar; else if (info[2] == "uint16") datatype = DT_UShort; @@ -625,13 +625,41 @@ int ImageBase::readEER(size_t select_img) { return 0; EERRenderer renderer; - renderer.read(hFile->fileName, upsampling); + renderer.read(dataFName, upsampling); - MultidimArray buffer(_yDim, _xDim); - const auto step = renderer.getNFrames() / fractioning; - const auto first = (select_img-1)*step; - const auto last = first + step - 1; - renderer.renderFrames(first, last, buffer); + MultidimArray buffer; + const auto nEerFrames = renderer.getNFrames(); + const auto step = nEerFrames / fractioning; + if (select_img > 0) + { + // Render single frame + if (select_img > fractioning) + { + REPORT_ERROR(ERR_LOGIC_ERROR, "Requested frame greater than the fractioning"); + } + else + { + + buffer.resizeNoCopy(_yDim, _xDim); + const auto first = (select_img-1)*step; + const auto last = first + step - 1; + renderer.renderFrames(first, last, buffer); + } + } + else + { + // Render the whole movie + buffer.resizeNoCopy(fractioning, 1, _yDim, _xDim); + MultidimArray frameAlias; + for(size_t i = 0; i < fractioning; ++i) + { + frameAlias.aliasImageInStack(buffer, i); + const auto first = i*step; + const auto last = first + step - 1; + renderer.renderFrames(first, last, frameAlias); + } + } + setPage2T( 0UL, reinterpret_cast(MULTIDIM_ARRAY(buffer)), DT_Int, diff --git a/core/rwMRC.cpp b/core/rwMRC.cpp index 8a774520..eeb37f7a 100644 --- a/core/rwMRC.cpp +++ b/core/rwMRC.cpp @@ -217,7 +217,7 @@ int ImageBase::readMRC(size_t start_img, size_t batch_size, bool isStack /* = fa switch ( header->mode ) { case 0: - datatype = DT_UChar; + datatype = DT_SChar; break; case 1: datatype = DT_Short; diff --git a/core/utils/memory_utils.h b/core/utils/memory_utils.h index 5533cff9..5dfd097f 100644 --- a/core/utils/memory_utils.h +++ b/core/utils/memory_utils.h @@ -71,7 +71,9 @@ namespace memoryUtils inline T* page_aligned_alloc(size_t elems, bool initToZero) { size_t bytes = elems * sizeof(T); auto p = (T*)page_aligned_alloc(bytes); - madvise(p, bytes, MADV_HUGEPAGE); + #ifdef MADV_HUGEPAGE + madvise(p, bytes, MADV_HUGEPAGE); // Not available in all platforms + #endif if (initToZero) { memset(p, 0, bytes); } diff --git a/core/xmipp_filename.cpp b/core/xmipp_filename.cpp index 4a44f4c8..7bc06eb0 100644 --- a/core/xmipp_filename.cpp +++ b/core/xmipp_filename.cpp @@ -257,9 +257,9 @@ bool FileName::hasImageExtension() const { String ext = getFileFormat(); return (ext=="img" || ext=="hed" || ext=="inf" || ext=="raw" || ext=="mrc" || - ext=="map" || ext=="spi" || ext=="xmp" || ext=="tif" || ext=="dm3" || - ext=="spe" || ext=="em" || ext=="pif" || ext=="ser" || ext=="stk" || - ext=="mrcs"|| ext=="jpg" || ext=="dm4"); + ext=="map" || ext=="spi" || ext=="xmp" || ext=="tif" || ext=="gain" || + ext=="dm3" || ext=="spe" || ext=="em" || ext=="pif" || ext=="ser" || + ext=="stk" || ext=="mrcs"|| ext=="jpg" || ext=="dm4"); } // Has image extension ..................................................... diff --git a/core/xmipp_image_base.cpp b/core/xmipp_image_base.cpp index 0de4f276..790cf234 100644 --- a/core/xmipp_image_base.cpp +++ b/core/xmipp_image_base.cpp @@ -617,7 +617,7 @@ ImageFHandler* ImageBase::openFile(const FileName &name, int mode) const } - if (ext_name.contains("tif")) + if (ext_name.contains("tif") || ext_name.contains("gain")) { TIFFSetWarningHandler(NULL); // Switch off warning messages if ((hFile->tif = TIFFOpen(fileName.c_str(), wmChar.c_str())) == NULL) @@ -731,7 +731,7 @@ void ImageBase::closeFile(ImageFHandler* hFile) const } - if (ext_name.contains("tif")) + if (ext_name.contains("tif") || ext_name.contains("gain")) { TIFFClose(tif); /* Since when creating a TIFF file without adding an image the file is 8 bytes @@ -1080,7 +1080,7 @@ void ImageBase::_write(const FileName &name, ImageFHandler* hFile, size_t select writeTIA(select_img,false,mode); else if (ext_name.contains("raw") || ext_name.contains("inf")) writeINF(select_img,false,mode,imParam,castMode); - else if (ext_name.contains("tif")) + else if (ext_name.contains("tif") || ext_name.contains("gain")) writeTIFF(select_img,isStack,mode,imParam,castMode); else if (ext_name.contains("spe")) writeSPE(select_img,isStack,mode); diff --git a/core/xmipp_image_extension.cpp b/core/xmipp_image_extension.cpp index 377f4dae..3dab919c 100644 --- a/core/xmipp_image_extension.cpp +++ b/core/xmipp_image_extension.cpp @@ -79,7 +79,7 @@ bool checkImageFileSize(const FileName &name, const ImageInfo &imgInfo, bool err dataFname = name.removeLastExtension().addExtension("img"); else if (ext.contains("inf")) dataFname = name.removeLastExtension(); - else if (ext.contains("tif") || ext.contains("jpg") || ext.contains("hdf") || ext.contains("h5")) + else if (ext.contains("tif") || ext.contains("gain") || ext.contains("jpg") || ext.contains("hdf") || ext.contains("h5")) return true; else dataFname = name; diff --git a/install/scons-tools/AutoConfig.py b/install/scons-tools/AutoConfig.py deleted file mode 100644 index 99bd57c0..00000000 --- a/install/scons-tools/AutoConfig.py +++ /dev/null @@ -1,87 +0,0 @@ -# AutoConfig Builder: Runs ./configure inside a directory. -# -# Parameters: -# AutoConfigParams -- Sequence of parameter strings to include on the -# configure command line. -# Default: [] -# AutoConfigTarget -- File that configure will create. -# Default: "config.h" -# AutoConfigSource -- File that configure depends on. -# Default: "Makefile.in" -# AutoConfigStdOut -- File where the output will be written to. -# Default: None - -import sys -from os.path import join, dirname -import subprocess - - -def parms(target, source, env): - """Assemble various AutoConfig parameters.""" - - workdir = dirname(str(source[0])) - params = env.get('AutoConfigParams', []) - if not isinstance(params, list): - print('AutoConfigParams must be a sequence') - sys.exit(1) - targetfile = env.get('AutoConfigTarget', 'config.h') - sourcefile = env.get('AutoConfigSource', 'Makefile.in') - out = env.get('AutoConfigStdOut') - return (workdir, params, targetfile, sourcefile, out) - - -def message(target, source, env): - """Return a pretty AutoConfig message.""" - - dirx, params, targetfile, sourcefile, out = parms(target, source, env) - - if 'AUTOCONFIGCOMSTR' in env: - msg = env.subst(env['AUTOCONFIGCOMSTR'], - target=target, source=source, raw=1) - return '%s > %s' % (msg, out) - - return 'cd %s && ./configure %s' % (dirx, ' '.join(params)) - - -def emitter(target, source, env): - """Remap the source & target to path/$AutoConfigSource and path/$AutoConfigTarget.""" - - dirx, params, targetfile, sourcefile, out = parms(target, source, env) - - # NOTE: Using source[0] instead of target[0] for the target's path! - # If there's only one . in the source[0] value, then Scons strips off the - # extension when it determines the target[0] value. For example, - # AutoConfig('foo.blah') - # sets - # source[0] = 'foo.blah' - # target[0] = 'foo' - # (SCons does NOT do this if source[0] has more than one . ) - # Since this emitter expects the incoming source[0] value to be a directory - # name, we can use it here for the rewritten target[0]. - - return ([ join(str(source[0]), targetfile) ], - [ join(str(source[0]), sourcefile) ]) - - -def builder(target, source, env): - """Run ./configure in a directory.""" - - dirx, params, targetfile, sourcefile, out = parms(target, source, env) - - if 'AUTOCONFIGCOMSTR' in env and out is not None: - fout = open(out, 'w+') - else: - fout = None - - return subprocess.call(['./configure'] + params, cwd=dirx, - stdout=fout, stderr=fout) - - -def generate(env, **kwargs): - env['BUILDERS']['AutoConfig'] = env.Builder( - action=env.Action(builder, message), - emitter=emitter, single_source=True) - - -def exists(env): - return True diff --git a/install/scons-tools/Make.py b/install/scons-tools/Make.py deleted file mode 100644 index 3b4652b4..00000000 --- a/install/scons-tools/Make.py +++ /dev/null @@ -1,156 +0,0 @@ -# Make Builder: Runs make. -# -# Parameters: -# MakePath -- SCons Dir node representing the directory in which to run make. REQUIRED. -# MakeCmd -- The 'make' executable to run. -# Default: make -# MakeEnv -- Dictionary of variables to set in the make execution environment. -# Default: none -# MakeOpts -- Options to pass on the make command line. -# Default: none -# MakeOneThread -- Don't pass any -j option to make. -# Default: False -# MakeTargets -- String of space-seperated targets to pass to make -# Default: "" - -import sys -import os -import subprocess - -from SCons.Script import GetOption - - -def parms(target, source, env): - """Assemble various Make parameters.""" - - if 'MakePath' not in env: - print("Make builder requires MakePath variable") - sys.exit(1) - - make_path = env.subst(str(env['MakePath'])) - - make_cmd = 'make' - if 'MakeCmd' in env: - make_cmd = env.subst(env['MakeCmd']) - elif 'MAKE' in env: - make_cmd = env.subst(env['MAKE']) - - make_env = None - if env.get('CROSS_BUILD'): - make_env = env['CROSS_ENV'] - if 'MakeEnv' in env: - if make_env == None: - make_env = {} - else: - # We're appending to an existing dictionary, so create a copy - # instead of appending to the original env['CROSS_ENV'] - make_env = env['CROSS_ENV'][:] - for (k,v) in env['MakeEnv'].items(): - make_env[k] = v - - make_opts = None - if 'MakeOpts' in env: - make_opts = env.subst(env['MakeOpts']) - - make_jobs = GetOption('num_jobs') - if 'MakeOneThread' in env and env['MakeOneThread']: - make_jobs = 1 - - make_targets = None - if 'MakeTargets' in env: - make_targets = env.subst(env['MakeTargets']) - - out = env.get('MakeStdOut') - - return (make_path, make_env, make_targets, make_cmd, make_jobs, make_opts, out) - - -def message(target, source, env): - """Return a pretty Make message""" - - (make_path, - make_env, - make_targets, - make_cmd, - make_jobs, - make_opts, - out) = parms(target, source, env) - - myenv = env.Clone() - # Want to use MakeTargets in the MAKECOMSTR, but make it pretty first. - if 'MakeTargets' in myenv: - myenv['MakeTargets'] += ' ' - else: - myenv['MakeTargets'] = '' - - if 'MAKECOMSTR' in myenv: - return myenv.subst(myenv['MAKECOMSTR'], - target=target, source=source, raw=1) + " > %s " % out - - msg = 'cd ' + make_path + ' &&' - if make_env != None: - for k, v in make_env.items(): - msg += ' ' + k + '=' + v - msg += ' ' + make_cmd - if make_jobs > 1: - msg += ' -j %d' % make_jobs - if make_opts != None: - msg += ' ' + ' '.join(make_opts) - if make_targets != None: - msg += ' ' + make_targets - return msg - - -def builder(target, source, env): - """Run make in a directory.""" - - (make_path, - make_env, - make_targets, - make_cmd, - make_jobs, - make_opts, - out) = parms(target, source, env) - - # Make sure there's a directory to run make in - if len(make_path) == 0: - print('No path specified') - if not os.path.exists(make_path): - print('Path %s not found' % make_path) - - # Build up the command and its arguments in a list - fullcmd = [ make_cmd ] - - if make_jobs > 1: - fullcmd += [ '-j', str(make_jobs) ] - - if make_opts: - fullcmd += make_opts - - if make_targets: - fullcmd += make_targets.split() - - # Capture the make command's output, unless we're verbose - if out is not None: - fout = open(out, 'w+') - else: - fout = None - - # Make! - make = subprocess.Popen(fullcmd, cwd=make_path, - stdout=fout, stderr=fout, - env=make_env) - - # Some subprocesses don't terminate unless we communicate with them - output = make.communicate()[0] - return make.returncode - - -def generate(env, **kwargs): - env['BUILDERS']['Make'] = env.Builder(action=env.Action(builder, message)) - - -def exists(env): - if env.WhereIs(env.subst('$MAKE')) != None: - return True - return False diff --git a/scripts/version.py b/scripts/version.py new file mode 100644 index 00000000..98e49a98 --- /dev/null +++ b/scripts/version.py @@ -0,0 +1,43 @@ +import argparse, os + +def __getReleaseName(path: str, lineStart: str) -> str: + """ + ### This function returns the line containing the given string in the provided file. + + #### Params: + - path (str): Path to the file to be parsed. + - lineStart (str): Start of the line to detect. + + #### Returns: + - (str): First line found that starts with the given string. + """ + with open(path) as cmakeFile: + for line in cmakeFile.readlines(): + if line.startswith(lineStart): + return line[:-1] if line.endswith("\n") else line + return '' + +if __name__ == "__main__": + # Generate and parse args + parser = argparse.ArgumentParser(prog="version") + parser.add_argument("-t", "--type", choices=['full', 'number', 'name'], default='full', + help='Type of version to show.\n' + '\'number\' only shows the version number.\n' + '\'name\' only shows the release name.\n' + '\'full\' shows the full release name, including number and name.') + parser.add_argument("-k", "--keep-format", action="store_true") + args = parser.parse_args() + + # Change directory to current file + os.chdir(os.path.dirname(os.path.abspath(__file__))) + + # Get release full name + relaseLine = __getReleaseName("../CHANGELOG.md", "##") + fullName = relaseLine.replace("## Release ", "") + + # Print name deppending on argument provided + resultName = fullName if args.keep_format else fullName.replace(" ", "") + if args.type != 'full': + parts = fullName.split("-") + resultName = parts[0].strip() if args.type == 'number' else parts[1].strip() + print(resultName) \ No newline at end of file