diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 4990f2586e..9d171f387a 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,3 +1,4 @@ # Clang-format whole repo d8f14fdddb5ca0fbb32d8e2bf5ac2960d6ac5ce6 ed2117e6d6826a98b6988e2f18c0c34e408563b6 +0ca96d048e7a26f6ca31354e2e834a0b82951940 diff --git a/CMakeLists.txt b/CMakeLists.txt index 479b13b668..b6e81009c7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,40 +15,46 @@ if(${CMAKE_VERSION} VERSION_GREATER_EQUAL 3.22) cmake_policy(SET CMP0127 NEW) endif() -if ("${CMAKE_CURRENT_BINARY_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") - option(BOUT_ALLOW_INSOURCE_BUILD "Whether BOUT++ should really allow to build in source." OFF) - if (NOT ${BOUT_ALLOW_INSOURCE_BUILD}) - message(FATAL_ERROR "BOUT++ does not recommend in source builds. Try building out of source, e.g. with `cmake -S . -B build` or set -DBOUT_ALLOW_INSOURCE_BUILD=ON - but things may break!") +if("${CMAKE_CURRENT_BINARY_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") + option(BOUT_ALLOW_INSOURCE_BUILD + "Whether BOUT++ should really allow to build in source." OFF) + if(NOT ${BOUT_ALLOW_INSOURCE_BUILD}) + message( + FATAL_ERROR + "BOUT++ does not recommend in source builds. Try building out of source, e.g. with `cmake -S . -B build` or set -DBOUT_ALLOW_INSOURCE_BUILD=ON - but things may break!" + ) endif() endif() -# CMake currently doesn't support proper semver -# Set the version here, strip any extra tags to use in `project` -# We try to use git to get a full description, inspired by setuptools_scm +# CMake currently doesn't support proper semver Set the version here, strip any +# extra tags to use in `project` We try to use git to get a full description, +# inspired by setuptools_scm set(_bout_previous_version "5.2.0") set(_bout_next_version "5.2.1") execute_process( COMMAND "git" describe --tags --match=v${_bout_previous_version} - COMMAND sed -e s/${_bout_previous_version}-/${_bout_next_version}.dev/ -e s/-/+/ - RESULTS_VARIABLE error_codes + COMMAND sed -e s/${_bout_previous_version}-/${_bout_next_version}.dev/ -e + s/-/+/ RESULTS_VARIABLE error_codes OUTPUT_VARIABLE BOUT_FULL_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE - ) + OUTPUT_STRIP_TRAILING_WHITESPACE) foreach(error_code ${error_codes}) - if (NOT ${error_code} STREQUAL 0) + if(NOT ${error_code} STREQUAL 0) set(BOUT_FULL_VERSION ${_bout_next_version}) endif() endforeach() # Remove leading "v" string(REGEX REPLACE "^v(.*)" "\\1" BOUT_FULL_VERSION ${BOUT_FULL_VERSION}) # Remove trailing tag -string(REGEX REPLACE "^([0-9]+\.[0-9]+\.[0-9]+)\..*" "\\1" BOUT_CMAKE_ACCEPTABLE_VERSION ${BOUT_FULL_VERSION}) +string(REGEX REPLACE "^([0-9]+\.[0-9]+\.[0-9]+)\..*" "\\1" + BOUT_CMAKE_ACCEPTABLE_VERSION ${BOUT_FULL_VERSION}) # Get the trailing tag -string(REGEX REPLACE "^[0-9]+\.[0-9]+\.[0-9]+\.(.*)" "\\1" BOUT_VERSION_TAG ${BOUT_FULL_VERSION}) +string(REGEX REPLACE "^[0-9]+\.[0-9]+\.[0-9]+\.(.*)" "\\1" BOUT_VERSION_TAG + ${BOUT_FULL_VERSION}) message(STATUS "Configuring BOUT++ version ${BOUT_FULL_VERSION}") -project(BOUT++ +project( + BOUT++ DESCRIPTION "Fluid PDE solver framework" VERSION ${BOUT_CMAKE_ACCEPTABLE_VERSION} LANGUAGES CXX) @@ -58,13 +64,18 @@ include(CMakeDependentOption) option(BUILD_SHARED_LIBS "Build shared libs" ON) # Override default -option(INSTALL_GTEST "Enable installation of googletest. (Projects embedding googletest may want to turn this OFF.)" OFF) +option( + INSTALL_GTEST + "Enable installation of googletest. (Projects embedding googletest may want to turn this OFF.)" + OFF) set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH}) include(BOUT++functions) -option(BOUT_UPDATE_GIT_SUBMODULE "Check submodules are up-to-date during build" ON) -# Adapted from https://cliutils.gitlab.io/modern-cmake/chapters/projects/submodule.html +option(BOUT_UPDATE_GIT_SUBMODULE "Check submodules are up-to-date during build" + ON) +# Adapted from +# https://cliutils.gitlab.io/modern-cmake/chapters/projects/submodule.html # Update submodules as needed function(bout_update_submodules) if(NOT BOUT_UPDATE_GIT_SUBMODULE) @@ -73,330 +84,348 @@ function(bout_update_submodules) find_package(Git QUIET) if(GIT_FOUND AND EXISTS "${PROJECT_SOURCE_DIR}/.git") message(STATUS "Submodule update") - execute_process(COMMAND ${GIT_EXECUTABLE} -c submodule.recurse=false submodule update --init --recursive + execute_process( + COMMAND ${GIT_EXECUTABLE} -c submodule.recurse=false submodule update + --init --recursive WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} RESULT_VARIABLE GIT_SUBMOD_RESULT) if(NOT GIT_SUBMOD_RESULT EQUAL "0") - message(FATAL_ERROR "git submodule update --init failed with ${GIT_SUBMOD_RESULT}, please checkout submodules") + message( + FATAL_ERROR + "git submodule update --init failed with ${GIT_SUBMOD_RESULT}, please checkout submodules" + ) endif() endif() endfunction() set(BOUT_SOURCES - ./include/bout/adios_object.hxx - ./include/bout/array.hxx - ./include/bout/assert.hxx - ./include/bout/boundary_factory.hxx - ./include/bout/boundary_op.hxx - ./include/bout/boundary_region.hxx - ./include/bout/boundary_standard.hxx - ./include/bout/bout.hxx - ./include/bout/bout_enum_class.hxx - ./include/bout/bout_types.hxx - ./include/bout/build_config.hxx - ./include/bout/boutcomm.hxx - ./include/bout/boutexception.hxx - ./include/bout/caliper_wrapper.hxx - ./include/bout/constants.hxx - ./include/bout/coordinates.hxx - ./include/bout/coordinates_accessor.hxx - ./include/bout/cyclic_reduction.hxx - ./include/bout/dcomplex.hxx - ./include/bout/deriv_store.hxx - ./include/bout/derivs.hxx - ./include/bout/difops.hxx - ./include/bout/expr.hxx - ./include/bout/fft.hxx - ./include/bout/field.hxx - ./include/bout/field2d.hxx - ./include/bout/field3d.hxx - ./include/bout/field_accessor.hxx - ./include/bout/field_data.hxx - ./include/bout/field_factory.hxx - ./include/bout/fieldgroup.hxx - ./include/bout/fieldperp.hxx - ./include/bout/fv_ops.hxx - ./include/bout/generic_factory.hxx - ./include/bout/globalfield.hxx - ./include/bout/globalindexer.hxx - ./include/bout/globals.hxx - ./include/bout/griddata.hxx - ./include/bout/gyro_average.hxx - ./include/bout/hypre_interface.hxx - ./include/bout/index_derivs.hxx - ./include/bout/index_derivs_interface.hxx - ./include/bout/initialprofiles.hxx - ./include/bout/interpolation.hxx - ./include/bout/interpolation_xz.hxx - ./include/bout/interpolation_z.hxx - ./include/bout/invert/laplacexy.hxx - ./include/bout/invert/laplacexz.hxx - ./include/bout/invert_laplace.hxx - ./include/bout/invert_parderiv.hxx - ./include/bout/invert_pardiv.hxx - ./include/bout/invertable_operator.hxx - ./include/bout/lapack_routines.hxx - ./include/bout/macro_for_each.hxx - ./include/bout/mask.hxx - ./include/bout/mesh.hxx - ./include/bout/monitor.hxx - ./include/bout/mpi_wrapper.hxx - ./include/bout/msg_stack.hxx - ./include/bout/multiostream.hxx - ./include/bout/openmpwrap.hxx - ./include/bout/operatorstencil.hxx - ./include/bout/options.hxx - ./include/bout/options_io.hxx - ./include/bout/optionsreader.hxx - ./include/bout/output.hxx - ./include/bout/output_bout_types.hxx - ./include/bout/parallel_boundary_op.hxx - ./include/bout/parallel_boundary_region.hxx - ./include/bout/paralleltransform.hxx - ./include/bout/petsc_interface.hxx - ./include/bout/petsclib.hxx - ./include/bout/physicsmodel.hxx - ./include/bout/rajalib.hxx - ./include/bout/region.hxx - ./include/bout/rkscheme.hxx - ./include/bout/rvec.hxx - ./include/bout/scorepwrapper.hxx - ./include/bout/single_index_ops.hxx - ./include/bout/slepclib.hxx - ./include/bout/smoothing.hxx - ./include/bout/snb.hxx - ./include/bout/solver.hxx - ./include/bout/solverfactory.hxx - ./include/bout/sourcex.hxx - ./include/bout/stencils.hxx - ./include/bout/sundials_backports.hxx - ./include/bout/surfaceiter.hxx - ./include/bout/sys/expressionparser.hxx - ./include/bout/sys/generator_context.hxx - ./include/bout/sys/gettext.hxx - ./include/bout/sys/range.hxx - ./include/bout/sys/timer.hxx - ./include/bout/sys/type_name.hxx - ./include/bout/sys/uncopyable.hxx - ./include/bout/sys/uuid.h - ./include/bout/sys/variant.hxx - ./include/bout/template_combinations.hxx - ./include/bout/traits.hxx - ./include/bout/unused.hxx - ./include/bout/utils.hxx - ./include/bout/vecops.hxx - ./include/bout/vector2d.hxx - ./include/bout/vector3d.hxx - ./include/bout/where.hxx - ./src/bout++.cxx - ./src/bout++-time.hxx - ./src/field/field.cxx - ./src/field/field2d.cxx - ./src/field/field3d.cxx - ./src/field/field_data.cxx - ./src/field/field_factory.cxx - ./src/field/fieldgenerators.cxx - ./src/field/fieldgenerators.hxx - ./src/field/fieldgroup.cxx - ./src/field/fieldperp.cxx - ./src/field/generated_fieldops.cxx - ./src/field/globalfield.cxx - ./src/field/initialprofiles.cxx - ./src/field/vecops.cxx - ./src/field/vector2d.cxx - ./src/field/vector3d.cxx - ./src/field/where.cxx - ./src/invert/fft_fftw.cxx - ./src/invert/lapack_routines.cxx - ./src/invert/laplace/impls/cyclic/cyclic_laplace.cxx - ./src/invert/laplace/impls/cyclic/cyclic_laplace.hxx - ./src/invert/laplace/impls/iterative_parallel_tri/iterative_parallel_tri.cxx - ./src/invert/laplace/impls/iterative_parallel_tri/iterative_parallel_tri.hxx - ./src/invert/laplace/impls/multigrid/multigrid_alg.cxx - ./src/invert/laplace/impls/multigrid/multigrid_laplace.cxx - ./src/invert/laplace/impls/multigrid/multigrid_laplace.hxx - ./src/invert/laplace/impls/multigrid/multigrid_solver.cxx - ./src/invert/laplace/impls/naulin/naulin_laplace.cxx - ./src/invert/laplace/impls/naulin/naulin_laplace.hxx - ./src/invert/laplace/impls/pcr/pcr.cxx - ./src/invert/laplace/impls/pcr/pcr.hxx - ./src/invert/laplace/impls/pcr_thomas/pcr_thomas.cxx - ./src/invert/laplace/impls/pcr_thomas/pcr_thomas.hxx - ./src/invert/laplace/impls/petsc/petsc_laplace.cxx - ./src/invert/laplace/impls/petsc/petsc_laplace.hxx - ./src/invert/laplace/impls/petsc3damg/petsc3damg.cxx - ./src/invert/laplace/impls/petsc3damg/petsc3damg.hxx - ./src/invert/laplace/impls/serial_band/serial_band.cxx - ./src/invert/laplace/impls/serial_band/serial_band.hxx - ./src/invert/laplace/impls/serial_tri/serial_tri.cxx - ./src/invert/laplace/impls/serial_tri/serial_tri.hxx - ./src/invert/laplace/impls/spt/spt.cxx - ./src/invert/laplace/impls/spt/spt.hxx - ./src/invert/laplace/impls/hypre3d/hypre3d_laplace.cxx - ./src/invert/laplace/impls/hypre3d/hypre3d_laplace.hxx - ./src/invert/laplace/invert_laplace.cxx - ./src/invert/laplacexy/impls/hypre/laplacexy-hypre.cxx - ./src/invert/laplacexy/impls/hypre/laplacexy-hypre.hxx - ./src/invert/laplacexy/impls/petsc/laplacexy-petsc.cxx - ./src/invert/laplacexy/impls/petsc/laplacexy-petsc.hxx - ./src/invert/laplacexy/impls/petsc2/laplacexy-petsc2.cxx - ./src/invert/laplacexy/impls/petsc2/laplacexy-petsc2.hxx - ./src/invert/laplacexy/laplacexy.cxx - ./src/invert/laplacexz/impls/cyclic/laplacexz-cyclic.cxx - ./src/invert/laplacexz/impls/cyclic/laplacexz-cyclic.hxx - ./src/invert/laplacexz/impls/petsc/laplacexz-petsc.cxx - ./src/invert/laplacexz/impls/petsc/laplacexz-petsc.hxx - ./src/invert/laplacexz/laplacexz.cxx - ./src/invert/parderiv/impls/cyclic/cyclic.cxx - ./src/invert/parderiv/impls/cyclic/cyclic.hxx - ./src/invert/parderiv/invert_parderiv.cxx - ./src/invert/pardiv/impls/cyclic/pardiv_cyclic.cxx - ./src/invert/pardiv/impls/cyclic/pardiv_cyclic.hxx - ./src/invert/pardiv/invert_pardiv.cxx - ./src/mesh/boundary_factory.cxx - ./src/mesh/boundary_region.cxx - ./src/mesh/boundary_standard.cxx - ./src/mesh/coordinates.cxx - ./src/mesh/coordinates_accessor.cxx - ./src/mesh/data/gridfromfile.cxx - ./src/mesh/data/gridfromoptions.cxx - ./src/mesh/difops.cxx - ./src/mesh/fv_ops.cxx - ./src/mesh/impls/bout/boutmesh.cxx - ./src/mesh/impls/bout/boutmesh.hxx - ./src/mesh/index_derivs.cxx - ./src/mesh/interpolation_xz.cxx - ./src/mesh/interpolation/bilinear_xz.cxx - ./src/mesh/interpolation/hermite_spline_xz.cxx - ./src/mesh/interpolation/hermite_spline_z.cxx - ./src/mesh/interpolation/interpolation_z.cxx - ./src/mesh/interpolation/lagrange_4pt_xz.cxx - ./src/mesh/interpolation/monotonic_hermite_spline_xz.cxx - ./src/mesh/invert3x3.hxx - ./src/mesh/mesh.cxx - ./src/mesh/parallel/fci.cxx - ./src/mesh/parallel/fci.hxx - ./src/mesh/parallel/identity.cxx - ./src/mesh/parallel/shiftedmetric.cxx - ./src/mesh/parallel/shiftedmetricinterp.cxx - ./src/mesh/parallel/shiftedmetricinterp.hxx - ./src/mesh/parallel_boundary_op.cxx - ./src/mesh/parallel_boundary_region.cxx - ./src/mesh/surfaceiter.cxx - ./src/physics/gyro_average.cxx - ./src/physics/physicsmodel.cxx - ./src/physics/smoothing.cxx - ./src/physics/snb.cxx - ./src/physics/sourcex.cxx - ./src/solver/impls/adams_bashforth/adams_bashforth.cxx - ./src/solver/impls/adams_bashforth/adams_bashforth.hxx - ./src/solver/impls/arkode/arkode.cxx - ./src/solver/impls/arkode/arkode.hxx - ./src/solver/impls/cvode/cvode.cxx - ./src/solver/impls/cvode/cvode.hxx - ./src/solver/impls/euler/euler.cxx - ./src/solver/impls/euler/euler.hxx - ./src/solver/impls/ida/ida.cxx - ./src/solver/impls/ida/ida.hxx - ./src/solver/impls/imex-bdf2/imex-bdf2.cxx - ./src/solver/impls/imex-bdf2/imex-bdf2.hxx - ./src/solver/impls/petsc/petsc.cxx - ./src/solver/impls/petsc/petsc.hxx - ./src/solver/impls/power/power.cxx - ./src/solver/impls/power/power.hxx - ./src/solver/impls/pvode/pvode.cxx - ./src/solver/impls/pvode/pvode.hxx - ./src/solver/impls/rk3-ssp/rk3-ssp.cxx - ./src/solver/impls/rk3-ssp/rk3-ssp.hxx - ./src/solver/impls/rk4/rk4.cxx - ./src/solver/impls/rk4/rk4.hxx - ./src/solver/impls/rkgeneric/impls/cashkarp/cashkarp.cxx - ./src/solver/impls/rkgeneric/impls/cashkarp/cashkarp.hxx - ./src/solver/impls/rkgeneric/impls/rk4simple/rk4simple.cxx - ./src/solver/impls/rkgeneric/impls/rk4simple/rk4simple.hxx - ./src/solver/impls/rkgeneric/impls/rkf34/rkf34.cxx - ./src/solver/impls/rkgeneric/impls/rkf34/rkf34.hxx - ./src/solver/impls/rkgeneric/impls/rkf45/rkf45.cxx - ./src/solver/impls/rkgeneric/impls/rkf45/rkf45.hxx - ./src/solver/impls/rkgeneric/rkgeneric.cxx - ./src/solver/impls/rkgeneric/rkgeneric.hxx - ./src/solver/impls/rkgeneric/rkscheme.cxx - ./src/solver/impls/slepc/slepc.cxx - ./src/solver/impls/slepc/slepc.hxx - ./src/solver/impls/snes/snes.cxx - ./src/solver/impls/snes/snes.hxx - ./src/solver/impls/split-rk/split-rk.cxx - ./src/solver/impls/split-rk/split-rk.hxx - ./src/solver/solver.cxx - ./src/sys/adios_object.cxx - ./src/sys/bout_types.cxx - ./src/sys/boutcomm.cxx - ./src/sys/boutexception.cxx - ./src/sys/derivs.cxx - ./src/sys/expressionparser.cxx - ./src/sys/generator_context.cxx - ./include/bout/hyprelib.hxx - ./src/sys/hyprelib.cxx - ./src/sys/msg_stack.cxx - ./src/sys/options.cxx - ./src/sys/options/optionparser.hxx - ./src/sys/options/options_ini.cxx - ./src/sys/options/options_ini.hxx - ./src/sys/options/options_io.cxx - ./src/sys/options/options_netcdf.cxx - ./src/sys/options/options_netcdf.hxx - ./src/sys/options/options_adios.cxx - ./src/sys/options/options_adios.hxx - ./src/sys/optionsreader.cxx - ./src/sys/output.cxx - ./src/sys/petsclib.cxx - ./src/sys/range.cxx - ./src/sys/slepclib.cxx - ./src/sys/timer.cxx - ./src/sys/type_name.cxx - ./src/sys/utils.cxx - ${CMAKE_CURRENT_BINARY_DIR}/include/bout/revision.hxx - ${CMAKE_CURRENT_BINARY_DIR}/include/bout/version.hxx - ) - + ./include/bout/adios_object.hxx + ./include/bout/array.hxx + ./include/bout/assert.hxx + ./include/bout/boundary_factory.hxx + ./include/bout/boundary_op.hxx + ./include/bout/boundary_region.hxx + ./include/bout/boundary_standard.hxx + ./include/bout/bout.hxx + ./include/bout/bout_enum_class.hxx + ./include/bout/bout_types.hxx + ./include/bout/build_config.hxx + ./include/bout/boutcomm.hxx + ./include/bout/boutexception.hxx + ./include/bout/caliper_wrapper.hxx + ./include/bout/constants.hxx + ./include/bout/coordinates.hxx + ./include/bout/coordinates_accessor.hxx + ./include/bout/cyclic_reduction.hxx + ./include/bout/dcomplex.hxx + ./include/bout/deriv_store.hxx + ./include/bout/derivs.hxx + ./include/bout/difops.hxx + ./include/bout/expr.hxx + ./include/bout/fft.hxx + ./include/bout/field.hxx + ./include/bout/field2d.hxx + ./include/bout/field3d.hxx + ./include/bout/field_accessor.hxx + ./include/bout/field_data.hxx + ./include/bout/field_factory.hxx + ./include/bout/fieldgroup.hxx + ./include/bout/fieldperp.hxx + ./include/bout/fv_ops.hxx + ./include/bout/generic_factory.hxx + ./include/bout/globalfield.hxx + ./include/bout/globalindexer.hxx + ./include/bout/globals.hxx + ./include/bout/griddata.hxx + ./include/bout/gyro_average.hxx + ./include/bout/hypre_interface.hxx + ./include/bout/index_derivs.hxx + ./include/bout/index_derivs_interface.hxx + ./include/bout/initialprofiles.hxx + ./include/bout/interpolation.hxx + ./include/bout/interpolation_xz.hxx + ./include/bout/interpolation_z.hxx + ./include/bout/invert/laplacexy.hxx + ./include/bout/invert/laplacexz.hxx + ./include/bout/invert_laplace.hxx + ./include/bout/invert_parderiv.hxx + ./include/bout/invert_pardiv.hxx + ./include/bout/invertable_operator.hxx + ./include/bout/lapack_routines.hxx + ./include/bout/macro_for_each.hxx + ./include/bout/mask.hxx + ./include/bout/mesh.hxx + ./include/bout/monitor.hxx + ./include/bout/mpi_wrapper.hxx + ./include/bout/msg_stack.hxx + ./include/bout/multiostream.hxx + ./include/bout/openmpwrap.hxx + ./include/bout/operatorstencil.hxx + ./include/bout/options.hxx + ./include/bout/options_io.hxx + ./include/bout/optionsreader.hxx + ./include/bout/output.hxx + ./include/bout/output_bout_types.hxx + ./include/bout/parallel_boundary_op.hxx + ./include/bout/parallel_boundary_region.hxx + ./include/bout/paralleltransform.hxx + ./include/bout/petsc_interface.hxx + ./include/bout/petsclib.hxx + ./include/bout/physicsmodel.hxx + ./include/bout/rajalib.hxx + ./include/bout/region.hxx + ./include/bout/rkscheme.hxx + ./include/bout/rvec.hxx + ./include/bout/scorepwrapper.hxx + ./include/bout/single_index_ops.hxx + ./include/bout/slepclib.hxx + ./include/bout/smoothing.hxx + ./include/bout/snb.hxx + ./include/bout/solver.hxx + ./include/bout/solverfactory.hxx + ./include/bout/sourcex.hxx + ./include/bout/stencils.hxx + ./include/bout/sundials_backports.hxx + ./include/bout/surfaceiter.hxx + ./include/bout/sys/expressionparser.hxx + ./include/bout/sys/generator_context.hxx + ./include/bout/sys/gettext.hxx + ./include/bout/sys/range.hxx + ./include/bout/sys/timer.hxx + ./include/bout/sys/type_name.hxx + ./include/bout/sys/uncopyable.hxx + ./include/bout/sys/uuid.h + ./include/bout/sys/variant.hxx + ./include/bout/template_combinations.hxx + ./include/bout/traits.hxx + ./include/bout/unused.hxx + ./include/bout/utils.hxx + ./include/bout/vecops.hxx + ./include/bout/vector2d.hxx + ./include/bout/vector3d.hxx + ./include/bout/where.hxx + ./src/bout++.cxx + ./src/bout++-time.hxx + ./src/field/field.cxx + ./src/field/field2d.cxx + ./src/field/field3d.cxx + ./src/field/field_data.cxx + ./src/field/field_factory.cxx + ./src/field/fieldgenerators.cxx + ./src/field/fieldgenerators.hxx + ./src/field/fieldgroup.cxx + ./src/field/fieldperp.cxx + ./src/field/generated_fieldops.cxx + ./src/field/globalfield.cxx + ./src/field/initialprofiles.cxx + ./src/field/vecops.cxx + ./src/field/vector2d.cxx + ./src/field/vector3d.cxx + ./src/field/where.cxx + ./src/invert/fft_fftw.cxx + ./src/invert/lapack_routines.cxx + ./src/invert/laplace/impls/cyclic/cyclic_laplace.cxx + ./src/invert/laplace/impls/cyclic/cyclic_laplace.hxx + ./src/invert/laplace/impls/iterative_parallel_tri/iterative_parallel_tri.cxx + ./src/invert/laplace/impls/iterative_parallel_tri/iterative_parallel_tri.hxx + ./src/invert/laplace/impls/multigrid/multigrid_alg.cxx + ./src/invert/laplace/impls/multigrid/multigrid_laplace.cxx + ./src/invert/laplace/impls/multigrid/multigrid_laplace.hxx + ./src/invert/laplace/impls/multigrid/multigrid_solver.cxx + ./src/invert/laplace/impls/naulin/naulin_laplace.cxx + ./src/invert/laplace/impls/naulin/naulin_laplace.hxx + ./src/invert/laplace/impls/pcr/pcr.cxx + ./src/invert/laplace/impls/pcr/pcr.hxx + ./src/invert/laplace/impls/pcr_thomas/pcr_thomas.cxx + ./src/invert/laplace/impls/pcr_thomas/pcr_thomas.hxx + ./src/invert/laplace/impls/petsc/petsc_laplace.cxx + ./src/invert/laplace/impls/petsc/petsc_laplace.hxx + ./src/invert/laplace/impls/petsc3damg/petsc3damg.cxx + ./src/invert/laplace/impls/petsc3damg/petsc3damg.hxx + ./src/invert/laplace/impls/serial_band/serial_band.cxx + ./src/invert/laplace/impls/serial_band/serial_band.hxx + ./src/invert/laplace/impls/serial_tri/serial_tri.cxx + ./src/invert/laplace/impls/serial_tri/serial_tri.hxx + ./src/invert/laplace/impls/spt/spt.cxx + ./src/invert/laplace/impls/spt/spt.hxx + ./src/invert/laplace/impls/hypre3d/hypre3d_laplace.cxx + ./src/invert/laplace/impls/hypre3d/hypre3d_laplace.hxx + ./src/invert/laplace/invert_laplace.cxx + ./src/invert/laplacexy/impls/hypre/laplacexy-hypre.cxx + ./src/invert/laplacexy/impls/hypre/laplacexy-hypre.hxx + ./src/invert/laplacexy/impls/petsc/laplacexy-petsc.cxx + ./src/invert/laplacexy/impls/petsc/laplacexy-petsc.hxx + ./src/invert/laplacexy/impls/petsc2/laplacexy-petsc2.cxx + ./src/invert/laplacexy/impls/petsc2/laplacexy-petsc2.hxx + ./src/invert/laplacexy/laplacexy.cxx + ./src/invert/laplacexz/impls/cyclic/laplacexz-cyclic.cxx + ./src/invert/laplacexz/impls/cyclic/laplacexz-cyclic.hxx + ./src/invert/laplacexz/impls/petsc/laplacexz-petsc.cxx + ./src/invert/laplacexz/impls/petsc/laplacexz-petsc.hxx + ./src/invert/laplacexz/laplacexz.cxx + ./src/invert/parderiv/impls/cyclic/cyclic.cxx + ./src/invert/parderiv/impls/cyclic/cyclic.hxx + ./src/invert/parderiv/invert_parderiv.cxx + ./src/invert/pardiv/impls/cyclic/pardiv_cyclic.cxx + ./src/invert/pardiv/impls/cyclic/pardiv_cyclic.hxx + ./src/invert/pardiv/invert_pardiv.cxx + ./src/mesh/boundary_factory.cxx + ./src/mesh/boundary_region.cxx + ./src/mesh/boundary_standard.cxx + ./src/mesh/coordinates.cxx + ./src/mesh/coordinates_accessor.cxx + ./src/mesh/data/gridfromfile.cxx + ./src/mesh/data/gridfromoptions.cxx + ./src/mesh/difops.cxx + ./src/mesh/fv_ops.cxx + ./src/mesh/impls/bout/boutmesh.cxx + ./src/mesh/impls/bout/boutmesh.hxx + ./src/mesh/index_derivs.cxx + ./src/mesh/interpolation_xz.cxx + ./src/mesh/interpolation/bilinear_xz.cxx + ./src/mesh/interpolation/hermite_spline_xz.cxx + ./src/mesh/interpolation/hermite_spline_z.cxx + ./src/mesh/interpolation/interpolation_z.cxx + ./src/mesh/interpolation/lagrange_4pt_xz.cxx + ./src/mesh/interpolation/monotonic_hermite_spline_xz.cxx + ./src/mesh/invert3x3.hxx + ./src/mesh/mesh.cxx + ./src/mesh/parallel/fci.cxx + ./src/mesh/parallel/fci.hxx + ./src/mesh/parallel/identity.cxx + ./src/mesh/parallel/shiftedmetric.cxx + ./src/mesh/parallel/shiftedmetricinterp.cxx + ./src/mesh/parallel/shiftedmetricinterp.hxx + ./src/mesh/parallel_boundary_op.cxx + ./src/mesh/parallel_boundary_region.cxx + ./src/mesh/surfaceiter.cxx + ./src/physics/gyro_average.cxx + ./src/physics/physicsmodel.cxx + ./src/physics/smoothing.cxx + ./src/physics/snb.cxx + ./src/physics/sourcex.cxx + ./src/solver/impls/adams_bashforth/adams_bashforth.cxx + ./src/solver/impls/adams_bashforth/adams_bashforth.hxx + ./src/solver/impls/arkode/arkode.cxx + ./src/solver/impls/arkode/arkode.hxx + ./src/solver/impls/cvode/cvode.cxx + ./src/solver/impls/cvode/cvode.hxx + ./src/solver/impls/euler/euler.cxx + ./src/solver/impls/euler/euler.hxx + ./src/solver/impls/ida/ida.cxx + ./src/solver/impls/ida/ida.hxx + ./src/solver/impls/imex-bdf2/imex-bdf2.cxx + ./src/solver/impls/imex-bdf2/imex-bdf2.hxx + ./src/solver/impls/petsc/petsc.cxx + ./src/solver/impls/petsc/petsc.hxx + ./src/solver/impls/power/power.cxx + ./src/solver/impls/power/power.hxx + ./src/solver/impls/pvode/pvode.cxx + ./src/solver/impls/pvode/pvode.hxx + ./src/solver/impls/rk3-ssp/rk3-ssp.cxx + ./src/solver/impls/rk3-ssp/rk3-ssp.hxx + ./src/solver/impls/rk4/rk4.cxx + ./src/solver/impls/rk4/rk4.hxx + ./src/solver/impls/rkgeneric/impls/cashkarp/cashkarp.cxx + ./src/solver/impls/rkgeneric/impls/cashkarp/cashkarp.hxx + ./src/solver/impls/rkgeneric/impls/rk4simple/rk4simple.cxx + ./src/solver/impls/rkgeneric/impls/rk4simple/rk4simple.hxx + ./src/solver/impls/rkgeneric/impls/rkf34/rkf34.cxx + ./src/solver/impls/rkgeneric/impls/rkf34/rkf34.hxx + ./src/solver/impls/rkgeneric/impls/rkf45/rkf45.cxx + ./src/solver/impls/rkgeneric/impls/rkf45/rkf45.hxx + ./src/solver/impls/rkgeneric/rkgeneric.cxx + ./src/solver/impls/rkgeneric/rkgeneric.hxx + ./src/solver/impls/rkgeneric/rkscheme.cxx + ./src/solver/impls/slepc/slepc.cxx + ./src/solver/impls/slepc/slepc.hxx + ./src/solver/impls/snes/snes.cxx + ./src/solver/impls/snes/snes.hxx + ./src/solver/impls/split-rk/split-rk.cxx + ./src/solver/impls/split-rk/split-rk.hxx + ./src/solver/solver.cxx + ./src/sys/adios_object.cxx + ./src/sys/bout_types.cxx + ./src/sys/boutcomm.cxx + ./src/sys/boutexception.cxx + ./src/sys/derivs.cxx + ./src/sys/expressionparser.cxx + ./src/sys/generator_context.cxx + ./include/bout/hyprelib.hxx + ./src/sys/hyprelib.cxx + ./src/sys/msg_stack.cxx + ./src/sys/options.cxx + ./src/sys/options/optionparser.hxx + ./src/sys/options/options_ini.cxx + ./src/sys/options/options_ini.hxx + ./src/sys/options/options_io.cxx + ./src/sys/options/options_netcdf.cxx + ./src/sys/options/options_netcdf.hxx + ./src/sys/options/options_adios.cxx + ./src/sys/options/options_adios.hxx + ./src/sys/optionsreader.cxx + ./src/sys/output.cxx + ./src/sys/petsclib.cxx + ./src/sys/range.cxx + ./src/sys/slepclib.cxx + ./src/sys/timer.cxx + ./src/sys/type_name.cxx + ./src/sys/utils.cxx + ${CMAKE_CURRENT_BINARY_DIR}/include/bout/revision.hxx + ${CMAKE_CURRENT_BINARY_DIR}/include/bout/version.hxx) find_package(Python3) find_package(ClangFormat) -if (Python3_FOUND AND ClangFormat_FOUND) +if(Python3_FOUND AND ClangFormat_FOUND) set(BOUT_GENERATE_FIELDOPS_DEFAULT ON) else() set(BOUT_GENERATE_FIELDOPS_DEFAULT OFF) endif() -execute_process(COMMAND ${Python3_EXECUTABLE} -c "import importlib.util ; import sys; sys.exit(importlib.util.find_spec(\"zoidberg\") is None)" +execute_process( + COMMAND + ${Python3_EXECUTABLE} -c + "import importlib.util ; import sys; sys.exit(importlib.util.find_spec(\"zoidberg\") is None)" RESULT_VARIABLE zoidberg_FOUND) -if (zoidberg_FOUND EQUAL 0) +if(zoidberg_FOUND EQUAL 0) set(zoidberg_FOUND ON) else() set(zoidberg_FOUND OFF) endif() -option(BOUT_GENERATE_FIELDOPS "Automatically re-generate the Field arithmetic operators from the Python templates. \ +option( + BOUT_GENERATE_FIELDOPS + "Automatically re-generate the Field arithmetic operators from the Python templates. \ Requires Python3, clang-format, and Jinja2. Turn this OFF to skip generating them if, for example, \ -you are unable to install the Jinja2 Python module. This is only important for BOUT++ developers." ${BOUT_GENERATE_FIELDOPS_DEFAULT}) +you are unable to install the Jinja2 Python module. This is only important for BOUT++ developers." + ${BOUT_GENERATE_FIELDOPS_DEFAULT}) -if (BOUT_GENERATE_FIELDOPS) - if (NOT Python3_FOUND) - message(FATAL_ERROR "python not found, but you have requested to generate code!") +if(BOUT_GENERATE_FIELDOPS) + if(NOT Python3_FOUND) + message( + FATAL_ERROR "python not found, but you have requested to generate code!") endif() - if (NOT ClangFormat_FOUND) - message(FATAL_ERROR "clang-format not found, but you have requested to generate code!") + if(NOT ClangFormat_FOUND) + message( + FATAL_ERROR + "clang-format not found, but you have requested to generate code!") endif() - add_custom_command( OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/src/field/generated_fieldops.cxx - COMMAND ${Python3_EXECUTABLE} gen_fieldops.py --filename generated_fieldops.cxx.tmp + add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/src/field/generated_fieldops.cxx + COMMAND ${Python3_EXECUTABLE} gen_fieldops.py --filename + generated_fieldops.cxx.tmp COMMAND ${ClangFormat_BIN} generated_fieldops.cxx.tmp -i - COMMAND ${CMAKE_COMMAND} -E rename generated_fieldops.cxx.tmp generated_fieldops.cxx - DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/src/field/gen_fieldops.jinja ${CMAKE_CURRENT_SOURCE_DIR}/src/field/gen_fieldops.py + COMMAND ${CMAKE_COMMAND} -E rename generated_fieldops.cxx.tmp + generated_fieldops.cxx + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/src/field/gen_fieldops.jinja + ${CMAKE_CURRENT_SOURCE_DIR}/src/field/gen_fieldops.py WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/src/field/ - COMMENT "Generating source code" ) + COMMENT "Generating source code") else() - message(AUTHOR_WARNING "'src/field/generated_fieldops.cxx' will not be \ + message( + AUTHOR_WARNING + "'src/field/generated_fieldops.cxx' will not be \ regenerated when you make changes to either \ 'src/field/gen_fieldops.py' or 'src/field/gen_fieldops.jinja'. \ This is because either Python3 or clang-format is missing \ @@ -411,30 +440,35 @@ include(GNUInstallDirs) # use, i.e. don't skip the full RPATH for the build tree set(CMAKE_SKIP_BUILD_RPATH FALSE) -# when building, don't use the install RPATH already -# (but later on when installing) +# when building, don't use the install RPATH already (but later on when +# installing) set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") -# add the automatically determined parts of the RPATH -# which point to directories outside the build tree to the install RPATH +# add the automatically determined parts of the RPATH which point to directories +# outside the build tree to the install RPATH set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) - -execute_process(COMMAND ${Python3_EXECUTABLE} -c "import site ; print('/'.join(site.getusersitepackages().split('/')[-2:]))" +execute_process( + COMMAND + ${Python3_EXECUTABLE} -c + "import site ; print('/'.join(site.getusersitepackages().split('/')[-2:]))" RESULT_VARIABLE PYTHON_WORKING OUTPUT_VARIABLE PYTHON_SITEPATH_SUFFIX - OUTPUT_STRIP_TRAILING_WHITESPACE -) -set(CMAKE_INSTALL_PYTHON_SITEARCH lib/${PYTHON_SITEPATH_SUFFIX} CACHE STRING "Location to install python arch-specific modules") + OUTPUT_STRIP_TRAILING_WHITESPACE) +set(CMAKE_INSTALL_PYTHON_SITEARCH + lib/${PYTHON_SITEPATH_SUFFIX} + CACHE STRING "Location to install python arch-specific modules") set(ON_OFF_AUTO ON OFF AUTO) -set(BOUT_ENABLE_PYTHON AUTO CACHE STRING "Build the Python interface") +set(BOUT_ENABLE_PYTHON + AUTO + CACHE STRING "Build the Python interface") set_property(CACHE BOUT_ENABLE_PYTHON PROPERTY STRINGS ${ON_OFF_AUTO}) -if (NOT BOUT_ENABLE_PYTHON IN_LIST ON_OFF_AUTO) +if(NOT BOUT_ENABLE_PYTHON IN_LIST ON_OFF_AUTO) message(FATAL_ERROR "BOUT_ENABLE_PYTHON must be one of ${ON_OFF_AUTO}") endif() -if (BOUT_ENABLE_PYTHON OR BOUT_ENABLE_PYTHON STREQUAL "AUTO") +if(BOUT_ENABLE_PYTHON OR BOUT_ENABLE_PYTHON STREQUAL "AUTO") add_subdirectory(tools/pylib/_boutpp_build) else() set(BOUT_ENABLE_PYTHON OFF) @@ -444,33 +478,32 @@ set(BOUT_USE_PYTHON ${BOUT_ENABLE_PYTHON}) # Ensure that the compile date/time is up-to-date when any of the sources change add_custom_command( OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/bout++-time.cxx - COMMAND ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_LIST_DIR}/cmake/GenerateDateTimeFile.cmake" + COMMAND ${CMAKE_COMMAND} -P + "${CMAKE_CURRENT_LIST_DIR}/cmake/GenerateDateTimeFile.cmake" DEPENDS ${BOUT_SOURCES} - MAIN_DEPENDENCY "${CMAKE_CURRENT_LIST_DIR}/cmake/GenerateDateTimeFile.cmake" - ) + MAIN_DEPENDENCY "${CMAKE_CURRENT_LIST_DIR}/cmake/GenerateDateTimeFile.cmake") - -add_library(bout++ - ${BOUT_SOURCES} - ${CMAKE_CURRENT_BINARY_DIR}/bout++-time.cxx - ) +add_library(bout++ ${BOUT_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/bout++-time.cxx) add_library(bout++::bout++ ALIAS bout++) target_link_libraries(bout++ PUBLIC MPI::MPI_CXX) -target_include_directories(bout++ PUBLIC - $ - $ - $ - ) +target_include_directories( + bout++ + PUBLIC $ + $ + $) set(BOUT_LIB_PATH "${CMAKE_CURRENT_BINARY_DIR}/lib") -set_target_properties(bout++ PROPERTIES - LIBRARY_OUTPUT_DIRECTORY "${BOUT_LIB_PATH}" - ARCHIVE_OUTPUT_DIRECTORY "${BOUT_LIB_PATH}" - SOVERSION 5.2.0) +set_target_properties( + bout++ + PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${BOUT_LIB_PATH}" + ARCHIVE_OUTPUT_DIRECTORY "${BOUT_LIB_PATH}" + SOVERSION 5.2.0) # Set some variables for the bout-config script set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} -L\$BOUT_LIB_PATH -lbout++") set(BOUT_INCLUDE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/include") -set(CONFIG_CFLAGS "${CONFIG_CFLAGS} -I\${BOUT_INCLUDE_PATH} -I${CMAKE_CURRENT_BINARY_DIR}/include ${CMAKE_CXX_FLAGS} -std=c++17") +set(CONFIG_CFLAGS + "${CONFIG_CFLAGS} -I\${BOUT_INCLUDE_PATH} -I${CMAKE_CURRENT_BINARY_DIR}/include ${CMAKE_CXX_FLAGS} -std=c++17" +) target_compile_features(bout++ PUBLIC cxx_std_17) set_target_properties(bout++ PROPERTIES CXX_EXTENSIONS OFF) @@ -485,9 +518,9 @@ include(cmake/SetupBOUTThirdParty.cmake) include(cmake/Sanitizers.cmake) enable_sanitizers(bout++) -################################################## -# Components of the version number -# Pre-release identifier (BOUT_VERSION_TAG) set above +# ############################################################################## +# Components of the version number Pre-release identifier (BOUT_VERSION_TAG) set +# above set(BOUT_VERSION ${BOUT_FULL_VERSION}) set(BOUT_VERSION_MAJOR ${PROJECT_VERSION_MAJOR}) set(BOUT_VERSION_MINOR ${PROJECT_VERSION_MINOR}) @@ -502,60 +535,70 @@ endif() message(STATUS "Git revision: ${BOUT_REVISION}") # Build the file containing the version information -configure_file( - "${PROJECT_SOURCE_DIR}/include/bout/version.hxx.in" - "${PROJECT_BINARY_DIR}/include/bout/version.hxx") -# Build the file containing just the commit hash -# This will be rebuilt on every commit! -configure_file( - "${PROJECT_SOURCE_DIR}/include/bout/revision.hxx.in" - "${PROJECT_BINARY_DIR}/include/bout/revision.hxx") +configure_file("${PROJECT_SOURCE_DIR}/include/bout/version.hxx.in" + "${PROJECT_BINARY_DIR}/include/bout/version.hxx") +# Build the file containing just the commit hash This will be rebuilt on every +# commit! +configure_file("${PROJECT_SOURCE_DIR}/include/bout/revision.hxx.in" + "${PROJECT_BINARY_DIR}/include/bout/revision.hxx") -################################################## +# ############################################################################## option(BOUT_ENABLE_WARNINGS "Enable compiler warnings" ON) -if (BOUT_ENABLE_WARNINGS) - target_compile_options(bout++ PRIVATE - $<$>: - $<$,$,$>: - -Wall -Wextra > > - $<$: - /W4 > - $<$:-Xcompiler=-Wall -Xcompiler=-Wextra > - ) - - include(EnableCXXWarningIfSupport) - # Note we explicitly turn off -Wcast-function-type as PETSc *requires* - # we cast a function to the wrong type in MatFDColoringSetFunction - target_enable_cxx_warning_if_supported(bout++ - FLAGS -Wnull-dereference -Wno-cast-function-type - ) +if(BOUT_ENABLE_WARNINGS) + target_compile_options( + bout++ + PRIVATE + $<$>: + $<$,$,$>: + -Wall + -Wextra + > + > + $<$: + /W4 + > + $<$:-Xcompiler=-Wall + -Xcompiler=-Wextra + >) + + include(EnableCXXWarningIfSupport) + # Note we explicitly turn off -Wcast-function-type as PETSc *requires* we cast + # a function to the wrong type in MatFDColoringSetFunction + target_enable_cxx_warning_if_supported(bout++ FLAGS -Wnull-dereference + -Wno-cast-function-type) endif() # Compile time features set(CHECK_LEVELS 0 1 2 3 4) -set(CHECK 2 CACHE STRING "Set run-time checking level") +set(CHECK + 2 + CACHE STRING "Set run-time checking level") set_property(CACHE CHECK PROPERTY STRINGS ${CHECK_LEVELS}) -if (NOT CHECK IN_LIST CHECK_LEVELS) +if(NOT CHECK IN_LIST CHECK_LEVELS) message(FATAL_ERROR "CHECK must be one of ${CHECK_LEVELS}") endif() message(STATUS "Runtime checking level: CHECK=${CHECK}") target_compile_definitions(bout++ PUBLIC "CHECK=${CHECK}") set(BOUT_CHECK_LEVEL ${CHECK}) -if (CHECK GREATER 1) +if(CHECK GREATER 1) set(bout_use_msgstack_default ON) else() set(bout_use_msgstack_default OFF) endif() -set(BOUT_ENABLE_MSGSTACK ${bout_use_msgstack_default} CACHE BOOL "Enable debug message stack") +set(BOUT_ENABLE_MSGSTACK + ${bout_use_msgstack_default} + CACHE BOOL "Enable debug message stack") message(STATUS "Message stack: BOUT_USE_MSGSTACK=${BOUT_ENABLE_MSGSTACK}") set(BOUT_USE_MSGSTACK ${BOUT_ENABLE_MSGSTACK}) cmake_dependent_option(BOUT_ENABLE_OUTPUT_DEBUG "Enable extra debug output" OFF - "CHECK LESS 3" ON) -message(STATUS "Extra debug output: BOUT_USE_OUTPUT_DEBUG=${BOUT_ENABLE_OUTPUT_DEBUG}") + "CHECK LESS 3" ON) +message( + STATUS "Extra debug output: BOUT_USE_OUTPUT_DEBUG=${BOUT_ENABLE_OUTPUT_DEBUG}" +) set(BOUT_USE_OUTPUT_DEBUG ${BOUT_ENABLE_OUTPUT_DEBUG}) option(BOUT_ENABLE_SIGNAL "SegFault handling" ON) @@ -571,7 +614,10 @@ message(STATUS "Field name tracking: BOUT_USE_TRACK=${BOUT_ENABLE_TRACK}") set(BOUT_USE_TRACK ${BOUT_ENABLE_TRACK}) option(BOUT_ENABLE_SIGFPE "Signalling floating point exceptions" OFF) -message(STATUS "Signalling floating point exceptions: BOUT_USE_SIGFPE=${BOUT_ENABLE_SIGFPE}") +message( + STATUS + "Signalling floating point exceptions: BOUT_USE_SIGFPE=${BOUT_ENABLE_SIGFPE}" +) set(BOUT_USE_SIGFPE ${BOUT_ENABLE_SIGFPE}) option(BOUT_ENABLE_METRIC_3D "Enable 3D metric support" OFF) @@ -583,107 +629,106 @@ endif() set(BOUT_USE_METRIC_3D ${BOUT_ENABLE_METRIC_3D}) include(CheckCXXSourceCompiles) -check_cxx_source_compiles("int main() { const char* name = __PRETTY_FUNCTION__; }" - HAS_PRETTY_FUNCTION) +check_cxx_source_compiles( + "int main() { const char* name = __PRETTY_FUNCTION__; }" HAS_PRETTY_FUNCTION) set(BOUT_HAS_PRETTY_FUNCTION ${HAS_PRETTY_FUNCTION}) -# Locations of the various Python modules, including the generated boutconfig module -set(BOUT_PYTHONPATH "${CMAKE_CURRENT_BINARY_DIR}/tools/pylib:${CMAKE_CURRENT_SOURCE_DIR}/tools/pylib") -# Variables for boutconfig module -- note that these will contain -# generator expressions and CMake targets, and not generally be very -# useful +# Locations of the various Python modules, including the generated boutconfig +# module +set(BOUT_PYTHONPATH + "${CMAKE_CURRENT_BINARY_DIR}/tools/pylib:${CMAKE_CURRENT_SOURCE_DIR}/tools/pylib" +) +# Variables for boutconfig module -- note that these will contain generator +# expressions and CMake targets, and not generally be very useful get_target_property(BOUT_LIBS bout++ INTERFACE_LINK_LIBRARIES) get_target_property(BOUT_CFLAGS bout++ INTERFACE_INCLUDE_DIRECTORIES) -# We want to compile the actual flags used into the library so we can -# see them at runtime. This needs a few steps: +# We want to compile the actual flags used into the library so we can see them +# at runtime. This needs a few steps: -# 1. Get the macro definitions. They come as a ;-separated list and -# without the -D. We also need to also stick a -D on the front of -# the first item -get_property(BOUT_COMPILE_DEFINITIONS +# 1. Get the macro definitions. They come as a ;-separated list and without the +# -D. We also need to also stick a -D on the front of the first item +get_property( + BOUT_COMPILE_DEFINITIONS TARGET bout++ PROPERTY COMPILE_DEFINITIONS) string(REPLACE ";" " -D" BOUT_COMPILE_DEFINITIONS "${BOUT_COMPILE_DEFINITIONS}") string(CONCAT BOUT_COMPILE_DEFINITIONS " -D" "${BOUT_COMPILE_DEFINITIONS}") -# 2. Get the compiler options. Again, they come as a ;-separated -# list. Note that they don't include optimisation or debug flags: -# they're in the CMAKE_CXX_FLAGS* variables -get_property(BOUT_COMPILE_OPTIONS +# 1. Get the compiler options. Again, they come as a ;-separated list. Note that +# they don't include optimisation or debug flags: they're in the +# CMAKE_CXX_FLAGS* variables +get_property( + BOUT_COMPILE_OPTIONS TARGET bout++ PROPERTY COMPILE_OPTIONS) string(REPLACE ";" " " BOUT_COMPILE_OPTIONS "${BOUT_COMPILE_OPTIONS}") -# 3. The optimisation and/or debug flags are in the CMAKE_CXX_FLAGS* -# variables. We need both the common flags as well as those for the -# build type actually being used. Note: this might behave weirdly -# on Windows. Might need to expand CMAKE_CONFIGURATION_TYPES -# instead? +# 1. The optimisation and/or debug flags are in the CMAKE_CXX_FLAGS* variables. +# We need both the common flags as well as those for the build type actually +# being used. Note: this might behave weirdly on Windows. Might need to expand +# CMAKE_CONFIGURATION_TYPES instead? include(BuildType) # Here CMAKE_BUILD_TYPE is always set string(TOUPPER "${CMAKE_BUILD_TYPE}" CMAKE_BUILD_TYPE_UPPER) -string(CONCAT BOUT_COMPILE_BUILD_FLAGS - " " - "${CMAKE_CXX_FLAGS}" - "${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}") - -# 4. Now we join all the flags from the first three steps together -string(CONCAT BOUT_FLAGS_STRING - "${BOUT_COMPILE_OPTIONS}" - "${BOUT_COMPILE_DEFINITIONS}" - "${BOUT_COMPILE_BUILD_FLAGS}") - -# 5. Finally actually add the flags as a define +string(CONCAT BOUT_COMPILE_BUILD_FLAGS " " "${CMAKE_CXX_FLAGS}" + "${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}") + +# 1. Now we join all the flags from the first three steps together +string(CONCAT BOUT_FLAGS_STRING "${BOUT_COMPILE_OPTIONS}" + "${BOUT_COMPILE_DEFINITIONS}" "${BOUT_COMPILE_BUILD_FLAGS}") + +# 1. Finally actually add the flags as a define target_compile_definitions(bout++ - PRIVATE BOUT_FLAGS_STRING=${BOUT_FLAGS_STRING}) + PRIVATE BOUT_FLAGS_STRING=${BOUT_FLAGS_STRING}) -################################################## +# ############################################################################## # Tests # Are we building BOUT++ directly, or as part of another project -string(COMPARE EQUAL - "${PROJECT_NAME}" "${CMAKE_PROJECT_NAME}" - PROJECT_IS_TOP_LEVEL -) +string(COMPARE EQUAL "${PROJECT_NAME}" "${CMAKE_PROJECT_NAME}" + PROJECT_IS_TOP_LEVEL) option(BOUT_TESTS "Build the tests" ${PROJECT_IS_TOP_LEVEL}) -option(BOUT_ENABLE_ALL_TESTS "Enable running all of the tests, rather then the standard selection of fast tests" OFF) +option( + BOUT_ENABLE_ALL_TESTS + "Enable running all of the tests, rather then the standard selection of fast tests" + OFF) if(BOUT_TESTS) enable_testing() - # Targets for just building the tests - # Tests need to add themselves as dependencies to these targets + # Targets for just building the tests Tests need to add themselves as + # dependencies to these targets add_custom_target(build-check-unit-tests) add_custom_target(build-check-integrated-tests) add_custom_target(build-check-mms-tests) # Build all the tests add_custom_target(build-check) - add_dependencies(build-check build-check-unit-tests build-check-integrated-tests build-check-mms-tests) + add_dependencies(build-check build-check-unit-tests + build-check-integrated-tests build-check-mms-tests) add_subdirectory(tests/unit EXCLUDE_FROM_ALL) add_subdirectory(tests/integrated EXCLUDE_FROM_ALL) add_subdirectory(tests/MMS EXCLUDE_FROM_ALL) # Targets for running the tests - if (BOUT_ENABLE_UNIT_TESTS) - add_custom_target(check-unit-tests - COMMAND ctest -R serial_tests --output-on-failure) + if(BOUT_ENABLE_UNIT_TESTS) + add_custom_target(check-unit-tests COMMAND ctest -R serial_tests + --output-on-failure) add_dependencies(check-unit-tests build-check-unit-tests) endif() - add_custom_target(check-integrated-tests - COMMAND ctest -R "test-" --output-on-failure) + add_custom_target(check-integrated-tests COMMAND ctest -R "test-" + --output-on-failure) add_dependencies(check-integrated-tests build-check-integrated-tests) - add_custom_target(check-mms-tests - COMMAND ctest -R "MMS-" --output-on-failure) + add_custom_target(check-mms-tests COMMAND ctest -R "MMS-" --output-on-failure) add_dependencies(check-mms-tests build-check-mms-tests) # Run all the tests add_custom_target(check) add_dependencies(check check-integrated-tests check-mms-tests) - if (BOUT_ENABLE_UNIT_TESTS) + if(BOUT_ENABLE_UNIT_TESTS) add_dependencies(check check-unit-tests) endif() @@ -695,64 +740,68 @@ if(BOUT_BUILD_EXAMPLES) add_subdirectory(examples EXCLUDE_FROM_ALL) endif() - -################################################## +# ############################################################################## # L10N: localisation - include translations find_package(Gettext) -if (GETTEXT_FOUND) - #add_custom_target(mofiles ALL) +if(GETTEXT_FOUND) + # add_custom_target(mofiles ALL) set(bout_langs es de fr zh_CN zh_TW) foreach(_lang IN LISTS bout_langs) set(_gmoFile ${CMAKE_CURRENT_BINARY_DIR}/locale/${_lang}/libbout.gmo) set(_poFile ${CMAKE_CURRENT_SOURCE_DIR}/locale/${_lang}/libbout.po) - add_custom_command(OUTPUT ${_gmoFile} _mo_file_${_lang} - COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/locale/${_lang}/ + add_custom_command( + OUTPUT ${_gmoFile} _mo_file_${_lang} + COMMAND ${CMAKE_COMMAND} -E make_directory + ${CMAKE_CURRENT_BINARY_DIR}/locale/${_lang}/ COMMAND ${GETTEXT_MSGFMT_EXECUTABLE} -o ${_gmoFile} ${_poFile} WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - DEPENDS ${_poFile} - ) + DEPENDS ${_poFile}) list(APPEND _gmoFiles ${_gmoFile}) - install(FILES ${_gmoFile} DESTINATION ${CMAKE_INSTALL_LOCALEDIR}/${_lang}/LC_MESSAGES/ RENAME libbout.mo) + install( + FILES ${_gmoFile} + DESTINATION ${CMAKE_INSTALL_LOCALEDIR}/${_lang}/LC_MESSAGES/ + RENAME libbout.mo) endforeach() - add_custom_target(mofiles ALL - DEPENDS ${_gmoFiles}) + add_custom_target(mofiles ALL DEPENDS ${_gmoFiles}) endif() - -################################################## +# ############################################################################## # Documentation option(BOUT_BUILD_DOCS "Build the documentation" OFF) -if (BOUT_BUILD_DOCS) +if(BOUT_BUILD_DOCS) add_subdirectory(manual) endif() - -add_custom_target(dist - COMMAND ${Python3_EXECUTABLE} ${CMAKE_SOURCE_DIR}/tools/pylib/_boutpp_build/backend.py dist - # there is no cmake equivalent to `mv` - so only works on systems that are not inentionally non-POSIX complient +add_custom_target( + dist + COMMAND ${Python3_EXECUTABLE} + ${CMAKE_SOURCE_DIR}/tools/pylib/_boutpp_build/backend.py dist + # there is no cmake equivalent to `mv` - so only works on systems that are not + # inentionally non-POSIX complient COMMAND mv BOUT++-v${BOUT_FULL_VERSION}.tar.gz ${CMAKE_CURRENT_BINARY_DIR}/ - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - ) -################################################## + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) +# ############################################################################## # Generate the build config header -if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/include/bout/build_defines.hxx") +if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/include/bout/build_defines.hxx") # If we do in source builds, this is fine - if (NOT ${CMAKE_CURRENT_SOURCE_DIR} STREQUAL ${CMAKE_CURRENT_BINARY_DIR}) - message(FATAL_ERROR "Generated build_defines.hxx header already exists; please remove '${CMAKE_CURRENT_SOURCE_DIR}/include/bout/build_defines.hxx' before continuing") + if(NOT ${CMAKE_CURRENT_SOURCE_DIR} STREQUAL ${CMAKE_CURRENT_BINARY_DIR}) + message( + FATAL_ERROR + "Generated build_defines.hxx header already exists; please remove '${CMAKE_CURRENT_SOURCE_DIR}/include/bout/build_defines.hxx' before continuing" + ) endif() endif() configure_file(cmake_build_defines.hxx.in include/bout/build_defines.hxx) - -################################################## +# ############################################################################## # Generate the bout-config script # Set some variables to match autotools so we can use the same input file @@ -761,11 +810,13 @@ set(PYTHONCONFIGPATH "${BOUT_PYTHONPATH}") set(BOUT_HAS_LEGACY_NETCDF OFF) set(BOUT_HAS_PNETCDF OFF) -# For shared libraries we only need to know how to link against BOUT++, -# while for static builds we need the dependencies too -if (BUILD_SHARED_LIBS) +# For shared libraries we only need to know how to link against BOUT++, while +# for static builds we need the dependencies too +if(BUILD_SHARED_LIBS) # Include rpath linker flag so user doesn't need to set LD_LIBRARY_PATH - set(CONFIG_LDFLAGS "${CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG}\$BOUT_LIB_PATH -L\$BOUT_LIB_PATH -lbout++ -lfmt ${CONFIG_LDFLAGS_SHARED}") + set(CONFIG_LDFLAGS + "${CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG}\$BOUT_LIB_PATH -L\$BOUT_LIB_PATH -lbout++ -lfmt ${CONFIG_LDFLAGS_SHARED}" + ) else() set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS}") endif() @@ -774,9 +825,9 @@ set(ISINSTALLED "FALSE") set(_CONFIG_LDFLAGS) string(REPLACE " " ";" CONFIG_LDFLAGS_LIST ${CONFIG_LDFLAGS}) -foreach (flag ${CONFIG_LDFLAGS_LIST}) +foreach(flag ${CONFIG_LDFLAGS_LIST}) string(REGEX MATCH "^-.*$" isopt "${flag}") - if (isopt) + if(isopt) # message("${flag} is an option") set(_CONFIG_LDFLAGS "${_CONFIG_LDFLAGS} ${flag}") # All good @@ -786,146 +837,148 @@ foreach (flag ${CONFIG_LDFLAGS_LIST}) set(_CONFIG_LDFLAGS "${_CONFIG_LDFLAGS} ${flag}") else() string(FIND "${flag}" "::" hascolcol) - if (${hascolcol} EQUAL -1) - message("Fixing ${flag} to -l${flag}") - set(_CONFIG_LDFLAGS "${_CONFIG_LDFLAGS} -l${flag}") + if(${hascolcol} EQUAL -1) + message("Fixing ${flag} to -l${flag}") + set(_CONFIG_LDFLAGS "${_CONFIG_LDFLAGS} -l${flag}") else() - string(REGEX MATCH "[^:]*$" flag2 "${flag}") - message("Fixing ${flag} to -l${flag2}") - set(_CONFIG_LDFLAGS "${_CONFIG_LDFLAGS} -l${flag2}") + string(REGEX MATCH "[^:]*$" flag2 "${flag}") + message("Fixing ${flag} to -l${flag2}") + set(_CONFIG_LDFLAGS "${_CONFIG_LDFLAGS} -l${flag2}") endif() endif() endif() endforeach() -set( CONFIG_LDFLAGS ${_CONFIG_LDFLAGS}) +set(CONFIG_LDFLAGS ${_CONFIG_LDFLAGS}) # This version of the file allows the build directory to be used directly configure_file(bin/bout-config.in bin/bout-config @ONLY) -configure_file(tools/pylib/boutconfig/__init__.py.cin tools/pylib/boutconfig/__init__.py @ONLY) +configure_file(tools/pylib/boutconfig/__init__.py.cin + tools/pylib/boutconfig/__init__.py @ONLY) configure_file(bout++Config.cmake.in bout++Config.cmake @ONLY) -# We need to generate a separate version for installation, with the -# correct install paths. So first we need to replace the build -# directory library path with the installation path -string(REPLACE - "${CMAKE_BINARY_DIR}/lib" "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}" - CONFIG_LDFLAGS "${CONFIG_LDFLAGS}") +# We need to generate a separate version for installation, with the correct +# install paths. So first we need to replace the build directory library path +# with the installation path +string(REPLACE "${CMAKE_BINARY_DIR}/lib" + "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}" CONFIG_LDFLAGS + "${CONFIG_LDFLAGS}") set(BOUT_LIB_PATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") # Update mpark.variant and fmt include paths if we're building them -if (NOT BOUT_USE_SYSTEM_MPARK_VARIANT) - set(MPARK_VARIANT_INCLUDE_PATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}") +if(NOT BOUT_USE_SYSTEM_MPARK_VARIANT) + set(MPARK_VARIANT_INCLUDE_PATH + "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}") endif() -if (NOT BOUT_USE_SYSTEM_FMT) +if(NOT BOUT_USE_SYSTEM_FMT) set(FMT_INCLUDE_PATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}") endif() -if (NOT BOUT_USE_SYSTEM_CPPTRACE) - set(CPPTRACE_INCLUDE_PATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}") +if(NOT BOUT_USE_SYSTEM_CPPTRACE) + set(CPPTRACE_INCLUDE_PATH + "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}") endif() set(BOUT_INCLUDE_PATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}") # We don't need the build include path any more -string(REPLACE "-I${CMAKE_CURRENT_BINARY_DIR}/include" "" CONFIG_CFLAGS "${CONFIG_CFLAGS}") +string(REPLACE "-I${CMAKE_CURRENT_BINARY_DIR}/include" "" CONFIG_CFLAGS + "${CONFIG_CFLAGS}") set(PYTHONCONFIGPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_PYTHON_SITEARCH}") set(ISINSTALLED "TRUE") # This version now has the correct paths to use the final installation configure_file(bin/bout-config.in bin/bout-config-install @ONLY) -configure_file(tools/pylib/boutconfig/__init__.py.cin tools/pylib/boutconfig/__init__.py-install @ONLY) +configure_file(tools/pylib/boutconfig/__init__.py.cin + tools/pylib/boutconfig/__init__.py-install @ONLY) configure_file(bout++Config.cmake.in bout++Config.cmake-install @ONLY) -################################################## +# ############################################################################## # Installation -install(TARGETS bout++ +install( + TARGETS bout++ EXPORT bout++Targets LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" - INCLUDES DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" - ) + INCLUDES + DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") # Repo files -install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} - FILES_MATCHING PATTERN "*.hxx") +install( + DIRECTORY include/ + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} + FILES_MATCHING + PATTERN "*.hxx") # Generated headers install(DIRECTORY "${PROJECT_BINARY_DIR}/include/" - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) # The various helper scripts -install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/bin/" - USE_SOURCE_PERMISSIONS +install( + DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/bin/" USE_SOURCE_PERMISSIONS DESTINATION "${CMAKE_INSTALL_BINDIR}" REGEX "bout-squashoutput" EXCLUDE REGEX "bout-config\.in" EXCLUDE - REGEX "bout-pylib-cmd-to-bin" EXCLUDE -) + REGEX "bout-pylib-cmd-to-bin" EXCLUDE) -# The installed version of bout-config needs renaming when we install -# it. Note this MUST be done after the installation of bin/, to make -# sure we clobber any versions of bout-config hanging around from an -# autotools build -install(PROGRAMS "${CMAKE_CURRENT_BINARY_DIR}/bin/bout-config-install" +# The installed version of bout-config needs renaming when we install it. Note +# this MUST be done after the installation of bin/, to make sure we clobber any +# versions of bout-config hanging around from an autotools build +install( + PROGRAMS "${CMAKE_CURRENT_BINARY_DIR}/bin/bout-config-install" DESTINATION "${CMAKE_INSTALL_BINDIR}" - RENAME "bout-config" - ) + RENAME "bout-config") install( FILES "${CMAKE_CURRENT_BINARY_DIR}/tools/pylib/boutconfig/__init__.py-install" DESTINATION "${CMAKE_INSTALL_PYTHON_SITEARCH}/boutconfig" - RENAME "__init__.py" - ) + RENAME "__init__.py") include(CMakePackageConfigHelpers) write_basic_package_version_file( bout++ConfigVersion.cmake VERSION ${PACKAGE_VERSION} - COMPATIBILITY SameMajorVersion - ) + COMPATIBILITY SameMajorVersion) -install(EXPORT bout++Targets +install( + EXPORT bout++Targets FILE bout++Targets.cmake NAMESPACE bout++:: - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/bout++" - ) + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/bout++") # CMake configuration files install( - FILES - "${CMAKE_CURRENT_BINARY_DIR}/bout++ConfigVersion.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/BOUT++functions.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/CorrectWindowsPaths.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindClangFormat.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindFFTW.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindHYPRE.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindnetCDF.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindnetCDFCxx.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindPackageMultipass.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindLibuuid.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindPETSc.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindScoreP.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindSLEPc.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindSUNDIALS.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindSphinx.cmake" - "${CMAKE_CURRENT_SOURCE_DIR}/cmake/ResolveCompilerPaths.cmake" - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/bout++" - ) + FILES "${CMAKE_CURRENT_BINARY_DIR}/bout++ConfigVersion.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/BOUT++functions.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/CorrectWindowsPaths.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindClangFormat.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindFFTW.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindHYPRE.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindnetCDF.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindnetCDFCxx.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindPackageMultipass.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindLibuuid.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindPETSc.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindScoreP.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindSLEPc.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindSUNDIALS.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FindSphinx.cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/cmake/ResolveCompilerPaths.cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/bout++") install( - FILES - "${CMAKE_CURRENT_BINARY_DIR}/bout++Config.cmake-install" + FILES "${CMAKE_CURRENT_BINARY_DIR}/bout++Config.cmake-install" DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/bout++" - RENAME "bout++Config.cmake" - ) + RENAME "bout++Config.cmake") -export(EXPORT bout++Targets +export( + EXPORT bout++Targets FILE "${CMAKE_CURRENT_BINARY_DIR}/bout++Targets.cmake" - NAMESPACE bout++:: - ) + NAMESPACE bout++::) export(PACKAGE bout) -################################################## +# ############################################################################## # Configure summary -message(" +message( + " -------------------------------- BOUT++ Configuration Summary -------------------------------- diff --git a/cmake/BOUT++functions.cmake b/cmake/BOUT++functions.cmake index 77279dfd4b..c0c3885711 100644 --- a/cmake/BOUT++functions.cmake +++ b/cmake/BOUT++functions.cmake @@ -2,29 +2,31 @@ # Copy FILENAME from source directory to build directory macro(bout_copy_file FILENAME) - configure_file( - ${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME} - ${CMAKE_CURRENT_BINARY_DIR}/${FILENAME} - COPYONLY) + configure_file(${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME} + ${CMAKE_CURRENT_BINARY_DIR}/${FILENAME} COPYONLY) endmacro() -# Handle the REQUIRES and CONFLICTS arguments for models, examples, -# and tests. Returns from those functions if REQUIRES are not met, or -# if CONFLICTS are true +# Handle the REQUIRES and CONFLICTS arguments for models, examples, and tests. +# Returns from those functions if REQUIRES are not met, or if CONFLICTS are true macro(bout_handle_requires_conflicts TYPENAME TYPEVAR) set(multiValueArgs REQUIRES CONFLICTS) cmake_parse_arguments(BOUT_HANDLE_OPTIONS "" "" "${multiValueArgs}" ${ARGN}) - foreach (REQUIREMENT IN LISTS BOUT_HANDLE_OPTIONS_REQUIRES) - if (NOT ${REQUIREMENT}) - message(STATUS "Not building ${TYPENAME} ${TYPEVAR}, requirement not met: ${REQUIREMENT}") + foreach(REQUIREMENT IN LISTS BOUT_HANDLE_OPTIONS_REQUIRES) + if(NOT ${REQUIREMENT}) + message( + STATUS + "Not building ${TYPENAME} ${TYPEVAR}, requirement not met: ${REQUIREMENT}" + ) return() endif() endforeach() - foreach (CONFLICT IN LISTS BOUT_HANDLE_OPTIONS_CONFLICTS) - if (${CONFLICT}) - message(STATUS "Not building ${TYPENAME} ${TYPEVAR}, conflicts with: ${CONFLICT}") + foreach(CONFLICT IN LISTS BOUT_HANDLE_OPTIONS_CONFLICTS) + if(${CONFLICT}) + message( + STATUS + "Not building ${TYPENAME} ${TYPEVAR}, conflicts with: ${CONFLICT}") return() endif() endforeach() @@ -35,87 +37,80 @@ endmacro() # This is basically just a simple wrapper around 'add_executable' and # 'target_link_libraries'. # -# Arguments: -# - MODEL: Name of the executable -# - SOURCES: List of source files to compile -# - REQUIRES: list of variables that must be true to build model -# (note: use `CONFLICTS` to negate the variable, rather than `NOT -# VARIABLE`) -# - CONFLICTS: list of variables that must be false to enable test +# Arguments: - MODEL: Name of the executable - SOURCES: List of source files to +# compile - REQUIRES: list of variables that must be true to build model (note: +# use `CONFLICTS` to negate the variable, rather than `NOT VARIABLE`) - +# CONFLICTS: list of variables that must be false to enable test function(bout_add_model MODEL) set(multiValueArgs SOURCES REQUIRES CONFLICTS) cmake_parse_arguments(BOUT_MODEL_OPTIONS "" "" "${multiValueArgs}" ${ARGN}) - bout_handle_requires_conflicts("model" MODEL - REQUIRES ${BOUT_MODEL_OPTIONS_REQUIRES} - CONFLICTS ${BOUT_MODEL_OPTIONS_CONFLICTS} - ) + bout_handle_requires_conflicts( + "model" MODEL REQUIRES ${BOUT_MODEL_OPTIONS_REQUIRES} CONFLICTS + ${BOUT_MODEL_OPTIONS_CONFLICTS}) - if (NOT BOUT_MODEL_OPTIONS_SOURCES) - message(FATAL_ERROR "Required argument SOURCES missing from 'bout_add_model'") + if(NOT BOUT_MODEL_OPTIONS_SOURCES) + message( + FATAL_ERROR "Required argument SOURCES missing from 'bout_add_model'") endif() - if ("SOURCES" IN_LIST BOUT_MODEL_OPTIONS_KEYWORDS_MISSING_VALUES) + if("SOURCES" IN_LIST BOUT_MODEL_OPTIONS_KEYWORDS_MISSING_VALUES) message(FATAL_ERROR "SOURCES missing values from 'bout_add_model'") endif() add_executable(${MODEL} ${BOUT_MODEL_OPTIONS_SOURCES}) target_link_libraries(${MODEL} bout++::bout++) - target_include_directories(${MODEL} PRIVATE $) + target_include_directories( + ${MODEL} PRIVATE $) endfunction() - # Build a BOUT++ example # -# If called from a standalone project, just builds the example as a -# normal model. If called when building the BOUT++ library itself, -# also copy input files and optionally other files, like grid files, -# to the library build directory. +# If called from a standalone project, just builds the example as a normal +# model. If called when building the BOUT++ library itself, also copy input +# files and optionally other files, like grid files, to the library build +# directory. # -# Arguments: -# - EXAMPENAME: Name of the executable -# - SOURCES: List of source files to compile -# - DATA_DIRS: List of data directories to copy (default: 'data') -# - EXTRA_FILES: List of other files to copy -# - REQUIRES: list of variables that must be true to build example -# (note: use `CONFLICTS` to negate the variable, rather than `NOT -# VARIABLE`) -# - CONFLICTS: list of variables that must be false to enable test +# Arguments: - EXAMPENAME: Name of the executable - SOURCES: List of source +# files to compile - DATA_DIRS: List of data directories to copy (default: +# 'data') - EXTRA_FILES: List of other files to copy - REQUIRES: list of +# variables that must be true to build example (note: use `CONFLICTS` to negate +# the variable, rather than `NOT VARIABLE`) - CONFLICTS: list of variables that +# must be false to enable test function(bout_add_example EXAMPLENAME) set(multiValueArgs SOURCES REQUIRES CONFLICTS DATA_DIRS EXTRA_FILES) cmake_parse_arguments(BOUT_EXAMPLE_OPTIONS "" "" "${multiValueArgs}" ${ARGN}) - bout_handle_requires_conflicts("example" ${EXAMPLENAME} - REQUIRES ${BOUT_EXAMPLE_OPTIONS_REQUIRES} - CONFLICTS ${BOUT_EXAMPLE_OPTIONS_CONFLICTS} - ) + bout_handle_requires_conflicts( + "example" ${EXAMPLENAME} REQUIRES ${BOUT_EXAMPLE_OPTIONS_REQUIRES} + CONFLICTS ${BOUT_EXAMPLE_OPTIONS_CONFLICTS}) bout_add_model(${EXAMPLENAME} SOURCES ${BOUT_EXAMPLE_OPTIONS_SOURCES}) - # If this is a standalone project, we can stop here. Otherwise, we - # need to copy the various input files to the build directory + # If this is a standalone project, we can stop here. Otherwise, we need to + # copy the various input files to the build directory get_directory_property(HAS_PARENT PARENT_DIRECTORY) - if (NOT HAS_PARENT) + if(NOT HAS_PARENT) return() endif() # Copy the documentation if it exists - if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/README.md) + if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/README.md) bout_copy_file(README.md) endif() # Copy the input file - if (NOT BOUT_EXAMPLE_OPTIONS_DATA_DIRS) + if(NOT BOUT_EXAMPLE_OPTIONS_DATA_DIRS) bout_copy_file(data/BOUT.inp) else() - foreach (DATA_DIR IN LISTS BOUT_EXAMPLE_OPTIONS_DATA_DIRS) + foreach(DATA_DIR IN LISTS BOUT_EXAMPLE_OPTIONS_DATA_DIRS) bout_copy_file(${DATA_DIR}/BOUT.inp) endforeach() endif() # Copy any other needed files - if (BOUT_EXAMPLE_OPTIONS_EXTRA_FILES) - foreach (FILE ${BOUT_EXAMPLE_OPTIONS_EXTRA_FILES}) + if(BOUT_EXAMPLE_OPTIONS_EXTRA_FILES) + foreach(FILE ${BOUT_EXAMPLE_OPTIONS_EXTRA_FILES}) bout_copy_file("${FILE}") endforeach() endif() @@ -125,75 +120,77 @@ function(bout_add_example EXAMPLENAME) add_dependencies(build-all-examples ${EXAMPLENAME}) endfunction() - -# Add a new integrated or MMS test. By default, the executable is -# named like the first source, stripped of its file extension. If no -# sources are given, then you probably at least want to set -# USE_RUNTEST +# Add a new integrated or MMS test. By default, the executable is named like the +# first source, stripped of its file extension. If no sources are given, then +# you probably at least want to set USE_RUNTEST # # Required arguments: # -# - BUILD_CHECK_TARGET: the specific build-check target that should -# depend on this test +# * BUILD_CHECK_TARGET: the specific build-check target that should depend on +# this test # -# - TESTNAME: name of the test +# * TESTNAME: name of the test # # Optional arguments: # -# - SOURCES: list of source files +# * SOURCES: list of source files # -# - USE_RUNTEST: if given, the test uses `./runtest` as the test -# command, otherwise it uses the executable +# * USE_RUNTEST: if given, the test uses `./runtest` as the test command, +# otherwise it uses the executable # -# - USE_DATA_BOUT_INP: if given, copy `data/BOUT.inp` +# * USE_DATA_BOUT_INP: if given, copy `data/BOUT.inp` # -# - EXTRA_FILES: any extra files that are required to run the test +# * EXTRA_FILES: any extra files that are required to run the test # -# - REQUIRES: list of variables that must be true to enable test -# (note: use `CONFLICTS` to negate the variable, rather than `NOT -# VARIABLE`) +# * REQUIRES: list of variables that must be true to enable test (note: use +# `CONFLICTS` to negate the variable, rather than `NOT VARIABLE`) # -# - CONFLICTS: list of variables that must be false to enable test +# * CONFLICTS: list of variables that must be false to enable test # -# - EXECUTABLE_NAME: name of the executable, if different from the -# first source name +# * EXECUTABLE_NAME: name of the executable, if different from the first source +# name # -# - EXTRA_DEPENDS: list of other targets that this test depends on +# * EXTRA_DEPENDS: list of other targets that this test depends on # function(bout_add_integrated_or_mms_test BUILD_CHECK_TARGET TESTNAME) set(options USE_RUNTEST USE_DATA_BOUT_INP) set(oneValueArgs EXECUTABLE_NAME PROCESSORS DOWNLOAD DOWNLOAD_NAME) - set(multiValueArgs SOURCES EXTRA_FILES REQUIRES CONFLICTS TESTARGS EXTRA_DEPENDS) - cmake_parse_arguments(BOUT_TEST_OPTIONS "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - - bout_handle_requires_conflicts("test" ${TESTNAME} - REQUIRES ${BOUT_TEST_OPTIONS_REQUIRES} - CONFLICTS ${BOUT_TEST_OPTIONS_CONFLICTS} - ) - - if (BOUT_TEST_OPTIONS_SOURCES) - # We've got some sources, so compile them into an executable and - # link against BOUT++ + set(multiValueArgs SOURCES EXTRA_FILES REQUIRES CONFLICTS TESTARGS + EXTRA_DEPENDS) + cmake_parse_arguments(BOUT_TEST_OPTIONS "${options}" "${oneValueArgs}" + "${multiValueArgs}" ${ARGN}) + + bout_handle_requires_conflicts( + "test" ${TESTNAME} REQUIRES ${BOUT_TEST_OPTIONS_REQUIRES} CONFLICTS + ${BOUT_TEST_OPTIONS_CONFLICTS}) + + if(BOUT_TEST_OPTIONS_SOURCES) + # We've got some sources, so compile them into an executable and link + # against BOUT++ add_executable(${TESTNAME} ${BOUT_TEST_OPTIONS_SOURCES}) target_link_libraries(${TESTNAME} bout++) - target_include_directories(${TESTNAME} PRIVATE $) + target_include_directories( + ${TESTNAME} PRIVATE $) set_target_properties(${TESTNAME} PROPERTIES FOLDER tests/integrated) - # Set the name of the executable. We either take it as an option, - # or use the first source file, stripping the file suffix - if (BOUT_TEST_OPTIONS_EXECUTABLE_NAME) - set_target_properties(${TESTNAME} PROPERTIES OUTPUT_NAME ${BOUT_TEST_OPTIONS_EXECUTABLE_NAME}) + # Set the name of the executable. We either take it as an option, or use the + # first source file, stripping the file suffix + if(BOUT_TEST_OPTIONS_EXECUTABLE_NAME) + set_target_properties( + ${TESTNAME} PROPERTIES OUTPUT_NAME ${BOUT_TEST_OPTIONS_EXECUTABLE_NAME}) else() # If more than one source file, just get the first one list(LENGTH ${BOUT_TEST_OPTIONS_SOURCES} BOUT_SOURCES_LENGTH) - if (BOUT_SOURCES_LENGTH GREATER 0) + if(BOUT_SOURCES_LENGTH GREATER 0) list(GET ${BOUT_TEST_OPTIONS_SOURCES} 0 BOUT_TEST_FIRST_SOURCE) else() set(BOUT_TEST_FIRST_SOURCE ${BOUT_TEST_OPTIONS_SOURCES}) endif() # Strip the directory and file extension from the source file - get_filename_component(BOUT_TEST_EXECUTABLE_NAME ${BOUT_TEST_FIRST_SOURCE} NAME_WE) - set_target_properties(${TESTNAME} PROPERTIES OUTPUT_NAME ${BOUT_TEST_EXECUTABLE_NAME}) + get_filename_component(BOUT_TEST_EXECUTABLE_NAME + ${BOUT_TEST_FIRST_SOURCE} NAME_WE) + set_target_properties(${TESTNAME} PROPERTIES OUTPUT_NAME + ${BOUT_TEST_EXECUTABLE_NAME}) endif() # Add the test to the build-check-integrated-tests target @@ -202,54 +199,53 @@ function(bout_add_integrated_or_mms_test BUILD_CHECK_TARGET TESTNAME) add_custom_target(${TESTNAME}) endif() - if (BOUT_TEST_OPTIONS_DOWNLOAD) - if (NOT BOUT_TEST_OPTIONS_DOWNLOAD_NAME) + if(BOUT_TEST_OPTIONS_DOWNLOAD) + if(NOT BOUT_TEST_OPTIONS_DOWNLOAD_NAME) message(FATAL_ERROR "We need DOWNLOAD_NAME if we should DOWNLOAD!") endif() - set(output ) - add_custom_command(OUTPUT ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME} - COMMAND wget ${BOUT_TEST_OPTIONS_DOWNLOAD} -O ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME} + set(output) + add_custom_command( + OUTPUT ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME} + COMMAND wget ${BOUT_TEST_OPTIONS_DOWNLOAD} -O + ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Downloading ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME}" - ) - add_custom_target(download_test_data DEPENDS ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME}) + COMMENT "Downloading ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME}") + add_custom_target(download_test_data + DEPENDS ${BOUT_TEST_OPTIONS_DOWNLOAD_NAME}) add_dependencies(${TESTNAME} download_test_data) endif() - if (BOUT_TEST_OPTIONS_EXTRA_DEPENDS) + if(BOUT_TEST_OPTIONS_EXTRA_DEPENDS) add_dependencies(${TESTNAME} ${BOUT_TEST_OPTIONS_EXTRA_DEPENDS}) endif() - if (NOT BOUT_TEST_OPTIONS_PROCESSORS) + if(NOT BOUT_TEST_OPTIONS_PROCESSORS) set(BOUT_TEST_OPTIONS_PROCESSORS 1) endif() # Set the actual test command - if (BOUT_TEST_OPTIONS_USE_RUNTEST) - add_test(NAME ${TESTNAME} - COMMAND ./runtest ${BOUT_TEST_OPTIONS_TESTARGS} - ) - set_tests_properties(${TESTNAME} PROPERTIES - ENVIRONMENT PYTHONPATH=${BOUT_PYTHONPATH}:$ENV{PYTHONPATH} - ) + if(BOUT_TEST_OPTIONS_USE_RUNTEST) + add_test(NAME ${TESTNAME} COMMAND ./runtest ${BOUT_TEST_OPTIONS_TESTARGS}) + set_tests_properties( + ${TESTNAME} PROPERTIES ENVIRONMENT + PYTHONPATH=${BOUT_PYTHONPATH}:$ENV{PYTHONPATH}) bout_copy_file(runtest) else() add_test(NAME ${TESTNAME} COMMAND ${TESTNAME} ${BOUT_TEST_OPTIONS_TESTARGS}) endif() - set_tests_properties(${TESTNAME} PROPERTIES - PROCESSORS ${BOUT_TEST_OPTIONS_PROCESSORS} - PROCESSOR_AFFINITY ON - ) + set_tests_properties( + ${TESTNAME} PROPERTIES PROCESSORS ${BOUT_TEST_OPTIONS_PROCESSORS} + PROCESSOR_AFFINITY ON) # Copy the input file if needed - if (BOUT_TEST_OPTIONS_USE_DATA_BOUT_INP) + if(BOUT_TEST_OPTIONS_USE_DATA_BOUT_INP) bout_copy_file(data/BOUT.inp) endif() # Copy any other needed files - if (BOUT_TEST_OPTIONS_EXTRA_FILES) - foreach (FILE ${BOUT_TEST_OPTIONS_EXTRA_FILES}) + if(BOUT_TEST_OPTIONS_EXTRA_FILES) + foreach(FILE ${BOUT_TEST_OPTIONS_EXTRA_FILES}) bout_copy_file("${FILE}") endforeach() endif() @@ -257,7 +253,8 @@ endfunction() # Add a new integrated test. See `bout_add_integrated_or_mms_test` for arguments function(bout_add_integrated_test TESTNAME) - bout_add_integrated_or_mms_test(build-check-integrated-tests ${TESTNAME} ${ARGV}) + bout_add_integrated_or_mms_test(build-check-integrated-tests ${TESTNAME} + ${ARGV}) endfunction() # Add a new MMS test. See `bout_add_integrated_or_mms_test` for arguments @@ -265,27 +262,31 @@ function(bout_add_mms_test TESTNAME) bout_add_integrated_or_mms_test(build-check-mms-tests ${TESTNAME} ${ARGV}) endfunction() -# Add an alias for an imported target -# Workaround for CMAke < 3.18 -# Taken from https://github.com/conan-io/conan/issues/2125#issuecomment-351176653 +# Add an alias for an imported target Workaround for CMAke < 3.18 Taken from +# https://github.com/conan-io/conan/issues/2125#issuecomment-351176653 function(bout_add_library_alias dst src) add_library(${dst} INTERFACE IMPORTED) - foreach(name INTERFACE_LINK_LIBRARIES INTERFACE_INCLUDE_DIRECTORIES INTERFACE_COMPILE_DEFINITIONS INTERFACE_COMPILE_OPTIONS) - get_property(value TARGET ${src} PROPERTY ${name} ) + foreach(name INTERFACE_LINK_LIBRARIES INTERFACE_INCLUDE_DIRECTORIES + INTERFACE_COMPILE_DEFINITIONS INTERFACE_COMPILE_OPTIONS) + get_property( + value + TARGET ${src} + PROPERTY ${name}) set_property(TARGET ${dst} PROPERTY ${name} ${value}) endforeach() endfunction() - -# Call nx-config with an argument, and append the resulting path to a list -# Taken from https://github.com/LiamBindle/geos-chem/blob/feature/CMake/CMakeScripts/FindNetCDF.cmake +# Call nx-config with an argument, and append the resulting path to a list Taken +# from +# https://github.com/LiamBindle/geos-chem/blob/feature/CMake/CMakeScripts/FindNetCDF.cmake function(bout_inspect_netcdf_config VAR NX_CONFIG ARG) execute_process( COMMAND ${NX_CONFIG} ${ARG} OUTPUT_VARIABLE NX_CONFIG_OUTPUT - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - if (NX_CONFIG_OUTPUT) - set(${VAR} ${NX_CONFIG_OUTPUT} PARENT_SCOPE) + OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NX_CONFIG_OUTPUT) + set(${VAR} + ${NX_CONFIG_OUTPUT} + PARENT_SCOPE) endif() endfunction() diff --git a/cmake/BuildType.cmake b/cmake/BuildType.cmake index c4dd7ff73d..e5b6fc79e3 100644 --- a/cmake/BuildType.cmake +++ b/cmake/BuildType.cmake @@ -1,21 +1,25 @@ -# This file sets the default build type to Release, but allows the -# user to override using the usualy command line `-DCMAKE_BUILD_TYPE` -# option. Also respects multi-config generators like Visual Studio. +# This file sets the default build type to Release, but allows the user to +# override using the usualy command line `-DCMAKE_BUILD_TYPE` option. Also +# respects multi-config generators like Visual Studio. # # Use like: # -# include(BuildType) +# include(BuildType) # -# Taken from https://blog.kitware.com/cmake-and-the-default-build-type/ -# via the GS2 project https://bitbucket.org/gyrokinetics/utils/src/8.1-RC/cmake/BuildType.cmake +# Taken from https://blog.kitware.com/cmake-and-the-default-build-type/ via the +# GS2 project +# https://bitbucket.org/gyrokinetics/utils/src/8.1-RC/cmake/BuildType.cmake set(default_build_type "RelWithDebInfo") if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - message(STATUS "Setting build type to '${default_build_type}' as none was specified.") - set(CMAKE_BUILD_TYPE "${default_build_type}" CACHE - STRING "Choose the type of build." FORCE) + message( + STATUS + "Setting build type to '${default_build_type}' as none was specified.") + set(CMAKE_BUILD_TYPE + "${default_build_type}" + CACHE STRING "Choose the type of build." FORCE) # Set the possible values of build type for cmake-gui - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS - "Debug" "Release" "MinSizeRel" "RelWithDebInfo") + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") endif() diff --git a/cmake/CorrectWindowsPaths.cmake b/cmake/CorrectWindowsPaths.cmake index 09bcdd67dc..00016ad920 100644 --- a/cmake/CorrectWindowsPaths.cmake +++ b/cmake/CorrectWindowsPaths.cmake @@ -1,14 +1,11 @@ # CorrectWindowsPaths - this module defines one macro # -# CONVERT_CYGWIN_PATH( PATH ) -# This uses the command cygpath (provided by cygwin) to convert -# unix-style paths into paths useable by cmake on windows - -macro (CONVERT_CYGWIN_PATH _path) - if (WIN32) - EXECUTE_PROCESS(COMMAND cygpath.exe -m ${${_path}} - OUTPUT_VARIABLE ${_path}) - string (STRIP ${${_path}} ${_path}) - endif (WIN32) -endmacro (CONVERT_CYGWIN_PATH) +# CONVERT_CYGWIN_PATH( PATH ) This uses the command cygpath (provided by cygwin) +# to convert unix-style paths into paths useable by cmake on windows +macro(CONVERT_CYGWIN_PATH _path) + if(WIN32) + execute_process(COMMAND cygpath.exe -m ${${_path}} OUTPUT_VARIABLE ${_path}) + string(STRIP ${${_path}} ${_path}) + endif(WIN32) +endmacro(CONVERT_CYGWIN_PATH) diff --git a/cmake/EnableCXXWarningIfSupport.cmake b/cmake/EnableCXXWarningIfSupport.cmake index 6d7a64265f..60f1788caf 100644 --- a/cmake/EnableCXXWarningIfSupport.cmake +++ b/cmake/EnableCXXWarningIfSupport.cmake @@ -5,40 +5,40 @@ function(target_enable_cxx_warning_if_supported TARGET) set(multiValueArgs FLAGS) cmake_parse_arguments(TARGET_ENABLE_WARNING "" "" "${multiValueArgs}" ${ARGN}) - foreach (WARNING_FLAG IN LISTS TARGET_ENABLE_WARNING_FLAGS) + foreach(WARNING_FLAG IN LISTS TARGET_ENABLE_WARNING_FLAGS) string(REPLACE "-" "_" WARNING_FLAG_STRIPPED ${WARNING_FLAG}) - # Note that gcc ignores unknown flags of the form "-Wno-warning" - # for backwards compatibility. Therefore we need to add the - # positive form as an additional flag which it will choke on (if - # it doesn't exist). See: https://gcc.gnu.org/wiki/FAQ#wnowarning + # Note that gcc ignores unknown flags of the form "-Wno-warning" for + # backwards compatibility. Therefore we need to add the positive form as an + # additional flag which it will choke on (if it doesn't exist). See: + # https://gcc.gnu.org/wiki/FAQ#wnowarning string(FIND ${WARNING_FLAG} "Wno-" NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED}) - if (NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED} EQUAL -1) + if(NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED} EQUAL -1) set(IS_NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED} FALSE) else() set(IS_NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED} TRUE) endif() - if (IS_NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED}) + if(IS_NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED}) set(ORIGINAL_FLAG ${WARNING_FLAG}) string(REPLACE "no-" "" WARNING_FLAG ${WARNING_FLAG}) message(STATUS "Found negative flag: ${ORIGINAL_FLAG}\n" - " replaced with ${WARNING_FLAG}") + " replaced with ${WARNING_FLAG}") endif() check_cxx_compiler_flag(${WARNING_FLAG} HAS_FLAG_${WARNING_FLAG_STRIPPED}) - if (IS_NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED}) + if(IS_NEGATIVE_FLAG_${WARNING_FLAG_STRIPPED}) set(WARNING_FLAG ${ORIGINAL_FLAG}) endif() - if (HAS_FLAG_${WARNING_FLAG_STRIPPED}) - message(STATUS "Warning flag is supported by compiler: ${WARNING_FLAG}") + if(HAS_FLAG_${WARNING_FLAG_STRIPPED}) + message(STATUS "Warning flag is supported by compiler: ${WARNING_FLAG}") - target_compile_options(${TARGET} PRIVATE - $<$>:${WARNING_FLAG} > - $<$:-Xcompiler=${WARNING_FLAG} > - ) + target_compile_options( + ${TARGET} + PRIVATE $<$>:${WARNING_FLAG} > + $<$:-Xcompiler=${WARNING_FLAG} >) else() message(STATUS "Warning flag not supported by compiler: ${WARNING_FLAG}") endif() diff --git a/cmake/FindBash.cmake b/cmake/FindBash.cmake index feb195f9f1..f5038ec838 100644 --- a/cmake/FindBash.cmake +++ b/cmake/FindBash.cmake @@ -7,31 +7,31 @@ # # :: # -# Bash_FOUND - true if Bash was found -# Bash_VERSION - Bash version -# Bash_EXECUTABLE - Path to bash executable +# Bash_FOUND - true if Bash was found Bash_VERSION - Bash version +# Bash_EXECUTABLE - Path to bash executable -find_program(Bash_EXECUTABLE - bash - ) +find_program(Bash_EXECUTABLE bash) mark_as_advanced(Bash_EXECUTABLE) -if (Bash_EXECUTABLE) - execute_process(COMMAND "${Bash_EXECUTABLE}" --version +if(Bash_EXECUTABLE) + execute_process( + COMMAND "${Bash_EXECUTABLE}" --version RESULT_VARIABLE _bash_runs OUTPUT_VARIABLE _bash_stdout - OUTPUT_STRIP_TRAILING_WHITESPACE - ) + OUTPUT_STRIP_TRAILING_WHITESPACE) if(_bash_stdout MATCHES "version ([0-9]+\\.[0-9]+\\.[0-9]+)") set(Bash_VERSION "${CMAKE_MATCH_1}") else() - message (WARNING "Failed to determine version of Bash interpreter (${Bash_EXECUTABLE})! Error:\n${_Bash_STDERR}") + message( + WARNING + "Failed to determine version of Bash interpreter (${Bash_EXECUTABLE})! Error:\n${_Bash_STDERR}" + ) endif() endif() include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(Bash +find_package_handle_standard_args( + Bash VERSION_VAR Bash_VERSION - REQUIRED_VARS Bash_EXECUTABLE - ) + REQUIRED_VARS Bash_EXECUTABLE) diff --git a/cmake/FindClangFormat.cmake b/cmake/FindClangFormat.cmake index c940002c3b..7f56401af0 100644 --- a/cmake/FindClangFormat.cmake +++ b/cmake/FindClangFormat.cmake @@ -1,29 +1,24 @@ # Find Clang format # -# Taken from https://github.com/ttroy50/cmake-examples commit 64bd54a -# This file is under MIT Licence +# Taken from https://github.com/ttroy50/cmake-examples commit 64bd54a This file +# is under MIT Licence -if (NOT ClangFormat_BIN_NAME) +if(NOT ClangFormat_BIN_NAME) set(ClangFormat_BIN_NAME clang-format) endif() # if custom path check there first -if (ClangFormat_ROOT_DIR) - find_program(ClangFormat_BIN - NAMES - ${ClangFormat_BIN_NAME} - PATHS - "${ClangFormat_ROOT_DIR}" +if(ClangFormat_ROOT_DIR) + find_program( + ClangFormat_BIN + NAMES ${ClangFormat_BIN_NAME} + PATHS "${ClangFormat_ROOT_DIR}" NO_DEFAULT_PATH) endif() find_program(ClangFormat_BIN NAMES ${ClangFormat_BIN_NAME}) include(FindPackageHandleStandardArgs) -find_package_handle_standard_args( - ClangFormat - DEFAULT_MSG - ClangFormat_BIN) +find_package_handle_standard_args(ClangFormat DEFAULT_MSG ClangFormat_BIN) -mark_as_advanced( - ClangFormat_BIN) +mark_as_advanced(ClangFormat_BIN) diff --git a/cmake/FindCython.cmake b/cmake/FindCython.cmake index 3b98cde89e..48ab54c92f 100644 --- a/cmake/FindCython.cmake +++ b/cmake/FindCython.cmake @@ -7,23 +7,22 @@ # # :: # -# CYTHON_FOUND - true if Cython was found -# CYTHON_VERSION - Cython version +# CYTHON_FOUND - true if Cython was found CYTHON_VERSION - Cython version -execute_process(COMMAND ${Python3_EXECUTABLE} -c "import cython ; print(cython.__version__)" +execute_process( + COMMAND ${Python3_EXECUTABLE} -c "import cython ; print(cython.__version__)" RESULT_VARIABLE _cython_runs OUTPUT_VARIABLE CYTHON_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE - ) + OUTPUT_STRIP_TRAILING_WHITESPACE) -if (${_cython_runs} EQUAL 0) +if(${_cython_runs} EQUAL 0) set(CYTHON_RUNS TRUE) else() set(CYTHON_RUNS FALSE) endif() include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(Cython +find_package_handle_standard_args( + Cython VERSION_VAR CYTHON_VERSION - REQUIRED_VARS CYTHON_RUNS - ) + REQUIRED_VARS CYTHON_RUNS) diff --git a/cmake/FindFFTW.cmake b/cmake/FindFFTW.cmake index e1940c687d..4936ebd3c2 100644 --- a/cmake/FindFFTW.cmake +++ b/cmake/FindFFTW.cmake @@ -3,93 +3,87 @@ # # Find the Fastest Fourier Transform in the West FFT library # -# This module uses the ``fftw-wisdom`` executable as a hint for the -# location of the FFTW library. It should be in your PATH. +# This module uses the ``fftw-wisdom`` executable as a hint for the location of +# the FFTW library. It should be in your PATH. # # This module will define the following variables: # # :: # -# FFTW_FOUND - true if FFTW was found -# FFTW_INCLUDE_DIRS - Location of the FFTW includes -# FFTW_LIBRARIES - Required libraries +# FFTW_FOUND - true if FFTW was found FFTW_INCLUDE_DIRS - Location of the FFTW +# includes FFTW_LIBRARIES - Required libraries # # This module will also export the ``FFTW::FFTW`` target. # # You can also set the following variables: # -# ``FFTW_ROOT`` -# Specify the path to the FFTW installation to use +# ``FFTW_ROOT`` Specify the path to the FFTW installation to use # -# ``FFTW_DEBUG`` -# Set to TRUE to get extra debugging output +# ``FFTW_DEBUG`` Set to TRUE to get extra debugging output -if (FFTW_INCLUDE_DIRS) +if(FFTW_INCLUDE_DIRS) # Already in cache, be silent - set (FFTW_FIND_QUIETLY TRUE) -endif (FFTW_INCLUDE_DIRS) + set(FFTW_FIND_QUIETLY TRUE) +endif(FFTW_INCLUDE_DIRS) -if (EXISTS ${FFTW_ROOT}) +if(EXISTS ${FFTW_ROOT}) # Make sure FFTW_ROOT is an absolute path by setting it as a 'FILEPATH' - set (FFTW_ROOT "" CACHE FILEPATH "Location of the FFTW library") + set(FFTW_ROOT + "" + CACHE FILEPATH "Location of the FFTW library") endif() -find_program(FFTW_WISDOM "fftw-wisdom" +find_program( + FFTW_WISDOM "fftw-wisdom" PATHS "${FFTW_ROOT}" PATH_SUFFIXES bin NO_DEFAULT_PATH - DOC "Path to fftw-wisdom executable" - ) + DOC "Path to fftw-wisdom executable") -find_program(FFTW_WISDOM "fftw-wisdom" - DOC "Path to fftw-wisdom executable" - ) -if (FFTW_DEBUG) +find_program(FFTW_WISDOM "fftw-wisdom" DOC "Path to fftw-wisdom executable") +if(FFTW_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " FFTW_WISDOM = ${FFTW_WISDOM}" - ) + " FFTW_WISDOM = ${FFTW_WISDOM}") endif() get_filename_component(FFTW_WISDOM_TMP "${FFTW_WISDOM}" DIRECTORY) get_filename_component(FFTW_HINT_DIR "${FFTW_WISDOM_TMP}" DIRECTORY) -find_path(FFTW_INCLUDE_DIRS +find_path( + FFTW_INCLUDE_DIRS NAMES fftw3.h DOC "FFTW include directory" HINTS "${FFTW_HINT_DIR}" - PATH_SUFFIXES "include" - ) -if (FFTW_DEBUG) + PATH_SUFFIXES "include") +if(FFTW_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " FFTW_INCLUDE_DIRS = ${FFTW_INCLUDE_DIRS}" - " FFTW_HINT_DIR = ${FFTW_HINT_DIR}" - ) + " FFTW_INCLUDE_DIRS = ${FFTW_INCLUDE_DIRS}" + " FFTW_HINT_DIR = ${FFTW_HINT_DIR}") endif() -find_library (FFTW_LIBRARIES +find_library( + FFTW_LIBRARIES NAMES fftw3 DOC "FFTW library location" HINTS "${FFTW_HINT_DIR}" - PATH_SUFFIXES "lib" "lib64" - ) -if (FFTW_DEBUG) + PATH_SUFFIXES "lib" "lib64") +if(FFTW_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " FFTW_LIBRARIES = ${FFTW_LIBRARIES}" - " FFTW_HINT_DIR = ${FFTW_HINT_DIR}" - ) + " FFTW_LIBRARIES = ${FFTW_LIBRARIES}" + " FFTW_HINT_DIR = ${FFTW_HINT_DIR}") endif() -# handle the QUIETLY and REQUIRED arguments and set FFTW_FOUND to TRUE if -# all listed variables are TRUE -include (FindPackageHandleStandardArgs) -find_package_handle_standard_args (FFTW DEFAULT_MSG FFTW_LIBRARIES FFTW_INCLUDE_DIRS) +# handle the QUIETLY and REQUIRED arguments and set FFTW_FOUND to TRUE if all +# listed variables are TRUE +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(FFTW DEFAULT_MSG FFTW_LIBRARIES + FFTW_INCLUDE_DIRS) -mark_as_advanced (FFTW_LIBRARIES FFTW_INCLUDE_DIRS) +mark_as_advanced(FFTW_LIBRARIES FFTW_INCLUDE_DIRS) -if (FFTW_FOUND AND NOT TARGET FFTW::FFTW) +if(FFTW_FOUND AND NOT TARGET FFTW::FFTW) add_library(FFTW::FFTW UNKNOWN IMPORTED) - set_target_properties(FFTW::FFTW PROPERTIES - IMPORTED_LOCATION "${FFTW_LIBRARIES}" - INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}" - ) + set_target_properties( + FFTW::FFTW PROPERTIES IMPORTED_LOCATION "${FFTW_LIBRARIES}" + INTERFACE_INCLUDE_DIRECTORIES "${FFTW_INCLUDE_DIRS}") endif() diff --git a/cmake/FindHYPRE.cmake b/cmake/FindHYPRE.cmake index 1b9a5ca6f9..951bff88b0 100644 --- a/cmake/FindHYPRE.cmake +++ b/cmake/FindHYPRE.cmake @@ -6,54 +6,55 @@ include(FindPackageHandleStandardArgs) find_package(HYPRE CONFIG QUIET) -if (HYPRE_FOUND) +if(HYPRE_FOUND) message(STATUS "Found HYPRE: ${HYPRE_VERSION}") return() endif() -find_path(HYPRE_INCLUDE_DIR +find_path( + HYPRE_INCLUDE_DIR NAMES HYPRE.h - DOC "HYPRE include directories" - REQUIRED - PATH_SUFFIXES include include/hypre -) + DOC "HYPRE include directories" REQUIRED + PATH_SUFFIXES include include/hypre) -find_library(HYPRE_LIBRARY +find_library( + HYPRE_LIBRARY NAMES HYPRE - DOC "HYPRE library" - REQUIRED - PATH_SUFFIXES lib64 lib - ) + DOC "HYPRE library" REQUIRED + PATH_SUFFIXES lib64 lib) -if (HYPRE_INCLUDE_DIR) +if(HYPRE_INCLUDE_DIR) file(READ "${HYPRE_INCLUDE_DIR}/HYPRE_config.h" HYPRE_CONFIG_FILE) - string(REGEX MATCH ".*#define HYPRE_RELEASE_VERSION \"([0-9]+)\\.([0-9]+)\\.([0-9]+)\".*" - _ "${HYPRE_CONFIG_FILE}") + string( + REGEX MATCH + ".*#define HYPRE_RELEASE_VERSION \"([0-9]+)\\.([0-9]+)\\.([0-9]+)\".*" + _ "${HYPRE_CONFIG_FILE}") set(HYPRE_VERSION_MAJOR ${CMAKE_MATCH_1}) set(HYPRE_VERSION_MINOR ${CMAKE_MATCH_2}) set(HYPRE_VERSION_PATCH ${CMAKE_MATCH_3}) - set(HYPRE_VERSION "${HYPRE_VERSION_MAJOR}.${HYPRE_VERSION_MINOR}.${HYPRE_VERSION_PATCH}") + set(HYPRE_VERSION + "${HYPRE_VERSION_MAJOR}.${HYPRE_VERSION_MINOR}.${HYPRE_VERSION_PATCH}") endif() -if (HYPRE_DEBUG) - message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ]" - " HYPRE_ROOT = ${HYPRE_ROOT}" - " HYPRE_INCLUDE_DIR = ${HYPRE_INCLUDE_DIR}" - " HYPRE_LIBRARY = ${HYPRE_LIBRARY}" - ) +if(HYPRE_DEBUG) + message( + STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ]" + " HYPRE_ROOT = ${HYPRE_ROOT}" + " HYPRE_INCLUDE_DIR = ${HYPRE_INCLUDE_DIR}" + " HYPRE_LIBRARY = ${HYPRE_LIBRARY}") endif() mark_as_advanced(HYPRE_INCLUDE_DIR HYPRE_LIBRARY) -find_package_handle_standard_args(HYPRE +find_package_handle_standard_args( + HYPRE REQUIRED_VARS HYPRE_LIBRARY HYPRE_INCLUDE_DIR - VERSION_VAR HYPRE_VERSION - ) + VERSION_VAR HYPRE_VERSION) -if (HYPRE_FOUND AND NOT TARGET HYPRE::HYPRE) +if(HYPRE_FOUND AND NOT TARGET HYPRE::HYPRE) add_library(HYPRE::HYPRE UNKNOWN IMPORTED) - set_target_properties(HYPRE::HYPRE PROPERTIES - IMPORTED_LOCATION "${HYPRE_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${HYPRE_INCLUDE_DIR}" - ) + set_target_properties( + HYPRE::HYPRE + PROPERTIES IMPORTED_LOCATION "${HYPRE_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${HYPRE_INCLUDE_DIR}") endif() diff --git a/cmake/FindLibuuid.cmake b/cmake/FindLibuuid.cmake index 91880487a2..01346ac870 100644 --- a/cmake/FindLibuuid.cmake +++ b/cmake/FindLibuuid.cmake @@ -7,52 +7,49 @@ # # :: # -# Libuuid_FOUND - true if Libuuid was found -# Libuuid_INCLUDE_DIRS - Location of the Libuuid includes -# Libuuid_LIBRARIES - Required libraries +# Libuuid_FOUND - true if Libuuid was found Libuuid_INCLUDE_DIRS - Location of +# the Libuuid includes Libuuid_LIBRARIES - Required libraries # # This module will also export the ``Libuuid::libuuid`` target. # # You can also set the following variables: # -# ``Libuuid_ROOT`` -# Specify the path to the Libuuid installation to use +# ``Libuuid_ROOT`` Specify the path to the Libuuid installation to use # -# ``Libuuid_DEBUG`` -# Set to TRUE to get extra debugging output +# ``Libuuid_DEBUG`` Set to TRUE to get extra debugging output -include (FindPackageHandleStandardArgs) +include(FindPackageHandleStandardArgs) -if (WIN32) +if(WIN32) find_package_handle_standard_args(Libuuid DEFAULT_MSG) return() endif() -if (APPLE) +if(APPLE) find_library(CFLIB CoreFoundation) find_package_handle_standard_args(Libuuid DEFAULT_MSG CFLIB) mark_as_advanced(${CFLIB}) - if (Libuuid_FOUND AND NOT TARGET Libuuid::libuuid) + if(Libuuid_FOUND AND NOT TARGET Libuuid::libuuid) add_library(Libuuid::libuuid UNKNOWN IMPORTED) - set_target_properties(Libuuid::libuuid PROPERTIES - IMPORTED_LOCATION ${CFLIB} - ) + set_target_properties(Libuuid::libuuid PROPERTIES IMPORTED_LOCATION + ${CFLIB}) endif() return() -endif () +endif() find_path(Libuuid_INCLUDE_DIRS uuid/uuid.h) find_library(Libuuid_LIBRARIES uuid) -find_package_handle_standard_args(Libuuid DEFAULT_MSG Libuuid_LIBRARIES Libuuid_INCLUDE_DIRS) +find_package_handle_standard_args(Libuuid DEFAULT_MSG Libuuid_LIBRARIES + Libuuid_INCLUDE_DIRS) mark_as_advanced(Libuuid_LIBRARIES Libuuid_INCLUDE_DIRS) -if (Libuuid_FOUND AND NOT TARGET Libuuid::libuuid) +if(Libuuid_FOUND AND NOT TARGET Libuuid::libuuid) add_library(Libuuid::libuuid UNKNOWN IMPORTED) - set_target_properties(Libuuid::libuuid PROPERTIES - IMPORTED_LOCATION "${Libuuid_LIBRARIES}" - INTERFACE_INCLUDE_DIRECTORIES "${Libuuid_INCLUDE_DIRS}" - ) + set_target_properties( + Libuuid::libuuid + PROPERTIES IMPORTED_LOCATION "${Libuuid_LIBRARIES}" + INTERFACE_INCLUDE_DIRECTORIES "${Libuuid_INCLUDE_DIRS}") endif() diff --git a/cmake/FindNumpy.cmake b/cmake/FindNumpy.cmake index b6de6e3e35..79623aac41 100644 --- a/cmake/FindNumpy.cmake +++ b/cmake/FindNumpy.cmake @@ -7,50 +7,55 @@ # # :: # -# Numpy_FOUND -# Numpy_VERSION -# Numpy_INCLUDE_DIR - +# Numpy_FOUND Numpy_VERSION Numpy_INCLUDE_DIR find_package(Python3 3.6 COMPONENTS Interpreter Development) -if (NOT Python3_FOUND) - message(STATUS "Could not find numpy as python was not found. Maybe the developement package is missing?") +if(NOT Python3_FOUND) + message( + STATUS + "Could not find numpy as python was not found. Maybe the developement package is missing?" + ) set(Numpy_FOUND ${Python3_FOUND}) return() endif() -if (NOT Numpy_FOUND) - execute_process(COMMAND ${Python3_EXECUTABLE} -c "import numpy ; print(numpy.__version__)" +if(NOT Numpy_FOUND) + execute_process( + COMMAND ${Python3_EXECUTABLE} -c "import numpy ; print(numpy.__version__)" OUTPUT_STRIP_TRAILING_WHITESPACE - OUTPUT_VARIABLE Numpy_VERSION - ) - execute_process(COMMAND ${Python3_EXECUTABLE} -c "import numpy ; print(numpy.get_include())" + OUTPUT_VARIABLE Numpy_VERSION) + execute_process( + COMMAND ${Python3_EXECUTABLE} -c "import numpy ; print(numpy.get_include())" OUTPUT_STRIP_TRAILING_WHITESPACE - OUTPUT_VARIABLE _numpy_include_dirs - ) + OUTPUT_VARIABLE _numpy_include_dirs) endif() -if (Numpy_DEBUG) - message(STATUS "Looking for numpy headers in: ${_numpy_include_dirs} ${Python3_INCLUDE_DIRS}") +if(Numpy_DEBUG) + message( + STATUS + "Looking for numpy headers in: ${_numpy_include_dirs} ${Python3_INCLUDE_DIRS}" + ) endif() -find_path(Numpy_INCLUDE_DIR - numpy/arrayobject.h +find_path( + Numpy_INCLUDE_DIR numpy/arrayobject.h PATHS "${_numpy_include_dirs}" "${Python3_INCLUDE_DIRS}" - PATH_SUFFIXES numpy/core/include - ) + PATH_SUFFIXES numpy/core/include) -if (NOT Numpy_INCLUDE_DIR) - message(STATUS "Numpy headers not found -- do you need to install the development package?") +if(NOT Numpy_INCLUDE_DIR) + message( + STATUS + "Numpy headers not found -- do you need to install the development package?" + ) endif() set(Numpy_INCLUDE_DIRS ${Numpy_INCLUDE_DIR}) include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(Numpy +find_package_handle_standard_args( + Numpy VERSION_VAR Numpy_VERSION - REQUIRED_VARS Numpy_INCLUDE_DIR - ) + REQUIRED_VARS Numpy_INCLUDE_DIR) mark_as_advanced(Numpy_INCLUDE_DIR) diff --git a/cmake/FindPETSc.cmake b/cmake/FindPETSc.cmake index c93d464673..1381e56abe 100644 --- a/cmake/FindPETSc.cmake +++ b/cmake/FindPETSc.cmake @@ -1,50 +1,46 @@ -# - Try to find PETSc -# Once done this will define +# * Try to find PETSc Once done this will define # -# PETSC_FOUND - system has PETSc -# PETSC_INCLUDES - the PETSc include directories -# PETSC_LIBRARIES - Link these to use PETSc -# PETSC_COMPILER - Compiler used by PETSc, helpful to find a compatible MPI -# PETSC_DEFINITIONS - Compiler switches for using PETSc -# PETSC_MPIEXEC - Executable for running MPI programs -# PETSC_VERSION - Version string (MAJOR.MINOR.SUBMINOR) +# PETSC_FOUND - system has PETSc PETSC_INCLUDES - the PETSc include +# directories PETSC_LIBRARIES - Link these to use PETSc PETSC_COMPILER - +# Compiler used by PETSc, helpful to find a compatible MPI PETSC_DEFINITIONS - +# Compiler switches for using PETSc PETSC_MPIEXEC - Executable for running +# MPI programs PETSC_VERSION - Version string (MAJOR.MINOR.SUBMINOR) # -# Usage: -# find_package(PETSc COMPONENTS CXX) - required if build --with-clanguage=C++ --with-c-support=0 -# find_package(PETSc COMPONENTS C) - standard behavior of checking build using a C compiler -# find_package(PETSc) - same as above +# Usage: find_package(PETSc COMPONENTS CXX) - required if build +# --with-clanguage=C++ --with-c-support=0 find_package(PETSc COMPONENTS C) - +# standard behavior of checking build using a C compiler find_package(PETSc) - +# same as above # -# Setting these changes the behavior of the search -# PETSC_DIR - directory in which PETSc resides -# PETSC_ARCH - build architecture +# Setting these changes the behavior of the search PETSC_DIR - directory in +# which PETSc resides PETSC_ARCH - build architecture # # Redistribution and use is allowed according to the terms of the BSD license. # For details see the accompanying COPYING-CMAKE-SCRIPTS file. # -# Taken from https://github.com/jedbrown/cmake-modules/blob/master/FindPETSc.cmake +# Taken from +# https://github.com/jedbrown/cmake-modules/blob/master/FindPETSc.cmake find_package(MPI REQUIRED) -set(PETSC_VALID_COMPONENTS - C - CXX) +set(PETSC_VALID_COMPONENTS C CXX) if(NOT PETSc_FIND_COMPONENTS) - get_property (_enabled_langs GLOBAL PROPERTY ENABLED_LANGUAGES) - if ("C" IN_LIST _enabled_langs) + get_property(_enabled_langs GLOBAL PROPERTY ENABLED_LANGUAGES) + if("C" IN_LIST _enabled_langs) set(PETSC_LANGUAGE_BINDINGS "C") - else () + else() set(PETSC_LANGUAGE_BINDINGS "CXX") - endif () + endif() else() - # Right now, this is designed for compatability with the --with-clanguage option, so - # only allow one item in the components list. + # Right now, this is designed for compatability with the --with-clanguage + # option, so only allow one item in the components list. list(LENGTH ${PETSc_FIND_COMPONENTS} components_length) if(${components_length} GREATER 1) - message(FATAL_ERROR "Only one component for PETSc is allowed to be specified") + message( + FATAL_ERROR "Only one component for PETSc is allowed to be specified") endif() - # This is a stub for allowing multiple components should that time ever come. Perhaps - # to also test Fortran bindings? + # This is a stub for allowing multiple components should that time ever come. + # Perhaps to also test Fortran bindings? foreach(component ${PETSc_FIND_COMPONENTS}) list(FIND PETSC_VALID_COMPONENTS ${component} component_location) if(${component_location} EQUAL -1) @@ -61,103 +57,148 @@ if(NOT PETSC_DIR) endif() endif() -function (petsc_get_version) - if (EXISTS "${PETSC_DIR}/include/petscversion.h") - file (STRINGS "${PETSC_DIR}/include/petscversion.h" vstrings REGEX "#define PETSC_VERSION_(RELEASE|MAJOR|MINOR|SUBMINOR|PATCH) ") - foreach (line ${vstrings}) - string (REGEX REPLACE " +" ";" fields ${line}) # break line into three fields (the first is always "#define") - list (GET fields 1 var) - list (GET fields 2 val) - set (${var} ${val} PARENT_SCOPE) - set (${var} ${val}) # Also in local scope so we have access below - endforeach () - if (PETSC_VERSION_RELEASE) - if ($(PETSC_VERSION_PATCH) GREATER 0) - set (PETSC_VERSION "${PETSC_VERSION_MAJOR}.${PETSC_VERSION_MINOR}.${PETSC_VERSION_SUBMINOR}p${PETSC_VERSION_PATCH}" CACHE INTERNAL "PETSc version") - else () - set (PETSC_VERSION "${PETSC_VERSION_MAJOR}.${PETSC_VERSION_MINOR}.${PETSC_VERSION_SUBMINOR}" CACHE INTERNAL "PETSc version") - endif () - else () - # make dev version compare higher than any patch level of a released version - set (PETSC_VERSION "${PETSC_VERSION_MAJOR}.${PETSC_VERSION_MINOR}.${PETSC_VERSION_SUBMINOR}.99" CACHE INTERNAL "PETSc version") - endif () - else () - message (SEND_ERROR "PETSC_DIR can not be used, ${PETSC_DIR}/include/petscversion.h does not exist") - endif () -endfunction () +function(petsc_get_version) + if(EXISTS "${PETSC_DIR}/include/petscversion.h") + file(STRINGS "${PETSC_DIR}/include/petscversion.h" vstrings + REGEX "#define PETSC_VERSION_(RELEASE|MAJOR|MINOR|SUBMINOR|PATCH) ") + foreach(line ${vstrings}) + string(REGEX REPLACE " +" ";" fields ${line}) # break line into three + # fields (the first is + # always "#define") + list(GET fields 1 var) + list(GET fields 2 val) + set(${var} + ${val} + PARENT_SCOPE) + set(${var} ${val}) # Also in local scope so we have access below + endforeach() + if(PETSC_VERSION_RELEASE) + if($(PETSC_VERSION_PATCH) GREATER 0) + set(PETSC_VERSION + "${PETSC_VERSION_MAJOR}.${PETSC_VERSION_MINOR}.${PETSC_VERSION_SUBMINOR}p${PETSC_VERSION_PATCH}" + CACHE INTERNAL "PETSc version") + else() + set(PETSC_VERSION + "${PETSC_VERSION_MAJOR}.${PETSC_VERSION_MINOR}.${PETSC_VERSION_SUBMINOR}" + CACHE INTERNAL "PETSc version") + endif() + else() + # make dev version compare higher than any patch level of a released + # version + set(PETSC_VERSION + "${PETSC_VERSION_MAJOR}.${PETSC_VERSION_MINOR}.${PETSC_VERSION_SUBMINOR}.99" + CACHE INTERNAL "PETSc version") + endif() + else() + message( + SEND_ERROR + "PETSC_DIR can not be used, ${PETSC_DIR}/include/petscversion.h does not exist" + ) + endif() +endfunction() # Debian uses versioned paths e.g /usr/lib/petscdir/3.5/ -file (GLOB DEB_PATHS "/usr/lib/petscdir/*") +file(GLOB DEB_PATHS "/usr/lib/petscdir/*") -find_path (PETSC_DIR include/petsc.h +find_path( + PETSC_DIR include/petsc.h HINTS ENV PETSC_DIR - PATHS - /usr/lib/petsc - # Debian paths - ${DEB_PATHS} - # Arch Linux path - /opt/petsc/linux-c-opt - # MacPorts path - /opt/local/lib/petsc - $ENV{HOME}/petsc + PATHS /usr/lib/petsc + # Debian paths + ${DEB_PATHS} + # Arch Linux path + /opt/petsc/linux-c-opt + # MacPorts path + /opt/local/lib/petsc + $ENV{HOME}/petsc DOC "PETSc Directory") -find_program (MAKE_EXECUTABLE NAMES make gmake) - -if (PETSC_DIR AND NOT PETSC_ARCH) - set (_petsc_arches - $ENV{PETSC_ARCH} # If set, use environment variable first - linux-gnu-c-debug linux-gnu-c-opt # Debian defaults - x86_64-unknown-linux-gnu i386-unknown-linux-gnu) - set (petscconf "NOTFOUND" CACHE FILEPATH "Cleared" FORCE) - foreach (arch ${_petsc_arches}) - if (NOT PETSC_ARCH) - find_path (petscconf petscconf.h +find_program(MAKE_EXECUTABLE NAMES make gmake) + +if(PETSC_DIR AND NOT PETSC_ARCH) + set(_petsc_arches + $ENV{PETSC_ARCH} # If set, use environment variable first + linux-gnu-c-debug linux-gnu-c-opt # Debian defaults + x86_64-unknown-linux-gnu i386-unknown-linux-gnu) + set(petscconf + "NOTFOUND" + CACHE FILEPATH "Cleared" FORCE) + foreach(arch ${_petsc_arches}) + if(NOT PETSC_ARCH) + find_path( + petscconf petscconf.h HINTS ${PETSC_DIR} PATH_SUFFIXES ${arch}/include bmake/${arch} NO_DEFAULT_PATH) - if (petscconf) - set (PETSC_ARCH "${arch}" CACHE STRING "PETSc build architecture") - endif (petscconf) - endif (NOT PETSC_ARCH) - endforeach (arch) - set (petscconf "NOTFOUND" CACHE INTERNAL "Scratch variable" FORCE) -endif (PETSC_DIR AND NOT PETSC_ARCH) - -set (petsc_slaves LIBRARIES_SYS LIBRARIES_VEC LIBRARIES_MAT LIBRARIES_DM LIBRARIES_KSP LIBRARIES_SNES LIBRARIES_TS - INCLUDE_DIR INCLUDE_CONF) -include (FindPackageMultipass) -find_package_multipass (PETSc petsc_config_current - STATES DIR ARCH - DEPENDENTS INCLUDES LIBRARIES COMPILER MPIEXEC ${petsc_slaves}) - -# Determine whether the PETSc layout is old-style (through 2.3.3) or -# new-style (>= 3.0.0) -if (EXISTS "${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/petscvariables") # > 3.5 - set (petsc_conf_rules "${PETSC_DIR}/lib/petsc/conf/rules") - set (petsc_conf_variables "${PETSC_DIR}/lib/petsc/conf/variables") -elseif (EXISTS "${PETSC_DIR}/${PETSC_ARCH}/include/petscconf.h") # > 2.3.3 - set (petsc_conf_rules "${PETSC_DIR}/conf/rules") - set (petsc_conf_variables "${PETSC_DIR}/conf/variables") -elseif (EXISTS "${PETSC_DIR}/bmake/${PETSC_ARCH}/petscconf.h") # <= 2.3.3 - set (petsc_conf_rules "${PETSC_DIR}/bmake/common/rules") - set (petsc_conf_variables "${PETSC_DIR}/bmake/common/variables") -elseif (PETSC_DIR) - message (SEND_ERROR "The pair PETSC_DIR=${PETSC_DIR} PETSC_ARCH=${PETSC_ARCH} do not specify a valid PETSc installation") -endif () - -if (petsc_conf_rules AND petsc_conf_variables AND NOT petsc_config_current) + if(petscconf) + set(PETSC_ARCH + "${arch}" + CACHE STRING "PETSc build architecture") + endif(petscconf) + endif(NOT PETSC_ARCH) + endforeach(arch) + set(petscconf + "NOTFOUND" + CACHE INTERNAL "Scratch variable" FORCE) +endif(PETSC_DIR AND NOT PETSC_ARCH) + +set(petsc_slaves + LIBRARIES_SYS + LIBRARIES_VEC + LIBRARIES_MAT + LIBRARIES_DM + LIBRARIES_KSP + LIBRARIES_SNES + LIBRARIES_TS + INCLUDE_DIR + INCLUDE_CONF) +include(FindPackageMultipass) +find_package_multipass( + PETSc + petsc_config_current + STATES + DIR + ARCH + DEPENDENTS + INCLUDES + LIBRARIES + COMPILER + MPIEXEC + ${petsc_slaves}) + +# Determine whether the PETSc layout is old-style (through 2.3.3) or new-style +# (>= 3.0.0) +if(EXISTS "${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/petscvariables") # > 3.5 + set(petsc_conf_rules "${PETSC_DIR}/lib/petsc/conf/rules") + set(petsc_conf_variables "${PETSC_DIR}/lib/petsc/conf/variables") +elseif(EXISTS "${PETSC_DIR}/${PETSC_ARCH}/include/petscconf.h") # > 2.3.3 + set(petsc_conf_rules "${PETSC_DIR}/conf/rules") + set(petsc_conf_variables "${PETSC_DIR}/conf/variables") +elseif(EXISTS "${PETSC_DIR}/bmake/${PETSC_ARCH}/petscconf.h") # <= 2.3.3 + set(petsc_conf_rules "${PETSC_DIR}/bmake/common/rules") + set(petsc_conf_variables "${PETSC_DIR}/bmake/common/variables") +elseif(PETSC_DIR) + message( + SEND_ERROR + "The pair PETSC_DIR=${PETSC_DIR} PETSC_ARCH=${PETSC_ARCH} do not specify a valid PETSc installation" + ) +endif() + +if(petsc_conf_rules + AND petsc_conf_variables + AND NOT petsc_config_current) petsc_get_version() - # Put variables into environment since they are needed to get - # configuration (petscvariables) in the PETSc makefile - set (ENV{PETSC_DIR} "${PETSC_DIR}") - set (ENV{PETSC_ARCH} "${PETSC_ARCH}") + # Put variables into environment since they are needed to get configuration + # (petscvariables) in the PETSc makefile + set(ENV{PETSC_DIR} "${PETSC_DIR}") + set(ENV{PETSC_ARCH} "${PETSC_ARCH}") # A temporary makefile to probe the PETSc configuration - set (petsc_config_makefile "${PROJECT_BINARY_DIR}/Makefile.petsc") - file (WRITE "${petsc_config_makefile}" -"## This file was autogenerated by FindPETSc.cmake + set(petsc_config_makefile "${PROJECT_BINARY_DIR}/Makefile.petsc") + file( + WRITE "${petsc_config_makefile}" + "## This file was autogenerated by FindPETSc.cmake # PETSC_DIR = ${PETSC_DIR} # PETSC_ARCH = ${PETSC_ARCH} include ${petsc_conf_rules} @@ -166,113 +207,160 @@ show : \t-@echo -n \${\${VARIABLE}} ") - macro (PETSC_GET_VARIABLE name var) - set (${var} "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) - execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} show VARIABLE=${name} + macro(PETSC_GET_VARIABLE name var) + set(${var} + "NOTFOUND" + CACHE INTERNAL "Cleared" FORCE) + execute_process( + COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f + ${petsc_config_makefile} show VARIABLE=${name} OUTPUT_VARIABLE ${var} RESULT_VARIABLE petsc_return) - endmacro (PETSC_GET_VARIABLE) - petsc_get_variable (PETSC_LIB_DIR petsc_lib_dir) - petsc_get_variable (PETSC_EXTERNAL_LIB_BASIC petsc_libs_external) - petsc_get_variable (PETSC_CCPPFLAGS petsc_cpp_line) - petsc_get_variable (PETSC_INCLUDE petsc_include) - petsc_get_variable (PCC petsc_cc) - petsc_get_variable (PCC_FLAGS petsc_cc_flags) - petsc_get_variable (MPIEXEC petsc_mpiexec) - # We are done with the temporary Makefile, calling PETSC_GET_VARIABLE after this point is invalid! - file (REMOVE ${petsc_config_makefile}) - - include (ResolveCompilerPaths) + endmacro(PETSC_GET_VARIABLE) + petsc_get_variable(PETSC_LIB_DIR petsc_lib_dir) + petsc_get_variable(PETSC_EXTERNAL_LIB_BASIC petsc_libs_external) + petsc_get_variable(PETSC_CCPPFLAGS petsc_cpp_line) + petsc_get_variable(PETSC_INCLUDE petsc_include) + petsc_get_variable(PCC petsc_cc) + petsc_get_variable(PCC_FLAGS petsc_cc_flags) + petsc_get_variable(MPIEXEC petsc_mpiexec) + # We are done with the temporary Makefile, calling PETSC_GET_VARIABLE after + # this point is invalid! + file(REMOVE ${petsc_config_makefile}) + + include(ResolveCompilerPaths) # Extract include paths and libraries from compile command line - resolve_includes (petsc_includes_all "${petsc_cpp_line}") + resolve_includes(petsc_includes_all "${petsc_cpp_line}") - #on windows we need to make sure we're linking against the right - #runtime library - if (WIN32) - if (petsc_cc_flags MATCHES "-MT") + # on windows we need to make sure we're linking against the right runtime + # library + if(WIN32) + if(petsc_cc_flags MATCHES "-MT") set(using_md False) - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO - CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE - CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) + foreach( + flag_var + CMAKE_C_FLAGS + CMAKE_C_FLAGS_DEBUG + CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL + CMAKE_C_FLAGS_RELWITHDEBINFO + CMAKE_CXX_FLAGS + CMAKE_CXX_FLAGS_DEBUG + CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_MINSIZEREL + CMAKE_CXX_FLAGS_RELWITHDEBINFO) if(${flag_var} MATCHES "/MD") set(using_md True) endif(${flag_var} MATCHES "/MD") endforeach(flag_var) if(${using_md} MATCHES "True") - message(WARNING "PETSc was built with /MT, but /MD is currently set. - See http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F") + message( + WARNING + "PETSc was built with /MT, but /MD is currently set. + See http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F" + ) endif(${using_md} MATCHES "True") - endif (petsc_cc_flags MATCHES "-MT") - endif (WIN32) + endif(petsc_cc_flags MATCHES "-MT") + endif(WIN32) - include (CorrectWindowsPaths) + include(CorrectWindowsPaths) convert_cygwin_path(petsc_lib_dir) - message (STATUS "petsc_lib_dir ${petsc_lib_dir}") - - macro (PETSC_FIND_LIBRARY suffix name) - set (PETSC_LIBRARY_${suffix} "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) # Clear any stale value, if we got here, we need to find it again - if (WIN32) - set (libname lib${name}) #windows expects "libfoo", linux expects "foo" - else (WIN32) - set (libname ${name}) - endif (WIN32) - find_library (PETSC_LIBRARY_${suffix} NAMES ${libname} HINTS ${petsc_lib_dir} NO_DEFAULT_PATH) - set (PETSC_LIBRARIES_${suffix} "${PETSC_LIBRARY_${suffix}}") - mark_as_advanced (PETSC_LIBRARY_${suffix}) - endmacro (PETSC_FIND_LIBRARY suffix name) - - # Look for petscvec first, if it doesn't exist, we must be using single-library - petsc_find_library (VEC petscvec) - if (PETSC_LIBRARY_VEC) - petsc_find_library (SYS "petscsys;petsc") # libpetscsys is called libpetsc prior to 3.1 (when single-library was introduced) - petsc_find_library (MAT petscmat) - petsc_find_library (DM petscdm) - petsc_find_library (KSP petscksp) - petsc_find_library (SNES petscsnes) - petsc_find_library (TS petscts) - macro (PETSC_JOIN libs deps) - list (APPEND PETSC_LIBRARIES_${libs} ${PETSC_LIBRARIES_${deps}}) - endmacro (PETSC_JOIN libs deps) - petsc_join (VEC SYS) - petsc_join (MAT VEC) - petsc_join (DM MAT) - petsc_join (KSP DM) - petsc_join (SNES KSP) - petsc_join (TS SNES) - petsc_join (ALL TS) - else () - set (PETSC_LIBRARY_VEC "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) # There is no libpetscvec - petsc_find_library (SINGLE petsc) - # Debian 9/Ubuntu 16.04 uses _real and _complex extensions when using libraries in /usr/lib/petsc. - if (NOT PETSC_LIBRARY_SINGLE) - petsc_find_library (SINGLE petsc_real) + message(STATUS "petsc_lib_dir ${petsc_lib_dir}") + + macro(PETSC_FIND_LIBRARY suffix name) + set(PETSC_LIBRARY_${suffix} + "NOTFOUND" + CACHE INTERNAL "Cleared" FORCE) # Clear any stale value, if we got here, + # we need to find it again + if(WIN32) + set(libname lib${name}) # windows expects "libfoo", linux expects "foo" + else(WIN32) + set(libname ${name}) + endif(WIN32) + find_library( + PETSC_LIBRARY_${suffix} + NAMES ${libname} + HINTS ${petsc_lib_dir} + NO_DEFAULT_PATH) + set(PETSC_LIBRARIES_${suffix} "${PETSC_LIBRARY_${suffix}}") + mark_as_advanced(PETSC_LIBRARY_${suffix}) + endmacro( + PETSC_FIND_LIBRARY + suffix + name) + + # Look for petscvec first, if it doesn't exist, we must be using + # single-library + petsc_find_library(VEC petscvec) + if(PETSC_LIBRARY_VEC) + petsc_find_library( + SYS "petscsys;petsc") # libpetscsys is called libpetsc prior to 3.1 (when + # single-library was introduced) + petsc_find_library(MAT petscmat) + petsc_find_library(DM petscdm) + petsc_find_library(KSP petscksp) + petsc_find_library(SNES petscsnes) + petsc_find_library(TS petscts) + macro(PETSC_JOIN libs deps) + list(APPEND PETSC_LIBRARIES_${libs} ${PETSC_LIBRARIES_${deps}}) + endmacro( + PETSC_JOIN + libs + deps) + petsc_join(VEC SYS) + petsc_join(MAT VEC) + petsc_join(DM MAT) + petsc_join(KSP DM) + petsc_join(SNES KSP) + petsc_join(TS SNES) + petsc_join(ALL TS) + else() + set(PETSC_LIBRARY_VEC + "NOTFOUND" + CACHE INTERNAL "Cleared" FORCE) # There is no libpetscvec + petsc_find_library(SINGLE petsc) + # Debian 9/Ubuntu 16.04 uses _real and _complex extensions when using + # libraries in /usr/lib/petsc. + if(NOT PETSC_LIBRARY_SINGLE) + petsc_find_library(SINGLE petsc_real) endif() - if (NOT PETSC_LIBRARY_SINGLE) - petsc_find_library (SINGLE petsc_complex) + if(NOT PETSC_LIBRARY_SINGLE) + petsc_find_library(SINGLE petsc_complex) endif() - foreach (pkg SYS VEC MAT DM KSP SNES TS ALL) - set (PETSC_LIBRARIES_${pkg} "${PETSC_LIBRARY_SINGLE}") - endforeach () - endif () - if (PETSC_LIBRARY_TS) - message (STATUS "Recognized PETSc install with separate libraries for each package") - else () - message (STATUS "Recognized PETSc install with single library for all packages") - endif () + foreach( + pkg + SYS + VEC + MAT + DM + KSP + SNES + TS + ALL) + set(PETSC_LIBRARIES_${pkg} "${PETSC_LIBRARY_SINGLE}") + endforeach() + endif() + if(PETSC_LIBRARY_TS) + message( + STATUS "Recognized PETSc install with separate libraries for each package" + ) + else() + message( + STATUS "Recognized PETSc install with single library for all packages") + endif() include(Check${PETSC_LANGUAGE_BINDINGS}SourceRuns) - macro (petsc_test_compiles includes libraries runs) - message(STATUS "PETSc test with : ${includes} ${libraries}" ) - if (PETSC_VERSION VERSION_GREATER 3.1) - set (_PETSC_TSDestroy "TSDestroy(&ts)") - else () - set (_PETSC_TSDestroy "TSDestroy(ts)") - endif () + macro(petsc_test_compiles includes libraries runs) + message(STATUS "PETSc test with : ${includes} ${libraries}") + if(PETSC_VERSION VERSION_GREATER 3.1) + set(_PETSC_TSDestroy "TSDestroy(&ts)") + else() + set(_PETSC_TSDestroy "TSDestroy(ts)") + endif() - set(_PETSC_TEST_SOURCE " + set(_PETSC_TEST_SOURCE + " static const char help[] = \"PETSc test program.\"; #include int main(int argc,char *argv[]) { @@ -287,115 +375,173 @@ int main(int argc,char *argv[]) { return 0; } ") - multipass_source_compiles ("${includes}" "${libraries}" "${_PETSC_TEST_SOURCE}" ${runs} "${PETSC_LANGUAGE_BINDINGS}") - if (${${runs}}) - set (PETSC_EXECUTABLE_COMPILES "YES" CACHE BOOL - "Can the system successfully run a PETSc executable? This variable can be manually set to \"YES\" to force CMake to accept a given PETSc configuration, but this will almost always result in a broken build. If you change PETSC_DIR, PETSC_ARCH, or PETSC_CURRENT you would have to reset this variable." FORCE) - endif (${${runs}}) - endmacro () - - - find_path (PETSC_INCLUDE_DIR petscts.h + multipass_source_compiles( + "${includes}" "${libraries}" "${_PETSC_TEST_SOURCE}" ${runs} + "${PETSC_LANGUAGE_BINDINGS}") + if(${${runs}}) + set(PETSC_EXECUTABLE_COMPILES + "YES" + CACHE + BOOL + "Can the system successfully run a PETSc executable? This variable can be manually set to \"YES\" to force CMake to accept a given PETSc configuration, but this will almost always result in a broken build. If you change PETSC_DIR, PETSC_ARCH, or PETSC_CURRENT you would have to reset this variable." + FORCE) + endif(${${runs}}) + endmacro() + + find_path( + PETSC_INCLUDE_DIR petscts.h HINTS "${PETSC_DIR}" PATH_SUFFIXES include NO_DEFAULT_PATH) - find_path (PETSC_INCLUDE_CONF petscconf.h + find_path( + PETSC_INCLUDE_CONF petscconf.h HINTS "${PETSC_DIR}" PATH_SUFFIXES "${PETSC_ARCH}/include" "bmake/${PETSC_ARCH}" NO_DEFAULT_PATH) - mark_as_advanced (PETSC_INCLUDE_DIR PETSC_INCLUDE_CONF) - set (petsc_includes_minimal ${PETSC_INCLUDE_CONF} ${PETSC_INCLUDE_DIR}) + mark_as_advanced(PETSC_INCLUDE_DIR PETSC_INCLUDE_CONF) + set(petsc_includes_minimal ${PETSC_INCLUDE_CONF} ${PETSC_INCLUDE_DIR}) - file (STRINGS "${PETSC_INCLUDE_CONF}/petscconf.h" PETSC_HAS_OPENMP REGEX "#define PETSC_HAVE_OPENMP 1") - if (PETSC_HAS_OPENMP) + file(STRINGS "${PETSC_INCLUDE_CONF}/petscconf.h" PETSC_HAS_OPENMP + REGEX "#define PETSC_HAVE_OPENMP 1") + if(PETSC_HAS_OPENMP) find_package(OpenMP REQUIRED) - set (petsc_openmp_library ";OpenMP::OpenMP_${PETSC_LANGUAGE_BINDINGS}") + set(petsc_openmp_library ";OpenMP::OpenMP_${PETSC_LANGUAGE_BINDINGS}") endif() - set (petsc_mpi_include_dirs "${MPI_${PETSC_LANGUAGE_BINDINGS}_INCLUDE_DIRS}") - #set (petsc_additional_libraries "MPI::MPI_${PETSC_LANGUAGE_BINDINGS}${petsc_openmp_library}") - - petsc_test_compiles ("${petsc_includes_minimal};${petsc_mpi_include_dirs}" - "${PETSC_LIBRARIES_TS};${petsc_additional_libraries}" - petsc_works_minimal) - if (petsc_works_minimal) - message (STATUS "Minimal PETSc includes and libraries work. This probably means we are building with shared libs.") - set (petsc_includes_needed "${petsc_includes_minimal}") - else (petsc_works_minimal) # Minimal includes fail, see if just adding full includes fixes it - petsc_test_compiles ("${petsc_includes_all};${petsc_mpi_include_dirs}" + set(petsc_mpi_include_dirs "${MPI_${PETSC_LANGUAGE_BINDINGS}_INCLUDE_DIRS}") + # set (petsc_additional_libraries + # "MPI::MPI_${PETSC_LANGUAGE_BINDINGS}${petsc_openmp_library}") + + petsc_test_compiles( + "${petsc_includes_minimal};${petsc_mpi_include_dirs}" + "${PETSC_LIBRARIES_TS};${petsc_additional_libraries}" petsc_works_minimal) + if(petsc_works_minimal) + message( + STATUS + "Minimal PETSc includes and libraries work. This probably means we are building with shared libs." + ) + set(petsc_includes_needed "${petsc_includes_minimal}") + else(petsc_works_minimal) # Minimal includes fail, see if just adding full + # includes fixes it + petsc_test_compiles( + "${petsc_includes_all};${petsc_mpi_include_dirs}" "${PETSC_LIBRARIES_TS};${petsc_additional_libraries}" petsc_works_allincludes) - if (petsc_works_allincludes) # It does, we just need all the includes ( - message (STATUS "PETSc requires extra include paths, but links correctly with only interface libraries. This is an unexpected configuration (but it seems to work fine).") - set (petsc_includes_needed ${petsc_includes_all}) - else (petsc_works_allincludes) # We are going to need to link the external libs explicitly - resolve_libraries (petsc_libraries_external "${petsc_libs_external}") - foreach (pkg SYS VEC MAT DM KSP SNES TS ALL) - list (APPEND PETSC_LIBRARIES_${pkg} ${petsc_libraries_external}) - endforeach (pkg) - petsc_test_compiles ("${petsc_includes_minimal};${petsc_mpi_include_dirs}" + if(petsc_works_allincludes) # It does, we just need all the includes ( + message( + STATUS + "PETSc requires extra include paths, but links correctly with only interface libraries. This is an unexpected configuration (but it seems to work fine)." + ) + set(petsc_includes_needed ${petsc_includes_all}) + else(petsc_works_allincludes) # We are going to need to link the external + # libs explicitly + resolve_libraries(petsc_libraries_external "${petsc_libs_external}") + foreach( + pkg + SYS + VEC + MAT + DM + KSP + SNES + TS + ALL) + list(APPEND PETSC_LIBRARIES_${pkg} ${petsc_libraries_external}) + endforeach(pkg) + petsc_test_compiles( + "${petsc_includes_minimal};${petsc_mpi_include_dirs}" "${PETSC_LIBRARIES_TS};${petsc_additional_libraries}" petsc_works_alllibraries) - if (petsc_works_alllibraries) - message (STATUS "PETSc only need minimal includes, but requires explicit linking to all dependencies. This is expected when PETSc is built with static libraries.") - set (petsc_includes_needed ${petsc_includes_minimal}) - else (petsc_works_alllibraries) + if(petsc_works_alllibraries) + message( + STATUS + "PETSc only need minimal includes, but requires explicit linking to all dependencies. This is expected when PETSc is built with static libraries." + ) + set(petsc_includes_needed ${petsc_includes_minimal}) + else(petsc_works_alllibraries) # It looks like we really need everything, should have listened to Matt - set (petsc_includes_needed ${petsc_includes_all}) - petsc_test_compiles ("${petsc_includes_all};${petsc_mpi_include_dirs}" - "${PETSC_LIBRARIES_TS};${petsc_additional_libraries}" - petsc_works_all) - if (petsc_works_all) # We fail anyways - message (STATUS "PETSc requires extra include paths and explicit linking to all dependencies. This probably means you have static libraries and something unexpected in PETSc headers.") - else (petsc_works_all) # We fail anyways - message (STATUS "PETSc could not be used, maybe the install is broken.") - endif (petsc_works_all) - endif (petsc_works_alllibraries) - endif (petsc_works_allincludes) - endif (petsc_works_minimal) - - # We do an out-of-source build so __FILE__ will be an absolute path, hence __INSDIR__ is superfluous - if (${PETSC_VERSION} VERSION_LESS 3.1) - set (PETSC_DEFINITIONS "-D__SDIR__=\"\"" CACHE STRING "PETSc definitions" FORCE) - else () - set (PETSC_DEFINITIONS "-D__INSDIR__=" CACHE STRING "PETSc definitions" FORCE) - endif () + set(petsc_includes_needed ${petsc_includes_all}) + petsc_test_compiles( + "${petsc_includes_all};${petsc_mpi_include_dirs}" + "${PETSC_LIBRARIES_TS};${petsc_additional_libraries}" petsc_works_all) + if(petsc_works_all) # We fail anyways + message( + STATUS + "PETSc requires extra include paths and explicit linking to all dependencies. This probably means you have static libraries and something unexpected in PETSc headers." + ) + else(petsc_works_all) # We fail anyways + message( + STATUS "PETSc could not be used, maybe the install is broken.") + endif(petsc_works_all) + endif(petsc_works_alllibraries) + endif(petsc_works_allincludes) + endif(petsc_works_minimal) + + # We do an out-of-source build so __FILE__ will be an absolute path, hence + # __INSDIR__ is superfluous + if(${PETSC_VERSION} VERSION_LESS 3.1) + set(PETSC_DEFINITIONS + "-D__SDIR__=\"\"" + CACHE STRING "PETSc definitions" FORCE) + else() + set(PETSC_DEFINITIONS + "-D__INSDIR__=" + CACHE STRING "PETSc definitions" FORCE) + endif() # Sometimes this can be used to assist FindMPI.cmake - set (PETSC_MPIEXEC ${petsc_mpiexec} CACHE FILEPATH "Executable for running PETSc MPI programs" FORCE) - set (PETSC_INCLUDES ${petsc_includes_needed} CACHE STRING "PETSc include path" FORCE) - set (PETSC_LIBRARIES ${PETSC_LIBRARIES_ALL} CACHE STRING "PETSc libraries" FORCE) - set (PETSC_COMPILER ${petsc_cc} CACHE FILEPATH "PETSc compiler" FORCE) -endif () + set(PETSC_MPIEXEC + ${petsc_mpiexec} + CACHE FILEPATH "Executable for running PETSc MPI programs" FORCE) + set(PETSC_INCLUDES + ${petsc_includes_needed} + CACHE STRING "PETSc include path" FORCE) + set(PETSC_LIBRARIES + ${PETSC_LIBRARIES_ALL} + CACHE STRING "PETSc libraries" FORCE) + set(PETSC_COMPILER + ${petsc_cc} + CACHE FILEPATH "PETSc compiler" FORCE) +endif() -if (NOT PETSC_INCLUDES AND NOT TARGET PETSc::PETSc) +if(NOT PETSC_INCLUDES AND NOT TARGET PETSc::PETSc) find_package(PkgConfig) - if (PkgConfig_FOUND) + if(PkgConfig_FOUND) pkg_search_module(PkgPETSC PETSc>3.4.0 petsc>3.4.0) - set (PETSC_LIBRARIES ${PkgPETSC_LINK_LIBRARIES} CACHE STRING "PETSc libraries" FORCE) - set (PETSC_INCLUDES ${PkgPETSC_INCLUDE_DIRS} CACHE STRING "PETSc include path" FORCE) - set (PETSC_EXECUTABLE_COMPILES "YES" CACHE BOOL - "Can the system successfully run a PETSc executable? This variable can be manually set to \"YES\" to force CMake to accept a given PETSc configuration, but this will almost always result in a broken build. If you change PETSC_DIR, PETSC_ARCH, or PETSC_CURRENT you would have to reset this variable." FORCE) + set(PETSC_LIBRARIES + ${PkgPETSC_LINK_LIBRARIES} + CACHE STRING "PETSc libraries" FORCE) + set(PETSC_INCLUDES + ${PkgPETSC_INCLUDE_DIRS} + CACHE STRING "PETSc include path" FORCE) + set(PETSC_EXECUTABLE_COMPILES + "YES" + CACHE + BOOL + "Can the system successfully run a PETSc executable? This variable can be manually set to \"YES\" to force CMake to accept a given PETSc configuration, but this will almost always result in a broken build. If you change PETSC_DIR, PETSC_ARCH, or PETSC_CURRENT you would have to reset this variable." + FORCE) endif() endif() -# Note that we have forced values for all these choices. If you -# change these, you are telling the system to trust you that they -# work. It is likely that you will end up with a broken build. -mark_as_advanced (PETSC_INCLUDES PETSC_LIBRARIES PETSC_COMPILER PETSC_DEFINITIONS PETSC_MPIEXEC PETSC_EXECUTABLE_COMPILES) +# Note that we have forced values for all these choices. If you change these, +# you are telling the system to trust you that they work. It is likely that you +# will end up with a broken build. +mark_as_advanced(PETSC_INCLUDES PETSC_LIBRARIES PETSC_COMPILER + PETSC_DEFINITIONS PETSC_MPIEXEC PETSC_EXECUTABLE_COMPILES) -include (FindPackageHandleStandardArgs) -find_package_handle_standard_args (PETSc +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args( + PETSc REQUIRED_VARS PETSC_INCLUDES PETSC_LIBRARIES VERSION_VAR PETSC_VERSION - FAIL_MESSAGE "PETSc could not be found. Be sure to set PETSC_DIR and PETSC_ARCH.") + FAIL_MESSAGE + "PETSc could not be found. Be sure to set PETSC_DIR and PETSC_ARCH.") -if (PETSC_FOUND) - if (NOT TARGET PETSc::PETSc) +if(PETSC_FOUND) + if(NOT TARGET PETSc::PETSc) add_library(PETSc::PETSc UNKNOWN IMPORTED) list(GET PETSC_LIBRARIES 0 PETSC_LIBRARY) target_link_libraries(PETSc::PETSc INTERFACE "${PETSC_LIBRARIES}") - set_target_properties(PETSc::PETSc PROPERTIES - IMPORTED_LOCATION "${PETSC_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${PETSC_INCLUDES}" - ) + set_target_properties( + PETSc::PETSc PROPERTIES IMPORTED_LOCATION "${PETSC_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${PETSC_INCLUDES}") endif() endif() diff --git a/cmake/FindPackageMultipass.cmake b/cmake/FindPackageMultipass.cmake index 99bbace448..4aa619495b 100644 --- a/cmake/FindPackageMultipass.cmake +++ b/cmake/FindPackageMultipass.cmake @@ -1,132 +1,140 @@ # PackageMultipass - this module defines two macros # -# FIND_PACKAGE_MULTIPASS (Name CURRENT -# STATES VAR0 VAR1 ... -# DEPENDENTS DEP0 DEP1 ...) +# FIND_PACKAGE_MULTIPASS (Name CURRENT STATES VAR0 VAR1 ... DEPENDENTS DEP0 DEP1 +# ...) # -# This function creates a cache entry _CURRENT which -# the user can set to "NO" to trigger a reconfiguration of the package. -# The first time this function is called, the values of -# _VAR0, ... are saved. If _CURRENT -# is false or if any STATE has changed since the last time -# FIND_PACKAGE_MULTIPASS() was called, then CURRENT will be set to "NO", -# otherwise CURRENT will be "YES". IF not CURRENT, then -# _DEP0, ... will be FORCED to NOTFOUND. -# Example: -# find_path (FOO_DIR include/foo.h) -# FIND_PACKAGE_MULTIPASS (Foo foo_current -# STATES DIR -# DEPENDENTS INCLUDES LIBRARIES) -# if (NOT foo_current) -# # Make temporary files, run programs, etc, to determine FOO_INCLUDES and FOO_LIBRARIES -# endif (NOT foo_current) +# This function creates a cache entry _CURRENT which the user +# can set to "NO" to trigger a reconfiguration of the package. The first time +# this function is called, the values of _VAR0, ... are saved. +# If _CURRENT is false or if any STATE has changed since the +# last time FIND_PACKAGE_MULTIPASS() was called, then CURRENT will be set to +# "NO", otherwise CURRENT will be "YES". IF not CURRENT, then +# _DEP0, ... will be FORCED to NOTFOUND. Example: find_path +# (FOO_DIR include/foo.h) FIND_PACKAGE_MULTIPASS (Foo foo_current STATES DIR +# DEPENDENTS INCLUDES LIBRARIES) if (NOT foo_current) # Make temporary files, +# run programs, etc, to determine FOO_INCLUDES and FOO_LIBRARIES endif (NOT +# foo_current) # -# MULTIPASS_SOURCE_RUNS (Name INCLUDES LIBRARIES SOURCE RUNS LANGUAGE) -# Always runs the given test, use this when you need to re-run tests -# because parent variables have made old cache entries stale. The LANGUAGE -# variable is either C or CXX indicating which compiler the test should -# use. -# MULTIPASS_C_SOURCE_RUNS (Name INCLUDES LIBRARIES SOURCE RUNS) -# DEPRECATED! This is only included for backwards compatability. Use -# the more general MULTIPASS_SOURCE_RUNS instead. -# Always runs the given test, use this when you need to re-run tests -# because parent variables have made old cache entries stale. +# MULTIPASS_SOURCE_RUNS (Name INCLUDES LIBRARIES SOURCE RUNS LANGUAGE) Always +# runs the given test, use this when you need to re-run tests because parent +# variables have made old cache entries stale. The LANGUAGE variable is either C +# or CXX indicating which compiler the test should use. MULTIPASS_C_SOURCE_RUNS +# (Name INCLUDES LIBRARIES SOURCE RUNS) DEPRECATED! This is only included for +# backwards compatability. Use the more general MULTIPASS_SOURCE_RUNS instead. +# Always runs the given test, use this when you need to re-run tests because +# parent variables have made old cache entries stale. include(CheckCXXSourceCompiles) -macro (FIND_PACKAGE_MULTIPASS _name _current) - string (TOUPPER ${_name} _NAME) - set (_args ${ARGV}) - list (REMOVE_AT _args 0 1) +macro(FIND_PACKAGE_MULTIPASS _name _current) + string(TOUPPER ${_name} _NAME) + set(_args ${ARGV}) + list(REMOVE_AT _args 0 1) - set (_states_current "YES") - list (GET _args 0 _cmd) - if (_cmd STREQUAL "STATES") - list (REMOVE_AT _args 0) - list (GET _args 0 _state) - while (_state AND NOT _state STREQUAL "DEPENDENTS") + set(_states_current "YES") + list(GET _args 0 _cmd) + if(_cmd STREQUAL "STATES") + list(REMOVE_AT _args 0) + list(GET _args 0 _state) + while(_state AND NOT _state STREQUAL "DEPENDENTS") # The name of the stored value for the given state - set (_stored_var PACKAGE_MULTIPASS_${_NAME}_${_state}) - if (NOT "${${_stored_var}}" STREQUAL "${${_NAME}_${_state}}") - set (_states_current "NO") - endif (NOT "${${_stored_var}}" STREQUAL "${${_NAME}_${_state}}") - set (${_stored_var} "${${_NAME}_${_state}}" CACHE INTERNAL "Stored state for ${_name}." FORCE) - list (REMOVE_AT _args 0) - list (GET _args 0 _state) - endwhile (_state AND NOT _state STREQUAL "DEPENDENTS") - endif (_cmd STREQUAL "STATES") + set(_stored_var PACKAGE_MULTIPASS_${_NAME}_${_state}) + if(NOT "${${_stored_var}}" STREQUAL "${${_NAME}_${_state}}") + set(_states_current "NO") + endif(NOT "${${_stored_var}}" STREQUAL "${${_NAME}_${_state}}") + set(${_stored_var} + "${${_NAME}_${_state}}" + CACHE INTERNAL "Stored state for ${_name}." FORCE) + list(REMOVE_AT _args 0) + list(GET _args 0 _state) + endwhile(_state AND NOT _state STREQUAL "DEPENDENTS") + endif(_cmd STREQUAL "STATES") - set (_stored ${_NAME}_CURRENT) - if (NOT ${_stored}) - set (${_stored} "YES" CACHE BOOL "Is the configuration for ${_name} current? Set to \"NO\" to reconfigure." FORCE) - set (_states_current "NO") - endif (NOT ${_stored}) + set(_stored ${_NAME}_CURRENT) + if(NOT ${_stored}) + set(${_stored} + "YES" + CACHE + BOOL + "Is the configuration for ${_name} current? Set to \"NO\" to reconfigure." + FORCE) + set(_states_current "NO") + endif(NOT ${_stored}) - set (${_current} ${_states_current}) - if (NOT ${_current} AND PACKAGE_MULTIPASS_${_name}_CALLED) - message (STATUS "Clearing ${_name} dependent variables") + set(${_current} ${_states_current}) + if(NOT ${_current} AND PACKAGE_MULTIPASS_${_name}_CALLED) + message(STATUS "Clearing ${_name} dependent variables") # Clear all the dependent variables so that the module can reset them - list (GET _args 0 _cmd) - if (_cmd STREQUAL "DEPENDENTS") - list (REMOVE_AT _args 0) - foreach (dep ${_args}) - set (${_NAME}_${dep} "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) - endforeach (dep) - endif (_cmd STREQUAL "DEPENDENTS") - set (${_NAME}_FOUND "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) - endif () - set (PACKAGE_MULTIPASS_${name}_CALLED YES CACHE INTERNAL "Private" FORCE) -endmacro (FIND_PACKAGE_MULTIPASS) - + list(GET _args 0 _cmd) + if(_cmd STREQUAL "DEPENDENTS") + list(REMOVE_AT _args 0) + foreach(dep ${_args}) + set(${_NAME}_${dep} + "NOTFOUND" + CACHE INTERNAL "Cleared" FORCE) + endforeach(dep) + endif(_cmd STREQUAL "DEPENDENTS") + set(${_NAME}_FOUND + "NOTFOUND" + CACHE INTERNAL "Cleared" FORCE) + endif() + set(PACKAGE_MULTIPASS_${name}_CALLED + YES + CACHE INTERNAL "Private" FORCE) +endmacro(FIND_PACKAGE_MULTIPASS) -macro (MULTIPASS_SOURCE_RUNS includes libraries source runs language) - include (Check${language}SourceRuns) +macro(MULTIPASS_SOURCE_RUNS includes libraries source runs language) + include(Check${language}SourceRuns) # This is a ridiculous hack. CHECK_${language}_SOURCE_* thinks that if the - # *name* of the return variable doesn't change, then the test does - # not need to be re-run. We keep an internal count which we - # increment to guarantee that every test name is unique. If we've - # gotten here, then the configuration has changed enough that the - # test *needs* to be rerun. - if (NOT MULTIPASS_TEST_COUNT) - set (MULTIPASS_TEST_COUNT 00) - endif (NOT MULTIPASS_TEST_COUNT) - math (EXPR _tmp "${MULTIPASS_TEST_COUNT} + 1") # Why can't I add to a cache variable? - set (MULTIPASS_TEST_COUNT ${_tmp} CACHE INTERNAL "Unique test ID") - set (testname MULTIPASS_TEST_${MULTIPASS_TEST_COUNT}_${runs}) - set (CMAKE_REQUIRED_INCLUDES ${includes}) - set (CMAKE_REQUIRED_LIBRARIES ${libraries}) + # *name* of the return variable doesn't change, then the test does not need to + # be re-run. We keep an internal count which we increment to guarantee that + # every test name is unique. If we've gotten here, then the configuration has + # changed enough that the test *needs* to be rerun. + if(NOT MULTIPASS_TEST_COUNT) + set(MULTIPASS_TEST_COUNT 00) + endif(NOT MULTIPASS_TEST_COUNT) + math(EXPR _tmp "${MULTIPASS_TEST_COUNT} + 1") # Why can't I add to a cache + # variable? + set(MULTIPASS_TEST_COUNT + ${_tmp} + CACHE INTERNAL "Unique test ID") + set(testname MULTIPASS_TEST_${MULTIPASS_TEST_COUNT}_${runs}) + set(CMAKE_REQUIRED_INCLUDES ${includes}) + set(CMAKE_REQUIRED_LIBRARIES ${libraries}) if(${language} STREQUAL "C") - check_c_source_runs ("${source}" ${testname}) + check_c_source_runs("${source}" ${testname}) elseif(${language} STREQUAL "CXX") - check_cxx_source_runs ("${source}" ${testname}) + check_cxx_source_runs("${source}" ${testname}) endif() - set (${runs} "${${testname}}") -endmacro (MULTIPASS_SOURCE_RUNS) + set(${runs} "${${testname}}") +endmacro(MULTIPASS_SOURCE_RUNS) -macro (MULTIPASS_C_SOURCE_RUNS includes libraries source runs) +macro(MULTIPASS_C_SOURCE_RUNS includes libraries source runs) multipass_source_runs("${includes}" "${libraries}" "${source}" ${runs} "C") -endmacro (MULTIPASS_C_SOURCE_RUNS) +endmacro(MULTIPASS_C_SOURCE_RUNS) -macro (MULTIPASS_SOURCE_COMPILES includes libraries source runs language) - include (Check${language}SourceCompiles) +macro(MULTIPASS_SOURCE_COMPILES includes libraries source runs language) + include(Check${language}SourceCompiles) # This is a ridiculous hack. CHECK_${language}_SOURCE_* thinks that if the - # *name* of the return variable doesn't change, then the test does - # not need to be re-run. We keep an internal count which we - # increment to guarantee that every test name is unique. If we've - # gotten here, then the configuration has changed enough that the - # test *needs* to be rerun. - if (NOT MULTIPASS_TEST_COUNT) - set (MULTIPASS_TEST_COUNT 00) - endif (NOT MULTIPASS_TEST_COUNT) - math (EXPR _tmp "${MULTIPASS_TEST_COUNT} + 1") # Why can't I add to a cache variable? - set (MULTIPASS_TEST_COUNT ${_tmp} CACHE INTERNAL "Unique test ID") - set (testname MULTIPASS_TEST_${MULTIPASS_TEST_COUNT}_${runs}) - set (CMAKE_REQUIRED_INCLUDES ${includes}) - set (CMAKE_REQUIRED_LIBRARIES ${libraries}) + # *name* of the return variable doesn't change, then the test does not need to + # be re-run. We keep an internal count which we increment to guarantee that + # every test name is unique. If we've gotten here, then the configuration has + # changed enough that the test *needs* to be rerun. + if(NOT MULTIPASS_TEST_COUNT) + set(MULTIPASS_TEST_COUNT 00) + endif(NOT MULTIPASS_TEST_COUNT) + math(EXPR _tmp "${MULTIPASS_TEST_COUNT} + 1") # Why can't I add to a cache + # variable? + set(MULTIPASS_TEST_COUNT + ${_tmp} + CACHE INTERNAL "Unique test ID") + set(testname MULTIPASS_TEST_${MULTIPASS_TEST_COUNT}_${runs}) + set(CMAKE_REQUIRED_INCLUDES ${includes}) + set(CMAKE_REQUIRED_LIBRARIES ${libraries}) if(${language} STREQUAL "C") - check_c_source_compiles ("${source}" ${testname}) + check_c_source_compiles("${source}" ${testname}) elseif(${language} STREQUAL "CXX") - check_cxx_source_compiles ("${source}" ${testname}) + check_cxx_source_compiles("${source}" ${testname}) endif() - set (${runs} "${${testname}}") -endmacro () + set(${runs} "${${testname}}") +endmacro() diff --git a/cmake/FindSLEPc.cmake b/cmake/FindSLEPc.cmake index 3add8eba8b..d37b107d44 100644 --- a/cmake/FindSLEPc.cmake +++ b/cmake/FindSLEPc.cmake @@ -1,107 +1,106 @@ -# - Try to find SLEPC -# Once done this will define +# * Try to find SLEPC Once done this will define # -# SLEPC_FOUND - system has SLEPc -# SLEPC_INCLUDE_DIRS - include directories for SLEPc -# SLEPC_LIBRARIES - libraries for SLEPc -# SLEPC_DIR - directory where SLEPc is built -# SLEPC_VERSION - version of SLEPc -# SLEPC_VERSION_MAJOR - First number in SLEPC_VERSION -# SLEPC_VERSION_MINOR - Second number in SLEPC_VERSION -# SLEPC_VERSION_SUBMINOR - Third number in SLEPC_VERSION +# SLEPC_FOUND - system has SLEPc SLEPC_INCLUDE_DIRS - include directories +# for SLEPc SLEPC_LIBRARIES - libraries for SLEPc SLEPC_DIR - +# directory where SLEPc is built SLEPC_VERSION - version of SLEPc +# SLEPC_VERSION_MAJOR - First number in SLEPC_VERSION SLEPC_VERSION_MINOR - +# Second number in SLEPC_VERSION SLEPC_VERSION_SUBMINOR - Third number in +# SLEPC_VERSION # -# Assumes that PETSC_DIR and PETSC_ARCH has been set by -# already calling find_package(PETSc) +# Assumes that PETSC_DIR and PETSC_ARCH has been set by already calling +# find_package(PETSc) -#============================================================================= -# Copyright (C) 2010-2012 Garth N. Wells, Anders Logg and Johannes Ring -# All rights reserved. +# ============================================================================= +# Copyright (C) 2010-2012 Garth N. Wells, Anders Logg and Johannes Ring All +# rights reserved. # # Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: +# modification, are permitted provided that the following conditions are met: # -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. # -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -#============================================================================= +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ============================================================================= # -# Taken from https://github.com/live-clones/libadjoint/blob/master/cmake/modules/FindSLEPc.cmake +# Taken from +# https://github.com/live-clones/libadjoint/blob/master/cmake/modules/FindSLEPc.cmake find_package(PETSc REQUIRED) find_package(MPI REQUIRED) # Set debian_arches (PETSC_ARCH for Debian-style installations) -foreach (debian_arches linux kfreebsd) - if ("${CMAKE_BUILD_TYPE}" STREQUAL "Debug") - set(DEBIAN_FLAVORS ${debian_arches}-gnu-c-debug ${debian_arches}-gnu-c-opt ${DEBIAN_FLAVORS}) +foreach(debian_arches linux kfreebsd) + if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug") + set(DEBIAN_FLAVORS ${debian_arches}-gnu-c-debug ${debian_arches}-gnu-c-opt + ${DEBIAN_FLAVORS}) else() - set(DEBIAN_FLAVORS ${debian_arches}-gnu-c-opt ${debian_arches}-gnu-c-debug ${DEBIAN_FLAVORS}) + set(DEBIAN_FLAVORS ${debian_arches}-gnu-c-opt ${debian_arches}-gnu-c-debug + ${DEBIAN_FLAVORS}) endif() endforeach() # List of possible locations for SLEPC_DIR set(slepc_dir_locations "") list(APPEND slepc_dir_locations "/usr/lib/slepc") -list(APPEND slepc_dir_locations "/opt/local/lib/petsc") # Macports +list(APPEND slepc_dir_locations "/opt/local/lib/petsc") # Macports list(APPEND slepc_dir_locations "/usr/local/lib/slepc") list(APPEND slepc_dir_locations "$ENV{HOME}/slepc") # Try to figure out SLEPC_DIR by finding slepc.h -find_path(SLEPC_DIR include/slepc.h +find_path( + SLEPC_DIR include/slepc.h HINTS ${SLEPC_DIR} $ENV{SLEPC_DIR} PATHS ${slepc_dir_locations} DOC "SLEPc directory") # Report result of search for SLEPC_DIR -if (DEFINED SLEPC_DIR) +if(DEFINED SLEPC_DIR) message(STATUS "SLEPC_DIR is ${SLEPC_DIR}") else() message(STATUS "SLEPC_DIR is empty") endif() # Get variables from SLEPc configuration -if (SLEPC_DIR) +if(SLEPC_DIR) - find_library(SLEPC_LIBRARY + find_library( + SLEPC_LIBRARY NAMES slepc - HINTS - ${SLEPC_DIR}/lib - $ENV{SLEPC_DIR}/lib - ${SLEPC_DIR}/${PETSC_ARCH}/lib - $ENV{SLEPC_DIR}/$ENV{PETSC_ARCH}/lib + HINTS ${SLEPC_DIR}/lib $ENV{SLEPC_DIR}/lib ${SLEPC_DIR}/${PETSC_ARCH}/lib + $ENV{SLEPC_DIR}/$ENV{PETSC_ARCH}/lib NO_DEFAULT_PATH DOC "The SLEPc library") - find_library(SLEPC_LIBRARY + find_library( + SLEPC_LIBRARY NAMES slepc DOC "The SLEPc library") mark_as_advanced(SLEPC_LIBRARY) # Find SLEPc config file - find_file(SLEPC_CONFIG_FILE NAMES slepc_common PATHS - ${SLEPC_DIR}/lib/slepc/conf - ${SLEPC_DIR}/lib/slepc-conf ${SLEPC_DIR}/conf) + find_file( + SLEPC_CONFIG_FILE + NAMES slepc_common + PATHS ${SLEPC_DIR}/lib/slepc/conf ${SLEPC_DIR}/lib/slepc-conf + ${SLEPC_DIR}/conf) # Create a temporary Makefile to probe the SLEPc configuration set(slepc_config_makefile ${PROJECT_BINARY_DIR}/Makefile.slepc) - file(WRITE ${slepc_config_makefile} -"# This file was autogenerated by FindSLEPc.cmake + file( + WRITE ${slepc_config_makefile} + "# This file was autogenerated by FindSLEPc.cmake SLEPC_DIR = ${SLEPC_DIR} PETSC_ARCH = ${PETSC_ARCH} PETSC_DIR = ${PETSC_DIR} @@ -112,8 +111,12 @@ show : # Define macro for getting SLEPc variables from Makefile macro(SLEPC_GET_VARIABLE var name) - set(${var} "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) - execute_process(COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${slepc_config_makefile} show VARIABLE=${name} + set(${var} + "NOTFOUND" + CACHE INTERNAL "Cleared" FORCE) + execute_process( + COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f + ${slepc_config_makefile} show VARIABLE=${name} OUTPUT_VARIABLE ${var} RESULT_VARIABLE slepc_return) endmacro() @@ -131,15 +134,19 @@ show : resolve_libraries(SLEPC_EXTERNAL_LIBRARIES "${SLEPC_EXTERNAL_LIB}") # Add variables to CMake cache and mark as advanced - set(SLEPC_INCLUDE_DIRS ${SLEPC_INCLUDE_DIRS} CACHE STRING "SLEPc include paths." FORCE) - set(SLEPC_LIBRARIES ${SLEPC_LIBRARY} CACHE STRING "SLEPc libraries." FORCE) + set(SLEPC_INCLUDE_DIRS + ${SLEPC_INCLUDE_DIRS} + CACHE STRING "SLEPc include paths." FORCE) + set(SLEPC_LIBRARIES + ${SLEPC_LIBRARY} + CACHE STRING "SLEPc libraries." FORCE) mark_as_advanced(SLEPC_INCLUDE_DIRS SLEPC_LIBRARIES) endif() -if (SLEPC_SKIP_BUILD_TESTS) +if(SLEPC_SKIP_BUILD_TESTS) set(SLEPC_VERSION "UNKNOWN") set(SLEPC_VERSION_OK TRUE) -elseif (SLEPC_LIBRARIES AND SLEPC_INCLUDE_DIRS) +elseif(SLEPC_LIBRARIES AND SLEPC_INCLUDE_DIRS) # Set flags for building test program set(CMAKE_REQUIRED_INCLUDES ${SLEPC_INCLUDE_DIRS}) @@ -147,8 +154,10 @@ elseif (SLEPC_LIBRARIES AND SLEPC_INCLUDE_DIRS) # Check SLEPc version set(SLEPC_CONFIG_TEST_VERSION_CPP - "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/slepc_config_test_version.cpp") - file(WRITE ${SLEPC_CONFIG_TEST_VERSION_CPP} " + "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/slepc_config_test_version.cpp") + file( + WRITE ${SLEPC_CONFIG_TEST_VERSION_CPP} + " #include #include \"slepcversion.h\" @@ -161,53 +170,58 @@ int main() { ") try_run( - SLEPC_CONFIG_TEST_VERSION_EXITCODE - SLEPC_CONFIG_TEST_VERSION_COMPILED - ${CMAKE_CURRENT_BINARY_DIR} - ${SLEPC_CONFIG_TEST_VERSION_CPP} - CMAKE_FLAGS - "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" + SLEPC_CONFIG_TEST_VERSION_EXITCODE SLEPC_CONFIG_TEST_VERSION_COMPILED + ${CMAKE_CURRENT_BINARY_DIR} ${SLEPC_CONFIG_TEST_VERSION_CPP} + CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" COMPILE_OUTPUT_VARIABLE COMPILE_OUTPUT - RUN_OUTPUT_VARIABLE OUTPUT - ) + RUN_OUTPUT_VARIABLE OUTPUT) - if (SLEPC_CONFIG_TEST_VERSION_EXITCODE EQUAL 0) - set(SLEPC_VERSION "${OUTPUT}" CACHE STRING "SLEPC version number") + if(SLEPC_CONFIG_TEST_VERSION_EXITCODE EQUAL 0) + set(SLEPC_VERSION + "${OUTPUT}" + CACHE STRING "SLEPC version number") string(REPLACE "." ";" SLEPC_VERSION_LIST ${SLEPC_VERSION}) list(GET SLEPC_VERSION_LIST 0 SLEPC_VERSION_MAJOR) list(GET SLEPC_VERSION_LIST 1 SLEPC_VERSION_MINOR) list(GET SLEPC_VERSION_LIST 2 SLEPC_VERSION_SUBMINOR) mark_as_advanced(SLEPC_VERSION) - mark_as_advanced(SLEPC_VERSION_MAJOR SLEPC_VERSION_MINOR SLEPC_VERSION_SUBMINOR) + mark_as_advanced(SLEPC_VERSION_MAJOR SLEPC_VERSION_MINOR + SLEPC_VERSION_SUBMINOR) endif() - if (SLEPc_FIND_VERSION) + if(SLEPc_FIND_VERSION) # Check if version found is >= required version - if (NOT "${SLEPC_VERSION}" VERSION_LESS "${SLEPc_FIND_VERSION}") - set(SLEPC_VERSION_OK TRUE CACHE BOOL "") + if(NOT "${SLEPC_VERSION}" VERSION_LESS "${SLEPc_FIND_VERSION}") + set(SLEPC_VERSION_OK + TRUE + CACHE BOOL "") endif() else() # No specific version requested - set(SLEPC_VERSION_OK TRUE CACHE BOOL "") + set(SLEPC_VERSION_OK + TRUE + CACHE BOOL "") endif() mark_as_advanced(SLEPC_VERSION_OK) endif() # Standard package handling include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(SLEPc +find_package_handle_standard_args( + SLEPc FOUND_VAR SLEPC_FOUND - FAIL_MESSAGE "SLEPc could not be found. Be sure to set SLEPC_DIR, PETSC_DIR, and PETSC_ARCH." + FAIL_MESSAGE + "SLEPc could not be found. Be sure to set SLEPC_DIR, PETSC_DIR, and PETSC_ARCH." VERSION_VAR SLEPC_VERSION REQUIRED_VARS SLEPC_LIBRARIES SLEPC_DIR SLEPC_INCLUDE_DIRS SLEPC_VERSION_OK) -if (SLEPC_FOUND) - if (NOT TARGET SLEPc::SLEPc) +if(SLEPC_FOUND) + if(NOT TARGET SLEPc::SLEPc) add_library(SLEPc::SLEPc UNKNOWN IMPORTED) - set_target_properties(SLEPc::SLEPc PROPERTIES - IMPORTED_LOCATION "${SLEPC_LIBRARIES}" - INTERFACE_INCLUDE_DIRECTORIES "${SLEPC_INCLUDE_DIRS}" - INTERFACE_LINK_LIBRARIES PETSc::PETSc - ) + set_target_properties( + SLEPc::SLEPc + PROPERTIES IMPORTED_LOCATION "${SLEPC_LIBRARIES}" + INTERFACE_INCLUDE_DIRECTORIES "${SLEPC_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES PETSc::PETSc) endif() endif() diff --git a/cmake/FindSUNDIALS.cmake b/cmake/FindSUNDIALS.cmake index 15b266d06a..1063d2b900 100644 --- a/cmake/FindSUNDIALS.cmake +++ b/cmake/FindSUNDIALS.cmake @@ -1,79 +1,73 @@ # FindSUNDIALS # ------------ # -# Find SUNDIALS, the SUite of Nonlinear and DIfferential/ALgebraic equation Solvers +# Find SUNDIALS, the SUite of Nonlinear and DIfferential/ALgebraic equation +# Solvers # -# Currently only actually looks for arkode, cvode and ida, as well as nvecparallel +# Currently only actually looks for arkode, cvode and ida, as well as +# nvecparallel # # This module will define the following variables: # # :: # -# SUNDIALS_FOUND - true if SUNDIALS was found on the system -# SUNDIALS_INCLUDE_DIRS - Location of the SUNDIALS includes -# SUNDIALS_LIBRARIES - Required libraries -# SUNDIALS_VERSION - Full version string +# SUNDIALS_FOUND - true if SUNDIALS was found on the system +# SUNDIALS_INCLUDE_DIRS - Location of the SUNDIALS includes SUNDIALS_LIBRARIES - +# Required libraries SUNDIALS_VERSION - Full version string # # This module will export the following targets: # -# ``SUNDIALS::NVecParallel`` -# ``SUNDIALS::arkode`` -# ``SUNDIALS::cvode`` +# ``SUNDIALS::NVecParallel`` ``SUNDIALS::arkode`` ``SUNDIALS::cvode`` # ``SUNDIALS::ida`` # # You can also set the following variables: # -# ``SUNDIALS_ROOT`` or ``SUNDIALS_DIR`` (as an environment variable) -# Specify the path to the SUNDIALS installation to use +# ``SUNDIALS_ROOT`` or ``SUNDIALS_DIR`` (as an environment variable) Specify the +# path to the SUNDIALS installation to use # -# ``SUNDIALS_DEBUG`` -# Set to TRUE to get extra debugging output +# ``SUNDIALS_DEBUG`` Set to TRUE to get extra debugging output include(FindPackageHandleStandardArgs) find_package(SUNDIALS CONFIG QUIET) -if (SUNDIALS_FOUND) - if (TARGET SUNDIALS::nvecparallel) +if(SUNDIALS_FOUND) + if(TARGET SUNDIALS::nvecparallel) return() else() message(STATUS "SUNDIALS found but not SUNDIALS::nvecparallel") endif() endif() -find_path(SUNDIALS_INCLUDE_DIR - sundials_config.h - HINTS - "${SUNDIALS_ROOT}" - ENV SUNDIALS_DIR +find_path( + SUNDIALS_INCLUDE_DIR sundials_config.h + HINTS "${SUNDIALS_ROOT}" ENV SUNDIALS_DIR PATH_SUFFIXES include include/sundials DOC "SUNDIALS Directory") -if (SUNDIALS_DEBUG) +if(SUNDIALS_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " SUNDIALS_INCLUDE_DIR = ${SUNDIALS_INCLUDE_DIR}" - " SUNDIALS_ROOT = ${SUNDIALS_ROOT}") + " SUNDIALS_INCLUDE_DIR = ${SUNDIALS_INCLUDE_DIR}" + " SUNDIALS_ROOT = ${SUNDIALS_ROOT}") endif() set(SUNDIALS_INCLUDE_DIRS - "${SUNDIALS_INCLUDE_DIR}" - "${SUNDIALS_INCLUDE_DIR}/.." - CACHE STRING "SUNDIALS include directories") + "${SUNDIALS_INCLUDE_DIR}" "${SUNDIALS_INCLUDE_DIR}/.." + CACHE STRING "SUNDIALS include directories") -find_library(SUNDIALS_nvecparallel_LIBRARY +find_library( + SUNDIALS_nvecparallel_LIBRARY NAMES sundials_nvecparallel - HINTS - "${SUNDIALS_INCLUDE_DIR}/.." - "${SUNDIALS_INCLUDE_DIR}/../.." - PATH_SUFFIXES lib lib64 - ) + HINTS "${SUNDIALS_INCLUDE_DIR}/.." "${SUNDIALS_INCLUDE_DIR}/../.." + PATH_SUFFIXES lib lib64) -if (SUNDIALS_DEBUG) - message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " SUNDIALS_nvecparallel_LIBRARY = ${SUNDIALS_nvecparallel_LIBRARY}") +if(SUNDIALS_DEBUG) + message( + STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " + " SUNDIALS_nvecparallel_LIBRARY = ${SUNDIALS_nvecparallel_LIBRARY}") endif() -if (NOT SUNDIALS_nvecparallel_LIBRARY) +if(NOT SUNDIALS_nvecparallel_LIBRARY) message(FATAL_ERROR "Sundials requested but SUNDIALS nvecparallel not found.") endif() list(APPEND SUNDIALS_LIBRARIES "${SUNDIALS_nvecparallel_LIBRARY}") @@ -81,62 +75,73 @@ mark_as_advanced(SUNDIALS_nvecparallel_LIBRARY) set(SUNDIALS_COMPONENTS arkode cvode ida) -foreach (LIB ${SUNDIALS_COMPONENTS}) - find_library(SUNDIALS_${LIB}_LIBRARY +foreach(LIB ${SUNDIALS_COMPONENTS}) + find_library( + SUNDIALS_${LIB}_LIBRARY NAMES sundials_${LIB} - HINTS - "${SUNDIALS_INCLUDE_DIR}/.." - "${SUNDIALS_INCLUDE_DIR}/../.." - PATH_SUFFIXES lib lib64 - ) + HINTS "${SUNDIALS_INCLUDE_DIR}/.." "${SUNDIALS_INCLUDE_DIR}/../.." + PATH_SUFFIXES lib lib64) - if (SUNDIALS_DEBUG) + if(SUNDIALS_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " SUNDIALS_${LIB}_LIBRARY = ${SUNDIALS_${LIB}_LIBRARY}") + " SUNDIALS_${LIB}_LIBRARY = ${SUNDIALS_${LIB}_LIBRARY}") endif() - if (NOT SUNDIALS_${LIB}_LIBRARY) + if(NOT SUNDIALS_${LIB}_LIBRARY) message(FATAL_ERROR "Sundials requested but SUNDIALS ${LIB} not found.") endif() list(APPEND SUNDIALS_LIBRARIES "${SUNDIALS_${LIB}_LIBRARY}") mark_as_advanced(SUNDIALS_${LIB}_LIBRARY) endforeach() -if (SUNDIALS_INCLUDE_DIR) +if(SUNDIALS_INCLUDE_DIR) file(READ "${SUNDIALS_INCLUDE_DIR}/sundials_config.h" SUNDIALS_CONFIG_FILE) set(SUNDIALS_VERSION_REGEX_PATTERN - ".*#define SUNDIALS_VERSION \"([0-9]+)\\.([0-9]+)\\.([0-9]+)\".*") - string(REGEX MATCH ${SUNDIALS_VERSION_REGEX_PATTERN} _ "${SUNDIALS_CONFIG_FILE}") - set(SUNDIALS_VERSION_MAJOR ${CMAKE_MATCH_1} CACHE STRING "") - set(SUNDIALS_VERSION_MINOR ${CMAKE_MATCH_2} CACHE STRING "") - set(SUNDIALS_VERSION_PATCH ${CMAKE_MATCH_3} CACHE STRING "") - set(SUNDIALS_VERSION "${SUNDIALS_VERSION_MAJOR}.${SUNDIALS_VERSION_MINOR}.${SUNDIALS_VERSION_PATCH}" CACHE STRING "SUNDIALS version") + ".*#define SUNDIALS_VERSION \"([0-9]+)\\.([0-9]+)\\.([0-9]+)\".*") + string(REGEX MATCH ${SUNDIALS_VERSION_REGEX_PATTERN} _ + "${SUNDIALS_CONFIG_FILE}") + set(SUNDIALS_VERSION_MAJOR + ${CMAKE_MATCH_1} + CACHE STRING "") + set(SUNDIALS_VERSION_MINOR + ${CMAKE_MATCH_2} + CACHE STRING "") + set(SUNDIALS_VERSION_PATCH + ${CMAKE_MATCH_3} + CACHE STRING "") + set(SUNDIALS_VERSION + "${SUNDIALS_VERSION_MAJOR}.${SUNDIALS_VERSION_MINOR}.${SUNDIALS_VERSION_PATCH}" + CACHE STRING "SUNDIALS version") endif() -if (SUNDIALS_DEBUG) +if(SUNDIALS_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " SUNDIALS_VERSION = ${SUNDIALS_VERSION}") + " SUNDIALS_VERSION = ${SUNDIALS_VERSION}") endif() -find_package_handle_standard_args(SUNDIALS +find_package_handle_standard_args( + SUNDIALS REQUIRED_VARS SUNDIALS_LIBRARIES SUNDIALS_INCLUDE_DIR SUNDIALS_INCLUDE_DIRS - VERSION_VAR SUNDIALS_VERSION - ) + VERSION_VAR SUNDIALS_VERSION) -set(SUNDIALS_LIBRARIES "${SUNDIALS_LIBRARIES}" CACHE STRING "SUNDIALS libraries") +set(SUNDIALS_LIBRARIES + "${SUNDIALS_LIBRARIES}" + CACHE STRING "SUNDIALS libraries") mark_as_advanced(SUNDIALS_LIBRARIES SUNDIALS_INCLUDE_DIR SUNDIALS_INCLUDE_DIRS) -if (SUNDIALS_FOUND AND NOT TARGET SUNDIALS::SUNDIALS) +if(SUNDIALS_FOUND AND NOT TARGET SUNDIALS::SUNDIALS) add_library(SUNDIALS::nvecparallel UNKNOWN IMPORTED) - set_target_properties(SUNDIALS::nvecparallel PROPERTIES - IMPORTED_LOCATION "${SUNDIALS_nvecparallel_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${SUNDIALS_INCLUDE_DIRS}") + set_target_properties( + SUNDIALS::nvecparallel + PROPERTIES IMPORTED_LOCATION "${SUNDIALS_nvecparallel_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${SUNDIALS_INCLUDE_DIRS}") - foreach (LIB ${SUNDIALS_COMPONENTS}) + foreach(LIB ${SUNDIALS_COMPONENTS}) add_library(SUNDIALS::${LIB} UNKNOWN IMPORTED) - set_target_properties(SUNDIALS::${LIB} PROPERTIES - IMPORTED_LOCATION "${SUNDIALS_${LIB}_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${SUNDIALS_INCLUDE_DIRS}" - INTERFACE_LINK_LIBRARIES SUNDIALS::nvecparallel) + set_target_properties( + SUNDIALS::${LIB} + PROPERTIES IMPORTED_LOCATION "${SUNDIALS_${LIB}_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${SUNDIALS_INCLUDE_DIRS}" + INTERFACE_LINK_LIBRARIES SUNDIALS::nvecparallel) endforeach() endif() diff --git a/cmake/FindScoreP.cmake b/cmake/FindScoreP.cmake index dd119545e8..39f2efe307 100644 --- a/cmake/FindScoreP.cmake +++ b/cmake/FindScoreP.cmake @@ -7,60 +7,58 @@ # # :: # -# ScoreP_FOUND - true if ScoreP was found -# ScoreP_EXECUTABLE - Path to the ``scorep`` compiler wrapper -# ScoreP_INCLUDE_DIRS - Location of the ScoreP includes -# ScoreP_LIBRARIES - List of libraries need to link against ScoreP -# ScoreP_CXX_FLAGS - Compile definitions +# ScoreP_FOUND - true if ScoreP was found ScoreP_EXECUTABLE - Path to the +# ``scorep`` compiler wrapper ScoreP_INCLUDE_DIRS - Location of the ScoreP +# includes ScoreP_LIBRARIES - List of libraries need to link against ScoreP +# ScoreP_CXX_FLAGS - Compile definitions # # This module will also export the ``ScoreP::ScoreP`` target. # # You can also set the following variables: # -# ``ScoreP_ROOT`` -# Specify the path to the ScoreP installation to use +# ``ScoreP_ROOT`` Specify the path to the ScoreP installation to use # -# ``ScoreP_FLAGS`` -# The flags to pass to the ``scorep`` executable as a -# semicolon-separated list. This defaults to ``--user;--nocompiler`` +# ``ScoreP_FLAGS`` The flags to pass to the ``scorep`` executable as a +# semicolon-separated list. This defaults to ``--user;--nocompiler`` # -# ``ScoreP_COMPILER_LAUNCHER`` -# The full path to the compiler wrapper plus flags as a -# semicolon-separated list. This defaults to -# ``/path/to/scorep;${ScoreP_FLAGS}`` +# ``ScoreP_COMPILER_LAUNCHER`` The full path to the compiler wrapper plus flags +# as a semicolon-separated list. This defaults to +# ``/path/to/scorep;${ScoreP_FLAGS}`` # -# ``ScoreP_DEBUG`` -# Set to TRUE to get extra debugging output +# ``ScoreP_DEBUG`` Set to TRUE to get extra debugging output # # ---------------- -# Part of this module (the bit that parses scorep-config for the libraries) was lifted from +# Part of this module (the bit that parses scorep-config for the libraries) was +# lifted from # https://raw.githubusercontent.com/score-p/scorep_plugin_common/master/FindScorep.cmake # -# Copyright (c) 2016, Technische Universität Dresden, Germany -# All rights reserved. +# Copyright (c) 2016, Technische Universität Dresden, Germany All rights +# reserved. # -# Redistribution and use in source and binary forms, with or without modification, are permitted -# provided that the following conditions are met: +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: # -# 1. Redistributions of source code must retain the above copyright notice, this list of conditions -# and the following disclaimer. +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. # -# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions -# and the following disclaimer in the documentation and/or other materials provided with the -# distribution. +# 1. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse -# or promote products derived from this software without specific prior written permission. +# 1. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. # -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR -# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR -# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER -# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF -# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. find_program(ScoreP_CONFIG scorep-config) mark_as_advanced(ScoreP_CONFIG) @@ -68,38 +66,40 @@ mark_as_advanced(ScoreP_CONFIG) get_filename_component(ScoreP_TMP "${ScoreP_CONFIG}" DIRECTORY) get_filename_component(ScoreP_EXEC_LOCATION "${ScoreP_TMP}" DIRECTORY) -if (ScoreP_DEBUG) +if(ScoreP_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " ScoreP_CONFIG = ${ScoreP_CONFIG}" - " ScoreP_EXEC_LOCATION = ${ScoreP_EXEC_LOCATION}") + " ScoreP_CONFIG = ${ScoreP_CONFIG}" + " ScoreP_EXEC_LOCATION = ${ScoreP_EXEC_LOCATION}") endif() if(ScoreP_CONFIG) message(STATUS "SCOREP library found. (using ${ScoreP_CONFIG})") execute_process(COMMAND ${ScoreP_CONFIG} "--user" "--nocompiler" "--cppflags" - OUTPUT_VARIABLE ScoreP_CONFIG_FLAGS) + OUTPUT_VARIABLE ScoreP_CONFIG_FLAGS) - string(REGEX MATCHALL "-I[^ ]*" ScoreP_CONFIG_INCLUDES "${ScoreP_CONFIG_FLAGS}") + string(REGEX MATCHALL "-I[^ ]*" ScoreP_CONFIG_INCLUDES + "${ScoreP_CONFIG_FLAGS}") foreach(inc ${ScoreP_CONFIG_INCLUDES}) string(SUBSTRING ${inc} 2 -1 inc) list(APPEND ScoreP_INCLUDE_DIRS ${inc}) endforeach() - if (ScoreP_DEBUG) + if(ScoreP_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " ScoreP_INCLUDE_DIRS = ${ScoreP_INCLUDE_DIRS}") + " ScoreP_INCLUDE_DIRS = ${ScoreP_INCLUDE_DIRS}") endif() - string(REGEX MATCHALL "(^| +)-[^I][^ ]*" ScoreP_CONFIG_CXXFLAGS "${ScoreP_CONFIG_FLAGS}") + string(REGEX MATCHALL "(^| +)-[^I][^ ]*" ScoreP_CONFIG_CXXFLAGS + "${ScoreP_CONFIG_FLAGS}") foreach(flag ${ScoreP_CONFIG_CXXFLAGS}) string(STRIP ${flag} flag) list(APPEND ScoreP_CXX_FLAGS ${flag}) endforeach() - if (ScoreP_DEBUG) + if(ScoreP_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " ScoreP_CXX_FLAGS = ${ScoreP_CXX_FLAGS}") + " ScoreP_CXX_FLAGS = ${ScoreP_CXX_FLAGS}") endif() unset(ScoreP_CONFIG_FLAGS) @@ -107,30 +107,30 @@ if(ScoreP_CONFIG) unset(ScoreP_CONFIG_CXXFLAGS) execute_process(COMMAND ${ScoreP_CONFIG} "--user" "--nocompiler" "--ldflags" - OUTPUT_VARIABLE _LINK_LD_ARGS) - string( REPLACE " " ";" _LINK_LD_ARGS ${_LINK_LD_ARGS} ) - foreach( _ARG ${_LINK_LD_ARGS} ) + OUTPUT_VARIABLE _LINK_LD_ARGS) + string(REPLACE " " ";" _LINK_LD_ARGS ${_LINK_LD_ARGS}) + foreach(_ARG ${_LINK_LD_ARGS}) if(${_ARG} MATCHES "^-L") - STRING(REGEX REPLACE "^-L" "" _ARG ${_ARG}) - SET(ScoreP_LINK_DIRS ${ScoreP_LINK_DIRS} ${_ARG}) + string(REGEX REPLACE "^-L" "" _ARG ${_ARG}) + set(ScoreP_LINK_DIRS ${ScoreP_LINK_DIRS} ${_ARG}) endif() endforeach() - if (ScoreP_DEBUG) + if(ScoreP_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " ScoreP_LINK_DIRS = ${ScoreP_LINK_DIRS}") + " ScoreP_LINK_DIRS = ${ScoreP_LINK_DIRS}") endif() execute_process(COMMAND ${ScoreP_CONFIG} "--user" "--nocompiler" "--libs" - OUTPUT_VARIABLE _LINK_LD_ARGS) - string( REPLACE " " ";" _LINK_LD_ARGS ${_LINK_LD_ARGS} ) - foreach( _ARG ${_LINK_LD_ARGS} ) + OUTPUT_VARIABLE _LINK_LD_ARGS) + string(REPLACE " " ";" _LINK_LD_ARGS ${_LINK_LD_ARGS}) + foreach(_ARG ${_LINK_LD_ARGS}) if(${_ARG} MATCHES "^-l") string(REGEX REPLACE "^-l" "" _ARG ${_ARG}) - find_library(_SCOREP_LIB_FROM_ARG NAMES ${_ARG} - PATHS - ${ScoreP_LINK_DIRS} - ) + find_library( + _SCOREP_LIB_FROM_ARG + NAMES ${_ARG} + PATHS ${ScoreP_LINK_DIRS}) if(_SCOREP_LIB_FROM_ARG) set(ScoreP_LIBRARIES ${ScoreP_LIBRARIES} ${_SCOREP_LIB_FROM_ARG}) endif() @@ -138,26 +138,23 @@ if(ScoreP_CONFIG) endif() endforeach() - if (ScoreP_DEBUG) + if(ScoreP_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " ScoreP_LIBRARIES = ${ScoreP_LIBRARIES}") + " ScoreP_LIBRARIES = ${ScoreP_LIBRARIES}") endif() endif() -include (FindPackageHandleStandardArgs) -find_package_handle_standard_args(ScoreP DEFAULT_MSG - ScoreP_CONFIG - ScoreP_LIBRARIES - ScoreP_INCLUDE_DIRS - ) +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(ScoreP DEFAULT_MSG ScoreP_CONFIG + ScoreP_LIBRARIES ScoreP_INCLUDE_DIRS) -if (ScoreP_FOUND AND NOT TARGET ScoreP::ScoreP) +if(ScoreP_FOUND AND NOT TARGET ScoreP::ScoreP) add_library(ScoreP::ScoreP UNKNOWN IMPORTED) - set_target_properties(ScoreP::ScoreP PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${ScoreP_INCLUDE_DIRS}" - IMPORTED_LINK_INTERFACE_LIBRARIES "${ScoreP_LIBRARIES}" - INTERFACE_INCLUDE_DEFINITIONS "${ScoreP_CXX_FLAGS}" - IMPORTED_LINK_INTERFACE_LANGUAGES "C" - ) + set_target_properties( + ScoreP::ScoreP + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${ScoreP_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LIBRARIES "${ScoreP_LIBRARIES}" + INTERFACE_INCLUDE_DEFINITIONS "${ScoreP_CXX_FLAGS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "C") endif() diff --git a/cmake/FindSphinx.cmake b/cmake/FindSphinx.cmake index fd377d0d00..2d48fe21fc 100644 --- a/cmake/FindSphinx.cmake +++ b/cmake/FindSphinx.cmake @@ -7,20 +7,20 @@ # # :: # -# Sphinx_FOUND - true if Sphinx was found -# Sphinx_EXECUTABLE - Path to the ``sphinx-build`` executable +# Sphinx_FOUND - true if Sphinx was found Sphinx_EXECUTABLE - Path to the +# ``sphinx-build`` executable # Taken from # https://devblogs.microsoft.com/cppblog/clear-functional-c-documentation-with-sphinx-breathe-doxygen-cmake/ -#Look for an executable called sphinx-build -find_program(SPHINX_EXECUTABLE - NAMES sphinx-build sphinx-build-3 - DOC "Path to sphinx-build executable") +# Look for an executable called sphinx-build +find_program( + SPHINX_EXECUTABLE + NAMES sphinx-build sphinx-build-3 + DOC "Path to sphinx-build executable") include(FindPackageHandleStandardArgs) -#Handle standard arguments to find_package like REQUIRED and QUIET -find_package_handle_standard_args(Sphinx - "Failed to find sphinx-build executable" - SPHINX_EXECUTABLE) +# Handle standard arguments to find_package like REQUIRED and QUIET +find_package_handle_standard_args( + Sphinx "Failed to find sphinx-build executable" SPHINX_EXECUTABLE) diff --git a/cmake/FindnetCDF.cmake b/cmake/FindnetCDF.cmake index 361095954e..42dae86b14 100644 --- a/cmake/FindnetCDF.cmake +++ b/cmake/FindnetCDF.cmake @@ -3,144 +3,138 @@ # # Find the netCDF IO library # -# This module uses the ``nc-config`` helper script as a hint for the -# location of the netCDF libraries. It should be in your PATH. +# This module uses the ``nc-config`` helper script as a hint for the location of +# the netCDF libraries. It should be in your PATH. # # This module will define the following variables: # # :: # -# netCDF_FOUND - true if netCDF was found -# netCDF_VERSION - netCDF version in format Major.Minor.Release -# netCDF_INCLUDE_DIRS - Location of the netCDF includes -# netCDF_LIBRARIES - Required libraries +# netCDF_FOUND - true if netCDF was found netCDF_VERSION - netCDF version in +# format Major.Minor.Release netCDF_INCLUDE_DIRS - Location of the netCDF +# includes netCDF_LIBRARIES - Required libraries # # This module will also export the ``netCDF::netcdf`` target. # # You can also set the following variables: # -# ``netCDF_ROOT`` -# Specify the path to the netCDF installation to use +# ``netCDF_ROOT`` Specify the path to the netCDF installation to use # -# ``netCDF_DEBUG`` -# Set to TRUE to get extra debugging output +# ``netCDF_DEBUG`` Set to TRUE to get extra debugging output include(BOUT++functions) include(CMakePrintHelpers) -if (NOT netCDF_ROOT AND EXISTS "${BOUT_USE_NETCDF}") +if(NOT netCDF_ROOT AND EXISTS "${BOUT_USE_NETCDF}") set(netCDF_ROOT "${BOUT_USE_NETCDF}") endif() enable_language(C) find_package(netCDF QUIET CONFIG) -if (netCDF_FOUND) +if(netCDF_FOUND) message(STATUS "netCDF CONFIG found") set(netCDF_FOUND TRUE) - if (NOT TARGET netCDF::netcdf) + if(NOT TARGET netCDF::netcdf) bout_add_library_alias(netCDF::netcdf netcdf) endif() - if (netCDF_DEBUG) - cmake_print_properties(TARGETS netcdf PROPERTIES LOCATION VERSION) - endif (netCDF_DEBUG) + if(netCDF_DEBUG) + cmake_print_properties(TARGETS netcdf PROPERTIES LOCATION VERSION) + endif(netCDF_DEBUG) return() endif() -find_program(NC_CONFIG "nc-config" +find_program( + NC_CONFIG "nc-config" PATHS "${netCDF_ROOT}" PATH_SUFFIXES bin DOC "Path to netCDF C config helper" - NO_DEFAULT_PATH - ) + NO_DEFAULT_PATH) -find_program(NC_CONFIG "nc-config" - DOC "Path to netCDF C config helper" - ) +find_program(NC_CONFIG "nc-config" DOC "Path to netCDF C config helper") get_filename_component(NC_CONFIG_TMP "${NC_CONFIG}" DIRECTORY) get_filename_component(NC_CONFIG_LOCATION "${NC_CONFIG_TMP}" DIRECTORY) -if (netCDF_DEBUG) +if(netCDF_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " NC_CONFIG_LOCATION = ${NC_CONFIG_LOCATION}" - " netCDF_ROOT = ${netCDF_ROOT}") + " NC_CONFIG_LOCATION = ${NC_CONFIG_LOCATION}" + " netCDF_ROOT = ${netCDF_ROOT}") endif() bout_inspect_netcdf_config(NC_HINTS_INCLUDE_DIR "${NC_CONFIG}" "--includedir") bout_inspect_netcdf_config(NC_HINTS_PREFIX "${NC_CONFIG}" "--prefix") -find_path(netCDF_C_INCLUDE_DIR +find_path( + netCDF_C_INCLUDE_DIR NAMES netcdf.h DOC "netCDF C include directories" - HINTS - "${NC_HINTS_INCLUDE_DIR}" - "${NC_HINTS_PREFIX}" - "${NC_CONFIG_LOCATION}" - PATH_SUFFIXES - "include" - ) -if (netCDF_DEBUG) - message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " netCDF_C_INCLUDE_DIR = ${netCDF_C_INCLUDE_DIR}" - " NC_HINTS_INCLUDE_DIR = ${NC_HINTS_INCLUDE_DIR}" - " NC_HINTS_PREFIX = ${NC_HINTS_PREFIX}" - ) + HINTS "${NC_HINTS_INCLUDE_DIR}" "${NC_HINTS_PREFIX}" "${NC_CONFIG_LOCATION}" + PATH_SUFFIXES "include") +if(netCDF_DEBUG) + message( + STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " + " netCDF_C_INCLUDE_DIR = ${netCDF_C_INCLUDE_DIR}" + " NC_HINTS_INCLUDE_DIR = ${NC_HINTS_INCLUDE_DIR}" + " NC_HINTS_PREFIX = ${NC_HINTS_PREFIX}") endif() mark_as_advanced(netCDF_C_INCLUDE_DIR) -find_library(netCDF_C_LIBRARY +find_library( + netCDF_C_LIBRARY NAMES netcdf DOC "netCDF C library" - HINTS - "${NC_HINTS_INCLUDE_DIR}" - "${NC_HINTS_PREFIX}" - "${NC_CONFIG_LOCATION}" - PATH_SUFFIXES - "lib" "lib64" - ) -if (netCDF_DEBUG) - message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " netCDF_C_LIBRARY = ${netCDF_C_LIBRARY}" - " NC_HINTS_INCLUDE_DIR = ${NC_HINTS_INCLUDE_DIR}" - " NC_HINTS_PREFIX = ${NC_HINTS_PREFIX}" - ) + HINTS "${NC_HINTS_INCLUDE_DIR}" "${NC_HINTS_PREFIX}" "${NC_CONFIG_LOCATION}" + PATH_SUFFIXES "lib" "lib64") +if(netCDF_DEBUG) + message( + STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " + " netCDF_C_LIBRARY = ${netCDF_C_LIBRARY}" + " NC_HINTS_INCLUDE_DIR = ${NC_HINTS_INCLUDE_DIR}" + " NC_HINTS_PREFIX = ${NC_HINTS_PREFIX}") endif() mark_as_advanced(netCDF_C_LIBRARY) -if (netCDF_C_INCLUDE_DIR) +if(netCDF_C_INCLUDE_DIR) file(STRINGS "${netCDF_C_INCLUDE_DIR}/netcdf_meta.h" _netcdf_version_lines - REGEX "#define[ \t]+NC_VERSION_(MAJOR|MINOR|PATCH|NOTE)") - string(REGEX REPLACE ".*NC_VERSION_MAJOR *\([0-9]*\).*" "\\1" _netcdf_version_major "${_netcdf_version_lines}") - string(REGEX REPLACE ".*NC_VERSION_MINOR *\([0-9]*\).*" "\\1" _netcdf_version_minor "${_netcdf_version_lines}") - string(REGEX REPLACE ".*NC_VERSION_PATCH *\([0-9]*\).*" "\\1" _netcdf_version_patch "${_netcdf_version_lines}") - string(REGEX REPLACE ".*NC_VERSION_NOTE *\"\([^\"]*\)\".*" "\\1" _netcdf_version_note "${_netcdf_version_lines}") - if (NOT _netcdf_version_note STREQUAL "") + REGEX "#define[ \t]+NC_VERSION_(MAJOR|MINOR|PATCH|NOTE)") + string(REGEX REPLACE ".*NC_VERSION_MAJOR *\([0-9]*\).*" "\\1" + _netcdf_version_major "${_netcdf_version_lines}") + string(REGEX REPLACE ".*NC_VERSION_MINOR *\([0-9]*\).*" "\\1" + _netcdf_version_minor "${_netcdf_version_lines}") + string(REGEX REPLACE ".*NC_VERSION_PATCH *\([0-9]*\).*" "\\1" + _netcdf_version_patch "${_netcdf_version_lines}") + string(REGEX REPLACE ".*NC_VERSION_NOTE *\"\([^\"]*\)\".*" "\\1" + _netcdf_version_note "${_netcdf_version_lines}") + if(NOT _netcdf_version_note STREQUAL "") # Make development version compare higher than any patch level set(_netcdf_version_note ".99") endif() - set(netCDF_VERSION "${_netcdf_version_major}.${_netcdf_version_minor}.${_netcdf_version_patch}${_netcdf_version_note}") + set(netCDF_VERSION + "${_netcdf_version_major}.${_netcdf_version_minor}.${_netcdf_version_patch}${_netcdf_version_note}" + ) unset(_netcdf_version_major) unset(_netcdf_version_minor) unset(_netcdf_version_patch) unset(_netcdf_version_note) unset(_netcdf_version_lines) -endif () +endif() include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(netCDF +find_package_handle_standard_args( + netCDF REQUIRED_VARS netCDF_C_LIBRARY netCDF_C_INCLUDE_DIR VERSION_VAR netCDF_VERSION) -if (netCDF_FOUND) +if(netCDF_FOUND) set(netCDF_INCLUDE_DIR "${netCDF_C_INCLUDE_DIR}") set(netCDF_INCLUDE_DIRS "${netCDF_C_INCLUDE_DIR}") set(netCDF_LIBRARIES "${netCDF_C_LIBRARY}") - if (NOT TARGET netCDF::netcdf) + if(NOT TARGET netCDF::netcdf) add_library(netCDF::netcdf UNKNOWN IMPORTED) - set_target_properties(netCDF::netcdf PROPERTIES - IMPORTED_LOCATION "${netCDF_C_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${netCDF_C_INCLUDE_DIR}" - ) - endif () -endif () + set_target_properties( + netCDF::netcdf + PROPERTIES IMPORTED_LOCATION "${netCDF_C_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${netCDF_C_INCLUDE_DIR}") + endif() +endif() diff --git a/cmake/FindnetCDFCxx.cmake b/cmake/FindnetCDFCxx.cmake index d4155dc760..6f646d2e45 100644 --- a/cmake/FindnetCDFCxx.cmake +++ b/cmake/FindnetCDFCxx.cmake @@ -3,40 +3,37 @@ # # Find the netCDF C++ API # -# This module uses the ``ncxx4-config`` helper script as a hint for -# the location of the NetCDF C++ library. It should be in your PATH. +# This module uses the ``ncxx4-config`` helper script as a hint for the location +# of the NetCDF C++ library. It should be in your PATH. # # This module will define the following variables: # # :: # -# netCDFCxx_FOUND - true if netCDFCxx was found -# netCDFCxx_VERSION - netCDFCxx version in format Major.Minor.Release -# netCDFCxx_INCLUDE_DIRS - Location of the netCDFCxx includes -# netCDFCxx_LIBRARIES - Required libraries +# netCDFCxx_FOUND - true if netCDFCxx was found netCDFCxx_VERSION - netCDFCxx +# version in format Major.Minor.Release netCDFCxx_INCLUDE_DIRS - Location of the +# netCDFCxx includes netCDFCxx_LIBRARIES - Required libraries # # This module will also export the ``netCDF::netcdf-cxx4`` target. # # You can also set the following variables: # -# ``netCDFCxx_ROOT`` -# Specify the path to the netCDF C++ installation to use +# ``netCDFCxx_ROOT`` Specify the path to the netCDF C++ installation to use # -# ``netCDFCxx_DEBUG`` -# Set to TRUE to get extra debugging output +# ``netCDFCxx_DEBUG`` Set to TRUE to get extra debugging output include(BOUT++functions) -if (NOT netCDFCxx_ROOT AND EXISTS "${BOUT_USE_NETCDF}") +if(NOT netCDFCxx_ROOT AND EXISTS "${BOUT_USE_NETCDF}") set(netCDFCxx_ROOT "${BOUT_USE_NETCDF}") endif() -if (NOT EXISTS ${NCXX4_CONFIG}) +if(NOT EXISTS ${NCXX4_CONFIG}) # Only search if NCXX4_CONFIG was not set explicitly find_package(netCDFCxx QUIET CONFIG) - if (netCDFCxx_FOUND) + if(netCDFCxx_FOUND) set(netCDFCxx_FOUND TRUE) - if (NOT TARGET netCDF::netcdf-cxx4) + if(NOT TARGET netCDF::netcdf-cxx4) bout_add_library_alias(netCDF::netcdf-cxx4 netcdf-cxx4) endif() return() @@ -45,73 +42,66 @@ endif() find_package(netCDF REQUIRED) -find_program(NCXX4_CONFIG "ncxx4-config" +find_program( + NCXX4_CONFIG "ncxx4-config" PATHS "${netCDFCxx_ROOT}" PATH_SUFFIXES bin DOC "Path to netCDF C++ config helper" - NO_DEFAULT_PATH - ) + NO_DEFAULT_PATH) -find_program(NCXX4_CONFIG "ncxx4-config" - DOC "Path to netCDF C++ config helper" - ) +find_program(NCXX4_CONFIG "ncxx4-config" DOC "Path to netCDF C++ config helper") get_filename_component(NCXX4_CONFIG_TMP "${NCXX4_CONFIG}" DIRECTORY) get_filename_component(NCXX4_CONFIG_LOCATION "${NCXX4_CONFIG_TMP}" DIRECTORY) -if (netCDFCxx_DEBUG) +if(netCDFCxx_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " NCXX4_CONFIG_LOCATION = ${NCXX4_CONFIG_LOCATION}") + " NCXX4_CONFIG_LOCATION = ${NCXX4_CONFIG_LOCATION}") endif() -bout_inspect_netcdf_config(NCXX4_HINTS_INCLUDE_DIR "${NCXX4_CONFIG}" "--includedir") +bout_inspect_netcdf_config(NCXX4_HINTS_INCLUDE_DIR "${NCXX4_CONFIG}" + "--includedir") bout_inspect_netcdf_config(NCXX4_HINTS_PREFIX "${NCXX4_CONFIG}" "--prefix") -find_path(netCDF_CXX_INCLUDE_DIR +find_path( + netCDF_CXX_INCLUDE_DIR NAMES netcdf DOC "netCDF C++ include directories" - HINTS - "${netCDF_C_INCLUDE_DIR}" - "${NCXX4_HINTS_INCLUDE_DIR}" - "${NCXX4_HINTS_PREFIX}" - "${NCXX4_CONFIG_LOCATION}" - PATH_SUFFIXES - "include" - ) -if (netCDFCxx_DEBUG) - message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " netCDF_CXX_INCLUDE_DIR = ${netCDF_CXX_INCLUDE_DIR}" - " NCXX4_HINTS_INCLUDE_DIR = ${NCXX4_HINTS_INCLUDE_DIR}" - " NCXX4_HINTS_PREFIX = ${NCXX4_HINTS_PREFIX}" - ) + HINTS "${netCDF_C_INCLUDE_DIR}" "${NCXX4_HINTS_INCLUDE_DIR}" + "${NCXX4_HINTS_PREFIX}" "${NCXX4_CONFIG_LOCATION}" + PATH_SUFFIXES "include") +if(netCDFCxx_DEBUG) + message( + STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " + " netCDF_CXX_INCLUDE_DIR = ${netCDF_CXX_INCLUDE_DIR}" + " NCXX4_HINTS_INCLUDE_DIR = ${NCXX4_HINTS_INCLUDE_DIR}" + " NCXX4_HINTS_PREFIX = ${NCXX4_HINTS_PREFIX}") endif() mark_as_advanced(netCDF_CXX_INCLUDE_DIR) -find_library(netCDF_CXX_LIBRARY +find_library( + netCDF_CXX_LIBRARY NAMES netcdf_c++4 netcdf-cxx4 DOC "netCDF C++ library" - HINTS - "${NCXX4_HINTS_INCLUDE_DIR}" - "${NCXX4_HINTS_PREFIX}" - "${NCXX4_CONFIG_LOCATION}" - PATH_SUFFIXES - "lib" "lib64" - ) -if (netCDFCxx_DEBUG) - message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " - " netCDF_CXX_LIBRARY = ${netCDF_CXX_LIBRARY}" - " NCXX4_HINTS_INCLUDE_DIR = ${NCXX4_HINTS_INCLUDE_DIR}" - " NCXX4_HINTS_PREFIX = ${NCXX4_HINTS_PREFIX}" - ) + HINTS "${NCXX4_HINTS_INCLUDE_DIR}" "${NCXX4_HINTS_PREFIX}" + "${NCXX4_CONFIG_LOCATION}" + PATH_SUFFIXES "lib" "lib64") +if(netCDFCxx_DEBUG) + message( + STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " + " netCDF_CXX_LIBRARY = ${netCDF_CXX_LIBRARY}" + " NCXX4_HINTS_INCLUDE_DIR = ${NCXX4_HINTS_INCLUDE_DIR}" + " NCXX4_HINTS_PREFIX = ${NCXX4_HINTS_PREFIX}") endif() mark_as_advanced(netCDF_CXX_LIBRARY) bout_inspect_netcdf_config(_ncxx4_version "${NCXX4_CONFIG}" "--version") -if (_ncxx4_version) +if(_ncxx4_version) # Change to lower case before matching, to avoid case problems string(TOLOWER "${_ncxx4_version}" _ncxx4_version_lower) - string(REGEX REPLACE "netcdf-cxx4 \([0-9]+\\.[0-9]+\\.[0-9]+\).*" "\\1" netCDFCxx_VERSION "${_ncxx4_version_lower}") + string(REGEX REPLACE "netcdf-cxx4 \([0-9]+\\.[0-9]+\\.[0-9]+\).*" "\\1" + netCDFCxx_VERSION "${_ncxx4_version_lower}") message(STATUS "Found netCDFCxx version ${netCDFCxx_VERSION}") -else () +else() message(WARNING "Couldn't get NetCDF version") endif() @@ -121,19 +111,21 @@ unset(_netcdf_version_minor) unset(_netcdf_version_patch) include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(netCDFCxx +find_package_handle_standard_args( + netCDFCxx REQUIRED_VARS netCDF_CXX_LIBRARY netCDF_CXX_INCLUDE_DIR VERSION_VAR netCDFCxx_VERSION) -if (netCDFCxx_FOUND) +if(netCDFCxx_FOUND) set(netCDFCxx_INCLUDE_DIRS "${netCDF_CXX_INCLUDE_DIR}") set(netCDFCxx_LIBRARIES "${netCDF_CXX_LIBRARY}") - if (NOT TARGET netCDF::netcdf-cxx4) + if(NOT TARGET netCDF::netcdf-cxx4) add_library(netCDF::netcdf-cxx4 UNKNOWN IMPORTED) - set_target_properties(netCDF::netcdf-cxx4 PROPERTIES - IMPORTED_LINK_INTERFACE_LIBRARIES netCDF::netcdf - IMPORTED_LOCATION "${netCDF_CXX_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${netCDF_CXX_INCLUDE_DIR}") - endif () -endif () + set_target_properties( + netCDF::netcdf-cxx4 + PROPERTIES IMPORTED_LINK_INTERFACE_LIBRARIES netCDF::netcdf + IMPORTED_LOCATION "${netCDF_CXX_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${netCDF_CXX_INCLUDE_DIR}") + endif() +endif() diff --git a/cmake/GenerateDateTimeFile.cmake b/cmake/GenerateDateTimeFile.cmake index ef48fc4638..edd72b76d7 100644 --- a/cmake/GenerateDateTimeFile.cmake +++ b/cmake/GenerateDateTimeFile.cmake @@ -1,7 +1,9 @@ -# Creates "bout++-time.cxx" in the build directory with the -# compilation date and time as variables +# Creates "bout++-time.cxx" in the build directory with the compilation date and +# time as variables set(bout_date_time_file - "const char* boutcompiledate{__DATE__}; const char* boutcompiletime{__TIME__};") + "const char* boutcompiledate{__DATE__}; const char* boutcompiletime{__TIME__};" +) -file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/bout++-time.cxx" "${bout_date_time_file}") +file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/bout++-time.cxx" + "${bout_date_time_file}") diff --git a/cmake/GetGitRevisionDescription.cmake b/cmake/GetGitRevisionDescription.cmake index 8ab03bc5f0..b843fdd7f2 100644 --- a/cmake/GetGitRevisionDescription.cmake +++ b/cmake/GetGitRevisionDescription.cmake @@ -1,168 +1,176 @@ -# - Returns a version string from Git +# * Returns a version string from Git # -# These functions force a re-configure on each git commit so that you can -# trust the values of the variables in your build system. +# These functions force a re-configure on each git commit so that you can trust +# the values of the variables in your build system. # -# get_git_head_revision( [ ...]) +# get_git_head_revision( [ ...]) # # Returns the refspec and sha hash of the current head revision # -# git_describe( [ ...]) +# git_describe( [ ...]) # -# Returns the results of git describe on the source tree, and adjusting -# the output so that it tests false if an error occurs. +# Returns the results of git describe on the source tree, and adjusting the +# output so that it tests false if an error occurs. # -# git_get_exact_tag( [ ...]) +# git_get_exact_tag( [ ...]) # -# Returns the results of git describe --exact-match on the source tree, -# and adjusting the output so that it tests false if there was no exact -# matching tag. +# Returns the results of git describe --exact-match on the source tree, and +# adjusting the output so that it tests false if there was no exact matching +# tag. # -# git_local_changes() +# git_local_changes() # -# Returns either "CLEAN" or "DIRTY" with respect to uncommitted changes. -# Uses the return code of "git diff-index --quiet HEAD --". -# Does not regard untracked files. +# Returns either "CLEAN" or "DIRTY" with respect to uncommitted changes. Uses +# the return code of "git diff-index --quiet HEAD --". Does not regard untracked +# files. # # Requires CMake 2.6 or newer (uses the 'function' command) # -# Original Author: -# 2009-2010 Ryan Pavlik -# http://academic.cleardefinition.com -# Iowa State University HCI Graduate Program/VRAC +# Original Author: 2009-2010 Ryan Pavlik +# http://academic.cleardefinition.com Iowa State University +# HCI Graduate Program/VRAC # -# Copyright Iowa State University 2009-2010. -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or copy at -# http://www.boost.org/LICENSE_1_0.txt) +# Copyright Iowa State University 2009-2010. Distributed under the Boost +# Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy +# at http://www.boost.org/LICENSE_1_0.txt) if(__get_git_revision_description) - return() + return() endif() set(__get_git_revision_description YES) -# We must run the following at "include" time, not at function call time, -# to find the path to this module rather than the path to a calling list file +# We must run the following at "include" time, not at function call time, to +# find the path to this module rather than the path to a calling list file get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH) function(get_git_head_revision _refspecvar _hashvar) - set(GIT_PARENT_DIR "${CMAKE_CURRENT_SOURCE_DIR}") - set(GIT_DIR "${GIT_PARENT_DIR}/.git") - while(NOT EXISTS "${GIT_DIR}") # .git dir not found, search parent directories - set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}") - get_filename_component(GIT_PARENT_DIR ${GIT_PARENT_DIR} PATH) - if(GIT_PARENT_DIR STREQUAL GIT_PREVIOUS_PARENT) - # We have reached the root directory, we are not in git - set(${_refspecvar} "GITDIR-NOTFOUND" PARENT_SCOPE) - set(${_hashvar} "GITDIR-NOTFOUND" PARENT_SCOPE) - return() - endif() - set(GIT_DIR "${GIT_PARENT_DIR}/.git") - endwhile() - # check if this is a submodule - if(NOT IS_DIRECTORY ${GIT_DIR}) - file(READ ${GIT_DIR} submodule) - string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" GIT_DIR_RELATIVE ${submodule}) - get_filename_component(SUBMODULE_DIR ${GIT_DIR} PATH) - get_filename_component(GIT_DIR ${SUBMODULE_DIR}/${GIT_DIR_RELATIVE} ABSOLUTE) - endif() - set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data") - if(NOT EXISTS "${GIT_DATA}") - file(MAKE_DIRECTORY "${GIT_DATA}") - endif() + set(GIT_PARENT_DIR "${CMAKE_CURRENT_SOURCE_DIR}") + set(GIT_DIR "${GIT_PARENT_DIR}/.git") + while(NOT EXISTS "${GIT_DIR}") # .git dir not found, search parent directories + set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}") + get_filename_component(GIT_PARENT_DIR ${GIT_PARENT_DIR} PATH) + if(GIT_PARENT_DIR STREQUAL GIT_PREVIOUS_PARENT) + # We have reached the root directory, we are not in git + set(${_refspecvar} + "GITDIR-NOTFOUND" + PARENT_SCOPE) + set(${_hashvar} + "GITDIR-NOTFOUND" + PARENT_SCOPE) + return() + endif() + set(GIT_DIR "${GIT_PARENT_DIR}/.git") + endwhile() + # check if this is a submodule + if(NOT IS_DIRECTORY ${GIT_DIR}) + file(READ ${GIT_DIR} submodule) + string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" GIT_DIR_RELATIVE ${submodule}) + get_filename_component(SUBMODULE_DIR ${GIT_DIR} PATH) + get_filename_component(GIT_DIR ${SUBMODULE_DIR}/${GIT_DIR_RELATIVE} + ABSOLUTE) + endif() + set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data") + if(NOT EXISTS "${GIT_DATA}") + file(MAKE_DIRECTORY "${GIT_DATA}") + endif() - if(NOT EXISTS "${GIT_DIR}/HEAD") - return() - endif() - set(HEAD_FILE "${GIT_DATA}/HEAD") - configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY) + if(NOT EXISTS "${GIT_DIR}/HEAD") + return() + endif() + set(HEAD_FILE "${GIT_DATA}/HEAD") + configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY) - configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in" - "${GIT_DATA}/grabRef.cmake" - @ONLY) - include("${GIT_DATA}/grabRef.cmake") + configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in" + "${GIT_DATA}/grabRef.cmake" @ONLY) + include("${GIT_DATA}/grabRef.cmake") - set(${_refspecvar} "${HEAD_REF}" PARENT_SCOPE) - set(${_hashvar} "${HEAD_HASH}" PARENT_SCOPE) + set(${_refspecvar} + "${HEAD_REF}" + PARENT_SCOPE) + set(${_hashvar} + "${HEAD_HASH}" + PARENT_SCOPE) endfunction() function(git_describe _var) - if(NOT GIT_FOUND) - find_package(Git QUIET) - endif() - get_git_head_revision(refspec hash) - if(NOT GIT_FOUND) - set(${_var} "GIT-NOTFOUND" PARENT_SCOPE) - return() - endif() - if(NOT hash) - set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE) - return() - endif() + if(NOT GIT_FOUND) + find_package(Git QUIET) + endif() + get_git_head_revision(refspec hash) + if(NOT GIT_FOUND) + set(${_var} + "GIT-NOTFOUND" + PARENT_SCOPE) + return() + endif() + if(NOT hash) + set(${_var} + "HEAD-HASH-NOTFOUND" + PARENT_SCOPE) + return() + endif() - # TODO sanitize - #if((${ARGN}" MATCHES "&&") OR - # (ARGN MATCHES "||") OR - # (ARGN MATCHES "\\;")) - # message("Please report the following error to the project!") - # message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}") - #endif() + # TODO sanitize if((${ARGN}" MATCHES "&&") OR (ARGN MATCHES "||") OR (ARGN + # MATCHES "\\;")) message("Please report the following error to the project!") + # message(FATAL_ERROR "Looks like someone's doing something nefarious with + # git_describe! Passed arguments ${ARGN}") endif() - #message(STATUS "Arguments to execute_process: ${ARGN}") + # message(STATUS "Arguments to execute_process: ${ARGN}") - execute_process(COMMAND - "${GIT_EXECUTABLE}" - describe - ${hash} - ${ARGN} - WORKING_DIRECTORY - "${CMAKE_CURRENT_SOURCE_DIR}" - RESULT_VARIABLE - res - OUTPUT_VARIABLE - out - ERROR_QUIET - OUTPUT_STRIP_TRAILING_WHITESPACE) - if(NOT res EQUAL 0) - set(out "${out}-${res}-NOTFOUND") - endif() + execute_process( + COMMAND "${GIT_EXECUTABLE}" describe ${hash} ${ARGN} + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + RESULT_VARIABLE res + OUTPUT_VARIABLE out + ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT res EQUAL 0) + set(out "${out}-${res}-NOTFOUND") + endif() - set(${_var} "${out}" PARENT_SCOPE) + set(${_var} + "${out}" + PARENT_SCOPE) endfunction() function(git_get_exact_tag _var) - git_describe(out --exact-match ${ARGN}) - set(${_var} "${out}" PARENT_SCOPE) + git_describe(out --exact-match ${ARGN}) + set(${_var} + "${out}" + PARENT_SCOPE) endfunction() function(git_local_changes _var) - if(NOT GIT_FOUND) - find_package(Git QUIET) - endif() - get_git_head_revision(refspec hash) - if(NOT GIT_FOUND) - set(${_var} "GIT-NOTFOUND" PARENT_SCOPE) - return() - endif() - if(NOT hash) - set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE) - return() - endif() + if(NOT GIT_FOUND) + find_package(Git QUIET) + endif() + get_git_head_revision(refspec hash) + if(NOT GIT_FOUND) + set(${_var} + "GIT-NOTFOUND" + PARENT_SCOPE) + return() + endif() + if(NOT hash) + set(${_var} + "HEAD-HASH-NOTFOUND" + PARENT_SCOPE) + return() + endif() - execute_process(COMMAND - "${GIT_EXECUTABLE}" - diff-index --quiet HEAD -- - WORKING_DIRECTORY - "${CMAKE_CURRENT_SOURCE_DIR}" - RESULT_VARIABLE - res - OUTPUT_VARIABLE - out - ERROR_QUIET - OUTPUT_STRIP_TRAILING_WHITESPACE) - if(res EQUAL 0) - set(${_var} "CLEAN" PARENT_SCOPE) - else() - set(${_var} "DIRTY" PARENT_SCOPE) - endif() + execute_process( + COMMAND "${GIT_EXECUTABLE}" diff-index --quiet HEAD -- + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + RESULT_VARIABLE res + OUTPUT_VARIABLE out + ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) + if(res EQUAL 0) + set(${_var} + "CLEAN" + PARENT_SCOPE) + else() + set(${_var} + "DIRTY" + PARENT_SCOPE) + endif() endfunction() diff --git a/cmake/ResolveCompilerPaths.cmake b/cmake/ResolveCompilerPaths.cmake index 54787fa38f..102fb751e4 100644 --- a/cmake/ResolveCompilerPaths.cmake +++ b/cmake/ResolveCompilerPaths.cmake @@ -1,105 +1,110 @@ # ResolveCompilerPaths - this module defines two macros # -# RESOLVE_LIBRARIES (XXX_LIBRARIES LINK_LINE) -# This macro is intended to be used by FindXXX.cmake modules. -# It parses a compiler link line and resolves all libraries -# (-lfoo) using the library path contexts (-L/path) in scope. -# The result in XXX_LIBRARIES is the list of fully resolved libs. -# Example: +# RESOLVE_LIBRARIES (XXX_LIBRARIES LINK_LINE) This macro is intended to be used +# by FindXXX.cmake modules. It parses a compiler link line and resolves all +# libraries (-lfoo) using the library path contexts (-L/path) in scope. The +# result in XXX_LIBRARIES is the list of fully resolved libs. Example: # -# RESOLVE_LIBRARIES (FOO_LIBRARIES "-L/A -la -L/B -lb -lc -ld") +# RESOLVE_LIBRARIES (FOO_LIBRARIES "-L/A -la -L/B -lb -lc -ld") # -# will be resolved to +# will be resolved to # -# FOO_LIBRARIES:STRING="/A/liba.so;/B/libb.so;/A/libc.so;/usr/lib/libd.so" +# FOO_LIBRARIES:STRING="/A/liba.so;/B/libb.so;/A/libc.so;/usr/lib/libd.so" # -# if the filesystem looks like +# if the filesystem looks like # -# /A: liba.so libc.so -# /B: liba.so libb.so -# /usr/lib: liba.so libb.so libc.so libd.so +# /A: liba.so libc.so /B: liba.so libb.so /usr/lib: liba.so +# libb.so libc.so libd.so # -# and /usr/lib is a system directory. +# and /usr/lib is a system directory. # -# Note: If RESOLVE_LIBRARIES() resolves a link line differently from -# the native linker, there is a bug in this macro (please report it). +# Note: If RESOLVE_LIBRARIES() resolves a link line differently from the native +# linker, there is a bug in this macro (please report it). # -# RESOLVE_INCLUDES (XXX_INCLUDES INCLUDE_LINE) -# This macro is intended to be used by FindXXX.cmake modules. -# It parses a compile line and resolves all includes -# (-I/path/to/include) to a list of directories. Other flags are ignored. -# Example: +# RESOLVE_INCLUDES (XXX_INCLUDES INCLUDE_LINE) This macro is intended to be used +# by FindXXX.cmake modules. It parses a compile line and resolves all includes +# (-I/path/to/include) to a list of directories. Other flags are ignored. +# Example: # -# RESOLVE_INCLUDES (FOO_INCLUDES "-I/A -DBAR='\"irrelevant -I/string here\"' -I/B") +# RESOLVE_INCLUDES (FOO_INCLUDES "-I/A -DBAR='\"irrelevant -I/string here\"' +# -I/B") # -# will be resolved to +# will be resolved to # -# FOO_INCLUDES:STRING="/A;/B" +# FOO_INCLUDES:STRING="/A;/B" # -# assuming both directories exist. -# Note: as currently implemented, the -I/string will be picked up mistakenly (cry, cry) -include (CorrectWindowsPaths) +# assuming both directories exist. Note: as currently implemented, the -I/string +# will be picked up mistakenly (cry, cry) +include(CorrectWindowsPaths) -macro (RESOLVE_LIBRARIES LIBS LINK_LINE) - string (REGEX MATCHALL "((-L|-l|-Wl)([^\" ]+|\"[^\"]+\")|[^\" ]+\\.(a|so|dll|lib))" _all_tokens "${LINK_LINE}") - set (_libs_found "") - set (_directory_list "") - foreach (token ${_all_tokens}) - if (token MATCHES "-L([^\" ]+|\"[^\"]+\")") +macro(RESOLVE_LIBRARIES LIBS LINK_LINE) + string(REGEX MATCHALL + "((-L|-l|-Wl)([^\" ]+|\"[^\"]+\")|[^\" ]+\\.(a|so|dll|lib))" + _all_tokens "${LINK_LINE}") + set(_libs_found "") + set(_directory_list "") + foreach(token ${_all_tokens}) + if(token MATCHES "-L([^\" ]+|\"[^\"]+\")") # If it's a library path, add it to the list - string (REGEX REPLACE "^-L" "" token ${token}) - string (REGEX REPLACE "//" "/" token ${token}) + string(REGEX REPLACE "^-L" "" token ${token}) + string(REGEX REPLACE "//" "/" token ${token}) convert_cygwin_path(token) - list (APPEND _directory_list ${token}) - elseif (token MATCHES "^(-l([^\" ]+|\"[^\"]+\")|[^\" ]+\\.(a|so|dll|lib))") - # It's a library, resolve the path by looking in the list and then (by default) in system directories - if (WIN32) #windows expects "libfoo", linux expects "foo" - string (REGEX REPLACE "^-l" "lib" token ${token}) - else (WIN32) - string (REGEX REPLACE "^-l" "" token ${token}) - endif (WIN32) - set (_root "") - if (token MATCHES "^/") # We have an absolute path - #separate into a path and a library name: - string (REGEX MATCH "[^/]*\\.(a|so|dll|lib)$" libname ${token}) - string (REGEX MATCH ".*[^${libname}$]" libpath ${token}) + list(APPEND _directory_list ${token}) + elseif(token MATCHES "^(-l([^\" ]+|\"[^\"]+\")|[^\" ]+\\.(a|so|dll|lib))") + # It's a library, resolve the path by looking in the list and then (by + # default) in system directories + if(WIN32) # windows expects "libfoo", linux expects "foo" + string(REGEX REPLACE "^-l" "lib" token ${token}) + else(WIN32) + string(REGEX REPLACE "^-l" "" token ${token}) + endif(WIN32) + set(_root "") + if(token MATCHES "^/") # We have an absolute path + # separate into a path and a library name: + string(REGEX MATCH "[^/]*\\.(a|so|dll|lib)$" libname ${token}) + string(REGEX MATCH ".*[^${libname}$]" libpath ${token}) convert_cygwin_path(libpath) - set (_directory_list ${_directory_list} ${libpath}) - set (token ${libname}) - endif (token MATCHES "^/") - set (_lib "NOTFOUND" CACHE FILEPATH "Cleared" FORCE) - find_library (_lib ${token} HINTS ${_directory_list} ${_root}) - if (_lib) - string (REPLACE "//" "/" _lib ${_lib}) - list (APPEND _libs_found ${_lib}) - else (_lib) - message (STATUS "Unable to find library ${token}") - endif (_lib) - endif (token MATCHES "-L([^\" ]+|\"[^\"]+\")") - endforeach (token) - set (_lib "NOTFOUND" CACHE INTERNAL "Scratch variable" FORCE) - # only the LAST occurence of each library is required since there should be no circular dependencies - if (_libs_found) - list (REVERSE _libs_found) - list (REMOVE_DUPLICATES _libs_found) - list (REVERSE _libs_found) - endif (_libs_found) - set (${LIBS} "${_libs_found}") -endmacro (RESOLVE_LIBRARIES) + set(_directory_list ${_directory_list} ${libpath}) + set(token ${libname}) + endif(token MATCHES "^/") + set(_lib + "NOTFOUND" + CACHE FILEPATH "Cleared" FORCE) + find_library(_lib ${token} HINTS ${_directory_list} ${_root}) + if(_lib) + string(REPLACE "//" "/" _lib ${_lib}) + list(APPEND _libs_found ${_lib}) + else(_lib) + message(STATUS "Unable to find library ${token}") + endif(_lib) + endif(token MATCHES "-L([^\" ]+|\"[^\"]+\")") + endforeach(token) + set(_lib + "NOTFOUND" + CACHE INTERNAL "Scratch variable" FORCE) + # only the LAST occurence of each library is required since there should be no + # circular dependencies + if(_libs_found) + list(REVERSE _libs_found) + list(REMOVE_DUPLICATES _libs_found) + list(REVERSE _libs_found) + endif(_libs_found) + set(${LIBS} "${_libs_found}") +endmacro(RESOLVE_LIBRARIES) -macro (RESOLVE_INCLUDES INCS COMPILE_LINE) - string (REGEX MATCHALL "-I([^\" ]+|\"[^\"]+\")" _all_tokens "${COMPILE_LINE}") - set (_incs_found "") - foreach (token ${_all_tokens}) - string (REGEX REPLACE "^-I" "" token ${token}) - string (REGEX REPLACE "//" "/" token ${token}) +macro(RESOLVE_INCLUDES INCS COMPILE_LINE) + string(REGEX MATCHALL "-I([^\" ]+|\"[^\"]+\")" _all_tokens "${COMPILE_LINE}") + set(_incs_found "") + foreach(token ${_all_tokens}) + string(REGEX REPLACE "^-I" "" token ${token}) + string(REGEX REPLACE "//" "/" token ${token}) convert_cygwin_path(token) - if (EXISTS ${token}) - list (APPEND _incs_found ${token}) - else (EXISTS ${token}) - message (STATUS "Include directory ${token} does not exist") - endif (EXISTS ${token}) - endforeach (token) - list (REMOVE_DUPLICATES _incs_found) - set (${INCS} "${_incs_found}") -endmacro (RESOLVE_INCLUDES) + if(EXISTS ${token}) + list(APPEND _incs_found ${token}) + else(EXISTS ${token}) + message(STATUS "Include directory ${token} does not exist") + endif(EXISTS ${token}) + endforeach(token) + list(REMOVE_DUPLICATES _incs_found) + set(${INCS} "${_incs_found}") +endmacro(RESOLVE_INCLUDES) diff --git a/cmake/Sanitizers.cmake b/cmake/Sanitizers.cmake index 715a08ab88..af68d2dd4c 100644 --- a/cmake/Sanitizers.cmake +++ b/cmake/Sanitizers.cmake @@ -4,7 +4,8 @@ function(enable_sanitizers target_name) - if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") + if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES + ".*Clang") option(ENABLE_COVERAGE "Enable coverage reporting for gcc/clang" FALSE) message(STATUS "Enable coverage: ${ENABLE_COVERAGE}") @@ -17,33 +18,37 @@ function(enable_sanitizers target_name) find_program(genhtml_FOUND genhtml) message(STATUS "Looking for genhtml: ${genhtml_FOUND}") - if (lcov_FOUND AND genhtml_FOUND) - set(COVERAGE_NAME coverage CACHE STRING "Name of coverage output file") + if(lcov_FOUND AND genhtml_FOUND) + set(COVERAGE_NAME + coverage + CACHE STRING "Name of coverage output file") set(COVERAGE_FILE "${COVERAGE_NAME}.info") - set(COVERAGE_MSG "Open file://${PROJECT_SOURCE_DIR}/${COVERAGE_NAME}/index.html in your browser to view coverage HTML output") + set(COVERAGE_MSG + "Open file://${PROJECT_SOURCE_DIR}/${COVERAGE_NAME}/index.html in your browser to view coverage HTML output" + ) - add_custom_target(code-coverage-capture + add_custom_target( + code-coverage-capture COMMAND - lcov -c --directory "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/bout++.dir/src" - --output-file "${COVERAGE_FILE}" - COMMAND - genhtml --output-directory "${COVERAGE_NAME}" --demangle-cpp --legend --show-details "${COVERAGE_FILE}" - COMMAND - "${CMAKE_COMMAND}" -E echo ${COVERAGE_MSG} + lcov -c --directory + "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/bout++.dir/src" + --output-file "${COVERAGE_FILE}" + COMMAND genhtml --output-directory "${COVERAGE_NAME}" --demangle-cpp + --legend --show-details "${COVERAGE_FILE}" + COMMAND "${CMAKE_COMMAND}" -E echo ${COVERAGE_MSG} WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" COMMENT "Capturing coverage information" - BYPRODUCTS - "${COVERAGE_FILE}" - "${COVERAGE_NAME}/index.html" - ) + BYPRODUCTS "${COVERAGE_FILE}" "${COVERAGE_NAME}/index.html") - add_custom_target(code-coverage-clean - COMMAND - lcov --zerocounters - COMMENT "Cleaning coverage information" - ) + add_custom_target( + code-coverage-clean + COMMAND lcov --zerocounters + COMMENT "Cleaning coverage information") else() - message(FATAL_ERROR "Coverage enabled, but coverage-capture not available. Please install lcov") + message( + FATAL_ERROR + "Coverage enabled, but coverage-capture not available. Please install lcov" + ) endif() endif() @@ -60,7 +65,8 @@ function(enable_sanitizers target_name) list(APPEND SANITIZERS "leak") endif() - option(ENABLE_SANITIZER_UNDEFINED_BEHAVIOR "Enable undefined behavior sanitizer" FALSE) + option(ENABLE_SANITIZER_UNDEFINED_BEHAVIOR + "Enable undefined behavior sanitizer" FALSE) if(ENABLE_SANITIZER_UNDEFINED_BEHAVIOR) list(APPEND SANITIZERS "undefined") endif() @@ -68,7 +74,10 @@ function(enable_sanitizers target_name) option(ENABLE_SANITIZER_THREAD "Enable thread sanitizer" FALSE) if(ENABLE_SANITIZER_THREAD) if("address" IN_LIST SANITIZERS OR "leak" IN_LIST SANITIZERS) - message(WARNING "Thread sanitizer does not work with Address and Leak sanitizer enabled") + message( + WARNING + "Thread sanitizer does not work with Address and Leak sanitizer enabled" + ) else() list(APPEND SANITIZERS "thread") endif() @@ -79,31 +88,34 @@ function(enable_sanitizers target_name) if("address" IN_LIST SANITIZERS OR "thread" IN_LIST SANITIZERS OR "leak" IN_LIST SANITIZERS) - message(WARNING "Memory sanitizer does not work with Address, Thread and Leak sanitizer enabled") + message( + WARNING + "Memory sanitizer does not work with Address, Thread and Leak sanitizer enabled" + ) else() list(APPEND SANITIZERS "memory") endif() endif() - list( - JOIN - SANITIZERS - "," - LIST_OF_SANITIZERS) + list(JOIN SANITIZERS "," LIST_OF_SANITIZERS) endif() # Default value gets overridden below - set(BOUT_USE_SANITIZERS "None" PARENT_SCOPE) + set(BOUT_USE_SANITIZERS + "None" + PARENT_SCOPE) if(LIST_OF_SANITIZERS) - if(NOT - "${LIST_OF_SANITIZERS}" - STREQUAL - "") - set(BOUT_USE_SANITIZERS ${LIST_OF_SANITIZERS} PARENT_SCOPE) - target_compile_options(${target_name} PUBLIC -fsanitize=${LIST_OF_SANITIZERS} -fno-omit-frame-pointer) - target_link_options(${target_name} PUBLIC -fsanitize=${LIST_OF_SANITIZERS}) + if(NOT "${LIST_OF_SANITIZERS}" STREQUAL "") + set(BOUT_USE_SANITIZERS + ${LIST_OF_SANITIZERS} + PARENT_SCOPE) + target_compile_options( + ${target_name} PUBLIC -fsanitize=${LIST_OF_SANITIZERS} + -fno-omit-frame-pointer) + target_link_options(${target_name} PUBLIC + -fsanitize=${LIST_OF_SANITIZERS}) endif() endif() diff --git a/cmake/SetupBOUTThirdParty.cmake b/cmake/SetupBOUTThirdParty.cmake index 42ba52b948..48ee0b5efe 100644 --- a/cmake/SetupBOUTThirdParty.cmake +++ b/cmake/SetupBOUTThirdParty.cmake @@ -1,12 +1,12 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${PROJECT_SOURCE_DIR}/cmake/") # determined in SetupCompilers.cmake -if (BOUT_USE_MPI) +if(BOUT_USE_MPI) target_link_libraries(bout++ PUBLIC MPI::MPI_CXX) -endif () +endif() # determined in SetupCompilers.cmake -if (BOUT_USE_OPENMP) +if(BOUT_USE_OPENMP) target_link_libraries(bout++ PUBLIC OpenMP::OpenMP_CXX) set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} -fopenmp") set(CONFIG_LDFLAGS_SHARED "${CONFIG_LDFLAGS_SHARED} -fopenmp") @@ -14,7 +14,7 @@ if (BOUT_USE_OPENMP) endif() # determined in SetupCompilers.cmake -if (BOUT_HAS_CUDA) +if(BOUT_HAS_CUDA) enable_language(CUDA) message(STATUS "BOUT_HAS_CUDA ${CMAKE_CUDA_COMPILER}") @@ -22,65 +22,69 @@ if (BOUT_HAS_CUDA) set(BOUT_SOURCES_CXX ${BOUT_SOURCES}) list(FILTER BOUT_SOURCES_CXX INCLUDE REGEX ".*\.cxx") - # NOTE: CUDA inherits the CXX standard setting from the top-level - # compile features, set for the bout++ target. + # NOTE: CUDA inherits the CXX standard setting from the top-level compile + # features, set for the bout++ target. set_source_files_properties(${BOUT_SOURCES_CXX} PROPERTIES LANGUAGE CUDA) find_package(CUDAToolkit) set_target_properties(bout++ PROPERTIES CUDA_SEPARABLE_COMPILATION ON) set_target_properties(bout++ PROPERTIES POSITION_INDEPENDENT_CODE ON) set_target_properties(bout++ PROPERTIES LINKER_LANGUAGE CUDA) -endif () +endif() # Caliper option(BOUT_ENABLE_CALIPER "Enable Caliper" OFF) -if (BOUT_ENABLE_CALIPER) +if(BOUT_ENABLE_CALIPER) find_package(caliper REQUIRED) target_include_directories(bout++ PUBLIC ${caliper_INCLUDE_DIR}) target_link_libraries(bout++ PUBLIC caliper) -endif () +endif() set(BOUT_HAS_CALIPER ${BOUT_ENABLE_CALIPER}) # UMPIRE option(BOUT_ENABLE_UMPIRE "Enable UMPIRE memory management" OFF) -if (BOUT_ENABLE_UMPIRE) +if(BOUT_ENABLE_UMPIRE) find_package(UMPIRE REQUIRED) target_include_directories(bout++ PUBLIC ${UMPIRE_INCLUDE_DIRS}/include) target_link_libraries(bout++ PUBLIC umpire) -endif () +endif() set(BOUT_HAS_UMPIRE ${BOUT_ENABLE_UMPIRE}) # RAJA option(BOUT_ENABLE_RAJA "Enable RAJA" OFF) -if (BOUT_ENABLE_RAJA) +if(BOUT_ENABLE_RAJA) find_package(RAJA REQUIRED) - message(STATUS "RAJA_CONFIG:" ${RAJA_CONFIG}) + message(STATUS "RAJA_CONFIG:" ${RAJA_CONFIG}) string(FIND ${RAJA_CONFIG} "raja" loc) math(EXPR value "${loc} + 5" OUTPUT_FORMAT DECIMAL) - string(SUBSTRING ${RAJA_CONFIG} 0 ${value} RAJA_PATH) + string(SUBSTRING ${RAJA_CONFIG} 0 ${value} RAJA_PATH) message(STATUS "RAJA_PATH" ${RAJA_PATH}) target_include_directories(bout++ PUBLIC ${RAJA_PATH}/include) target_link_libraries(bout++ PUBLIC RAJA) -endif () +endif() set(BOUT_HAS_RAJA ${BOUT_ENABLE_RAJA}) # Hypre option(BOUT_USE_HYPRE "Enable support for Hypre solvers" OFF) -if (BOUT_USE_HYPRE) +if(BOUT_USE_HYPRE) enable_language(C) find_package(HYPRE REQUIRED) target_link_libraries(bout++ PUBLIC HYPRE::HYPRE) - if (HYPRE_WITH_CUDA AND BOUT_HAS_CUDA) - target_compile_definitions(bout++ PUBLIC "HYPRE_USING_CUDA;HYPRE_USING_UNIFIED_MEMORY") - target_link_libraries(bout++ PUBLIC CUDA::cusparse CUDA::curand CUDA::culibos CUDA::cublas CUDA::cublasLt) - endif () -endif () + if(HYPRE_WITH_CUDA AND BOUT_HAS_CUDA) + target_compile_definitions( + bout++ PUBLIC "HYPRE_USING_CUDA;HYPRE_USING_UNIFIED_MEMORY") + target_link_libraries( + bout++ PUBLIC CUDA::cusparse CUDA::curand CUDA::culibos CUDA::cublas + CUDA::cublasLt) + endif() +endif() message(STATUS "HYPRE support: ${BOUT_USE_HYPRE}") set(BOUT_HAS_HYPRE ${BOUT_USE_HYPRE}) # PETSc -option(BOUT_USE_PETSC "Enable support for PETSc time solvers and inversions" OFF) -if (BOUT_USE_PETSC) - if (NOT CMAKE_SYSTEM_NAME STREQUAL "CrayLinuxEnvironment") +option(BOUT_USE_PETSC "Enable support for PETSc time solvers and inversions" + OFF) +if(BOUT_USE_PETSC) + if(NOT CMAKE_SYSTEM_NAME STREQUAL "CrayLinuxEnvironment") # Cray wrappers sort this out for us find_package(PETSc REQUIRED) target_link_libraries(bout++ PUBLIC PETSc::PETSc) @@ -94,28 +98,40 @@ endif() message(STATUS "PETSc support: ${BOUT_USE_PETSC}") set(BOUT_HAS_PETSC ${BOUT_USE_PETSC}) - -cmake_dependent_option(BOUT_USE_SYSTEM_MPARK_VARIANT "Use external installation of mpark.variant" OFF - "BOUT_UPDATE_GIT_SUBMODULE OR EXISTS ${PROJECT_SOURCE_DIR}/externalpackages/mpark.variant/CMakeLists.txt" ON) +cmake_dependent_option( + BOUT_USE_SYSTEM_MPARK_VARIANT + "Use external installation of mpark.variant" + OFF + "BOUT_UPDATE_GIT_SUBMODULE OR EXISTS ${PROJECT_SOURCE_DIR}/externalpackages/mpark.variant/CMakeLists.txt" + ON) if(BOUT_USE_SYSTEM_MPARK_VARIANT) message(STATUS "Using external mpark.variant") find_package(mpark_variant REQUIRED) - get_target_property(MPARK_VARIANT_INCLUDE_PATH mpark_variant INTERFACE_INCLUDE_DIRECTORIES) + get_target_property(MPARK_VARIANT_INCLUDE_PATH mpark_variant + INTERFACE_INCLUDE_DIRECTORIES) else() message(STATUS "Using mpark.variant submodule") bout_update_submodules() add_subdirectory(externalpackages/mpark.variant) if(NOT TARGET mpark_variant) - message(FATAL_ERROR "mpark_variant not found! Have you disabled the git submodules (BOUT_UPDATE_GIT_SUBMODULE)?") + message( + FATAL_ERROR + "mpark_variant not found! Have you disabled the git submodules (BOUT_UPDATE_GIT_SUBMODULE)?" + ) endif() - set(MPARK_VARIANT_INCLUDE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/externalpackages/mpark.variant/include") + set(MPARK_VARIANT_INCLUDE_PATH + "${CMAKE_CURRENT_SOURCE_DIR}/externalpackages/mpark.variant/include") set(CONFIG_CFLAGS "${CONFIG_CFLAGS} -I\${MPARK_VARIANT_INCLUDE_PATH}") endif() target_link_libraries(bout++ PUBLIC mpark_variant) -cmake_dependent_option(BOUT_USE_SYSTEM_FMT "Use external installation of fmt" OFF - "BOUT_UPDATE_GIT_SUBMODULE OR EXISTS ${PROJECT_SOURCE_DIR}/externalpackages/fmt/CMakeLists.txt" ON) +cmake_dependent_option( + BOUT_USE_SYSTEM_FMT + "Use external installation of fmt" + OFF + "BOUT_UPDATE_GIT_SUBMODULE OR EXISTS ${PROJECT_SOURCE_DIR}/externalpackages/fmt/CMakeLists.txt" + ON) if(BOUT_USE_SYSTEM_FMT) message(STATUS "Using external fmt") @@ -125,32 +141,41 @@ else() message(STATUS "Using fmt submodule") bout_update_submodules() # Need to install fmt alongside BOUT++ - set(FMT_INSTALL ON CACHE BOOL "") - set(FMT_DEBUG_POSTFIX "" CACHE STRING "") + set(FMT_INSTALL + ON + CACHE BOOL "") + set(FMT_DEBUG_POSTFIX + "" + CACHE STRING "") add_subdirectory(externalpackages/fmt) if(NOT TARGET fmt::fmt) - message(FATAL_ERROR "fmt not found! Have you disabled the git submodules (BOUT_UPDATE_GIT_SUBMODULE)?") + message( + FATAL_ERROR + "fmt not found! Have you disabled the git submodules (BOUT_UPDATE_GIT_SUBMODULE)?" + ) endif() - # Build the library in /lib: this makes updating the path - # for bout-config much easier - set_target_properties(fmt PROPERTIES - LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib" - ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib") - set(FMT_INCLUDE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/externalpackages/fmt/include") + # Build the library in /lib: this makes updating the path for + # bout-config much easier + set_target_properties( + fmt PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib" + ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib") + set(FMT_INCLUDE_PATH + "${CMAKE_CURRENT_SOURCE_DIR}/externalpackages/fmt/include") set(CONFIG_CFLAGS "${CONFIG_CFLAGS} -I\${FMT_INCLUDE_PATH}") set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} -lfmt") endif() target_link_libraries(bout++ PUBLIC fmt::fmt) option(BOUT_USE_PVODE "Enable support for bundled PVODE" ON) -if (BOUT_USE_PVODE) +if(BOUT_USE_PVODE) add_subdirectory(externalpackages/PVODE) target_link_libraries(bout++ PUBLIC pvode pvpre) - # Build the libraries in /lib: this makes updating the - # path for bout-config much easier - set_target_properties(pvode pvpre PROPERTIES - LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib" - ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib") + # Build the libraries in /lib: this makes updating the path for + # bout-config much easier + set_target_properties( + pvode pvpre + PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib" + ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/lib") set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} -lpvode -lpvpre") endif() message(STATUS "PVODE support: ${BOUT_USE_PVODE}") @@ -158,25 +183,27 @@ set(BOUT_HAS_PVODE ${BOUT_USE_PVODE}) option(BOUT_USE_NETCDF "Enable support for NetCDF output" ON) option(BOUT_DOWNLOAD_NETCDF_CXX4 "Download and build netCDF-cxx4" OFF) -if (BOUT_USE_NETCDF) - if (BOUT_DOWNLOAD_NETCDF_CXX4) +if(BOUT_USE_NETCDF) + if(BOUT_DOWNLOAD_NETCDF_CXX4) message(STATUS "Downloading and configuring NetCDF-cxx4") include(FetchContent) FetchContent_Declare( netcdf-cxx4 GIT_REPOSITORY https://github.com/Unidata/netcdf-cxx4 - GIT_TAG "a43d6d4d415d407712c246faca553bd951730dc1" - ) + GIT_TAG "a43d6d4d415d407712c246faca553bd951730dc1") # Don't build the netcdf tests, they have lots of warnings - set(NCXX_ENABLE_TESTS OFF CACHE BOOL "" FORCE) + set(NCXX_ENABLE_TESTS + OFF + CACHE BOOL "" FORCE) # Use our own FindnetCDF module which uses nc-config find_package(netCDF REQUIRED) FetchContent_MakeAvailable(netcdf-cxx4) target_link_libraries(bout++ PUBLIC netCDF::netcdf-cxx4) else() find_package(netCDFCxx) - if (netCDFCxx_FOUND) - set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} ${netCDF_CXX_LIBRARY} ${netCDF_LIBRARIES}") + if(netCDFCxx_FOUND) + set(CONFIG_LDFLAGS + "${CONFIG_LDFLAGS} ${netCDF_CXX_LIBRARY} ${netCDF_LIBRARIES}") target_link_libraries(bout++ PUBLIC netCDF::netcdf-cxx4) else() find_package(PkgConfig REQUIRED) @@ -186,10 +213,10 @@ if (BOUT_USE_NETCDF) set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} ${NETCDF_LDFLAGS_STRING}") endif() endif() - if (netCDF_DIR) + if(netCDF_DIR) set(netCDF_ROOT "${netCDF_DIR}") endif() - if (netCDFCxx_DIR) + if(netCDFCxx_DIR) set(netCDFCxx_ROOT "${netCDFCxx_DIR}") endif() endif() @@ -198,27 +225,38 @@ set(BOUT_HAS_NETCDF ${BOUT_USE_NETCDF}) option(BOUT_USE_ADIOS2 "Enable support for ADIOS output" OFF) option(BOUT_DOWNLOAD_ADIOS2 "Download and build ADIOS2" OFF) -if (BOUT_USE_ADIOS2) +if(BOUT_USE_ADIOS2) enable_language(C) find_package(MPI REQUIRED COMPONENTS C) - if (BOUT_DOWNLOAD_ADIOS2) + if(BOUT_DOWNLOAD_ADIOS2) message(STATUS "Downloading and configuring ADIOS2") include(FetchContent) FetchContent_Declare( adios2 GIT_REPOSITORY https://github.com/ornladios/ADIOS2.git GIT_TAG origin/master - GIT_SHALLOW 1 - ) - set(ADIOS2_USE_MPI ON CACHE BOOL "" FORCE) - set(ADIOS2_USE_Fortran OFF CACHE BOOL "" FORCE) - set(ADIOS2_USE_Python OFF CACHE BOOL "" FORCE) - set(ADIOS2_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE) + GIT_SHALLOW 1) + set(ADIOS2_USE_MPI + ON + CACHE BOOL "" FORCE) + set(ADIOS2_USE_Fortran + OFF + CACHE BOOL "" FORCE) + set(ADIOS2_USE_Python + OFF + CACHE BOOL "" FORCE) + set(ADIOS2_BUILD_EXAMPLES + OFF + CACHE BOOL "" FORCE) # Disable testing, or ADIOS will try to find or install GTEST - set(BUILD_TESTING OFF CACHE BOOL "" FORCE) + set(BUILD_TESTING + OFF + CACHE BOOL "" FORCE) # Note: SST requires but doesn't check at configure time - set(ADIOS2_USE_SST OFF CACHE BOOL "" FORCE) + set(ADIOS2_USE_SST + OFF + CACHE BOOL "" FORCE) FetchContent_MakeAvailable(adios2) message(STATUS "ADIOS2 done configuring") else() @@ -229,9 +267,8 @@ endif() message(STATUS "ADIOS2 support: ${BOUT_USE_ADIOS2}") set(BOUT_HAS_ADIOS2 ${BOUT_USE_ADIOS2}) - option(BOUT_USE_FFTW "Enable support for FFTW" ON) -if (BOUT_USE_FFTW) +if(BOUT_USE_FFTW) find_package(FFTW REQUIRED) target_link_libraries(bout++ PUBLIC FFTW::FFTW) set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} ${FFTW_LIBRARIES}") @@ -244,15 +281,15 @@ option(BOUT_USE_LAPACK "Enable support for LAPACK" AUTO) set_property(CACHE BOUT_USE_LAPACK PROPERTY STRINGS ${ON_OFF_AUTO}) set(LAPACK_FOUND OFF) -if (BOUT_USE_LAPACK) - if (NOT CMAKE_SYSTEM_NAME STREQUAL "CrayLinuxEnvironment") +if(BOUT_USE_LAPACK) + if(NOT CMAKE_SYSTEM_NAME STREQUAL "CrayLinuxEnvironment") # Cray wrappers sort this out for us - if (BOUT_USE_LAPACK STREQUAL ON) + if(BOUT_USE_LAPACK STREQUAL ON) find_package(LAPACK REQUIRED) else() find_package(LAPACK) endif() - if (LAPACK_FOUND) + if(LAPACK_FOUND) target_link_libraries(bout++ PUBLIC "${LAPACK_LIBRARIES}") string(JOIN " " CONFIG_LAPACK_LIBRARIES ${LAPACK_LIBRARIES}) set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} ${CONFIG_LAPACK_LIBRARIES}") @@ -264,7 +301,7 @@ message(STATUS "LAPACK support: ${LAPACK_FOUND}") set(BOUT_HAS_LAPACK ${LAPACK_FOUND}) option(BOUT_USE_SLEPC "Enable support for SLEPc eigen solver" OFF) -if (BOUT_USE_SLEPC) +if(BOUT_USE_SLEPC) find_package(SLEPc REQUIRED) target_link_libraries(bout++ PUBLIC SLEPc::SLEPc) string(JOIN " " CONFIG_SLEPC_LIBRARIES ${SLEPC_LIBRARIES}) @@ -275,44 +312,61 @@ set(BOUT_HAS_SLEPC ${BOUT_USE_SLEPC}) option(BOUT_DOWNLOAD_SUNDIALS "Download and build SUNDIALS" OFF) # Force BOUT_USE_SUNDIALS if we're downloading it! -cmake_dependent_option(BOUT_USE_SUNDIALS "Enable support for SUNDIALS time solvers" OFF +cmake_dependent_option( + BOUT_USE_SUNDIALS "Enable support for SUNDIALS time solvers" OFF "NOT BOUT_DOWNLOAD_SUNDIALS" ON) -if (BOUT_USE_SUNDIALS) +if(BOUT_USE_SUNDIALS) enable_language(C) - if (BOUT_DOWNLOAD_SUNDIALS) + if(BOUT_DOWNLOAD_SUNDIALS) message(STATUS "Downloading and configuring SUNDIALS") include(FetchContent) FetchContent_Declare( sundials GIT_REPOSITORY https://github.com/LLNL/sundials - GIT_TAG v7.2.1 - ) + GIT_TAG v7.2.1) # Note: These are settings for building SUNDIALS - set(EXAMPLES_ENABLE_C OFF CACHE BOOL "" FORCE) - set(EXAMPLES_INSTALL OFF CACHE BOOL "" FORCE) - set(ENABLE_MPI ${BOUT_USE_MPI} CACHE BOOL "" FORCE) - set(ENABLE_OPENMP OFF CACHE BOOL "" FORCE) - if (BUILD_SHARED_LIBS) - set(BUILD_STATIC_LIBS OFF CACHE BOOL "" FORCE) + set(EXAMPLES_ENABLE_C + OFF + CACHE BOOL "" FORCE) + set(EXAMPLES_INSTALL + OFF + CACHE BOOL "" FORCE) + set(ENABLE_MPI + ${BOUT_USE_MPI} + CACHE BOOL "" FORCE) + set(ENABLE_OPENMP + OFF + CACHE BOOL "" FORCE) + if(BUILD_SHARED_LIBS) + set(BUILD_STATIC_LIBS + OFF + CACHE BOOL "" FORCE) else() - set(BUILD_STATIC_LIBS ON CACHE BOOL "" FORCE) + set(BUILD_STATIC_LIBS + ON + CACHE BOOL "" FORCE) endif() FetchContent_MakeAvailable(sundials) message(STATUS "SUNDIALS done configuring") else() find_package(SUNDIALS REQUIRED) - if (SUNDIALS_VERSION VERSION_LESS 4.0.0) - message(FATAL_ERROR "SUNDIALS_VERSION 4.0.0 or newer is required. Found version ${SUNDIALS_VERSION}.") + if(SUNDIALS_VERSION VERSION_LESS 4.0.0) + message( + FATAL_ERROR + "SUNDIALS_VERSION 4.0.0 or newer is required. Found version ${SUNDIALS_VERSION}." + ) endif() endif() - if (SUNDIALS_DIR) + if(SUNDIALS_DIR) set(SUNDIALS_ROOT "${SUNDIALS_DIR}") endif() target_link_libraries(bout++ PUBLIC SUNDIALS::nvecparallel) target_link_libraries(bout++ PUBLIC SUNDIALS::cvode) target_link_libraries(bout++ PUBLIC SUNDIALS::ida) target_link_libraries(bout++ PUBLIC SUNDIALS::arkode) - set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} ${SUNDIALS_cvode_LIBRARY} ${SUNDIALS_ida_LIBRARY} ${SUNDIALS_arkode_LIBRARY} ${SUNDIALS_nvecparallel_LIBRARY}") + set(CONFIG_LDFLAGS + "${CONFIG_LDFLAGS} ${SUNDIALS_cvode_LIBRARY} ${SUNDIALS_ida_LIBRARY} ${SUNDIALS_arkode_LIBRARY} ${SUNDIALS_nvecparallel_LIBRARY}" + ) endif() message(STATUS "SUNDIALS support: ${BOUT_USE_SUNDIALS}") set(BOUT_HAS_SUNDIALS ${BOUT_USE_SUNDIALS}) @@ -321,46 +375,48 @@ set(BOUT_HAS_CVODE ${BOUT_USE_SUNDIALS}) set(BOUT_HAS_IDA ${BOUT_USE_SUNDIALS}) set(ON_OFF_AUTO ON OFF AUTO) -set(BOUT_USE_NLS AUTO CACHE STRING "Enable Native Language Support") +set(BOUT_USE_NLS + AUTO + CACHE STRING "Enable Native Language Support") set_property(CACHE BOUT_USE_NLS PROPERTY STRINGS ${ON_OFF_AUTO}) set(BOUT_HAS_GETTEXT OFF) -if (BOUT_USE_NLS) +if(BOUT_USE_NLS) find_package(Gettext) - if (GETTEXT_FOUND) + if(GETTEXT_FOUND) find_package(Intl) - if (Intl_FOUND) - target_link_libraries(bout++ - PUBLIC ${Intl_LIBRARIES}) - target_include_directories(bout++ - PUBLIC ${Intl_INCLUDE_DIRS}) + if(Intl_FOUND) + target_link_libraries(bout++ PUBLIC ${Intl_LIBRARIES}) + target_include_directories(bout++ PUBLIC ${Intl_INCLUDE_DIRS}) set(BOUT_HAS_GETTEXT ON) else() - if (NOT BOUT_USE_NLS STREQUAL "AUTO") - message(FATAL_ERROR "Intl not found but requested!") + if(NOT BOUT_USE_NLS STREQUAL "AUTO") + message(FATAL_ERROR "Intl not found but requested!") endif() endif() else() - if (NOT BOUT_USE_NLS STREQUAL "AUTO") + if(NOT BOUT_USE_NLS STREQUAL "AUTO") message(FATAL_ERROR "GETTEXT not found but requested!") endif() endif() endif() option(BOUT_USE_SCOREP "Enable support for Score-P based instrumentation" OFF) -if (BOUT_USE_SCOREP) - message(STATUS "Score-P support enabled. Please make sure you are calling CMake like so: +if(BOUT_USE_SCOREP) + message( + STATUS + "Score-P support enabled. Please make sure you are calling CMake like so: SCOREP_WRAPPER=off cmake -DCMAKE_C_COMPILER=scorep-mpicc -DCMAKE_CXX_COMPILER=scorep-mpicxx ") endif() set(BOUT_HAS_SCOREP ${BOUT_USE_SCOREP}) -option(BOUT_USE_UUID_SYSTEM_GENERATOR "Enable support for using a system UUID generator" ON) -if (BOUT_USE_UUID_SYSTEM_GENERATOR) +option(BOUT_USE_UUID_SYSTEM_GENERATOR + "Enable support for using a system UUID generator" ON) +if(BOUT_USE_UUID_SYSTEM_GENERATOR) find_package(Libuuid QUIET) - if (Libuuid_FOUND) - target_link_libraries(bout++ - PUBLIC Libuuid::libuuid) + if(Libuuid_FOUND) + target_link_libraries(bout++ PUBLIC Libuuid::libuuid) set(CONFIG_LDFLAGS "${CONFIG_LDFLAGS} ${Libuuid_LIBRARIES}") else() message(STATUS "libuuid not found, using fallback UUID generator") @@ -370,24 +426,37 @@ endif() message(STATUS "UUID_SYSTEM_GENERATOR: ${BOUT_USE_UUID_SYSTEM_GENERATOR}") set(BOUT_HAS_UUID_SYSTEM_GENERATOR ${BOUT_USE_UUID_SYSTEM_GENERATOR}) -cmake_dependent_option(BOUT_USE_SYSTEM_CPPTRACE "Use external installation of cpptrace" OFF - "BOUT_UPDATE_GIT_SUBMODULE OR EXISTS ${PROJECT_SOURCE_DIR}/externalpackages/cpptrace/CMakeLists.txt" ON) +cmake_dependent_option( + BOUT_USE_SYSTEM_CPPTRACE + "Use external installation of cpptrace" + OFF + "BOUT_UPDATE_GIT_SUBMODULE OR EXISTS ${PROJECT_SOURCE_DIR}/externalpackages/cpptrace/CMakeLists.txt" + ON) if(BOUT_USE_SYSTEM_CPPTRACE) message(STATUS "Using external cpptrace") find_package(cpptrace REQUIRED) - get_target_property(CPPTRACE_INCLUDE_PATH cpptrace::cpptrace INTERFACE_INCLUDE_DIRECTORIES) + get_target_property(CPPTRACE_INCLUDE_PATH cpptrace::cpptrace + INTERFACE_INCLUDE_DIRECTORIES) else() message(STATUS "Using cpptrace submodule") bout_update_submodules() # Need a fork with some fixes for CMake - set(CPPTRACE_LIBDWARF_REPO "https://github.com/ZedThree/libdwarf-lite.git" CACHE STRING "" FORCE) - set(CPPTRACE_LIBDWARF_TAG "ebe10a39afd56b8247de633bfe17666ad50ab95e" CACHE STRING "" FORCE) + set(CPPTRACE_LIBDWARF_REPO + "https://github.com/ZedThree/libdwarf-lite.git" + CACHE STRING "" FORCE) + set(CPPTRACE_LIBDWARF_TAG + "ebe10a39afd56b8247de633bfe17666ad50ab95e" + CACHE STRING "" FORCE) add_subdirectory(externalpackages/cpptrace) if(NOT TARGET cpptrace::cpptrace) - message(FATAL_ERROR "cpptrace not found! Have you disabled the git submodules (BOUT_UPDATE_GIT_SUBMODULE)?") + message( + FATAL_ERROR + "cpptrace not found! Have you disabled the git submodules (BOUT_UPDATE_GIT_SUBMODULE)?" + ) endif() - set(CPPTRACE_INCLUDE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/externalpackages/cpptrace/include") + set(CPPTRACE_INCLUDE_PATH + "${CMAKE_CURRENT_SOURCE_DIR}/externalpackages/cpptrace/include") endif() set(CONFIG_CFLAGS "${CONFIG_CFLAGS} -I\${CPPTRACE_INCLUDE_PATH}") target_link_libraries(bout++ PUBLIC cpptrace::cpptrace) diff --git a/cmake/SetupCompilers.cmake b/cmake/SetupCompilers.cmake index 647cb20f75..4aa1dff512 100644 --- a/cmake/SetupCompilers.cmake +++ b/cmake/SetupCompilers.cmake @@ -1,44 +1,55 @@ - -# Note: Currently BOUT++ always needs MPI. This option just determines -# whether the find_* routines are used +# Note: Currently BOUT++ always needs MPI. This option just determines whether +# the find_* routines are used option(BOUT_ENABLE_MPI "Enable MPI support" ON) if(BOUT_ENABLE_MPI) - # This might not be entirely sensible, but helps CMake to find the - # correct MPI, workaround for https://gitlab.kitware.com/cmake/cmake/issues/18895 - find_program(MPIEXEC_EXECUTABLE NAMES mpiexec mpirun) - find_package(MPI REQUIRED) -endif () + # This might not be entirely sensible, but helps CMake to find the correct + # MPI, workaround for https://gitlab.kitware.com/cmake/cmake/issues/18895 + find_program(MPIEXEC_EXECUTABLE NAMES mpiexec mpirun) + find_package(MPI REQUIRED) +endif() set(BOUT_USE_MPI ${BOUT_ENABLE_MPI}) option(BOUT_ENABLE_OPENMP "Enable OpenMP support" OFF) -set(BOUT_OPENMP_SCHEDULE static CACHE STRING "Set OpenMP schedule") -set_property(CACHE BOUT_OPENMP_SCHEDULE PROPERTY STRINGS static dynamic guided auto) -if (BOUT_ENABLE_OPENMP) +set(BOUT_OPENMP_SCHEDULE + static + CACHE STRING "Set OpenMP schedule") +set_property(CACHE BOUT_OPENMP_SCHEDULE PROPERTY STRINGS static dynamic guided + auto) +if(BOUT_ENABLE_OPENMP) find_package(OpenMP REQUIRED) set(possible_openmp_schedules static dynamic guided auto) - if (NOT BOUT_OPENMP_SCHEDULE IN_LIST possible_openmp_schedules) - message(FATAL_ERROR "BOUT_OPENMP_SCHEDULE must be one of ${possible_openmp_schedules}; got ${BOUT_OPENMP_SCHEDULE}") + if(NOT BOUT_OPENMP_SCHEDULE IN_LIST possible_openmp_schedules) + message( + FATAL_ERROR + "BOUT_OPENMP_SCHEDULE must be one of ${possible_openmp_schedules}; got ${BOUT_OPENMP_SCHEDULE}" + ) endif() message(STATUS "OpenMP schedule: ${BOUT_OPENMP_SCHEDULE}") -endif () +endif() set(BOUT_USE_OPENMP ${BOUT_ENABLE_OPENMP}) message(STATUS "Enable OpenMP: ${BOUT_ENABLE_OPENMP}") option(BOUT_ENABLE_CUDA "Enable CUDA support" OFF) -set(CUDA_ARCH "compute_70,code=sm_70" CACHE STRING "CUDA architecture") +set(CUDA_ARCH + "compute_70,code=sm_70" + CACHE STRING "CUDA architecture") if(BOUT_ENABLE_CUDA) - # Set specific options for CUDA if enabled - enable_language(CUDA) - set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -gencode arch=${CUDA_ARCH} -ccbin ${CMAKE_CXX_COMPILER}") - if (BOUT_ENABLE_RAJA) - # RAJA uses lambda expressions - set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} --expt-extended-lambda --expt-relaxed-constexpr") - endif () + # Set specific options for CUDA if enabled + enable_language(CUDA) + set(CMAKE_CUDA_FLAGS + "${CMAKE_CUDA_FLAGS} -gencode arch=${CUDA_ARCH} -ccbin ${CMAKE_CXX_COMPILER}" + ) + if(BOUT_ENABLE_RAJA) + # RAJA uses lambda expressions + set(CMAKE_CUDA_FLAGS + "${CMAKE_CUDA_FLAGS} --expt-extended-lambda --expt-relaxed-constexpr") + endif() -# TODO Ensure openmp flags are not enabled twice! - if (BOUT_ENABLE_OPENMP) - # CMAKE_CUDA_FLAGS does not pass OpenMP_CXX_FLAGS to the host compiler by default - set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -Xcompiler ${OpenMP_CXX_FLAGS}") - endif () + # TODO Ensure openmp flags are not enabled twice! + if(BOUT_ENABLE_OPENMP) + # CMAKE_CUDA_FLAGS does not pass OpenMP_CXX_FLAGS to the host compiler by + # default + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -Xcompiler ${OpenMP_CXX_FLAGS}") + endif() endif() set(BOUT_HAS_CUDA ${BOUT_ENABLE_CUDA}) diff --git a/examples/6field-simple/CMakeLists.txt b/examples/6field-simple/CMakeLists.txt index 6a51327cda..fb2b1ced86 100644 --- a/examples/6field-simple/CMakeLists.txt +++ b/examples/6field-simple/CMakeLists.txt @@ -2,10 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(6field-simple LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(elm_6f - SOURCES elm_6f.cxx - EXTRA_FILES cbm18_dens8.grid_nx68ny64.nc) +bout_add_example(elm_6f SOURCES elm_6f.cxx EXTRA_FILES + cbm18_dens8.grid_nx68ny64.nc) diff --git a/examples/IMEX/advection-diffusion/CMakeLists.txt b/examples/IMEX/advection-diffusion/CMakeLists.txt index 334d48d767..f2008f3479 100644 --- a/examples/IMEX/advection-diffusion/CMakeLists.txt +++ b/examples/IMEX/advection-diffusion/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(advection-diffusion LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/IMEX/advection-reaction/CMakeLists.txt b/examples/IMEX/advection-reaction/CMakeLists.txt index 03e8686371..f28daa4c0f 100644 --- a/examples/IMEX/advection-reaction/CMakeLists.txt +++ b/examples/IMEX/advection-reaction/CMakeLists.txt @@ -2,10 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(advection-reaction LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(split_operator - SOURCES split_operator.cxx - EXTRA_FILES simple_xz.nc) +bout_add_example(split_operator SOURCES split_operator.cxx EXTRA_FILES + simple_xz.nc) diff --git a/examples/IMEX/diffusion-nl/CMakeLists.txt b/examples/IMEX/diffusion-nl/CMakeLists.txt index 664d16e042..73c7250bb5 100644 --- a/examples/IMEX/diffusion-nl/CMakeLists.txt +++ b/examples/IMEX/diffusion-nl/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(diffusion-nl LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/IMEX/drift-wave-constraint/CMakeLists.txt b/examples/IMEX/drift-wave-constraint/CMakeLists.txt index 5680b5367e..b72396d2f0 100644 --- a/examples/IMEX/drift-wave-constraint/CMakeLists.txt +++ b/examples/IMEX/drift-wave-constraint/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(drift-wave-constraint LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/IMEX/drift-wave/CMakeLists.txt b/examples/IMEX/drift-wave/CMakeLists.txt index e3e2a1b8ee..44fbbd7b0f 100644 --- a/examples/IMEX/drift-wave/CMakeLists.txt +++ b/examples/IMEX/drift-wave/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(drift-wave LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/blob2d-laplacexz/CMakeLists.txt b/examples/blob2d-laplacexz/CMakeLists.txt index 17f9ebe97a..855d86af70 100644 --- a/examples/blob2d-laplacexz/CMakeLists.txt +++ b/examples/blob2d-laplacexz/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(blob2d-laplacexz LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/blob2d-outerloop/CMakeLists.txt b/examples/blob2d-outerloop/CMakeLists.txt index cd7187ee3f..e991b45d51 100644 --- a/examples/blob2d-outerloop/CMakeLists.txt +++ b/examples/blob2d-outerloop/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(blob2d-outerloop LANGUAGES CXX C) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/blob2d/CMakeLists.txt b/examples/blob2d/CMakeLists.txt index a4772874d9..e93cd2ea12 100644 --- a/examples/blob2d/CMakeLists.txt +++ b/examples/blob2d/CMakeLists.txt @@ -2,11 +2,19 @@ cmake_minimum_required(VERSION 3.13) project(blob2d LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(blob2d - SOURCES blob2d.cxx - DATA_DIRS delta_0.25 delta_1 delta_10 two_blobs data - EXTRA_FILES blob_velocity.py) +bout_add_example( + blob2d + SOURCES + blob2d.cxx + DATA_DIRS + delta_0.25 + delta_1 + delta_10 + two_blobs + data + EXTRA_FILES + blob_velocity.py) diff --git a/examples/blob2d/blob_velocity.py b/examples/blob2d/blob_velocity.py index d044946320..d374cb795f 100644 --- a/examples/blob2d/blob_velocity.py +++ b/examples/blob2d/blob_velocity.py @@ -3,96 +3,101 @@ import pickle try: - from past.utils import old_div + from past.utils import old_div except ImportError: - def old_div(a,b): - return a/b - -def blob_velocity(n,**kwargs): - - from boututils import calculus as Calc - # Calculate blob velocity in normalized time and normalized grid spacing - # - # Input: Blob density as a 3D vector in the form n[t,x,z] where t is time and x,z are the perpendicular spatial coordinates - # - # Keywords: - # - # type='peak' -> Calculate velocity of the peak density - # type='COM' -> Calculate centre of mass velocity - # Index=True -> return indices used to create velocity - # - # Default: Peak velocity with no index returning - - size = n.shape - - try: - v_type = kwargs['type'] - except: - v_type = 'peak' #Default to peak velocity calculation - try: - return_index = kwargs['Index'] - except: - return_index = False #Default to no index returning - - - if v_type == 'peak': - x = np.zeros(size[0]) - z = np.zeros(size[0]) - for i in np.arange(size[0]): - nmax,nmin = np.amax((n[i,:,:])),np.amin((n[i,:,:])) - xpos,zpos = np.where(n[i,:,:]==nmax) - x[i] = xpos[0] - z[i] = zpos[0] - - if v_type == 'COM': - x = np.zeros(size[0]) - z = np.zeros(size[0]) - for i in np.arange(size[0]): - data = n[i,:,:] - n[0,0,0] #use corner cell rather than nmin - ntot = np.sum(data[:,:]) - - z[i] = old_div(np.sum(np.sum(data[:,:],axis=0)*(np.arange(size[2]))),ntot) - x[i] = old_div(np.sum(np.sum(data[:,:],axis=1)*(np.arange(size[1]))),ntot) - - vx = Calc.deriv(x) - vz = Calc.deriv(z) - - if return_index: - return vx,vz,x,z - else: - return vx,vz - - - -data='data' + def old_div(a, b): + return a / b + + +def blob_velocity(n, **kwargs): + + from boututils import calculus as Calc + # Calculate blob velocity in normalized time and normalized grid spacing + # + # Input: Blob density as a 3D vector in the form n[t,x,z] where t is time and x,z are the perpendicular spatial coordinates + # + # Keywords: + # + # type='peak' -> Calculate velocity of the peak density + # type='COM' -> Calculate centre of mass velocity + # Index=True -> return indices used to create velocity + # + # Default: Peak velocity with no index returning + + size = n.shape + + try: + v_type = kwargs["type"] + except: + v_type = "peak" # Default to peak velocity calculation + try: + return_index = kwargs["Index"] + except: + return_index = False # Default to no index returning + + if v_type == "peak": + x = np.zeros(size[0]) + z = np.zeros(size[0]) + for i in np.arange(size[0]): + nmax, nmin = np.amax((n[i, :, :])), np.amin((n[i, :, :])) + xpos, zpos = np.where(n[i, :, :] == nmax) + x[i] = xpos[0] + z[i] = zpos[0] + + if v_type == "COM": + x = np.zeros(size[0]) + z = np.zeros(size[0]) + for i in np.arange(size[0]): + data = n[i, :, :] - n[0, 0, 0] # use corner cell rather than nmin + ntot = np.sum(data[:, :]) + + z[i] = old_div( + np.sum(np.sum(data[:, :], axis=0) * (np.arange(size[2]))), ntot + ) + x[i] = old_div( + np.sum(np.sum(data[:, :], axis=1) * (np.arange(size[1]))), ntot + ) + + vx = Calc.deriv(x) + vz = Calc.deriv(z) + + if return_index: + return vx, vz, x, z + else: + return vx, vz + + +data = "data" if True: - import sys - if len(sys.argv) > 1: - data=sys.argv[1] + import sys + + if len(sys.argv) > 1: + data = sys.argv[1] -n = collect('n', path=data, info=False) +n = collect("n", path=data, info=False) -vx,vy,xx,yy = blob_velocity(n[:,:,0,:],type='COM',Index=True) +vx, vy, xx, yy = blob_velocity(n[:, :, 0, :], type="COM", Index=True) -f = open('Velocity.dat','wb') -pickle.dump(vx,f) +f = open("Velocity.dat", "wb") +pickle.dump(vx, f) f.close() -f = open('Position.dat','wb') -pickle.dump(xx,f) +f = open("Position.dat", "wb") +pickle.dump(xx, f) f.close() -f = open('Velocity.dat','rb') +f = open("Velocity.dat", "rb") vx = pickle.load(f) f.close() try: - import matplotlib.pyplot as plt - plt.plot(vx) - plt.show() + import matplotlib.pyplot as plt + + plt.plot(vx) + plt.show() except ImportError: - pass + pass diff --git a/examples/boundary-conditions/advection/CMakeLists.txt b/examples/boundary-conditions/advection/CMakeLists.txt index a4ab73a24d..c1f2c22b5d 100644 --- a/examples/boundary-conditions/advection/CMakeLists.txt +++ b/examples/boundary-conditions/advection/CMakeLists.txt @@ -2,13 +2,16 @@ cmake_minimum_required(VERSION 3.13) project(advection LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(advection - SOURCES advection.cxx - DATA_DIRS central-dirichlet - central-free - central-free-o3 - upwind) +bout_add_example( + advection + SOURCES + advection.cxx + DATA_DIRS + central-dirichlet + central-free + central-free-o3 + upwind) diff --git a/examples/boutpp/CMakeLists.txt b/examples/boutpp/CMakeLists.txt index e46a7ae990..1cc5c9619b 100644 --- a/examples/boutpp/CMakeLists.txt +++ b/examples/boutpp/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required(VERSION 3.13) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/boutpp/simulation.py b/examples/boutpp/simulation.py index 1b57ff8b33..9948bd9b03 100755 --- a/examples/boutpp/simulation.py +++ b/examples/boutpp/simulation.py @@ -3,12 +3,13 @@ bc.init("mesh:n=48") + class Model(bc.PhysicsModel): - def init(self,restart): + def init(self, restart): self.dens = bc.create3D("sin(x)") self.solve_for(n=self.dens) - def rhs(self,time): + def rhs(self, time): self.dens.ddt(bc.DDX(self.dens)) diff --git a/examples/conducting-wall-mode/CMakeLists.txt b/examples/conducting-wall-mode/CMakeLists.txt index 857a22038e..c15df78c2a 100644 --- a/examples/conducting-wall-mode/CMakeLists.txt +++ b/examples/conducting-wall-mode/CMakeLists.txt @@ -2,10 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(conducting-wall-mode LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(conducting-wall-mode - SOURCES cwm.cxx - EXTRA_FILES cwm_grid.nc) +bout_add_example(conducting-wall-mode SOURCES cwm.cxx EXTRA_FILES cwm_grid.nc) diff --git a/examples/conduction-snb/CMakeLists.txt b/examples/conduction-snb/CMakeLists.txt index 45072dbe59..af5ee819be 100644 --- a/examples/conduction-snb/CMakeLists.txt +++ b/examples/conduction-snb/CMakeLists.txt @@ -2,11 +2,22 @@ cmake_minimum_required(VERSION 3.13) project(conduction-snb LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(conduction-snb - SOURCES conduction-snb.cxx - EXTRA_FILES fit_temperature.py sinusoid.py snb.csv spitzer-harm.csv step.py temperature.csv vfp.csv - DATA_DIRS data step) +bout_add_example( + conduction-snb + SOURCES + conduction-snb.cxx + EXTRA_FILES + fit_temperature.py + sinusoid.py + snb.csv + spitzer-harm.csv + step.py + temperature.csv + vfp.csv + DATA_DIRS + data + step) diff --git a/examples/conduction-snb/fit_temperature.py b/examples/conduction-snb/fit_temperature.py index 1e7f3ddf94..81faa9cf93 100644 --- a/examples/conduction-snb/fit_temperature.py +++ b/examples/conduction-snb/fit_temperature.py @@ -3,23 +3,31 @@ import matplotlib.pyplot as plt Te_ref = np.loadtxt("temperature.csv", delimiter=",") -Te_ref[:,0] *= 1e-4 # Convert X axis to m +Te_ref[:, 0] *= 1e-4 # Convert X axis to m + def te_function(ypos, mid, wwid, w0, w1, w2, Tmax, Tmin, clip=False): - width = w0 + ((ypos - mid)*w1 + (ypos - mid)**2 * w2) * np.exp(-((ypos - mid)/wwid)**2) + width = w0 + ((ypos - mid) * w1 + (ypos - mid) ** 2 * w2) * np.exp( + -(((ypos - mid) / wwid) ** 2) + ) if clip: width = np.clip(width, 1e-10, None) - return Tmax - 0.5 * (1 + np.tanh((ypos - mid)/width)) * (Tmax - Tmin) + return Tmax - 0.5 * (1 + np.tanh((ypos - mid) / width)) * (Tmax - Tmin) + -popt, pcov = optimize.curve_fit(te_function, Te_ref[:,0], Te_ref[:,1], - p0 = [2.2e-4, 1e-4, 1e-4, 0.0, 0.0, 0.960, 0.190]) +popt, pcov = optimize.curve_fit( + te_function, + Te_ref[:, 0], + Te_ref[:, 1], + p0=[2.2e-4, 1e-4, 1e-4, 0.0, 0.0, 0.960, 0.190], +) print(popt) -xfit = np.linspace(Te_ref[0,0], Te_ref[-1,0], 100) +xfit = np.linspace(Te_ref[0, 0], Te_ref[-1, 0], 100) -plt.plot(xfit, te_function(xfit, *popt, clip=True), '-k') -plt.plot(Te_ref[:,0], Te_ref[:,1], 'or') +plt.plot(xfit, te_function(xfit, *popt, clip=True), "-k") +plt.plot(Te_ref[:, 0], Te_ref[:, 1], "or") plt.show() diff --git a/examples/conduction-snb/sinusoid.py b/examples/conduction-snb/sinusoid.py index 10c81923a3..439530383e 100644 --- a/examples/conduction-snb/sinusoid.py +++ b/examples/conduction-snb/sinusoid.py @@ -11,7 +11,7 @@ build_and_log("Sinusoidal SNB") # Electron temperature in eV -Telist = 10 ** np.linspace(0,3,20) +Telist = 10 ** np.linspace(0, 3, 20) # Electron density in m^-3 Ne = 1e20 @@ -19,29 +19,30 @@ # Length of the domain in m length = 1.0 -c = 299792458 -mu0 = 4.e-7*np.pi -e0 = 1/(c*c*mu0) +c = 299792458 +mu0 = 4.0e-7 * np.pi +e0 = 1 / (c * c * mu0) qe = 1.602176634e-19 me = 9.10938356e-31 -thermal_speed = np.sqrt(2.*qe * Telist / me) -Y = (qe**2 / (e0 * me))**2 / (4 * np.pi) +thermal_speed = np.sqrt(2.0 * qe * Telist / me) +Y = (qe**2 / (e0 * me)) ** 2 / (4 * np.pi) coulomb_log = 6.6 - 0.5 * np.log(Ne * 1e-20) + 1.5 * np.log(Telist) -lambda_ee_T = thermal_speed**4 / (Y * Ne * coulomb_log) +lambda_ee_T = thermal_speed**4 / (Y * Ne * coulomb_log) beta_max_list = [5, 10, 20, 40] -colors = ['k','b','g','r'] +colors = ["k", "b", "g", "r"] ngroups_list = [20, 40, 80] -syms = ['x', 'o', 'D'] +syms = ["x", "o", "D"] for beta_max, color in zip(beta_max_list, colors): for ngroups, sym in zip(ngroups_list, syms): - flux_ratio = [] for Te in Telist: - cmd = "./conduction-snb \"Te={0}+0.01*sin(y)\" Ne={1} mesh:length={2} snb:beta_max={3} snb:ngroups={4}".format(Te, Ne, length, beta_max, ngroups) + cmd = './conduction-snb "Te={0}+0.01*sin(y)" Ne={1} mesh:length={2} snb:beta_max={3} snb:ngroups={4}'.format( + Te, Ne, length, beta_max, ngroups + ) # Run the case s, out = launch_safe(cmd, nproc=1, mthread=1, pipe=True) @@ -54,7 +55,12 @@ flux_ratio.append(div_q[ind] / div_q_SH[ind]) - plt.plot(lambda_ee_T / length, flux_ratio, '-'+sym+color, label=r"$\beta_{{max}}={0}, N_g={1}$".format(beta_max,ngroups)) + plt.plot( + lambda_ee_T / length, + flux_ratio, + "-" + sym + color, + label=r"$\beta_{{max}}={0}, N_g={1}$".format(beta_max, ngroups), + ) plt.legend() plt.xlabel(r"$\lambda_{ee,T} / L$") diff --git a/examples/conduction-snb/step.py b/examples/conduction-snb/step.py index 1f8933e66b..63a13149f4 100644 --- a/examples/conduction-snb/step.py +++ b/examples/conduction-snb/step.py @@ -3,7 +3,7 @@ # # Uses a step in the temperature, intended for comparison to VFP results -length = 6e-4 # Domain length in m +length = 6e-4 # Domain length in m qe = 1.602176634e-19 @@ -40,37 +40,37 @@ # Read reference values Te_ref = np.loadtxt("temperature.csv", delimiter=",") -Te_ref[:,0] *= 1e-4 # Convert X axis to m +Te_ref[:, 0] *= 1e-4 # Convert X axis to m SH_ref = np.loadtxt("spitzer-harm.csv", delimiter=",") -SH_ref[:,0] *= 1e-4 +SH_ref[:, 0] *= 1e-4 SNB_ref = np.loadtxt("snb.csv", delimiter=",") -SNB_ref[:,0] *= 1e-4 +SNB_ref[:, 0] *= 1e-4 VFP_ref = np.loadtxt("vfp.csv", delimiter=",") -VFP_ref[:,0] *= 1e-4 +VFP_ref[:, 0] *= 1e-4 ######################################### fig, ax1 = plt.subplots() -color='tab:red' +color = "tab:red" ax1.plot(position, Te * 1e-3, color=color, label="Te") -ax1.plot(Te_ref[:,0], Te_ref[:,1], color=color, marker="o", label="Reference Te") +ax1.plot(Te_ref[:, 0], Te_ref[:, 1], color=color, marker="o", label="Reference Te") ax1.set_xlabel("position [m]") ax1.set_ylabel("Electron temperature [keV]", color=color) -ax1.set_ylim(0,1) -ax1.tick_params(axis='y', colors=color) +ax1.set_ylim(0, 1) +ax1.tick_params(axis="y", colors=color) ax2 = ax1.twinx() -ax2.plot(position, q_SH * 1e-4, '-k', label="Spitzer-Harm") -ax2.plot(SH_ref[:,0], SH_ref[:,1], '--k', label="Reference SH") +ax2.plot(position, q_SH * 1e-4, "-k", label="Spitzer-Harm") +ax2.plot(SH_ref[:, 0], SH_ref[:, 1], "--k", label="Reference SH") -ax2.plot(position, q * 1e-4, '-b', label="SNB") -ax2.plot(SNB_ref[:,0], SNB_ref[:,1], '--b', label="Reference SNB") +ax2.plot(position, q * 1e-4, "-b", label="SNB") +ax2.plot(SNB_ref[:, 0], SNB_ref[:, 1], "--b", label="Reference SNB") -ax2.plot(VFP_ref[:,0], VFP_ref[:,1], '--g', label="Reference VFP") +ax2.plot(VFP_ref[:, 0], VFP_ref[:, 1], "--g", label="Reference VFP") ax2.set_ylabel("Heat flux W/cm^2") ax2.set_ylim(bottom=0.0) @@ -78,8 +78,7 @@ plt.legend() fig.tight_layout() -plt.xlim(0,3.5e-4) +plt.xlim(0, 3.5e-4) plt.savefig("snb-step.png") plt.show() - diff --git a/examples/conduction/CMakeLists.txt b/examples/conduction/CMakeLists.txt index f26b838621..4d0adfa34d 100644 --- a/examples/conduction/CMakeLists.txt +++ b/examples/conduction/CMakeLists.txt @@ -2,11 +2,16 @@ cmake_minimum_required(VERSION 3.13) project(conduction LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(conduction - SOURCES conduction.cxx - DATA_DIRS data fromfile - EXTRA_FILES generate.py) +bout_add_example( + conduction + SOURCES + conduction.cxx + DATA_DIRS + data + fromfile + EXTRA_FILES + generate.py) diff --git a/examples/conduction/generate.py b/examples/conduction/generate.py index 7138026898..4a2a2acfc0 100755 --- a/examples/conduction/generate.py +++ b/examples/conduction/generate.py @@ -4,9 +4,9 @@ # Generate an input mesh # -from boututils.datafile import DataFile # Wrapper around NetCDF4 libraries +from boututils.datafile import DataFile # Wrapper around NetCDF4 libraries -nx = 5 # Minimum is 5: 2 boundary, one evolved +nx = 5 # Minimum is 5: 2 boundary, one evolved ny = 64 # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) f = DataFile() diff --git a/examples/constraints/alfven-wave/CMakeLists.txt b/examples/constraints/alfven-wave/CMakeLists.txt index a95ace4086..aaa7f964a5 100644 --- a/examples/constraints/alfven-wave/CMakeLists.txt +++ b/examples/constraints/alfven-wave/CMakeLists.txt @@ -2,12 +2,17 @@ cmake_minimum_required(VERSION 3.13) project(constraints-alfven-wave LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(constraints-alfven-wave - SOURCES alfven.cxx - DATA_DIRS cbm18 data - EXTRA_FILES cbm18_dens8.grid_nx68ny64.nc d3d_119919.nc) - +bout_add_example( + constraints-alfven-wave + SOURCES + alfven.cxx + DATA_DIRS + cbm18 + data + EXTRA_FILES + cbm18_dens8.grid_nx68ny64.nc + d3d_119919.nc) diff --git a/examples/constraints/laplace-dae/CMakeLists.txt b/examples/constraints/laplace-dae/CMakeLists.txt index e487bd6a0a..0e5dee580d 100644 --- a/examples/constraints/laplace-dae/CMakeLists.txt +++ b/examples/constraints/laplace-dae/CMakeLists.txt @@ -2,10 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(constraints-laplace-dae LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(constraints-laplace-dae - SOURCES laplace_dae.cxx - EXTRA_FILES simple_xz.nc) +bout_add_example(constraints-laplace-dae SOURCES laplace_dae.cxx EXTRA_FILES + simple_xz.nc) diff --git a/examples/dalf3/CMakeLists.txt b/examples/dalf3/CMakeLists.txt index 5f5b3d701d..0bc8795247 100644 --- a/examples/dalf3/CMakeLists.txt +++ b/examples/dalf3/CMakeLists.txt @@ -2,11 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(dalf3 LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() - -bout_add_example(dalf3 - SOURCES dalf3.cxx - EXTRA_FILES cbm18_8_y064_x516_090309.nc) +bout_add_example(dalf3 SOURCES dalf3.cxx EXTRA_FILES + cbm18_8_y064_x516_090309.nc) diff --git a/examples/eigen-box/CMakeLists.txt b/examples/eigen-box/CMakeLists.txt index 76af4fbaa6..010ecffcf9 100644 --- a/examples/eigen-box/CMakeLists.txt +++ b/examples/eigen-box/CMakeLists.txt @@ -2,10 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(eigen-box LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(eigen-box - SOURCES eigen-box.cxx - EXTRA_FILES eigenvals.py) +bout_add_example(eigen-box SOURCES eigen-box.cxx EXTRA_FILES eigenvals.py) diff --git a/examples/eigen-box/eigenvals.py b/examples/eigen-box/eigenvals.py index 7b52fc60f2..5a71aabfc6 100755 --- a/examples/eigen-box/eigenvals.py +++ b/examples/eigen-box/eigenvals.py @@ -27,7 +27,8 @@ def plot_eigenvals(eigenvalues, eigenvectors=None): raise ValueError("Expecting eigenvectors to be 2D") if eigenvectors.shape[0] != len(eigenvalues): raise ValueError( - "First dimension of eigenvectors must match length of eigenvalues") + "First dimension of eigenvectors must match length of eigenvalues" + ) # If no eigenvectors supplied, only plot eigenvalues, otherwise # eigenvalues and eigenvectors @@ -44,18 +45,18 @@ def plot_eigenvals(eigenvalues, eigenvectors=None): range_r = amax(eigs_r) - amin(eigs_r) range_i = amax(eigs_i) - amin(eigs_i) - ax[0].plot(eigs_r, eigs_i, 'x') + ax[0].plot(eigs_r, eigs_i, "x") ax[0].set_xlabel("Real component") ax[0].set_ylabel("Imaginary component") ax[0].set_title("Eigenvalue") - overplot, = ax[0].plot([], [], 'ok') + (overplot,) = ax[0].plot([], [], "ok") if eigenvectors is not None: # Add a eigenvectors plot - vector_r, = ax[1].plot([], [], '-k', label="Real") - vector_i, = ax[1].plot([], [], '-r', label="Imag") - ax[1].legend(loc='upper right') + (vector_r,) = ax[1].plot([], [], "-k", label="Real") + (vector_i,) = ax[1].plot([], [], "-r", label="Imag") + ax[1].legend(loc="upper right") ax[1].set_xlabel("X") ax[1].set_ylabel("Amplitude") ax[1].set_title("Eigenvector") @@ -68,33 +69,33 @@ def onclick(event): # Find closest eigenvectors point, but stretch axes so # real and imaginary components are weighted equally - if(range_r == 0): - dist = ((eigs_i - event.ydata)/range_i)**2 - elif(range_i == 0): - dist = ((eigs_r - event.xdata)/range_r)**2 + if range_r == 0: + dist = ((eigs_i - event.ydata) / range_i) ** 2 + elif range_i == 0: + dist = ((eigs_r - event.xdata) / range_r) ** 2 else: - dist = ((eigs_r - event.xdata)/range_r)**2 + \ - ((eigs_i - event.ydata)/range_i)**2 + dist = ((eigs_r - event.xdata) / range_r) ** 2 + ( + (eigs_i - event.ydata) / range_i + ) ** 2 ind = argmin(dist) # Update the highlight plot overplot.set_data([eigs_r[ind]], [eigs_i[ind]]) - print("Eigenvalue number: %d (%e,%e)" % - (ind, eigs_r[ind], eigs_i[ind])) + print("Eigenvalue number: %d (%e,%e)" % (ind, eigs_r[ind], eigs_i[ind])) if eigenvectors is not None: # Update plots nx = eigenvectors.shape[1] - vector_r.set_data(arange(nx), eigenvectors[2*ind, :]) - vector_i.set_data(arange(nx), eigenvectors[2*ind+1, :]) + vector_r.set_data(arange(nx), eigenvectors[2 * ind, :]) + vector_i.set_data(arange(nx), eigenvectors[2 * ind + 1, :]) ax[1].relim() ax[1].autoscale_view() fig.canvas.draw() - fig.canvas.mpl_connect('button_press_event', onclick) + fig.canvas.mpl_connect("button_press_event", onclick) plt.show() diff --git a/examples/elm-pb-outerloop/CMakeLists.txt b/examples/elm-pb-outerloop/CMakeLists.txt index 008918a87f..5aee8cc431 100644 --- a/examples/elm-pb-outerloop/CMakeLists.txt +++ b/examples/elm-pb-outerloop/CMakeLists.txt @@ -2,16 +2,13 @@ cmake_minimum_required(VERSION 3.13) project(elm_pb LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(elm_pb_outerloop - SOURCES elm_pb_outerloop.cxx - EXTRA_FILES cbm18_dens8.grid_nx68ny64.nc -) +bout_add_example(elm_pb_outerloop SOURCES elm_pb_outerloop.cxx EXTRA_FILES + cbm18_dens8.grid_nx68ny64.nc) if(BOUT_HAS_CUDA) - set_source_files_properties(elm_pb_outerloop.cxx PROPERTIES LANGUAGE CUDA ) + set_source_files_properties(elm_pb_outerloop.cxx PROPERTIES LANGUAGE CUDA) endif() - diff --git a/examples/elm-pb/CMakeLists.txt b/examples/elm-pb/CMakeLists.txt index 8c672822cf..cb7b318de9 100644 --- a/examples/elm-pb/CMakeLists.txt +++ b/examples/elm-pb/CMakeLists.txt @@ -2,14 +2,16 @@ cmake_minimum_required(VERSION 3.13) project(elm_pb LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(elm_pb - SOURCES elm_pb.cxx - EXTRA_FILES cbm18_dens8.grid_nx68ny64.nc - data/BOUT.inp - data-hypre/BOUT.inp - data-nonlinear/BOUT.inp) - +bout_add_example( + elm_pb + SOURCES + elm_pb.cxx + EXTRA_FILES + cbm18_dens8.grid_nx68ny64.nc + data/BOUT.inp + data-hypre/BOUT.inp + data-nonlinear/BOUT.inp) diff --git a/examples/elm-pb/Python/2dprofile.py b/examples/elm-pb/Python/2dprofile.py index 5bb01972f9..e95709ab5b 100644 --- a/examples/elm-pb/Python/2dprofile.py +++ b/examples/elm-pb/Python/2dprofile.py @@ -4,81 +4,92 @@ import numpy as np import matplotlib.pyplot as plt from boututils.datafile import DataFile -from matplotlib.ticker import FixedFormatter, FormatStrFormatter, AutoLocator, AutoMinorLocator +from matplotlib.ticker import ( + FixedFormatter, + FormatStrFormatter, + AutoLocator, + AutoMinorLocator, +) with DataFile("./cbm18_dens8.grid_nx68ny64.nc") as f: g = {v: f.read(v) for v in f.keys()} -majorLocator = AutoLocator() -majorFormatter = FormatStrFormatter('%3.0e') -minorLocator = AutoMinorLocator() -Fm = FixedFormatter(['0','$1 \\times 10^4$','$2 \\times 10^4$','$3 \\times 10^4$','$4 \\times 10^4$']) -Fm2 = FixedFormatter(['0','$2 \\times 10^5$','$4 \\times 10^5$','$6 \\times 10^5$']) - -bxy=g.get('Bxy') -p=g.get('pressure') -jpar0=g.get('Jpar0') -psixy=g.get('psixy') -btxy=g.get('Btxy') -shiftangle=g.get('ShiftAngle') - -nx=g.get('nx') -ny=g.get('ny') +majorLocator = AutoLocator() +majorFormatter = FormatStrFormatter("%3.0e") +minorLocator = AutoMinorLocator() +Fm = FixedFormatter( + [ + "0", + "$1 \\times 10^4$", + "$2 \\times 10^4$", + "$3 \\times 10^4$", + "$4 \\times 10^4$", + ] +) +Fm2 = FixedFormatter(["0", "$2 \\times 10^5$", "$4 \\times 10^5$", "$6 \\times 10^5$"]) + +bxy = g.get("Bxy") +p = g.get("pressure") +jpar0 = g.get("Jpar0") +psixy = g.get("psixy") +btxy = g.get("Btxy") +shiftangle = g.get("ShiftAngle") + +nx = g.get("nx") +ny = g.get("ny") q = np.zeros((nx, ny)) for i in range(ny): - q[:,i] = old_div(- shiftangle, (2 * np.pi)) - - + q[:, i] = old_div(-shiftangle, (2 * np.pi)) -xarr = psixy[:,0] +xarr = psixy[:, 0] xarr = old_div((xarr + 0.854856), (0.854856 + 0.0760856)) -fig=plt.figure() -plt.plot(xarr,q[:,32]) -plt.xlabel('normalized $\psi$', fontsize=25) -plt.ylabel('$q$',rotation='horizontal',fontsize=25) +fig = plt.figure() +plt.plot(xarr, q[:, 32]) +plt.xlabel("normalized $\psi$", fontsize=25) +plt.ylabel("$q$", rotation="horizontal", fontsize=25) fig.set_tight_layout(True) fig, ax1 = plt.subplots() -ax1.plot(xarr, p[:,32], 'r-', markevery=1, linewidth=3) -ax1.set_xlabel('normalized $\psi$',fontsize=25) +ax1.plot(xarr, p[:, 32], "r-", markevery=1, linewidth=3) +ax1.set_xlabel("normalized $\psi$", fontsize=25) # Make the y-axis label and tick labels match the line color. -ax1.set_ylabel('Pressure [Pa]', color='k',fontsize=25) +ax1.set_ylabel("Pressure [Pa]", color="k", fontsize=25) -#set y limit -ax1.set_ylim(0,40000,10000) +# set y limit +ax1.set_ylim(0, 40000, 10000) -#define ticks# +# define ticks# ax1.yaxis.set_ticks(np.arange(0, 40000, 10000)) -#ax1.yaxis.set_major_locator(majorLocator) -#ax1.yaxis.set_major_formatter(majorFormatter) +# ax1.yaxis.set_major_locator(majorLocator) +# ax1.yaxis.set_major_formatter(majorFormatter) ax1.yaxis.set_major_formatter(Fm) -#for the minor ticks, use no labels; default NullFormatter +# for the minor ticks, use no labels; default NullFormatter ax1.xaxis.set_minor_locator(AutoMinorLocator()) ax1.yaxis.set_minor_locator(AutoMinorLocator(10)) -#format tick labels +# format tick labels for tl in ax1.get_yticklabels(): - tl.set_color('k') + tl.set_color("k") ax2 = ax1.twinx() s2 = -jpar0 -ax2.plot(xarr, s2[:,32], 'r-',markevery=1,linewidth=3) -ax2.set_ylabel('$J_\parallel [A/m^2]$', color='k',fontsize=25) -ax2.set_ylim(0,600000) +ax2.plot(xarr, s2[:, 32], "r-", markevery=1, linewidth=3) +ax2.set_ylabel("$J_\parallel [A/m^2]$", color="k", fontsize=25) +ax2.set_ylim(0, 600000) ax2.yaxis.set_ticks(np.arange(0, 600000, 200000)) ax2.yaxis.set_major_formatter(Fm2) for tl in ax2.get_yticklabels(): - tl.set_color('k') + tl.set_color("k") fig.set_tight_layout(True) @@ -86,4 +97,4 @@ plt.show() -#plt.savefig('2d.png', transparent=True) +# plt.savefig('2d.png', transparent=True) diff --git a/examples/elm-pb/Python/analysis.py b/examples/elm-pb/Python/analysis.py index 4faf85f8ce..0e4a3acfdd 100644 --- a/examples/elm-pb/Python/analysis.py +++ b/examples/elm-pb/Python/analysis.py @@ -6,24 +6,24 @@ import pylab as plt from boutdata.collect import collect -path='./data/' -var=collect('P', path=path) +path = "./data/" +var = collect("P", path=path) -dcvar=np.mean(var, axis=3) -rmsvar=np.sqrt(np.mean(var**2,axis=3)-dcvar**2) +dcvar = np.mean(var, axis=3) +rmsvar = np.sqrt(np.mean(var**2, axis=3) - dcvar**2) plt.figure() -plt.plot(rmsvar[:,34,32]) +plt.plot(rmsvar[:, 34, 32]) plt.show(block=False) -fvar=np.fft.rfft(var,axis=3) +fvar = np.fft.rfft(var, axis=3) plt.figure() -plt.plot(abs(fvar[:,34,32,1:10])) +plt.plot(abs(fvar[:, 34, 32, 1:10])) plt.show(block=False) plt.figure() -plt.semilogy(abs(fvar[:,34,32,1:7])) +plt.semilogy(abs(fvar[:, 34, 32, 1:7])) plt.show(block=False) plt.show() diff --git a/examples/elm-pb/Python/elm_size.py b/examples/elm-pb/Python/elm_size.py index 0d5c105870..061cad3e1e 100644 --- a/examples/elm-pb/Python/elm_size.py +++ b/examples/elm-pb/Python/elm_size.py @@ -4,87 +4,127 @@ from past.utils import old_div import numpy as np -def elm_size(dcp,p0,uedge,xmin=None,xmax=None,yind=None,Bbar=None): - - lis=[dcp,p0,uedge] - if np.size(lis) != 3 : + +def elm_size(dcp, p0, uedge, xmin=None, xmax=None, yind=None, Bbar=None): + + lis = [dcp, p0, uedge] + if np.size(lis) != 3: print("lack of parameters") return 0 - - if xmin == None : xmin=0 - if xmax == None : xmax=327 - if yind == None : yind=63 # choose the poloidal location for 1D size - if Bbar == None : Bbar=1.992782 # the normalized magnetic field + if xmin == None: + xmin = 0 + if xmax == None: + xmax = 327 + if yind == None: + yind = 63 # choose the poloidal location for 1D size + if Bbar == None: + Bbar = 1.992782 # the normalized magnetic field - mydcp=dcp - myp0=p0 - g=uedge + mydcp = dcp + myp0 = p0 + g = uedge PI = 3.1415926 - MU0 = 4.0e-7*PI + MU0 = 4.0e-7 * PI - s=np.shape(mydcp) + s = np.shape(mydcp) - if np.ndim(mydcp) != 3 : + if np.ndim(mydcp) != 3: print("dcp should be 3D(t,x,y)") - - - nt=s[0] - nx=s[1] - ny=s[2] - - Dtheta=g['dy'] #using correct poloidal angle - psixy=g['psixy'] - R=g['Rxy'] - Bp=g['Bpxy'] - hthe=g['hthe'] - - Dpsi=np.zeros((nx,ny)) - Dpsi[0,:]=psixy[1,:]-psixy[0,:] - Dpsi[nx-1,:]=psixy[nx-1,:]-psixy[nx-2,:] - for i in range(1,nx-2): - Dpsi[i,:]=old_div((psixy[i+1,:]-psixy[i-1,:]),2) - - - Ddcp1=np.zeros(nt) - Ddcp2=np.zeros(nt) - Ddcp3=np.zeros(nt) - Tp01=0. - Tp02=0. - Tp03=0. - - for t in range(nt) : - Ddcp3[t]=2.0*PI*np.sum(mydcp[t,xmin:xmax,:]*hthe[xmin:xmax,:]*Dtheta[xmin:xmax,:]*Dpsi[xmin:xmax,:]/Bp[xmin:xmax,:]) - Ddcp2[t]=np.sum(mydcp[t,xmin:xmax,:]*hthe[xmin:xmax,:]*Dtheta[xmin:xmax,:]*Dpsi[xmin:xmax,:]/(R[xmin:xmax,:]*Bp[xmin:xmax,:])) - Ddcp1[t]=np.sum(mydcp[t,xmin:xmax,yind]*Dpsi[xmin:xmax,yind]/(R[xmin:xmax,yind]*Bp[xmin:xmax,yind])) - - - Tp03=2.0*PI*np.sum(myp0[xmin:xmax,:]*hthe[xmin:xmax,:]*Dtheta[xmin:xmax,:]*Dpsi[xmin:xmax,:]/Bp[xmin:xmax,:]) - Tp02=np.sum(myp0[xmin:xmax,:]*hthe[xmin:xmax,:]*Dtheta[xmin:xmax,:]*Dpsi[xmin:xmax,:]/(R[xmin:xmax,:]*Bp[xmin:xmax,:])) - Tp01=np.sum(myp0[xmin:xmax,yind]*Dpsi[xmin:xmax,yind]/(R[xmin:xmax,yind]*Bp[xmin:xmax,yind])) - - s1=np.zeros(nt) - s2=np.zeros(nt) - s3=np.zeros(nt) - E_loss=np.zeros(nt) - - s1=old_div(-Ddcp1,Tp01) #1D elm size - s2=old_div(-Ddcp2,Tp02) #2D elm size - s3=old_div(-Ddcp3,Tp03) #3D elm size - - E_loss=-Ddcp3*(0.5*Bbar*Bbar/MU0) #energy loss, unit J - E_total=Tp03*(0.5*Bbar*Bbar/MU0) #total energy, unit J + + nt = s[0] + nx = s[1] + ny = s[2] + + Dtheta = g["dy"] # using correct poloidal angle + psixy = g["psixy"] + R = g["Rxy"] + Bp = g["Bpxy"] + hthe = g["hthe"] + + Dpsi = np.zeros((nx, ny)) + Dpsi[0, :] = psixy[1, :] - psixy[0, :] + Dpsi[nx - 1, :] = psixy[nx - 1, :] - psixy[nx - 2, :] + for i in range(1, nx - 2): + Dpsi[i, :] = old_div((psixy[i + 1, :] - psixy[i - 1, :]), 2) + + Ddcp1 = np.zeros(nt) + Ddcp2 = np.zeros(nt) + Ddcp3 = np.zeros(nt) + Tp01 = 0.0 + Tp02 = 0.0 + Tp03 = 0.0 + + for t in range(nt): + Ddcp3[t] = ( + 2.0 + * PI + * np.sum( + mydcp[t, xmin:xmax, :] + * hthe[xmin:xmax, :] + * Dtheta[xmin:xmax, :] + * Dpsi[xmin:xmax, :] + / Bp[xmin:xmax, :] + ) + ) + Ddcp2[t] = np.sum( + mydcp[t, xmin:xmax, :] + * hthe[xmin:xmax, :] + * Dtheta[xmin:xmax, :] + * Dpsi[xmin:xmax, :] + / (R[xmin:xmax, :] * Bp[xmin:xmax, :]) + ) + Ddcp1[t] = np.sum( + mydcp[t, xmin:xmax, yind] + * Dpsi[xmin:xmax, yind] + / (R[xmin:xmax, yind] * Bp[xmin:xmax, yind]) + ) + + Tp03 = ( + 2.0 + * PI + * np.sum( + myp0[xmin:xmax, :] + * hthe[xmin:xmax, :] + * Dtheta[xmin:xmax, :] + * Dpsi[xmin:xmax, :] + / Bp[xmin:xmax, :] + ) + ) + Tp02 = np.sum( + myp0[xmin:xmax, :] + * hthe[xmin:xmax, :] + * Dtheta[xmin:xmax, :] + * Dpsi[xmin:xmax, :] + / (R[xmin:xmax, :] * Bp[xmin:xmax, :]) + ) + Tp01 = np.sum( + myp0[xmin:xmax, yind] + * Dpsi[xmin:xmax, yind] + / (R[xmin:xmax, yind] * Bp[xmin:xmax, yind]) + ) + + s1 = np.zeros(nt) + s2 = np.zeros(nt) + s3 = np.zeros(nt) + E_loss = np.zeros(nt) + + s1 = old_div(-Ddcp1, Tp01) # 1D elm size + s2 = old_div(-Ddcp2, Tp02) # 2D elm size + s3 = old_div(-Ddcp3, Tp03) # 3D elm size + + E_loss = -Ddcp3 * (0.5 * Bbar * Bbar / MU0) # energy loss, unit J + E_total = Tp03 * (0.5 * Bbar * Bbar / MU0) # total energy, unit J class ELM: pass - elmsize=ELM() - elmsize.s1=s1 - elmsize.s2=s2 - elmsize.s3=s3 - elmsize.E_loss=E_loss - elmsize.E_total=E_total + + elmsize = ELM() + elmsize.s1 = s1 + elmsize.s2 = s2 + elmsize.s3 = s3 + elmsize.E_loss = E_loss + elmsize.E_total = E_total return elmsize - - diff --git a/examples/elm-pb/Python/fftall.py b/examples/elm-pb/Python/fftall.py index e0a4385764..c6d5b87ded 100644 --- a/examples/elm-pb/Python/fftall.py +++ b/examples/elm-pb/Python/fftall.py @@ -2,8 +2,7 @@ from numpy import * from scipy.io import readsav -print('Calculating P..') -a=transpose(readsav('phi.idl.dat')['phi']) -fa=fft.fft(a,axis=2) -save('fp',fa) - +print("Calculating P..") +a = transpose(readsav("phi.idl.dat")["phi"]) +fa = fft.fft(a, axis=2) +save("fp", fa) diff --git a/examples/elm-pb/Python/fftall2.py b/examples/elm-pb/Python/fftall2.py index d864f356d0..64b08c437a 100644 --- a/examples/elm-pb/Python/fftall2.py +++ b/examples/elm-pb/Python/fftall2.py @@ -2,9 +2,9 @@ from numpy import * from boutdata.collect import collect -path='./data/' -data=collect('P',path=path) +path = "./data/" +data = collect("P", path=path) -print('Saving P..') -fa=fft.fft(data,axis=3) -save('fp',rollaxis(fa,0,4)) +print("Saving P..") +fa = fft.fft(data, axis=3) +save("fp", rollaxis(fa, 0, 4)) diff --git a/examples/elm-pb/Python/grate.py b/examples/elm-pb/Python/grate.py index 003f38ff8e..65d6da6449 100644 --- a/examples/elm-pb/Python/grate.py +++ b/examples/elm-pb/Python/grate.py @@ -8,12 +8,11 @@ from boututils.moment_xyzt import moment_xyzt +path = "./data/" -path='./data/' +p = collect("P", path=path) +rmsp_f = moment_xyzt(p[:, 34:35, 32:33, :], "RMS").rms -p=collect('P',path=path) -rmsp_f=moment_xyzt(p[:,34:35,32:33,:], 'RMS').rms - -print(np.gradient(np.log(rmsp_f[:,0,0]))[-1]) +print(np.gradient(np.log(rmsp_f[:, 0, 0]))[-1]) diff --git a/examples/elm-pb/Python/grate2.py b/examples/elm-pb/Python/grate2.py index f8172bcf08..af0768803a 100644 --- a/examples/elm-pb/Python/grate2.py +++ b/examples/elm-pb/Python/grate2.py @@ -2,6 +2,7 @@ from __future__ import division from builtins import range from past.utils import old_div + ### # computes average growth rate for all points at the final timestep # computes average growth rate for points in the mead plane at the final timestep @@ -10,40 +11,44 @@ from boutdata.collect import collect from boututils.moment_xyzt import moment_xyzt -path='./data/' +path = "./data/" + +p = collect("P", path=path) -p=collect('P',path=path) +nmpy = old_div(p.shape[2], 2) # define mead plane -nmpy=old_div(p.shape[2],2) # define mead plane +ik = 50 # disregard the first ik timesteps -ik = 50 # disregard the first ik timesteps def gr(p): - rmsp_f=moment_xyzt(p, 'RMS').rms + rmsp_f = moment_xyzt(p, "RMS").rms - ni=np.shape(rmsp_f)[1] - nj=np.shape(rmsp_f)[2] + ni = np.shape(rmsp_f)[1] + nj = np.shape(rmsp_f)[2] - growth=np.zeros((ni,nj)) + growth = np.zeros((ni, nj)) - for i in range(ni): - for j in range(nj): - growth[i,j]=np.gradient(np.log(rmsp_f[ik::,i,j]))[-1] + for i in range(ni): + for j in range(nj): + growth[i, j] = np.gradient(np.log(rmsp_f[ik::, i, j]))[-1] - return growth + return growth -growth=gr(p) +growth = gr(p) -d=np.ma.masked_array(growth,np.isnan(growth)) +d = np.ma.masked_array(growth, np.isnan(growth)) # masked arrays # http://stackoverflow.com/questions/5480694/numpy-calculate-averages-with-nans-removed -print('Total mean value = ', np.mean(np.ma.masked_array(d,np.isinf(d)))) -mm=np.ma.masked_array(growth[:,nmpy],np.isnan(growth[:,nmpy])) -if np.isinf(np.mean(mm)) : - print('There is an Inf value in the mead plane') - print('Mean value of floating numbers in mead plane is = ', np.mean(np.ma.masked_array(mm,np.isinf(mm)))) +print("Total mean value = ", np.mean(np.ma.masked_array(d, np.isinf(d)))) +mm = np.ma.masked_array(growth[:, nmpy], np.isnan(growth[:, nmpy])) +if np.isinf(np.mean(mm)): + print("There is an Inf value in the mead plane") + print( + "Mean value of floating numbers in mead plane is = ", + np.mean(np.ma.masked_array(mm, np.isinf(mm))), + ) else: - print('Mean value in mead plane= ', np.mean(mm)) + print("Mean value in mead plane= ", np.mean(mm)) diff --git a/examples/elm-pb/Python/plotcollapse.py b/examples/elm-pb/Python/plotcollapse.py index ee64ca381f..f68e7b06aa 100755 --- a/examples/elm-pb/Python/plotcollapse.py +++ b/examples/elm-pb/Python/plotcollapse.py @@ -10,49 +10,52 @@ import os from pathlib import Path -#Dynamic matplotlib settings +# Dynamic matplotlib settings from matplotlib import rcParams -rcParams['font.size'] = 20. -rcParams['legend.fontsize'] = 'small' -rcParams['lines.linewidth'] = 2 -if not os.path.exists('image'): - os.makedirs('image') +rcParams["font.size"] = 20.0 +rcParams["legend.fontsize"] = "small" +rcParams["lines.linewidth"] = 2 + +if not os.path.exists("image"): + os.makedirs("image") filename = Path(__file__).with_name("cbm18_dens8.grid_nx68ny64.nc") with DataFile(str(filename)) as f: g = {v: f.read(v) for v in f.keys()} -psi = old_div((g['psixy'][:, 32] - g['psi_axis']), (g['psi_bndry'] - g['psi_axis'])) +psi = old_div((g["psixy"][:, 32] - g["psi_axis"]), (g["psi_bndry"] - g["psi_axis"])) -path = './data' +path = "./data" plt.figure() -p0=collect('P0', path=path) +p0 = collect("P0", path=path) -p=collect('P', path=path) -res = moment_xyzt(p,'RMS','DC') +p = collect("P", path=path) +res = moment_xyzt(p, "RMS", "DC") rmsp = res.rms dcp = res.dc nt = dcp.shape[0] -plt.plot(psi, p0[:, 32], 'k--', label='t=0') -plt.plot(psi, p0[:, 32] + dcp[nt//4, :, 32], 'r-', label='t='+np.str(nt//4)) -plt.plot(psi, p0[:, 32] + dcp[nt//2, :, 32], 'g-', label='t='+np.str(nt//2)) -plt.plot(psi, p0[:, 32] + dcp[3*nt//4, :, 32], 'b-', label='t='+np.str(3*nt//4)) -plt.plot(psi, p0[:, 32] + dcp[-1, :, 32], 'c-', label='t='+np.str(nt)) +plt.plot(psi, p0[:, 32], "k--", label="t=0") +plt.plot(psi, p0[:, 32] + dcp[nt // 4, :, 32], "r-", label="t=" + np.str(nt // 4)) +plt.plot(psi, p0[:, 32] + dcp[nt // 2, :, 32], "g-", label="t=" + np.str(nt // 2)) +plt.plot( + psi, p0[:, 32] + dcp[3 * nt // 4, :, 32], "b-", label="t=" + np.str(3 * nt // 4) +) +plt.plot(psi, p0[:, 32] + dcp[-1, :, 32], "c-", label="t=" + np.str(nt)) plt.legend() -#plt.xlim(0.6, 1.0) -plt.xlabel(r'Normalized poloidal flux ($\psi$)') -plt.ylabel(r'$\langle p\rangle_\xi$') -plt.title(r'Pressure') +# plt.xlim(0.6, 1.0) +plt.xlabel(r"Normalized poloidal flux ($\psi$)") +plt.ylabel(r"$\langle p\rangle_\xi$") +plt.title(r"Pressure") xmin, xmax = plt.xlim() ymin, ymax = plt.ylim() -#plt.savefig('image/plotcollapse.png', bbox_inches='tight') -#plt.savefig('image/plotcollapse.eps', bbox_inches='tight') +# plt.savefig('image/plotcollapse.png', bbox_inches='tight') +# plt.savefig('image/plotcollapse.eps', bbox_inches='tight') plt.tight_layout() diff --git a/examples/elm-pb/Python/plotmode.py b/examples/elm-pb/Python/plotmode.py index d89fdaf940..9325ee8764 100644 --- a/examples/elm-pb/Python/plotmode.py +++ b/examples/elm-pb/Python/plotmode.py @@ -4,60 +4,57 @@ from builtins import range from past.utils import old_div -from numpy import *; -#from scipy.io import readsav; -import matplotlib.pyplot as plt; +from numpy import * + +# from scipy.io import readsav; +import matplotlib.pyplot as plt # Dynamic matplotlib settings -from matplotlib import rcParams; -rcParams['font.size'] = 20; -rcParams['legend.fontsize'] = 'small'; -rcParams['legend.labelspacing'] = 0.1; -rcParams['lines.linewidth'] = 2; -rcParams['savefig.bbox'] = 'tight'; +from matplotlib import rcParams +rcParams["font.size"] = 20 +rcParams["legend.fontsize"] = "small" +rcParams["legend.labelspacing"] = 0.1 +rcParams["lines.linewidth"] = 2 +rcParams["savefig.bbox"] = "tight" # Create image directory if not exists -import os; -if not os.path.exists('image'): - os.makedirs('image'); - -#fphi = transpose(readsav('fphi.idl.dat')['fphi'])[:,:,:,]; -fphi = load('fp.npy') - -plt.figure(); -for i in range(1, 9): - print("Growth rate for mode number", i) - print(gradient(log(abs(fphi[34, 32, i, :])))) - plt.semilogy(((abs(fphi[34, 32, i, :]))), label = 'n=' + str(i * 5)); +import os -plt.legend(loc=2); -plt.xlabel('Time'); -plt.savefig('image/plotmode.png'); -plt.savefig('image/plotmode.eps'); +if not os.path.exists("image"): + os.makedirs("image") +# fphi = transpose(readsav('fphi.idl.dat')['fphi'])[:,:,:,]; +fphi = load("fp.npy") -plt.show(block=False); -plt.figure(); +plt.figure() for i in range(1, 9): - plt.plot(abs(fphi[:, 32, i, -1]), label = 'n=' + str(i * 5)); - -plt.legend(); -plt.xlabel('X index'); - -plt.savefig('image/plotmodeamp.png'); -plt.savefig('image/plotmodeamp.eps'); - -plt.show(block=False); - -plt.figure(); + print("Growth rate for mode number", i) + print(gradient(log(abs(fphi[34, 32, i, :])))) + plt.semilogy((abs(fphi[34, 32, i, :])), label="n=" + str(i * 5)) + +plt.legend(loc=2) +plt.xlabel("Time") +plt.savefig("image/plotmode.png") +plt.savefig("image/plotmode.eps") +plt.show(block=False) +plt.figure() for i in range(1, 9): - plt.plot(old_div(abs(fphi[:, 32, i, -1]),abs(fphi[:, 32, i, -1]).max()), label = 'n=' + str(i * 5)); - -plt.legend(); -plt.xlabel('X index'); - -plt.savefig('image/plotmodenorm.png'); -plt.savefig('image/plotmodenorm.eps'); - -plt.show(); - + plt.plot(abs(fphi[:, 32, i, -1]), label="n=" + str(i * 5)) + +plt.legend() +plt.xlabel("X index") +plt.savefig("image/plotmodeamp.png") +plt.savefig("image/plotmodeamp.eps") +plt.show(block=False) +plt.figure() +for i in range(1, 9): + plt.plot( + old_div(abs(fphi[:, 32, i, -1]), abs(fphi[:, 32, i, -1]).max()), + label="n=" + str(i * 5), + ) + +plt.legend() +plt.xlabel("X index") +plt.savefig("image/plotmodenorm.png") +plt.savefig("image/plotmodenorm.eps") +plt.show() diff --git a/examples/elm-pb/Python/plotmode2.py b/examples/elm-pb/Python/plotmode2.py index d0c63f32d9..c298a3a5ef 100644 --- a/examples/elm-pb/Python/plotmode2.py +++ b/examples/elm-pb/Python/plotmode2.py @@ -4,63 +4,61 @@ from builtins import range from past.utils import old_div -from numpy import *; -#from scipy.io import readsav; -import matplotlib.pyplot as plt; +from numpy import * + +# from scipy.io import readsav; +import matplotlib.pyplot as plt from boutdata.collect import collect # Dynamic matplotlib settings -from matplotlib import rcParams; -rcParams['font.size'] = 20; -rcParams['legend.fontsize'] = 'small'; -rcParams['legend.labelspacing'] = 0.1; -rcParams['lines.linewidth'] = 2; -rcParams['savefig.bbox'] = 'tight'; +from matplotlib import rcParams +rcParams["font.size"] = 20 +rcParams["legend.fontsize"] = "small" +rcParams["legend.labelspacing"] = 0.1 +rcParams["lines.linewidth"] = 2 +rcParams["savefig.bbox"] = "tight" # Create image directory if not exists -import os; -if not os.path.exists('image'): - os.makedirs('image'); +import os + +if not os.path.exists("image"): + os.makedirs("image") -path='./data/' -data=collect('P',path=path) +path = "./data/" +data = collect("P", path=path) -#fphi = transpose(readsav('fphi.idl.dat')['fphi'])[:,:,:,]; +# fphi = transpose(readsav('fphi.idl.dat')['fphi'])[:,:,:,]; fphi = fft.fft(data, axis=3) -plt.figure(); +plt.figure() for i in range(1, 9): - print("Growth rate for mode number", i) - print(gradient(log(abs(fphi[:,34, 32, i])))) - plt.semilogy(((abs(fphi[:,34, 32, i]))), label = 'n=' + str(i * 5)); - -plt.legend(loc=2); -plt.xlabel('Time'); -plt.savefig('image/plotmode.png'); -plt.savefig('image/plotmode.eps'); - - -plt.show(block=False); -plt.figure(); + print("Growth rate for mode number", i) + print(gradient(log(abs(fphi[:, 34, 32, i])))) + plt.semilogy((abs(fphi[:, 34, 32, i])), label="n=" + str(i * 5)) + +plt.legend(loc=2) +plt.xlabel("Time") +plt.savefig("image/plotmode.png") +plt.savefig("image/plotmode.eps") +plt.show(block=False) +plt.figure() for i in range(1, 9): - plt.plot(abs(fphi[-1, :, 32, i]), label = 'n=' + str(i * 5)); - -plt.legend(); -plt.xlabel('X index'); - -plt.savefig('image/plotmodeamp.png'); -plt.savefig('image/plotmodeamp.eps'); - -plt.show(block=False); - -plt.figure(); + plt.plot(abs(fphi[-1, :, 32, i]), label="n=" + str(i * 5)) + +plt.legend() +plt.xlabel("X index") +plt.savefig("image/plotmodeamp.png") +plt.savefig("image/plotmodeamp.eps") +plt.show(block=False) +plt.figure() for i in range(1, 9): - plt.plot(old_div(abs(fphi[-1, :, 32, i]),abs(fphi[-1, :, 32, i]).max()), label = 'n=' + str(i * 5)); - -plt.legend(); -plt.xlabel('X index'); - -plt.savefig('image/plotmodenorm.png'); -plt.savefig('image/plotmodenorm.eps'); - -plt.show(); + plt.plot( + old_div(abs(fphi[-1, :, 32, i]), abs(fphi[-1, :, 32, i]).max()), + label="n=" + str(i * 5), + ) + +plt.legend() +plt.xlabel("X index") +plt.savefig("image/plotmodenorm.png") +plt.savefig("image/plotmodenorm.eps") +plt.show() diff --git a/examples/elm-pb/Python/plotphase.py b/examples/elm-pb/Python/plotphase.py index 9225e498ae..10f4279cf4 100755 --- a/examples/elm-pb/Python/plotphase.py +++ b/examples/elm-pb/Python/plotphase.py @@ -4,34 +4,33 @@ from numpy import save, load, angle import matplotlib.pyplot as plt -fphi = load('fphi.npy') +fphi = load("fphi.npy") -fte = load('fte.npy') -phase_te = angle(old_div(fphi,fte)) -save('phase_te', phase_te) +fte = load("fte.npy") +phase_te = angle(old_div(fphi, fte)) +save("phase_te", phase_te) plt.figure() -plt.plot(mean(mean(phase_te[:,:,3,:],axis=1),axis=1)) -plt.title('Te') -plt.savefig('image/phase_te.png') -plt.savefig('image/phase_te.eps') +plt.plot(mean(mean(phase_te[:, :, 3, :], axis=1), axis=1)) +plt.title("Te") +plt.savefig("image/phase_te.png") +plt.savefig("image/phase_te.eps") -fti = load('fti.npy') -phase_ti = angle(old_div(fphi,fti)) -save('phase_ti', phase_ti) +fti = load("fti.npy") +phase_ti = angle(old_div(fphi, fti)) +save("phase_ti", phase_ti) plt.figure() -plt.plot(mean(mean(phase_ti[:,:,3,:],axis=1),axis=1)) -plt.title('ti') -plt.savefig('image/phase_ti.png') -plt.savefig('image/phase_ti.eps') +plt.plot(mean(mean(phase_ti[:, :, 3, :], axis=1), axis=1)) +plt.title("ti") +plt.savefig("image/phase_ti.png") +plt.savefig("image/phase_ti.eps") -fni = load('fni.npy') -phase_ni = angle(old_div(fphi,fni)) -save('phase_ni', phase_ni) +fni = load("fni.npy") +phase_ni = angle(old_div(fphi, fni)) +save("phase_ni", phase_ni) plt.figure() -plt.plot(mean(mean(phase_ni[:,:,3,:],axis=1),axis=1)) -plt.title('ni') -plt.savefig('image/phase_ni.png') -plt.savefig('image/phase_ni.eps') +plt.plot(mean(mean(phase_ni[:, :, 3, :], axis=1), axis=1)) +plt.title("ni") +plt.savefig("image/phase_ni.png") +plt.savefig("image/phase_ni.eps") plt.show() - diff --git a/examples/elm-pb/Python/polslice.py b/examples/elm-pb/Python/polslice.py index fe78495b5a..6a1179b2ae 100644 --- a/examples/elm-pb/Python/polslice.py +++ b/examples/elm-pb/Python/polslice.py @@ -11,31 +11,33 @@ # Specify parameters -path='./data/' +path = "./data/" -variable="P" +variable = "P" p = collect(variable, path=path) -period=15 +period = 15 -grid='../cbm18_dens8.grid_nx68ny64.nc' +grid = "../cbm18_dens8.grid_nx68ny64.nc" ######################################################## # Call plotpolslice once to get extended poloidal grid -r,z,fun=plotpolslice(p[0,:,:,:],grid,period=period,rz=1) +r, z, fun = plotpolslice(p[0, :, :, :], grid, period=period, rz=1) -nx=r.shape[0] # number of points in r -ny=r.shape[1] # number of points in z -nt=p.shape[0] # time intervals +nx = r.shape[0] # number of points in r +ny = r.shape[1] # number of points in z +nt = p.shape[0] # time intervals -fm=np.zeros((nt,nx,ny)) # array to store the time sequence of the poloidal cross section +fm = np.zeros( + (nt, nx, ny) +) # array to store the time sequence of the poloidal cross section -#Compute all time frames +# Compute all time frames for k in range(nt): - fm[k,:,:]=plotpolslice(p[k,:,:,:],grid,period=period,rz=0) + fm[k, :, :] = plotpolslice(p[k, :, :, :], grid, period=period, rz=0) -np.savez('pslice',fm=fm, z=z, r=r) +np.savez("pslice", fm=fm, z=z, r=r) diff --git a/examples/elm-pb/Python/post.py b/examples/elm-pb/Python/post.py index 9bb67347f0..cdaefa98cf 100644 --- a/examples/elm-pb/Python/post.py +++ b/examples/elm-pb/Python/post.py @@ -15,12 +15,12 @@ from mayavi import mlab -path0="./data0/" -path1="./data/" +path0 = "./data0/" +path1 = "./data/" -period=15 +period = 15 -gfile='./cbm18_dens8.grid_nx68ny64.nc' +gfile = "./cbm18_dens8.grid_nx68ny64.nc" with DataFile(gfile) as f: @@ -28,71 +28,115 @@ Dphi0 = collect("Dphi0", path=path0) -phi0 = collect("phi0", path=path1) # needs diamagnetic effects +phi0 = collect("phi0", path=path1) # needs diamagnetic effects # -psixy=g.get('psixy') -PSI_AXIS=g.get('psi_axis') -PSI_BNDRY=g.get('psi_bndry') +psixy = g.get("psixy") +PSI_AXIS = g.get("psi_axis") +PSI_BNDRY = g.get("psi_bndry") # -psix=old_div((psixy[:,32]-PSI_AXIS),(PSI_BNDRY-PSI_AXIS)) -Epsi=-deriv(phi0[:,32],psix) +psix = old_div((psixy[:, 32] - PSI_AXIS), (PSI_BNDRY - PSI_AXIS)) +Epsi = -deriv(phi0[:, 32], psix) # # -fig=figure() -plot(psix,-Dphi0[:,32], 'r', linewidth=5) -plot(psix,Epsi,'k',linewidth=5) -annotate('w/o flow', xy=(.3, .7), xycoords='axes fraction',horizontalalignment='center', verticalalignment='center', size=30) -annotate('w/ flow', xy=(.7, .4), xycoords='axes fraction',horizontalalignment='center', verticalalignment='center', color='r', size=30) -xlabel('Radial $\psi$',fontsize=25) -ylabel('$\Omega(\psi)/\omega_A$',fontsize=25) -ylim([-.05,0]) -xlim([0.4,1.2]) +fig = figure() +plot(psix, -Dphi0[:, 32], "r", linewidth=5) +plot(psix, Epsi, "k", linewidth=5) +annotate( + "w/o flow", + xy=(0.3, 0.7), + xycoords="axes fraction", + horizontalalignment="center", + verticalalignment="center", + size=30, +) +annotate( + "w/ flow", + xy=(0.7, 0.4), + xycoords="axes fraction", + horizontalalignment="center", + verticalalignment="center", + color="r", + size=30, +) +xlabel("Radial $\psi$", fontsize=25) +ylabel("$\Omega(\psi)/\omega_A$", fontsize=25) +ylim([-0.05, 0]) +xlim([0.4, 1.2]) fig.set_tight_layout(True) show(block=False) p_f0 = collect("P", path=path0) p_f = collect("P", path=path1) # -rmsp_f0=moment_xyzt(p_f0, 'RMS').rms -rmsp_f=moment_xyzt(p_f, 'RMS').rms +rmsp_f0 = moment_xyzt(p_f0, "RMS").rms +rmsp_f = moment_xyzt(p_f, "RMS").rms # -fig=figure(figsize=(10, 8)) -plot(np.gradient(np.log(rmsp_f0[:,34,32])), color='k',linewidth=3) -plot(np.gradient(np.log(rmsp_f[:,34,32])),color='red',linewidth=3) - -ylabel('$\gamma / \omega_A$',fontsize=25) -xlabel('Time$(\\tau_A)$',fontsize=25) -annotate('w/o flow', xy=(.5, .7), xycoords='axes fraction',horizontalalignment='center', verticalalignment='center', size=30) -annotate('w/ flow', xy=(.5, .4), xycoords='axes fraction',horizontalalignment='center', verticalalignment='center', color='r', size=30) -ylim([0,0.5]) -xlim([0,100]) +fig = figure(figsize=(10, 8)) +plot(np.gradient(np.log(rmsp_f0[:, 34, 32])), color="k", linewidth=3) +plot(np.gradient(np.log(rmsp_f[:, 34, 32])), color="red", linewidth=3) + +ylabel("$\gamma / \omega_A$", fontsize=25) +xlabel("Time$(\\tau_A)$", fontsize=25) +annotate( + "w/o flow", + xy=(0.5, 0.7), + xycoords="axes fraction", + horizontalalignment="center", + verticalalignment="center", + size=30, +) +annotate( + "w/ flow", + xy=(0.5, 0.4), + xycoords="axes fraction", + horizontalalignment="center", + verticalalignment="center", + color="r", + size=30, +) +ylim([0, 0.5]) +xlim([0, 100]) fig.set_tight_layout(True) show(block=False) - -plotpolslice(p_f0[50,:,:,:],gfile,period=period, fig=1) -mlab.text(.01,.99,"w/o flow") - -plotpolslice(p_f[50,:,:,:],gfile,period=period, fig=1) -mlab.text(.01,.99,"w/ flow") - -fig=figure() -mode_structure(p_f0[50,:,:,:], g, period=period) -plot([40,40],[0,.014],'k--',linewidth=5) -annotate('w/o flow', xy=(.3, .7), xycoords='axes fraction',horizontalalignment='center', verticalalignment='center', size=30) -ylim([0,0.014]) -xlim([0,80]) +plotpolslice(p_f0[50, :, :, :], gfile, period=period, fig=1) +mlab.text(0.01, 0.99, "w/o flow") + +plotpolslice(p_f[50, :, :, :], gfile, period=period, fig=1) +mlab.text(0.01, 0.99, "w/ flow") + +fig = figure() +mode_structure(p_f0[50, :, :, :], g, period=period) +plot([40, 40], [0, 0.014], "k--", linewidth=5) +annotate( + "w/o flow", + xy=(0.3, 0.7), + xycoords="axes fraction", + horizontalalignment="center", + verticalalignment="center", + size=30, +) +ylim([0, 0.014]) +xlim([0, 80]) fig.set_tight_layout(True) show(block=False) figure() -mode_structure(p_f[50,:,:,:], g, period=period) -plot([40,40],[0,.014],'k--',linewidth=5) -annotate('w/ flow', xy=(.3, .7), xycoords='axes fraction',horizontalalignment='center', verticalalignment='center', color='k', size=30) -ylim([0,0.0001]) -xlim([0,80]) +mode_structure(p_f[50, :, :, :], g, period=period) +plot([40, 40], [0, 0.014], "k--", linewidth=5) +annotate( + "w/ flow", + xy=(0.3, 0.7), + xycoords="axes fraction", + horizontalalignment="center", + verticalalignment="center", + color="k", + size=30, +) +ylim([0, 0.0001]) +xlim([0, 80]) show(block=False) show() diff --git a/examples/elm-pb/Python/read_elmsize.py b/examples/elm-pb/Python/read_elmsize.py index fcd3d24e53..340f57ce7b 100644 --- a/examples/elm-pb/Python/read_elmsize.py +++ b/examples/elm-pb/Python/read_elmsize.py @@ -4,12 +4,12 @@ from pylab import save, figure, plot, title, xlabel, ylabel, show, tight_layout from elm_size import elm_size -path='./data' +path = "./data" -t_array=collect('t_array', path=path) -save('t_array.dat', t_array) -p0=collect('P0', path=path) -save('p0.dat', p0) +t_array = collect("t_array", path=path) +save("t_array.dat", t_array) +p0 = collect("P0", path=path) +save("p0.dat", p0) # n0=collect('n0', path=path) @@ -22,27 +22,27 @@ with DataFile("./cbm18_dens8.grid_nx68ny64.nc") as f: gfile = {v: f.read(v) for v in f.keys()} -p=collect('P', path=path) -save('p.dat', p) -res=moment_xyzt(p,'RMS','DC') -rmsp=res.rms -dcp=res.dc -save('rmsp.dat', rmsp) -save('dcp.dat', dcp) -elmsp=elm_size(dcp,p0,gfile,yind=32,Bbar=gfile['bmag']) -save('elmsp.dat', elmsp) +p = collect("P", path=path) +save("p.dat", p) +res = moment_xyzt(p, "RMS", "DC") +rmsp = res.rms +dcp = res.dc +save("rmsp.dat", rmsp) +save("dcp.dat", dcp) +elmsp = elm_size(dcp, p0, gfile, yind=32, Bbar=gfile["bmag"]) +save("elmsp.dat", elmsp) figure(0) -plot(t_array,elmsp.s2, 'k-') -xlabel('t/Ta') -ylabel('Elm size') -title('Elm size, P') +plot(t_array, elmsp.s2, "k-") +xlabel("t/Ta") +ylabel("Elm size") +title("Elm size, P") tight_layout() show() -phi=collect('phi', path=path ) -save('phi.dat', phi) -res=moment_xyzt( phi, 'DC', 'RMS') -save('dcphi.dat',res.dc) -save('rmsphi.dat', res.rms) +phi = collect("phi", path=path) +save("phi.dat", phi) +res = moment_xyzt(phi, "DC", "RMS") +save("dcphi.dat", res.dc) +save("rmsphi.dat", res.rms) diff --git a/examples/elm-pb/Python/showpolslice.py b/examples/elm-pb/Python/showpolslice.py index d292575e35..072c53dcd6 100644 --- a/examples/elm-pb/Python/showpolslice.py +++ b/examples/elm-pb/Python/showpolslice.py @@ -6,10 +6,12 @@ import numpy as np from tvtk.tools import visual + try: from enthought.mayavi import mlab except ImportError: - try: from mayavi import mlab + try: + from mayavi import mlab except ImportError: print("No mlab available") @@ -17,40 +19,42 @@ ########################### # Read polslice array -npzfile=np.load('pslice.npz') -r=npzfile['r'] -z=npzfile['z'] -fm=npzfile['fm'] +npzfile = np.load("pslice.npz") +r = npzfile["r"] +z = npzfile["z"] +fm = npzfile["fm"] ######################################################## # Set up the window -f = mlab.figure(size=(800,600)) +f = mlab.figure(size=(800, 600)) # Tell visual to use this as the viewer. visual.set_viewer(f) ######################################################## # Do the appropriate graph -#s = mlab.contour_surf(r,z,fun, contours=30, line_width=.5, transparent=True) -#s=mlab.surf(r,z,fun, colormap='Spectral') -s = mlab.mesh(r,z,fm[0,:,:], scalars=fm[0,:,:], colormap='PuOr')#, wrap_scale='true')#, representation='wireframe') -s.enable_contours=True -s.contour.filled_contours=True +# s = mlab.contour_surf(r,z,fun, contours=30, line_width=.5, transparent=True) +# s=mlab.surf(r,z,fun, colormap='Spectral') +s = mlab.mesh( + r, z, fm[0, :, :], scalars=fm[0, :, :], colormap="PuOr" +) # , wrap_scale='true')#, representation='wireframe') +s.enable_contours = True +s.contour.filled_contours = True # Define perspective and optional attributes. You can also implement from the window afterwards -mlab.view(0,0) -#mlab.view(-94.159958841373324, +mlab.view(0, 0) +# mlab.view(-94.159958841373324, # 53.777002382688906, # 8.2311808018087582) mlab.draw(f) mlab.colorbar(orientation="vertical") -#mlab.axes() -#mlab.outline() +# mlab.axes() +# mlab.outline() ######################################################## # mlab animation -anim(s,fm, save=True) +anim(s, fm, save=True) diff --git a/examples/elm-pb/Python/sprofiles.py b/examples/elm-pb/Python/sprofiles.py index 9d8eea48de..244599f8af 100644 --- a/examples/elm-pb/Python/sprofiles.py +++ b/examples/elm-pb/Python/sprofiles.py @@ -16,45 +16,46 @@ with DataFile(gfile) as f: g = {v: f.read(v) for v in f.keys()} -var=collect("P", path=path) +var = collect("P", path=path) -sol=surface_average(var, g) -#sol=np.mean(var,axis=3) +sol = surface_average(var, g) +# sol=np.mean(var,axis=3) -p0av=collect("P0", path=path) +p0av = collect("P0", path=path) -q=np.zeros(sol.shape) +q = np.zeros(sol.shape) for i in range(sol.shape[1]): - q[:,i]=sol[:,i]+p0av[:,0] + q[:, i] = sol[:, i] + p0av[:, 0] -psixy=g.get('psixy') -psi0=g.get('psi_axis') -psix=g.get('psi_bndry') +psixy = g.get("psixy") +psi0 = g.get("psi_axis") +psix = g.get("psi_bndry") -xarr = psixy[:,0] -xarr = old_div((xarr - psi0), (-psi0 + psix)) #for this grid +xarr = psixy[:, 0] +xarr = old_div((xarr - psi0), (-psi0 + psix)) # for this grid -fig=figure() +fig = figure() -nt=q.shape[1] +nt = q.shape[1] -plot(xarr, p0av,'k',label='t=0') -plot(xarr,q[:,nt/4],'r',label='t='+np.str(nt/4)) -plot(xarr,q[:,nt/2],'b',label='t='+np.str(nt/2)) -plot(xarr,q[:,3*nt/4],'g',label='t='+np.str(3*nt/4)) -plot(xarr, q[:,-1],'k',label='t='+np.str(nt)) +plot(xarr, p0av, "k", label="t=0") +plot(xarr, q[:, nt / 4], "r", label="t=" + np.str(nt / 4)) +plot(xarr, q[:, nt / 2], "b", label="t=" + np.str(nt / 2)) +plot(xarr, q[:, 3 * nt / 4], "g", label="t=" + np.str(3 * nt / 4)) +plot(xarr, q[:, -1], "k", label="t=" + np.str(nt)) from collections import OrderedDict + handles, labels = gca().get_legend_handles_labels() by_label = OrderedDict(list(zip(labels, handles))) legend(list(by_label.values()), list(by_label.keys())) -xlabel(r"$\psi$",fontsize=25) -ylabel(r"$2 \mu_0 / B^2$",fontsize=25) +xlabel(r"$\psi$", fontsize=25) +ylabel(r"$2 \mu_0 / B^2$", fontsize=25) fig.set_tight_layout(True) diff --git a/examples/elm-pb/runexample.py b/examples/elm-pb/runexample.py index f7ebc01028..b49902d29c 100755 --- a/examples/elm-pb/runexample.py +++ b/examples/elm-pb/runexample.py @@ -28,18 +28,22 @@ # Calculate RMS in toroidal direction prms = np.sqrt(np.mean(p**2, axis=3)) -growth = np.gradient(np.log(prms[:,42,32])) +growth = np.gradient(np.log(prms[:, 42, 32])) # Final growth-rate gamma = growth[-2] import matplotlib.pyplot as plt -plt.plot(tarr, prms[:,42,32], label='Outboard midplane') -plt.plot( [tarr[0], tarr[-1]], - [prms[-1,42,32]*np.exp(gamma*(tarr[0] - tarr[-1])), prms[-1,42,32]], '--', label=r'$\gamma =$'+str(gamma)) +plt.plot(tarr, prms[:, 42, 32], label="Outboard midplane") +plt.plot( + [tarr[0], tarr[-1]], + [prms[-1, 42, 32] * np.exp(gamma * (tarr[0] - tarr[-1])), prms[-1, 42, 32]], + "--", + label=r"$\gamma =$" + str(gamma), +) -plt.yscale('log') +plt.yscale("log") plt.grid() plt.xlabel(r"Time [$1/\tau_A$]") @@ -57,19 +61,21 @@ # Take a poloidal slice at fixed toroidal angle from boutdata.pol_slice import pol_slice -p2d = pol_slice(p[-1,:,:,:], 'cbm18_dens8.grid_nx68ny64.nc', n=15, zangle=0.0) + +p2d = pol_slice(p[-1, :, :, :], "cbm18_dens8.grid_nx68ny64.nc", n=15, zangle=0.0) # Read grid file to get coordinates from boututils.datafile import DataFile -g = DataFile('cbm18_dens8.grid_nx68ny64.nc') -Rxy = g.read("Rxy") # Major radius [m] -Zxy = g.read("Zxy") # Height [m] +g = DataFile("cbm18_dens8.grid_nx68ny64.nc") + +Rxy = g.read("Rxy") # Major radius [m] +Zxy = g.read("Zxy") # Height [m] plt.contourf(Rxy, Zxy, p2d, 30) -plt.axis('equal') # Maintain aspect ratio +plt.axis("equal") # Maintain aspect ratio -plt.colorbar() # Plot a bar down the side with a color scale +plt.colorbar() # Plot a bar down the side with a color scale plt.savefig("poloidal_slice.pdf") diff --git a/examples/fci-wave/CMakeLists.txt b/examples/fci-wave/CMakeLists.txt index 2680b1310e..8d4a2523d5 100644 --- a/examples/fci-wave/CMakeLists.txt +++ b/examples/fci-wave/CMakeLists.txt @@ -2,11 +2,17 @@ cmake_minimum_required(VERSION 3.13) project(fci-wave LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(fci-wave - SOURCES fci-wave.cxx - DATA_DIRS div div-integrate logn - EXTRA_FILES compare-density.py) +bout_add_example( + fci-wave + SOURCES + fci-wave.cxx + DATA_DIRS + div + div-integrate + logn + EXTRA_FILES + compare-density.py) diff --git a/examples/fci-wave/compare-density.py b/examples/fci-wave/compare-density.py index c039c250b6..16e620a08f 100644 --- a/examples/fci-wave/compare-density.py +++ b/examples/fci-wave/compare-density.py @@ -1,4 +1,3 @@ - import matplotlib.pyplot as plt from boutdata import collect import numpy as np @@ -9,60 +8,61 @@ # Note: Data from fci-wave-logn examples commented out. data_noboundary = [ - ("div", "Model 1 (density, point interpolation)") - ,("div-integrate", "Model 2 (density, area integration)") - ,("logn", "Model 3 (log density, area integration)") - #,("../fci-wave-logn/div-integrate", "Model 5 (velocity, log density, area integration)") + ("div", "Model 1 (density, point interpolation)"), + ("div-integrate", "Model 2 (density, area integration)"), + ("logn", "Model 3 (log density, area integration)"), + # ,("../fci-wave-logn/div-integrate", "Model 5 (velocity, log density, area integration)") ] data_boundary = [ - ("boundary", "Model 2 (density, momentum)") - ,("boundary-logn", "Model 3 (log density, momentum)") - #,("../fci-wave-logn/boundary", "Model 5 (log density, velocity)") - ] + ("boundary", "Model 2 (density, momentum)"), + ("boundary-logn", "Model 3 (log density, momentum)"), + # ,("../fci-wave-logn/boundary", "Model 5 (log density, velocity)") +] # Change this to select no boundary or boundary cases data = data_noboundary if run: from boututils.run_wrapper import shell_safe, launch_safe + shell_safe("make > make.log") - for path,label in data: - launch_safe("./fci-wave -d "+path, nproc=nproc, pipe=False) + for path, label in data: + launch_safe("./fci-wave -d " + path, nproc=nproc, pipe=False) -# Collect the results into a dictionary +# Collect the results into a dictionary sum_n_B = {} -for path,label in data: +for path, label in data: n = collect("n", path=path) Bxyz = collect("Bxyz", path=path) time = collect("t_array", path=path) - + nt, nx, ny, nz = n.shape - + n_B = np.ndarray(nt) for t in range(nt): - n_B[t] = np.sum(n[t,:,:,:] / Bxyz) + n_B[t] = np.sum(n[t, :, :, :] / Bxyz) sum_n_B[path] = (time, n_B) # Plot the density at the final time - + plt.figure() - plt.contourf(n[-1,:,0,:].T, 100) + plt.contourf(n[-1, :, 0, :].T, 100) plt.colorbar() plt.xlabel("Major radius") plt.ylabel("Height") - plt.title("Density n, "+label) - plt.savefig(path+".pdf") + plt.title("Density n, " + label) + plt.savefig(path + ".pdf") plt.show() # Make a plot comparing total sum density / B - + plt.figure() -for path,label in data: +for path, label in data: time, n_B = sum_n_B[path] plt.plot(time, n_B, label=label) plt.legend() @@ -71,4 +71,3 @@ plt.savefig("compare-density.pdf") plt.show() - diff --git a/examples/finite-volume/diffusion/CMakeLists.txt b/examples/finite-volume/diffusion/CMakeLists.txt index 0dd7d220f6..718a7f3e61 100644 --- a/examples/finite-volume/diffusion/CMakeLists.txt +++ b/examples/finite-volume/diffusion/CMakeLists.txt @@ -2,10 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(finite-volume-diffusion LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(finite-volume-diffusion - SOURCES diffusion.cxx - EXTRA_FILES mms.py) +bout_add_example(finite-volume-diffusion SOURCES diffusion.cxx EXTRA_FILES + mms.py) diff --git a/examples/finite-volume/diffusion/mms.py b/examples/finite-volume/diffusion/mms.py index 2c609ca82e..31b7714727 100644 --- a/examples/finite-volume/diffusion/mms.py +++ b/examples/finite-volume/diffusion/mms.py @@ -5,26 +5,26 @@ from math import pi # Length of the y domain -Ly = 10. +Ly = 10.0 # metric tensor metric = Metric() # Identity # Define solution in terms of input x,y,z -f = 1 + 0.1*sin(2*y - t) -k = 1 + 0.1*sin(y) +f = 1 + 0.1 * sin(2 * y - t) +k = 1 + 0.1 * sin(y) # Turn solution into real x and z coordinates -replace = [ (y, metric.y*2*pi/Ly) ] +replace = [(y, metric.y * 2 * pi / Ly)] f = f.subs(replace) -k = k.subs(replace) +k = k.subs(replace) ############################## # Calculate time derivatives -dfdt = Div_par( k * Grad_par(f) ) +dfdt = Div_par(k * Grad_par(f)) ############################# # Calculate sources @@ -32,7 +32,7 @@ Sf = diff(f, t) - dfdt # Substitute back to get input y coordinates -replace = [ (metric.y, y*Ly/(2*pi) ) ] +replace = [(metric.y, y * Ly / (2 * pi))] k = k.subs(replace) f = f.subs(replace) diff --git a/examples/finite-volume/fluid/CMakeLists.txt b/examples/finite-volume/fluid/CMakeLists.txt index e9028459ec..76f8687547 100644 --- a/examples/finite-volume/fluid/CMakeLists.txt +++ b/examples/finite-volume/fluid/CMakeLists.txt @@ -2,11 +2,16 @@ cmake_minimum_required(VERSION 3.13) project(finite-volume-fluid LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(finite-volume-fluid - SOURCES fluid.cxx - DATA_DIRS data mms - EXTRA_FILES mms.py) +bout_add_example( + finite-volume-fluid + SOURCES + fluid.cxx + DATA_DIRS + data + mms + EXTRA_FILES + mms.py) diff --git a/examples/finite-volume/fluid/mms.py b/examples/finite-volume/fluid/mms.py index 8ed8fba517..a31782e2c7 100644 --- a/examples/finite-volume/fluid/mms.py +++ b/examples/finite-volume/fluid/mms.py @@ -5,19 +5,19 @@ from math import pi # Length of the y domain -Ly = 10. +Ly = 10.0 # metric tensor metric = Metric() # Identity # Define solution in terms of input x,y,z -n = 1 + 0.1*sin(2*y - t) -p = 1 + 0.1*cos(3*y + t) -nv = 0.1*sin(y + 2*t) +n = 1 + 0.1 * sin(2 * y - t) +p = 1 + 0.1 * cos(3 * y + t) +nv = 0.1 * sin(y + 2 * t) # Turn solution into real x and z coordinates -replace = [ (y, metric.y*2*pi/Ly) ] +replace = [(y, metric.y * 2 * pi / Ly)] n = n.subs(replace) p = p.subs(replace) @@ -27,16 +27,16 @@ # Calculate time derivatives v = nv / n -gamma = 5./3 +gamma = 5.0 / 3 # Density equation -dndt = - Div_par(nv) +dndt = -Div_par(nv) # Pressure equation -dpdt = - Div_par(p*v) - (gamma-1.0)*p*Div_par(v) +dpdt = -Div_par(p * v) - (gamma - 1.0) * p * Div_par(v) # Momentum equation -dnvdt = - Div_par(nv*v) - Grad_par(p) +dnvdt = -Div_par(nv * v) - Grad_par(p) ############################# # Calculate sources @@ -46,7 +46,7 @@ Snv = diff(nv, t) - dnvdt # Substitute back to get input y coordinates -replace = [ (metric.y, y*Ly/(2*pi) ) ] +replace = [(metric.y, y * Ly / (2 * pi))] n = n.subs(replace) p = p.subs(replace) diff --git a/examples/finite-volume/test/CMakeLists.txt b/examples/finite-volume/test/CMakeLists.txt index 73fe99f960..d09567e193 100644 --- a/examples/finite-volume/test/CMakeLists.txt +++ b/examples/finite-volume/test/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(finite-volume-test LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/gas-compress/CMakeLists.txt b/examples/gas-compress/CMakeLists.txt index 1b4416d32b..b5db63571a 100644 --- a/examples/gas-compress/CMakeLists.txt +++ b/examples/gas-compress/CMakeLists.txt @@ -2,11 +2,18 @@ cmake_minimum_required(VERSION 3.13) project(gas-compress LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(gas-compress - SOURCES gas_compress.cxx gas_compress.hxx - DATA_DIRS rayleigh-taylor sod-shock - EXTRA_FILES rt.grd.nc sod.grd.nc) +bout_add_example( + gas-compress + SOURCES + gas_compress.cxx + gas_compress.hxx + DATA_DIRS + rayleigh-taylor + sod-shock + EXTRA_FILES + rt.grd.nc + sod.grd.nc) diff --git a/examples/gyro-gem/CMakeLists.txt b/examples/gyro-gem/CMakeLists.txt index 7189bb06b8..2f52f94810 100644 --- a/examples/gyro-gem/CMakeLists.txt +++ b/examples/gyro-gem/CMakeLists.txt @@ -2,10 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(gyro-gem LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(gyro-gem - SOURCES gem.cxx - EXTRA_FILES cyclone_68x32.nc) +bout_add_example(gyro-gem SOURCES gem.cxx EXTRA_FILES cyclone_68x32.nc) diff --git a/examples/hasegawa-wakatani-3d/CMakeLists.txt b/examples/hasegawa-wakatani-3d/CMakeLists.txt index 0cdb5207f8..c555d6d7f9 100644 --- a/examples/hasegawa-wakatani-3d/CMakeLists.txt +++ b/examples/hasegawa-wakatani-3d/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(hw3d LANGUAGES CXX C) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/hasegawa-wakatani/CMakeLists.txt b/examples/hasegawa-wakatani/CMakeLists.txt index c9b9401b3a..53f4e5ed4f 100644 --- a/examples/hasegawa-wakatani/CMakeLists.txt +++ b/examples/hasegawa-wakatani/CMakeLists.txt @@ -2,9 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(hasegawa-wakatani LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() bout_add_example(hasegawa-wakatani SOURCES hw.cxx) - diff --git a/examples/invertable_operator/CMakeLists.txt b/examples/invertable_operator/CMakeLists.txt index f054466f23..3bc181f1b4 100644 --- a/examples/invertable_operator/CMakeLists.txt +++ b/examples/invertable_operator/CMakeLists.txt @@ -2,10 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(invertable_operator LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(invertable_operator - SOURCES invertable_operator.cxx - REQUIRES BOUT_HAS_PETSC) +bout_add_example(invertable_operator SOURCES invertable_operator.cxx REQUIRES + BOUT_HAS_PETSC) diff --git a/examples/laplace-petsc3d/plotcheck.py b/examples/laplace-petsc3d/plotcheck.py index 707e5db1ee..7f2758dd81 100755 --- a/examples/laplace-petsc3d/plotcheck.py +++ b/examples/laplace-petsc3d/plotcheck.py @@ -10,27 +10,27 @@ xg = 2 -f = collect('f', path=datadir)[xg:-xg, :, :] -rhs = collect('rhs', path=datadir)[xg:-xg, :, :] -rhs_check = collect('rhs_check', path=datadir)[xg:-xg, :, :] -error = collect('error', path=datadir)[xg:-xg, :, :] +f = collect("f", path=datadir)[xg:-xg, :, :] +rhs = collect("rhs", path=datadir)[xg:-xg, :, :] +rhs_check = collect("rhs_check", path=datadir)[xg:-xg, :, :] +error = collect("error", path=datadir)[xg:-xg, :, :] pyplot.subplot(221) pyplot.pcolormesh(f[:, yind, :]) pyplot.colorbar() -pyplot.title('f') +pyplot.title("f") pyplot.subplot(222) pyplot.pcolormesh(rhs[:, yind, :]) pyplot.colorbar() -pyplot.title('rhs') +pyplot.title("rhs") pyplot.subplot(223) pyplot.pcolormesh(rhs_check[:, yind, :]) pyplot.colorbar() -pyplot.title('rhs_check') +pyplot.title("rhs_check") pyplot.subplot(224) pyplot.pcolormesh(error[:, yind, :]) pyplot.colorbar() -pyplot.title('error') +pyplot.title("error") pyplot.show() diff --git a/examples/laplacexy/alfven-wave/CMakeLists.txt b/examples/laplacexy/alfven-wave/CMakeLists.txt index 2423400519..43c4304c94 100644 --- a/examples/laplacexy/alfven-wave/CMakeLists.txt +++ b/examples/laplacexy/alfven-wave/CMakeLists.txt @@ -2,11 +2,17 @@ cmake_minimum_required(VERSION 3.13) project(laplacexy-alfven-wave LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(laplacexy-alfven-wave - SOURCES alfven.cxx - DATA_DIRS cbm18 data - EXTRA_FILES cbm18_dens8.grid_nx68ny64.nc d3d_119919.nc) +bout_add_example( + laplacexy-alfven-wave + SOURCES + alfven.cxx + DATA_DIRS + cbm18 + data + EXTRA_FILES + cbm18_dens8.grid_nx68ny64.nc + d3d_119919.nc) diff --git a/examples/laplacexy/laplace_perp/CMakeLists.txt b/examples/laplacexy/laplace_perp/CMakeLists.txt index 388513b044..83a696db96 100644 --- a/examples/laplacexy/laplace_perp/CMakeLists.txt +++ b/examples/laplacexy/laplace_perp/CMakeLists.txt @@ -2,11 +2,16 @@ cmake_minimum_required(VERSION 3.13) project(laplacexy-laplace_perp LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(laplacexy-laplace_perp - SOURCES test.cxx - EXTRA_FILES cbm18_dens8.grid_nx68ny64.nc - DATA_DIRS square torus) +bout_add_example( + laplacexy-laplace_perp + SOURCES + test.cxx + EXTRA_FILES + cbm18_dens8.grid_nx68ny64.nc + DATA_DIRS + square + torus) diff --git a/examples/laplacexy/simple/CMakeLists.txt b/examples/laplacexy/simple/CMakeLists.txt index 7859a08259..b7bf7f04b4 100644 --- a/examples/laplacexy/simple/CMakeLists.txt +++ b/examples/laplacexy/simple/CMakeLists.txt @@ -2,11 +2,9 @@ cmake_minimum_required(VERSION 3.13) project(laplacexy-simple LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(laplacexy-simple - SOURCES test-laplacexy.cxx - DATA_DIRS data hypre -) +bout_add_example(laplacexy-simple SOURCES test-laplacexy.cxx DATA_DIRS data + hypre) diff --git a/examples/monitor-newapi/CMakeLists.txt b/examples/monitor-newapi/CMakeLists.txt index 0ee3ee7f85..5c2022b792 100644 --- a/examples/monitor-newapi/CMakeLists.txt +++ b/examples/monitor-newapi/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(monitor-newapi LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/orszag-tang/CMakeLists.txt b/examples/orszag-tang/CMakeLists.txt index 9ac8fd8d1c..f7e5f96848 100644 --- a/examples/orszag-tang/CMakeLists.txt +++ b/examples/orszag-tang/CMakeLists.txt @@ -2,10 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(orszag-tang LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(orszag-tang - SOURCES mhd.cxx - EXTRA_FILES data/otv.grd.nc) +bout_add_example(orszag-tang SOURCES mhd.cxx EXTRA_FILES data/otv.grd.nc) diff --git a/examples/orszag-tang/generate.py b/examples/orszag-tang/generate.py index 3ba6dbbea9..acce7497c3 100644 --- a/examples/orszag-tang/generate.py +++ b/examples/orszag-tang/generate.py @@ -2,14 +2,16 @@ from __future__ import division from builtins import range from past.utils import old_div + # the Scientific Python netCDF 3 interface # http://dirac.cnrs-orleans.fr/ScientificPython/ -#from Scientific.IO.NetCDF import NetCDFFile as Dataset +# from Scientific.IO.NetCDF import NetCDFFile as Dataset # the 'classic' version of the netCDF4 python interface # http://code.google.com/p/netcdf4-python/ import numpy as np from netCDF4 import Dataset -from numpy import dtype # array module from http://numpy.scipy.org +from numpy import dtype # array module from http://numpy.scipy.org + """ This example writes some surface pressure and temperatures The companion program sfc_pres_temp_rd.py shows how to read the netCDF @@ -30,44 +32,45 @@ # # the output array to write will be nx x ny -ny = 100; nx = ny + 4 +ny = 100 +nx = ny + 4 # dy of grid dy = old_div(1.0, np.float(ny)) dx = dy # create grid -dxarr=np.zeros((nx,ny),dtype='float32')+dx -dyarr=np.zeros((nx,ny),dtype='float32')+dy +dxarr = np.zeros((nx, ny), dtype="float32") + dx +dyarr = np.zeros((nx, ny), dtype="float32") + dy -xarr=np.arange(0.,np.float(nx),1.,dtype='float32')*dx -yarr=np.arange(0.,np.float(ny),1.,dtype='float32')*dy +xarr = np.arange(0.0, np.float(nx), 1.0, dtype="float32") * dx +yarr = np.arange(0.0, np.float(ny), 1.0, dtype="float32") * dy # compute initial variables -rho=np.zeros((nx,ny),dtype='float32')+old_div(25.,(36.*np.pi)) -p=np.zeros((nx,ny),dtype='float32')+old_div(5.,(12.*np.pi)) +rho = np.zeros((nx, ny), dtype="float32") + old_div(25.0, (36.0 * np.pi)) +p = np.zeros((nx, ny), dtype="float32") + old_div(5.0, (12.0 * np.pi)) -rho=1. -p=old_div(rho,3.) +rho = 1.0 +p = old_div(rho, 3.0) -v_x=np.zeros((nx,ny),dtype='float32') -Bx=np.zeros((nx,ny),dtype='float32') +v_x = np.zeros((nx, ny), dtype="float32") +Bx = np.zeros((nx, ny), dtype="float32") for y in range(ny): - v_x[:,y]=-np.sin(2.*np.pi*yarr[y]) - Bx[:,y]=-np.sin(2.*np.pi*yarr[y]) - -#Bx=Bx/np.sqrt(4.*np.pi) + v_x[:, y] = -np.sin(2.0 * np.pi * yarr[y]) + Bx[:, y] = -np.sin(2.0 * np.pi * yarr[y]) +# Bx=Bx/np.sqrt(4.*np.pi) -v_y=np.zeros((nx,ny),dtype='float32') -By=np.zeros((nx,ny),dtype='float32') + +v_y = np.zeros((nx, ny), dtype="float32") +By = np.zeros((nx, ny), dtype="float32") for x in range(nx): - v_y[x,:]=np.sin(2.*np.pi*xarr[x]) - By[x,:]=np.sin(4.*np.pi*xarr[x]) - -#By=By/np.sqrt(4.*np.pi) + v_y[x, :] = np.sin(2.0 * np.pi * xarr[x]) + By[x, :] = np.sin(4.0 * np.pi * xarr[x]) + +# By=By/np.sqrt(4.*np.pi) # Domain inside core (periodic) @@ -76,55 +79,62 @@ ixseps2 = nx # open a new netCDF file for writing. -ncfile = Dataset('otv.grd.128.nc','w', format='NETCDF3_CLASSIC') +ncfile = Dataset("otv.grd.128.nc", "w", format="NETCDF3_CLASSIC") # output data. # create the nx and ny dimensions. -ncfile.createDimension('x',nx) -ncfile.createDimension('y',ny) -ncfile.createDimension('single',1) +ncfile.createDimension("x", nx) +ncfile.createDimension("y", ny) +ncfile.createDimension("single", 1) # create and write nx,ny variables -nxx=ncfile.createVariable('nx','i4',('single')) -nyy=ncfile.createVariable('ny','i4',('single')) +nxx = ncfile.createVariable("nx", "i4", ("single")) +nyy = ncfile.createVariable("ny", "i4", ("single")) -nxx[:]=nx -nyy[:]=ny +nxx[:] = nx +nyy[:] = ny # Define the coordinate variables. They will hold the coordinate # information, that is, xarr,yarr -dx = ncfile.createVariable('dx',dtype('float32').char,('x','y')) -dy = ncfile.createVariable('dy',dtype('float32').char,('x','y',)) +dx = ncfile.createVariable("dx", dtype("float32").char, ("x", "y")) +dy = ncfile.createVariable( + "dy", + dtype("float32").char, + ( + "x", + "y", + ), +) # write data to coordinate vars. -dx[:,:] = dxarr -dy[:,:] = dyarr +dx[:, :] = dxarr +dy[:, :] = dyarr # create and write ixseps* dimensions. -ix1=ncfile.createVariable('ixseps1','i4',('single')) -ix2=ncfile.createVariable('ixseps2','i4',('single')) +ix1 = ncfile.createVariable("ixseps1", "i4", ("single")) +ix2 = ncfile.createVariable("ixseps2", "i4", ("single")) -ix1[:]=ixseps1 -ix2[:]=ixseps2 +ix1[:] = ixseps1 +ix2[:] = ixseps2 -# create the corresponding variables -rho0 = ncfile.createVariable('rho0',dtype('float32').char,('x','y')) -p0 = ncfile.createVariable('p0',dtype('float32').char,('x','y')) -v0_x = ncfile.createVariable('v0_x',dtype('float32').char,('x','y')) -v0_y = ncfile.createVariable('v0_y',dtype('float32').char,('x','y')) -B0x = ncfile.createVariable('B0x',dtype('float32').char,('x','y')) -B0y = ncfile.createVariable('B0y',dtype('float32').char,('x','y')) +# create the corresponding variables +rho0 = ncfile.createVariable("rho0", dtype("float32").char, ("x", "y")) +p0 = ncfile.createVariable("p0", dtype("float32").char, ("x", "y")) +v0_x = ncfile.createVariable("v0_x", dtype("float32").char, ("x", "y")) +v0_y = ncfile.createVariable("v0_y", dtype("float32").char, ("x", "y")) +B0x = ncfile.createVariable("B0x", dtype("float32").char, ("x", "y")) +B0y = ncfile.createVariable("B0y", dtype("float32").char, ("x", "y")) # write data to variables. -rho0[:,:]=rho -p0[:,:]=p -v0_x[:,:]=v_x -v0_y[:,:]=v_y -B0x[:,:]=Bx -B0y[:,:]=By +rho0[:, :] = rho +p0[:, :] = p +v0_x[:, :] = v_x +v0_y[:, :] = v_y +B0x[:, :] = Bx +B0y[:, :] = By ncfile.close() -print('*** SUCCESS writing file otv.grd.py.nc!') +print("*** SUCCESS writing file otv.grd.py.nc!") diff --git a/examples/performance/bracket/scaling_parser.py b/examples/performance/bracket/scaling_parser.py index d7a23842d4..15f0ea8780 100644 --- a/examples/performance/bracket/scaling_parser.py +++ b/examples/performance/bracket/scaling_parser.py @@ -2,8 +2,7 @@ def read_file(filename): - reader = csv.reader(open(filename, 'r'), delimiter='\t', - skipinitialspace=True) + reader = csv.reader(open(filename, "r"), delimiter="\t", skipinitialspace=True) # Skip header for _, _ in zip(range(4), reader): @@ -13,7 +12,7 @@ def read_file(filename): for line in reader: if line == []: break - case_lines[line[0].rstrip('.')] = line[1] + case_lines[line[0].rstrip(".")] = line[1] titles = next(reader) cases_weak = {col.strip(): [] for col in titles[:-1]} diff --git a/examples/performance/ddx/new_scaling_parser.py b/examples/performance/ddx/new_scaling_parser.py index d2a5fee191..5c0c92ff5e 100644 --- a/examples/performance/ddx/new_scaling_parser.py +++ b/examples/performance/ddx/new_scaling_parser.py @@ -2,19 +2,19 @@ def read_file(filename): - reader = csv.reader(open(filename, 'r'), delimiter='\t', - skipinitialspace=True) + reader = csv.reader(open(filename, "r"), delimiter="\t", skipinitialspace=True) # Skip header for _, _ in zip(range(4), reader): continue from collections import OrderedDict - case_lines = OrderedDict() #{} + + case_lines = OrderedDict() # {} for line in reader: if line == []: break - case_lines[line[0].rstrip('.')] = line[1] + case_lines[line[0].rstrip(".")] = line[1] titles = next(reader) cases_weak = {col.strip(): [] for col in titles[:-1]} @@ -25,12 +25,13 @@ def read_file(filename): for title, col in zip(titles, line[:-1]): cases_weak[title].append(float(col)) - axis = cases_weak['Local grid'] + axis = cases_weak["Local grid"] data = [cases_weak[x] for x in case_lines] labels = [case_lines[x] for x in case_lines] - return {'axis':axis, 'data':data, 'labels':labels} + return {"axis": axis, "data": data, "labels": labels} + -def getScan(baseName = "timing_{n}.txt", nthreads = (1,2,4,8,16,32)): +def getScan(baseName="timing_{n}.txt", nthreads=(1, 2, 4, 8, 16, 32)): from numpy import zeros dataS = [] @@ -38,17 +39,16 @@ def getScan(baseName = "timing_{n}.txt", nthreads = (1,2,4,8,16,32)): f = baseName.format(n=n) dataS.append(read_file(f)) - nnt = len(dataS) - nlines = len(dataS[0]['data']) - nval = len(dataS[0]['data'][0]) - rawDat = zeros([nnt,nval,nlines]) + nlines = len(dataS[0]["data"]) + nval = len(dataS[0]["data"][0]) + rawDat = zeros([nnt, nval, nlines]) for i, dat in enumerate(dataS): - print(len(dat['data'])) - - rawDat[i,:,:] = dat['data'] + print(len(dat["data"])) + + rawDat[i, :, :] = dat["data"] - axes = [nthreads, dataS[0]['axis']] + axes = [nthreads, dataS[0]["axis"]] - return axes, rawDat, dataS[0]['labels'] + return axes, rawDat, dataS[0]["labels"] diff --git a/examples/performance/ddy/new_scaling_parser.py b/examples/performance/ddy/new_scaling_parser.py index d2a5fee191..5c0c92ff5e 100644 --- a/examples/performance/ddy/new_scaling_parser.py +++ b/examples/performance/ddy/new_scaling_parser.py @@ -2,19 +2,19 @@ def read_file(filename): - reader = csv.reader(open(filename, 'r'), delimiter='\t', - skipinitialspace=True) + reader = csv.reader(open(filename, "r"), delimiter="\t", skipinitialspace=True) # Skip header for _, _ in zip(range(4), reader): continue from collections import OrderedDict - case_lines = OrderedDict() #{} + + case_lines = OrderedDict() # {} for line in reader: if line == []: break - case_lines[line[0].rstrip('.')] = line[1] + case_lines[line[0].rstrip(".")] = line[1] titles = next(reader) cases_weak = {col.strip(): [] for col in titles[:-1]} @@ -25,12 +25,13 @@ def read_file(filename): for title, col in zip(titles, line[:-1]): cases_weak[title].append(float(col)) - axis = cases_weak['Local grid'] + axis = cases_weak["Local grid"] data = [cases_weak[x] for x in case_lines] labels = [case_lines[x] for x in case_lines] - return {'axis':axis, 'data':data, 'labels':labels} + return {"axis": axis, "data": data, "labels": labels} + -def getScan(baseName = "timing_{n}.txt", nthreads = (1,2,4,8,16,32)): +def getScan(baseName="timing_{n}.txt", nthreads=(1, 2, 4, 8, 16, 32)): from numpy import zeros dataS = [] @@ -38,17 +39,16 @@ def getScan(baseName = "timing_{n}.txt", nthreads = (1,2,4,8,16,32)): f = baseName.format(n=n) dataS.append(read_file(f)) - nnt = len(dataS) - nlines = len(dataS[0]['data']) - nval = len(dataS[0]['data'][0]) - rawDat = zeros([nnt,nval,nlines]) + nlines = len(dataS[0]["data"]) + nval = len(dataS[0]["data"][0]) + rawDat = zeros([nnt, nval, nlines]) for i, dat in enumerate(dataS): - print(len(dat['data'])) - - rawDat[i,:,:] = dat['data'] + print(len(dat["data"])) + + rawDat[i, :, :] = dat["data"] - axes = [nthreads, dataS[0]['axis']] + axes = [nthreads, dataS[0]["axis"]] - return axes, rawDat, dataS[0]['labels'] + return axes, rawDat, dataS[0]["labels"] diff --git a/examples/performance/ddz/new_scaling_parser.py b/examples/performance/ddz/new_scaling_parser.py index d2a5fee191..5c0c92ff5e 100644 --- a/examples/performance/ddz/new_scaling_parser.py +++ b/examples/performance/ddz/new_scaling_parser.py @@ -2,19 +2,19 @@ def read_file(filename): - reader = csv.reader(open(filename, 'r'), delimiter='\t', - skipinitialspace=True) + reader = csv.reader(open(filename, "r"), delimiter="\t", skipinitialspace=True) # Skip header for _, _ in zip(range(4), reader): continue from collections import OrderedDict - case_lines = OrderedDict() #{} + + case_lines = OrderedDict() # {} for line in reader: if line == []: break - case_lines[line[0].rstrip('.')] = line[1] + case_lines[line[0].rstrip(".")] = line[1] titles = next(reader) cases_weak = {col.strip(): [] for col in titles[:-1]} @@ -25,12 +25,13 @@ def read_file(filename): for title, col in zip(titles, line[:-1]): cases_weak[title].append(float(col)) - axis = cases_weak['Local grid'] + axis = cases_weak["Local grid"] data = [cases_weak[x] for x in case_lines] labels = [case_lines[x] for x in case_lines] - return {'axis':axis, 'data':data, 'labels':labels} + return {"axis": axis, "data": data, "labels": labels} + -def getScan(baseName = "timing_{n}.txt", nthreads = (1,2,4,8,16,32)): +def getScan(baseName="timing_{n}.txt", nthreads=(1, 2, 4, 8, 16, 32)): from numpy import zeros dataS = [] @@ -38,17 +39,16 @@ def getScan(baseName = "timing_{n}.txt", nthreads = (1,2,4,8,16,32)): f = baseName.format(n=n) dataS.append(read_file(f)) - nnt = len(dataS) - nlines = len(dataS[0]['data']) - nval = len(dataS[0]['data'][0]) - rawDat = zeros([nnt,nval,nlines]) + nlines = len(dataS[0]["data"]) + nval = len(dataS[0]["data"][0]) + rawDat = zeros([nnt, nval, nlines]) for i, dat in enumerate(dataS): - print(len(dat['data'])) - - rawDat[i,:,:] = dat['data'] + print(len(dat["data"])) + + rawDat[i, :, :] = dat["data"] - axes = [nthreads, dataS[0]['axis']] + axes = [nthreads, dataS[0]["axis"]] - return axes, rawDat, dataS[0]['labels'] + return axes, rawDat, dataS[0]["labels"] diff --git a/examples/performance/iterator/scaling_parser.py b/examples/performance/iterator/scaling_parser.py index d7a23842d4..15f0ea8780 100644 --- a/examples/performance/iterator/scaling_parser.py +++ b/examples/performance/iterator/scaling_parser.py @@ -2,8 +2,7 @@ def read_file(filename): - reader = csv.reader(open(filename, 'r'), delimiter='\t', - skipinitialspace=True) + reader = csv.reader(open(filename, "r"), delimiter="\t", skipinitialspace=True) # Skip header for _, _ in zip(range(4), reader): @@ -13,7 +12,7 @@ def read_file(filename): for line in reader: if line == []: break - case_lines[line[0].rstrip('.')] = line[1] + case_lines[line[0].rstrip(".")] = line[1] titles = next(reader) cases_weak = {col.strip(): [] for col in titles[:-1]} diff --git a/examples/preconditioning/wave/CMakeLists.txt b/examples/preconditioning/wave/CMakeLists.txt index 437f39fe3a..05d7548832 100644 --- a/examples/preconditioning/wave/CMakeLists.txt +++ b/examples/preconditioning/wave/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.13) project(preconditioning-wave LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() diff --git a/examples/staggered_grid/CMakeLists.txt b/examples/staggered_grid/CMakeLists.txt index dd2b3b463e..c9222f05af 100644 --- a/examples/staggered_grid/CMakeLists.txt +++ b/examples/staggered_grid/CMakeLists.txt @@ -2,11 +2,18 @@ cmake_minimum_required(VERSION 3.13) project(staggered_grid LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(staggered_grid - SOURCES test_staggered.cxx - EXTRA_FILES generate.py run test-staggered.nc - DATA_DIRS data test) +bout_add_example( + staggered_grid + SOURCES + test_staggered.cxx + EXTRA_FILES + generate.py + run + test-staggered.nc + DATA_DIRS + data + test) diff --git a/examples/staggered_grid/generate.py b/examples/staggered_grid/generate.py index da84e9c2e6..a5a0120612 100755 --- a/examples/staggered_grid/generate.py +++ b/examples/staggered_grid/generate.py @@ -4,11 +4,11 @@ # Generate an input mesh # -from boututils.datafile import DataFile # Wrapper around NetCDF4 libraries +from boututils.datafile import DataFile # Wrapper around NetCDF4 libraries -nx = 5 # Minimum is 5: 2 boundary, one evolved +nx = 5 # Minimum is 5: 2 boundary, one evolved ny = 32 # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) -dy = 1. # distance between points in y, in m/g22/lengthunit +dy = 1.0 # distance between points in y, in m/g22/lengthunit ixseps1 = -1 ixseps2 = -1 diff --git a/examples/subsampling/CMakeLists.txt b/examples/subsampling/CMakeLists.txt index 86f71d98f5..98aa5cff45 100644 --- a/examples/subsampling/CMakeLists.txt +++ b/examples/subsampling/CMakeLists.txt @@ -2,11 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(subsampling LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(subsampling - SOURCES monitor.cxx - EXTRA_FILES show.py) - +bout_add_example(subsampling SOURCES monitor.cxx EXTRA_FILES show.py) diff --git a/examples/subsampling/show.py b/examples/subsampling/show.py index 34c6ee3084..b79c5e2964 100755 --- a/examples/subsampling/show.py +++ b/examples/subsampling/show.py @@ -11,14 +11,14 @@ for pack in monitors: filename, data_name = pack - t = DataFile(path+'/'+filename+'.dmp.0.nc').read('t_array') - data = DataFile(path+'/'+filename+'.dmp.0.nc').read(data_name).flatten() + t = DataFile(path + "/" + filename + ".dmp.0.nc").read("t_array") + data = DataFile(path + "/" + filename + ".dmp.0.nc").read(data_name).flatten() plt.plot(t, data, label="{} {}".format(filename, data_name)) -time = DataFile(path+'/BOUT.dmp.0.nc').read('t_array') -data = DataFile(path+'/BOUT.dmp.0.nc').read("T")[:, 2, 2, 0] +time = DataFile(path + "/BOUT.dmp.0.nc").read("t_array") +data = DataFile(path + "/BOUT.dmp.0.nc").read("T")[:, 2, 2, 0] -plt.plot(time, data, marker='+', label="BOUT++ T") +plt.plot(time, data, marker="+", label="BOUT++ T") plt.xlabel("Time") plt.legend() diff --git a/examples/wave-slab/CMakeLists.txt b/examples/wave-slab/CMakeLists.txt index b1943c4e1c..0350dda486 100644 --- a/examples/wave-slab/CMakeLists.txt +++ b/examples/wave-slab/CMakeLists.txt @@ -2,10 +2,8 @@ cmake_minimum_required(VERSION 3.13) project(wave-slab LANGUAGES CXX) -if (NOT TARGET bout++::bout++) +if(NOT TARGET bout++::bout++) find_package(bout++ REQUIRED) endif() -bout_add_example(wave-slab - SOURCES wave_slab.cxx - EXTRA_FILES generate.py) +bout_add_example(wave-slab SOURCES wave_slab.cxx EXTRA_FILES generate.py) diff --git a/examples/wave-slab/generate.py b/examples/wave-slab/generate.py index 0a2e8f3049..12698d1e24 100755 --- a/examples/wave-slab/generate.py +++ b/examples/wave-slab/generate.py @@ -51,7 +51,7 @@ for x in range(nx): Bpxy[x, :] = Bpx[x] -Bxy = sqrt(Bpxy ** 2 + Bt ** 2) +Bxy = sqrt(Bpxy**2 + Bt**2) # Calculate change in poloidal flux dr = Lx / nx # Constant mesh spacing in radius diff --git a/externalpackages/PVODE/CMakeLists.txt b/externalpackages/PVODE/CMakeLists.txt index 5e3f8f2f63..d73665fe03 100644 --- a/externalpackages/PVODE/CMakeLists.txt +++ b/externalpackages/PVODE/CMakeLists.txt @@ -6,14 +6,16 @@ else() cmake_policy(VERSION 3.12) endif() -project(PVODE +project( + PVODE DESCRIPTION "ODE Solver" VERSION 0.1 LANGUAGES CXX) find_package(MPI REQUIRED) -add_library(pvode +add_library( + pvode source/cvode.cpp source/nvector.cpp source/llnlmath.cpp @@ -32,64 +34,55 @@ add_library(pvode include/pvode/nvector.h include/pvode/smalldense.h include/pvode/spgmr.h - include/pvode/vector.h - ) + include/pvode/vector.h) -target_include_directories(pvode PUBLIC - $ - $ - $ - ) +target_include_directories( + pvode + PUBLIC $ + $ + $) target_link_libraries(pvode PUBLIC MPI::MPI_CXX) -add_library(pvpre - include/pvode/pvbbdpre.h - precon/pvbbdpre.cpp - precon/band.cpp - precon/band.h - ) +add_library(pvpre include/pvode/pvbbdpre.h precon/pvbbdpre.cpp precon/band.cpp + precon/band.h) +set_target_properties(pvode PROPERTIES SOVERSION 1.0.0) -set_target_properties(pvode PROPERTIES - SOVERSION 1.0.0) - -target_include_directories(pvpre PUBLIC - $ - $ - $ - ) +target_include_directories( + pvpre + PUBLIC $ + $ + $) target_link_libraries(pvpre PUBLIC pvode MPI::MPI_CXX) - -set_target_properties(pvpre PROPERTIES - SOVERSION 1.0.0) +set_target_properties(pvpre PROPERTIES SOVERSION 1.0.0) include(GNUInstallDirs) -install(TARGETS pvode pvpre +install( + TARGETS pvode pvpre EXPORT PVODETargets LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" - INCLUDES DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" - ) + INCLUDES + DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) include(CMakePackageConfigHelpers) write_basic_package_version_file( PVODEConfigVersion.cmake VERSION ${PACKAGE_VERSION} - COMPATIBILITY SameMajorVersion - ) + COMPATIBILITY SameMajorVersion) -install(EXPORT PVODETargets +install( + EXPORT PVODETargets FILE PVODEConfig.cmake NAMESPACE PVODE:: - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/PVODE" - ) + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/PVODE") -export(EXPORT PVODETargets +export( + EXPORT PVODETargets FILE "${CMAKE_CURRENT_BINARY_DIR}/PVODEConfig.cmake" - NAMESPACE PVODE:: - ) + NAMESPACE PVODE::) export(PACKAGE PVODE) diff --git a/manual/CMakeLists.txt b/manual/CMakeLists.txt index b5224440bf..eae3fe4ffe 100644 --- a/manual/CMakeLists.txt +++ b/manual/CMakeLists.txt @@ -6,33 +6,35 @@ find_package(Sphinx REQUIRED) set(BOUT_SPHINX_SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/sphinx) set(BOUT_SPHINX_BUILD ${CMAKE_CURRENT_BINARY_DIR}/docs) -set(env_command - ${CMAKE_COMMAND} -E env - PYTHONPATH=${BOUT_PYTHONPATH}:$ENV{PYTHONPATH} -) +set(env_command ${CMAKE_COMMAND} -E env + PYTHONPATH=${BOUT_PYTHONPATH}:$ENV{PYTHONPATH}) -add_custom_target(sphinx-html +add_custom_target( + sphinx-html COMMAND ${env_command} - COMMAND ${SPHINX_EXECUTABLE} -b html ${BOUT_SPHINX_SOURCE} ${BOUT_SPHINX_BUILD} - COMMAND ${CMAKE_COMMAND} -E echo "Generated HTML docs in file://${BOUT_SPHINX_BUILD}/index.html" + COMMAND ${SPHINX_EXECUTABLE} -b html ${BOUT_SPHINX_SOURCE} + ${BOUT_SPHINX_BUILD} + COMMAND ${CMAKE_COMMAND} -E echo + "Generated HTML docs in file://${BOUT_SPHINX_BUILD}/index.html" WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Generating HTML documentation with Sphinx in ${BOUT_SPHINX_BUILD}" -) + COMMENT "Generating HTML documentation with Sphinx in ${BOUT_SPHINX_BUILD}") -add_custom_target(sphinx-pdf +add_custom_target( + sphinx-pdf COMMAND ${env_command} - COMMAND ${SPHINX_EXECUTABLE} -M latexpdf ${BOUT_SPHINX_SOURCE} ${BOUT_SPHINX_BUILD} - COMMAND ${CMAKE_COMMAND} -E echo "Generated PDF docs in file://${BOUT_SPHINX_BUILD}" + COMMAND ${SPHINX_EXECUTABLE} -M latexpdf ${BOUT_SPHINX_SOURCE} + ${BOUT_SPHINX_BUILD} + COMMAND ${CMAKE_COMMAND} -E echo + "Generated PDF docs in file://${BOUT_SPHINX_BUILD}" WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Generating PDF documentation with Sphinx in ${BOUT_SPHINX_BUILD}" -) + COMMENT "Generating PDF documentation with Sphinx in ${BOUT_SPHINX_BUILD}") add_custom_target(docs) add_dependencies(docs sphinx-html) -install(DIRECTORY ${BOUT_SPHINX_BUILD}/ +install( + DIRECTORY ${BOUT_SPHINX_BUILD}/ DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/doc/bout++/ EXCLUDE_FROM_ALL COMPONENT docs - PATTERN .* EXCLUDE -) + PATTERN .* EXCLUDE) diff --git a/manual/sphinx/figs/figure_creators/LaplacianMatrices.ipynb b/manual/sphinx/figs/figure_creators/LaplacianMatrices.ipynb index c3c39a3c12..4e940b998f 100644 --- a/manual/sphinx/figs/figure_creators/LaplacianMatrices.ipynb +++ b/manual/sphinx/figs/figure_creators/LaplacianMatrices.ipynb @@ -40,10 +40,10 @@ "outputs": [], "source": [ "# Note, these can be max 9 due to the current index convention\n", - "nx = 3 # Not including ghost points\n", + "nx = 3 # Not including ghost points\n", "nz = 3\n", "\n", - "startXIndex = 10 # The x indices are the slowest growing indices\n", + "startXIndex = 10 # The x indices are the slowest growing indices\n", "startZIndex = 1 # The z indices are the fastest growing indices" ] }, @@ -85,13 +85,13 @@ "for z in range(nz):\n", " f.append([])\n", " xStart = startZIndex\n", - " xEnd = startXIndex*(nx+1) # +1 due to ghost point\n", + " xEnd = startXIndex * (nx + 1) # +1 due to ghost point\n", " # +startXIndex in the range as the range does not include endpoint\n", - " for xInd in range(xStart, xEnd+startXIndex, 10):\n", - " ind = str(xInd+z)\n", - " if (xInd+z) < startXIndex:\n", - " ind = '0'+str(xInd+z)\n", - " f[z].append(symbols('f_' + ind))\n", + " for xInd in range(xStart, xEnd + startXIndex, 10):\n", + " ind = str(xInd + z)\n", + " if (xInd + z) < startXIndex:\n", + " ind = \"0\" + str(xInd + z)\n", + " f[z].append(symbols(\"f_\" + ind))\n", "\n", "mesh = Matrix(f[::-1])\n", "display(mesh)" @@ -195,21 +195,21 @@ } ], "source": [ - "xVec=[]\n", - "bVec=[]\n", + "xVec = []\n", + "bVec = []\n", "# Do the inner loop, so start ranges at 1\n", "# (nx+1) to include outer ghost point, +1 in the range as the range does not include endpoint\n", - "for x in range(1, (nx+1)+1):\n", - " for z in range(1,nz+1):\n", - " xVec.append(symbols('x_'+str(x)+'_'+str(z)))\n", - " bVec.append(symbols('b_'+str(x)+'_'+str(z)))\n", + "for x in range(1, (nx + 1) + 1):\n", + " for z in range(1, nz + 1):\n", + " xVec.append(symbols(\"x_\" + str(x) + \"_\" + str(z)))\n", + " bVec.append(symbols(\"b_\" + str(x) + \"_\" + str(z)))\n", "\n", "# Do the inner ghost points\n", "# Must count backwards since we are inserting in the front\n", - "for ind in range(nz,0,-1):\n", - " xVec.insert(0, symbols('x_0_'+str(ind)))\n", - " bVec.insert(0, symbols('b_0_'+str(ind)))\n", - " \n", + "for ind in range(nz, 0, -1):\n", + " xVec.insert(0, symbols(\"x_0_\" + str(ind)))\n", + " bVec.insert(0, symbols(\"b_0_\" + str(ind)))\n", + "\n", "display(Matrix(xVec))\n", "display(Matrix(bVec))" ] @@ -274,7 +274,7 @@ "globInd = []\n", "for rows in range(len(xVec)):\n", " cols = []\n", - " for col in range(rows*len(xVec), (rows+1)*len(xVec)):\n", + " for col in range(rows * len(xVec), (rows + 1) * len(xVec)):\n", " cols.append(col)\n", " globInd.append(cols)\n", "\n", @@ -305,10 +305,14 @@ "source": [ "c = []\n", "for x in range(nx):\n", - " indexStart = (startXIndex+1)+(startXIndex*x) # Multiply by 10 due to index system\n", - " indexEnd = (startXIndex+nz+1)+(startXIndex*x) # Multiply by 10 due to index system\n", + " indexStart = (startXIndex + 1) + (\n", + " startXIndex * x\n", + " ) # Multiply by 10 due to index system\n", + " indexEnd = (startXIndex + nz + 1) + (\n", + " startXIndex * x\n", + " ) # Multiply by 10 due to index system\n", " for ind in range(indexStart, indexEnd):\n", - " c.append(symbols('c_'+str(ind)))" + " c.append(symbols(\"c_\" + str(ind)))" ] }, { @@ -328,11 +332,11 @@ "source": [ "# The inner ghost\n", "innerGhostStart = startZIndex\n", - "innerGhostEnd = nz\n", + "innerGhostEnd = nz\n", "ig = []\n", "# +1 in the range as last point is not included\n", - "for z in range(innerGhostStart, innerGhostEnd+1):\n", - " ig.append(symbols('ig_0_'+str(z)))" + "for z in range(innerGhostStart, innerGhostEnd + 1):\n", + " ig.append(symbols(\"ig_0_\" + str(z)))" ] }, { @@ -345,12 +349,12 @@ "source": [ "# The outer ghost\n", "# nx+1 as we want to go past the last inner x grid point\n", - "outerGhostStart = startXIndex*(nx+1) + startZIndex\n", - "outerGhostEnd = startXIndex*(nx+1) + nz\n", + "outerGhostStart = startXIndex * (nx + 1) + startZIndex\n", + "outerGhostEnd = startXIndex * (nx + 1) + nz\n", "og = []\n", "# +1 in the range as last point is not included\n", - "for z in range(outerGhostStart, outerGhostEnd+1):\n", - " og.append(symbols('og_'+str(z)))" + "for z in range(outerGhostStart, outerGhostEnd + 1):\n", + " og.append(symbols(\"og_\" + str(z)))" ] }, { @@ -396,21 +400,27 @@ "for x in range(nx):\n", " # The indices referring to the matrix index\n", " # The last -1 is there as the matrix indices count from 0\n", - " startRow = (nz+1)+(x*nz)-1 # Starting at row+1 after inner ghost point sub-matrix\n", - " endRow = (nz+1)+(x*nz)+(nz-1)-1 # Ending row-1 before the last z-index (last z will be wrapped around)\n", + " startRow = (\n", + " (nz + 1) + (x * nz) - 1\n", + " ) # Starting at row+1 after inner ghost point sub-matrix\n", + " endRow = (\n", + " (nz + 1) + (x * nz) + (nz - 1) - 1\n", + " ) # Ending row-1 before the last z-index (last z will be wrapped around)\n", " # +1 in range as last point is not included\n", - " rows = range(startRow, endRow+1)\n", - " cols = range(startRow+1, endRow+1) # Column is shifted +1 from the diagonal\n", - " \n", + " rows = range(startRow, endRow + 1)\n", + " cols = range(startRow + 1, endRow + 1) # Column is shifted +1 from the diagonal\n", + "\n", " # The indices referring to the spatial point in the grid\n", " # The last \"+1\" is fue to the fact that the column is shifted +1 from the diagonal\n", - " startInd = (startXIndex+startZIndex) + (startXIndex*x) + 1\n", - " endInd = (startXIndex+startZIndex) + (nz-1) + (startXIndex*x) + 1 # Wrap around last point\n", + " startInd = (startXIndex + startZIndex) + (startXIndex * x) + 1\n", + " endInd = (\n", + " (startXIndex + startZIndex) + (nz - 1) + (startXIndex * x) + 1\n", + " ) # Wrap around last point\n", " # +1 in range as last point is not included\n", - " inds = range(startInd, endInd+1)\n", - " \n", + " inds = range(startInd, endInd + 1)\n", + "\n", " for rInd, cInd, ind in zip(rows, cols, inds):\n", - " InvM[rInd, cInd] = symbols('zp_'+str(ind))" + " InvM[rInd, cInd] = symbols(\"zp_\" + str(ind))" ] }, { @@ -423,15 +433,17 @@ "source": [ "# The wrap around\n", "# The index referring to the spatial point in the grid\n", - "startInd = startXIndex+startZIndex\n", + "startInd = startXIndex + startZIndex\n", "# The indices referring to the matrix index\n", - "# Last -1 as the matrix indices are counted from 0 \n", - "startRow = (nz+1) + (nz-1) - 1 # nz+1 below from the ghost sub matrix, nz-1 below after that\n", - "startCol = (nz+1)-1 # nz+1 left of the ghost sub matrix\n", + "# Last -1 as the matrix indices are counted from 0\n", + "startRow = (\n", + " (nz + 1) + (nz - 1) - 1\n", + ") # nz+1 below from the ghost sub matrix, nz-1 below after that\n", + "startCol = (nz + 1) - 1 # nz+1 left of the ghost sub matrix\n", "for wrap in range(nx):\n", - " row = startRow+wrap*nz\n", - " col = startCol+wrap*nz\n", - " InvM[row, col] = symbols('zp_'+str(startInd+startXIndex*wrap))" + " row = startRow + wrap * nz\n", + " col = startCol + wrap * nz\n", + " InvM[row, col] = symbols(\"zp_\" + str(startInd + startXIndex * wrap))" ] }, { @@ -452,20 +464,26 @@ "for x in range(nx):\n", " # The indices referring to the matrix index\n", " # The last -1 is there as the matrix indices count from 0\n", - " startRow = (nz+1)+(x*nz)-1 # Starting at row+1 after inner ghost point sub-matrix\n", - " endRow = (nz+1)+(x*nz)+(nz-1)-1 # Ending row-1 before the last z-index (last z will be wrapped around)\n", + " startRow = (\n", + " (nz + 1) + (x * nz) - 1\n", + " ) # Starting at row+1 after inner ghost point sub-matrix\n", + " endRow = (\n", + " (nz + 1) + (x * nz) + (nz - 1) - 1\n", + " ) # Ending row-1 before the last z-index (last z will be wrapped around)\n", " # +1 in range as last point is not included\n", - " rows = range(startRow+1, endRow+1) # Row is shifted +1 from the diagonal\n", - " cols = range(startRow, endRow+1)\n", - " \n", + " rows = range(startRow + 1, endRow + 1) # Row is shifted +1 from the diagonal\n", + " cols = range(startRow, endRow + 1)\n", + "\n", " # The indices referring to the spatial point in the grid\n", - " startInd = (startXIndex+startZIndex) + (startXIndex*x)\n", - " endInd = (startXIndex+startZIndex) + (nz-1) + (startXIndex*x) # Wrap around last point\n", + " startInd = (startXIndex + startZIndex) + (startXIndex * x)\n", + " endInd = (\n", + " (startXIndex + startZIndex) + (nz - 1) + (startXIndex * x)\n", + " ) # Wrap around last point\n", " # +1 in range as last point is not included\n", - " inds = range(startInd, endInd+1)\n", - " \n", + " inds = range(startInd, endInd + 1)\n", + "\n", " for rInd, cInd, ind in zip(rows, cols, inds):\n", - " InvM[rInd, cInd] = symbols('zm_'+str(ind))" + " InvM[rInd, cInd] = symbols(\"zm_\" + str(ind))" ] }, { @@ -478,15 +496,19 @@ "source": [ "# The wrap around\n", "# The index referring to the spatial point in the grid\n", - "startInd = startXIndex+startZIndex+(nz-1) # +(nz-1) as this will be the last z point for the current x\n", + "startInd = (\n", + " startXIndex + startZIndex + (nz - 1)\n", + ") # +(nz-1) as this will be the last z point for the current x\n", "# The indices referring to the matrix index\n", - "# Last -1 as the matrix indices are counted from 0 \n", - "startRow = (nz+1)-1 # nz+1 below the ghost sub matrix\n", - "startCol = (nz+1) + (nz-1) - 1 # nz+1 left from the ghost sub matrix, nz-1 left after that\n", + "# Last -1 as the matrix indices are counted from 0\n", + "startRow = (nz + 1) - 1 # nz+1 below the ghost sub matrix\n", + "startCol = (\n", + " (nz + 1) + (nz - 1) - 1\n", + ") # nz+1 left from the ghost sub matrix, nz-1 left after that\n", "for wrap in range(nx):\n", - " row = startRow+wrap*nz\n", - " col = startCol+wrap*nz\n", - " InvM[row, col] = symbols('zm_'+str(startInd+startXIndex*wrap))" + " row = startRow + wrap * nz\n", + " col = startCol + wrap * nz\n", + " InvM[row, col] = symbols(\"zm_\" + str(startInd + startXIndex * wrap))" ] }, { @@ -505,23 +527,29 @@ "outputs": [], "source": [ "# Indices referring to the spatial points in the grid\n", - "startInd = startXIndex*2 + startZIndex # *2 as we start at the second inner x-index\n", - "endInd = startInd + (startZIndex*nz) # *nz as this is the last z-index in the current x-index\n", + "startInd = startXIndex * 2 + startZIndex # *2 as we start at the second inner x-index\n", + "endInd = startInd + (\n", + " startZIndex * nz\n", + ") # *nz as this is the last z-index in the current x-index\n", "\n", "for x in range(nx):\n", " # The indices referring to the matrix index\n", " # The last -1 as the matrix indices counts from 0\n", - " startRow = (nz+1)+(x*nz)-1 # Starting at row+1 after inner ghost point sub-matrix\n", - " endRow = (nz+1)+(x*nz)+(nz)-1 # Ending at the row referring to the last z-index\n", + " startRow = (\n", + " (nz + 1) + (x * nz) - 1\n", + " ) # Starting at row+1 after inner ghost point sub-matrix\n", + " endRow = (\n", + " (nz + 1) + (x * nz) + (nz) - 1\n", + " ) # Ending at the row referring to the last z-index\n", " # Not +1 in range as we do not want to include last point\n", - " rows = range(startRow, endRow)\n", - " cols = range(startRow+nz, endRow+nz) # Start at first index after last z-index\n", - " \n", + " rows = range(startRow, endRow)\n", + " cols = range(startRow + nz, endRow + nz) # Start at first index after last z-index\n", + "\n", " # Indices referring to the spatial points in the grid\n", - " inds = range(startInd+startXIndex*x, endInd+startXIndex*x)\n", - " \n", + " inds = range(startInd + startXIndex * x, endInd + startXIndex * x)\n", + "\n", " for rInd, cInd, ind in zip(rows, cols, inds):\n", - " InvM[rInd, cInd] = symbols('xp_'+str(ind))" + " InvM[rInd, cInd] = symbols(\"xp_\" + str(ind))" ] }, { @@ -534,22 +562,22 @@ "source": [ "# x+1 for inner ghost point\n", "# Indices referring to the spatial points in the grid\n", - "startInd = startXIndex + startZIndex # First inner point for first z\n", - "endInd = startInd + (startZIndex*nz) # First inner point for last z\n", + "startInd = startXIndex + startZIndex # First inner point for first z\n", + "endInd = startInd + (startZIndex * nz) # First inner point for last z\n", "\n", "# The indices referring to the matrix index\n", "# The last -1 as the matrix indices counts from 0\n", - "startRow = startZIndex-1 # Starting at first row\n", - "endRow = startZIndex+nz-1 # Ending at the row referring to the last z-index\n", + "startRow = startZIndex - 1 # Starting at first row\n", + "endRow = startZIndex + nz - 1 # Ending at the row referring to the last z-index\n", "# Not +1 in range as we do not want to include last point\n", - "rows = range(startRow, endRow)\n", - "cols = range(startRow+nz, endRow+nz) # Start at first index after last z-index\n", - " \n", + "rows = range(startRow, endRow)\n", + "cols = range(startRow + nz, endRow + nz) # Start at first index after last z-index\n", + "\n", "# Indices referring to the spatial points in the grid\n", - "inds = range(startInd, endInd)\n", - " \n", + "inds = range(startInd, endInd)\n", + "\n", "for rInd, cInd, ind in zip(rows, cols, inds):\n", - " InvM[rInd, cInd] = symbols('igxp_'+str(ind))" + " InvM[rInd, cInd] = symbols(\"igxp_\" + str(ind))" ] }, { @@ -569,25 +597,25 @@ "source": [ "# Indices referring to the spatial points in the grid\n", "startInd = startZIndex\n", - "endInd = startInd + nz\n", + "endInd = startInd + nz\n", "\n", "for x in range(nx):\n", " # The indices referring to the matrix index\n", " # Note that x starts counting from zero, so we must add 1 to x in the rows\n", - " startRow = ((x+1)*nz) # Starting at row+1 after inner ghost point sub-matrix\n", - " endRow = ((x+1)*nz)+(nz) # Ending at the row referring to the last z-index\n", + " startRow = (x + 1) * nz # Starting at row+1 after inner ghost point sub-matrix\n", + " endRow = ((x + 1) * nz) + (nz) # Ending at the row referring to the last z-index\n", " # Not +1 in range as we do not want to include last point\n", - " rows = range(startRow, endRow)\n", - " cols = range(startRow-nz, endRow-nz) # Start at first index after last z-index\n", - " \n", + " rows = range(startRow, endRow)\n", + " cols = range(startRow - nz, endRow - nz) # Start at first index after last z-index\n", + "\n", " # Indices referring to the spatial points in the grid\n", - " inds = range(startInd+startXIndex*x, endInd+startXIndex*x)\n", - " \n", + " inds = range(startInd + startXIndex * x, endInd + startXIndex * x)\n", + "\n", " for rInd, cInd, ind in zip(rows, cols, inds):\n", " if (ind) < startXIndex:\n", - " ind = '0'+str(ind)\n", + " ind = \"0\" + str(ind)\n", "\n", - " InvM[rInd, cInd] = symbols('xm_'+str(ind))" + " InvM[rInd, cInd] = symbols(\"xm_\" + str(ind))" ] }, { @@ -600,22 +628,24 @@ "source": [ "# x-1 for inner ghost point\n", "# Indices referring to the spatial points in the grid\n", - "startInd = startXIndex*nx + startZIndex # Last inner point for first z\n", - "endInd = startInd + (startZIndex*nz) # Last inner point for last z\n", + "startInd = startXIndex * nx + startZIndex # Last inner point for first z\n", + "endInd = startInd + (startZIndex * nz) # Last inner point for last z\n", "\n", "# The indices referring to the matrix index\n", "# The last -1 as the matrix indices counts from 0\n", - "startRow = len(xVec)-nz-1 # Starting at last inner point row\n", - "endRow = len(xVec)-1 # Ending at the last row\n", + "startRow = len(xVec) - nz - 1 # Starting at last inner point row\n", + "endRow = len(xVec) - 1 # Ending at the last row\n", "# +1 in range as last point is not included\n", - "rows = range(startRow+1, endRow+1)\n", - "cols = range(startRow-nz+1, endRow-nz+1) # Start at first index after last z-index\n", - " \n", + "rows = range(startRow + 1, endRow + 1)\n", + "cols = range(\n", + " startRow - nz + 1, endRow - nz + 1\n", + ") # Start at first index after last z-index\n", + "\n", "# Indices referring to the spatial points in the grid\n", - "inds = range(startInd, endInd)\n", - " \n", + "inds = range(startInd, endInd)\n", + "\n", "for rInd, cInd, ind in zip(rows, cols, inds):\n", - " InvM[rInd, cInd] = symbols('ogxm_'+str(ind))" + " InvM[rInd, cInd] = symbols(\"ogxm_\" + str(ind))" ] }, { diff --git a/manual/sphinx/figs/figure_creators/bout_runners_folder_structure.py b/manual/sphinx/figs/figure_creators/bout_runners_folder_structure.py index 8a95a13436..5a4a2a0843 100644 --- a/manual/sphinx/figs/figure_creators/bout_runners_folder_structure.py +++ b/manual/sphinx/figs/figure_creators/bout_runners_folder_structure.py @@ -10,10 +10,10 @@ mmag@fysik.dtu.dk """ -__authors__ = 'Michael Loeiten' -__email__ = 'mmag@fysik.dtu.dk' -__version__ = '1.0' -__date__ = '21.01.2016' +__authors__ = "Michael Loeiten" +__email__ = "mmag@fysik.dtu.dk" +__version__ = "1.0" +__date__ = "21.01.2016" import pygraphviz as pgv @@ -21,113 +21,118 @@ tree = pgv.AGraph() # Appendable lists -files = [] +files = [] dead_ends = [] # Default node attributes -tree.node_attr['shape']='box' -tree.node_attr['style']='bold' +tree.node_attr["shape"] = "box" +tree.node_attr["style"] = "bold" # Adding nodes and edges -l0 = 'project' -l1 = ['data', 'source\nfiles', 'driver.py'] +l0 = "project" +l1 = ["data", "source\nfiles", "driver.py"] # Append the files files.append(l1[1]) files.append(l1[2]) # Add the boxes to the mother node for box in l1: - tree.add_edge(l0,box) + tree.add_edge(l0, box) -l2 = ['solver1', 'solver2',\ - 'BOUT.inp', 'run_log.txt'] +l2 = ["solver1", "solver2", "BOUT.inp", "run_log.txt"] # Append the files files.append(l2[2]) files.append(l2[3]) # Add the boxes to the mother node for box in l2: - tree.add_edge('data', box) -tree.add_edge('solver2', 'solver2/...') + tree.add_edge("data", box) +tree.add_edge("solver2", "solver2/...") # Append the dead_end -de = l2[1] + '/...' +de = l2[1] + "/..." dead_ends.append(de) -l3 = ['method1', 'method2', 'solver1/...'] +l3 = ["method1", "method2", "solver1/..."] for box in l3: - tree.add_edge('solver1', box) -tree.add_edge('method2', 'method2/...') + tree.add_edge("solver1", box) +tree.add_edge("method2", "method2/...") # Append the dead_end de = l3[2] dead_ends.append(de) -de = l3[1] + '/...' +de = l3[1] + "/..." dead_ends.append(de) -l4 = ['nout\ntimestep1', 'nout\ntimestep2', 'method1/...'] +l4 = ["nout\ntimestep1", "nout\ntimestep2", "method1/..."] for box in l4: - tree.add_edge('method1', box) -tree.add_edge('nout\ntimestep2', 'nout\ntimestep2/...') + tree.add_edge("method1", box) +tree.add_edge("nout\ntimestep2", "nout\ntimestep2/...") # Append the dead_end de = l4[2] dead_ends.append(de) -de = l4[1] + '/...' +de = l4[1] + "/..." dead_ends.append(de) -l5 = ['mesh1', 'mesh2', 'nout\ntimestep1/...'] +l5 = ["mesh1", "mesh2", "nout\ntimestep1/..."] for box in l5: - tree.add_edge('nout\ntimestep1', box) -tree.add_edge('mesh2', 'mesh2/...') + tree.add_edge("nout\ntimestep1", box) +tree.add_edge("mesh2", "mesh2/...") # Append the dead_end de = l5[2] dead_ends.append(de) -de = l5[1] + '/...' +de = l5[1] + "/..." dead_ends.append(de) -l6 = ['additional1', 'additional2', 'mesh1/...'] +l6 = ["additional1", "additional2", "mesh1/..."] for box in l6: - tree.add_edge('mesh1', box) -tree.add_edge('additional2', 'additional2/...') + tree.add_edge("mesh1", box) +tree.add_edge("additional2", "additional2/...") # Append the dead_end de = l6[2] dead_ends.append(de) -de = l6[1] + '/...' +de = l6[1] + "/..." dead_ends.append(de) -l7 = ['grid_file1', 'grid_file2', 'additional1/...'] +l7 = ["grid_file1", "grid_file2", "additional1/..."] for box in l7: - tree.add_edge('additional1', box) -tree.add_edge('grid_file2', 'grid_file2/...') + tree.add_edge("additional1", box) +tree.add_edge("grid_file2", "grid_file2/...") # Append the dead_end de = l7[2] dead_ends.append(de) -de = l7[1] + '/...' +de = l7[1] + "/..." dead_ends.append(de) -l8 = ['BOUT.inp\n(copy)', 'BOUT.log', 'BOUT.dmp',\ - 'BOUT.restart', '(source_files\n(copy))', '(grid_file\n(copy))'] +l8 = [ + "BOUT.inp\n(copy)", + "BOUT.log", + "BOUT.dmp", + "BOUT.restart", + "(source_files\n(copy))", + "(grid_file\n(copy))", +] # Add l8 to the files list for cur_file in l8: files.append(cur_file) # Append them to the mother node for box in l8: - tree.add_edge('grid_file1', box) + tree.add_edge("grid_file1", box) # Change colors for the files for the_file in files: - member=tree.get_node(the_file) -# member.attr['fontcolor'] = 'limegreen' - member.attr['color'] = 'limegreen' + member = tree.get_node(the_file) + # member.attr['fontcolor'] = 'limegreen' + member.attr["color"] = "limegreen" # Change colors for the dead_ends for dead_end in dead_ends: - member=tree.get_node(dead_end) -# member.attr['fontcolor'] = 'darksalmon' - member.attr['color'] = 'darksalmon' + member = tree.get_node(dead_end) + # member.attr['fontcolor'] = 'darksalmon' + member.attr["color"] = "darksalmon" # Print the graph print(tree.string()) # Set layout -tree.layout('dot') +tree.layout("dot") # Write to file -tree.draw('folder_tree.svg') +tree.draw("folder_tree.svg") diff --git a/src/mesh/parallel/fci.cxx b/src/mesh/parallel/fci.cxx index 08e56584e1..7832e48fc6 100644 --- a/src/mesh/parallel/fci.cxx +++ b/src/mesh/parallel/fci.cxx @@ -309,8 +309,8 @@ void FCITransform::checkInputGrid() { "to generate metric components for FCITransform. Should be 'fci'."); } } // else: parallel_transform variable not found in grid input, indicates older input - // file or grid from options so must rely on the user having ensured the type is - // correct + // file or grid from options so must rely on the user having ensured the type is + // correct } void FCITransform::calcParallelSlices(Field3D& f) { diff --git a/src/mesh/parallel/fci.hxx b/src/mesh/parallel/fci.hxx index 1a02f558e1..74922bd0e5 100644 --- a/src/mesh/parallel/fci.hxx +++ b/src/mesh/parallel/fci.hxx @@ -73,7 +73,7 @@ public: FCITransform() = delete; FCITransform(Mesh& mesh, const Coordinates::FieldMetric& dy, bool zperiodic = true, Options* opt = nullptr) - : ParallelTransform(mesh, opt), R{&mesh}, Z{&mesh} { + : ParallelTransform(mesh, opt), R{&mesh}, Z{&mesh} { // check the coordinate system used for the grid data source FCITransform::checkInputGrid(); diff --git a/tests/MMS/CMakeLists.txt b/tests/MMS/CMakeLists.txt index 0c42da7074..1c80e3b6a6 100644 --- a/tests/MMS/CMakeLists.txt +++ b/tests/MMS/CMakeLists.txt @@ -13,19 +13,17 @@ add_subdirectory(time-petsc) add_subdirectory(wave-1d) add_subdirectory(wave-1d-y) -######################################## +# ############################################################################## # The following require boutpp: -if (BOUT_ENABLE_PYTHON) +if(BOUT_ENABLE_PYTHON) add_subdirectory(bracket) add_subdirectory(derivatives3) add_subdirectory(shiftedmetricinterp) add_subdirectory(upwinding3) endif() -######################################## +# ############################################################################## # The following are marked as broken -# add_subdirectory(elm-pb) -# add_subdirectory(fieldalign) -# add_subdirectory(GBS) +# add_subdirectory(elm-pb) add_subdirectory(fieldalign) add_subdirectory(GBS) # add_subdirectory(tokamak) diff --git a/tests/MMS/advection/arakawa/CMakeLists.txt b/tests/MMS/advection/arakawa/CMakeLists.txt index 267146ed51..2acd15d711 100644 --- a/tests/MMS/advection/arakawa/CMakeLists.txt +++ b/tests/MMS/advection/arakawa/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_mms_test(MMS-advection-arakawa +bout_add_mms_test( + MMS-advection-arakawa USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES parent.py - EXTRA_DEPENDS MMS-advection - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 8 -) + EXTRA_FILES + parent.py + EXTRA_DEPENDS + MMS-advection + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 8) diff --git a/tests/MMS/advection/central/CMakeLists.txt b/tests/MMS/advection/central/CMakeLists.txt index 9b2288856e..7821e315f5 100644 --- a/tests/MMS/advection/central/CMakeLists.txt +++ b/tests/MMS/advection/central/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_mms_test(MMS-advection-central +bout_add_mms_test( + MMS-advection-central USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES parent.py - EXTRA_DEPENDS MMS-advection - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 8 -) + EXTRA_FILES + parent.py + EXTRA_DEPENDS + MMS-advection + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 8) diff --git a/tests/MMS/advection/runtest b/tests/MMS/advection/runtest index 9a0a691287..5109c47815 100755 --- a/tests/MMS/advection/runtest +++ b/tests/MMS/advection/runtest @@ -50,7 +50,7 @@ def run_mms(options, exit=True): dx = 2.0 * pi / (nx) - args = f"{opts} mesh:nx={nx+4} mesh:dx={dx} MZ={nx}" + args = f"{opts} mesh:nx={nx + 4} mesh:dx={dx} MZ={nx}" print(" Running with " + args) diff --git a/tests/MMS/advection/upwind/CMakeLists.txt b/tests/MMS/advection/upwind/CMakeLists.txt index 3a9db4630a..bf91170f43 100644 --- a/tests/MMS/advection/upwind/CMakeLists.txt +++ b/tests/MMS/advection/upwind/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_mms_test(MMS-advection-upwind +bout_add_mms_test( + MMS-advection-upwind USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES parent.py - EXTRA_DEPENDS MMS-advection - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 8 -) + EXTRA_FILES + parent.py + EXTRA_DEPENDS + MMS-advection + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 8) diff --git a/tests/MMS/advection/weno3/CMakeLists.txt b/tests/MMS/advection/weno3/CMakeLists.txt index 293b6b95c9..deac4bd8fc 100644 --- a/tests/MMS/advection/weno3/CMakeLists.txt +++ b/tests/MMS/advection/weno3/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_mms_test(MMS-advection-weno3 +bout_add_mms_test( + MMS-advection-weno3 USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES parent.py - EXTRA_DEPENDS MMS-advection - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 8 -) + EXTRA_FILES + parent.py + EXTRA_DEPENDS + MMS-advection + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 8) diff --git a/tests/MMS/bracket/CMakeLists.txt b/tests/MMS/bracket/CMakeLists.txt index fc8103aab0..4f67fbde9f 100644 --- a/tests/MMS/bracket/CMakeLists.txt +++ b/tests/MMS/bracket/CMakeLists.txt @@ -1,7 +1,4 @@ -bout_add_mms_test(MMS-bracket - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_mms_test(MMS-bracket USE_RUNTEST USE_DATA_BOUT_INP REQUIRES + BOUT_ENABLE_PYTHON) add_dependencies(MMS-bracket boutpp) diff --git a/tests/MMS/derivatives3/CMakeLists.txt b/tests/MMS/derivatives3/CMakeLists.txt index 5615a46a38..1fc3d2936f 100644 --- a/tests/MMS/derivatives3/CMakeLists.txt +++ b/tests/MMS/derivatives3/CMakeLists.txt @@ -1,7 +1,4 @@ -bout_add_mms_test(MMS-derivatives3 - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_mms_test(MMS-derivatives3 USE_RUNTEST USE_DATA_BOUT_INP REQUIRES + BOUT_ENABLE_PYTHON) add_dependencies(MMS-derivatives3 boutpp) diff --git a/tests/MMS/diffusion/CMakeLists.txt b/tests/MMS/diffusion/CMakeLists.txt index b91b6a8327..4187f15916 100644 --- a/tests/MMS/diffusion/CMakeLists.txt +++ b/tests/MMS/diffusion/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_mms_test(MMS-diffusion - SOURCES diffusion.cxx - EXECUTABLE_NAME cyto +bout_add_mms_test( + MMS-diffusion + SOURCES + diffusion.cxx + EXECUTABLE_NAME + cyto USE_RUNTEST - USE_DATA_BOUT_INP - ) + USE_DATA_BOUT_INP) diff --git a/tests/MMS/diffusion2/CMakeLists.txt b/tests/MMS/diffusion2/CMakeLists.txt index 910be46481..d96f50181c 100644 --- a/tests/MMS/diffusion2/CMakeLists.txt +++ b/tests/MMS/diffusion2/CMakeLists.txt @@ -1,11 +1,15 @@ -bout_add_mms_test(MMS-diffusion2 - SOURCES diffusion.cxx - EXECUTABLE_NAME cyto +bout_add_mms_test( + MMS-diffusion2 + SOURCES + diffusion.cxx + EXECUTABLE_NAME + cyto USE_RUNTEST EXTRA_FILES - X/BOUT.inp - Y/BOUT.inp - Z/BOUT.inp - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 2 - ) + X/BOUT.inp + Y/BOUT.inp + Z/BOUT.inp + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 2) diff --git a/tests/MMS/hw/CMakeLists.txt b/tests/MMS/hw/CMakeLists.txt index 6fda0448be..f4f4d164fe 100644 --- a/tests/MMS/hw/CMakeLists.txt +++ b/tests/MMS/hw/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_mms_test(MMS-hw - SOURCES hw.cxx +bout_add_mms_test( + MMS-hw + SOURCES + hw.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 4 -) + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 4) diff --git a/tests/MMS/laplace/CMakeLists.txt b/tests/MMS/laplace/CMakeLists.txt index 4825fc7fd6..a93628a3e5 100644 --- a/tests/MMS/laplace/CMakeLists.txt +++ b/tests/MMS/laplace/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_mms_test(MMS-laplace - SOURCES laplace.cxx +bout_add_mms_test( + MMS-laplace + SOURCES + laplace.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 2 -) + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 2) diff --git a/tests/MMS/shiftedmetricinterp/CMakeLists.txt b/tests/MMS/shiftedmetricinterp/CMakeLists.txt index 1693428da6..832c2d9416 100644 --- a/tests/MMS/shiftedmetricinterp/CMakeLists.txt +++ b/tests/MMS/shiftedmetricinterp/CMakeLists.txt @@ -1,7 +1,4 @@ -bout_add_mms_test(MMS-shiftedmetricinterp - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_mms_test(MMS-shiftedmetricinterp USE_RUNTEST USE_DATA_BOUT_INP + REQUIRES BOUT_ENABLE_PYTHON) add_dependencies(MMS-shiftedmetricinterp boutpp) diff --git a/tests/MMS/spatial/advection/CMakeLists.txt b/tests/MMS/spatial/advection/CMakeLists.txt index 4adb108612..cc2709e8ce 100644 --- a/tests/MMS/spatial/advection/CMakeLists.txt +++ b/tests/MMS/spatial/advection/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_mms_test(MMS-spatial-advection - SOURCES advection.cxx +bout_add_mms_test( + MMS-spatial-advection + SOURCES + advection.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 4 -) + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 4) diff --git a/tests/MMS/spatial/d2dx2/CMakeLists.txt b/tests/MMS/spatial/d2dx2/CMakeLists.txt index 0affa9beee..ba0fed4e9d 100644 --- a/tests/MMS/spatial/d2dx2/CMakeLists.txt +++ b/tests/MMS/spatial/d2dx2/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_mms_test(MMS-spatial-d2dx2 - SOURCES test_d2dx2.cxx +bout_add_mms_test( + MMS-spatial-d2dx2 + SOURCES + test_d2dx2.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_RUN_ALL_TESTS -) + REQUIRES + BOUT_RUN_ALL_TESTS) diff --git a/tests/MMS/spatial/d2dz2/CMakeLists.txt b/tests/MMS/spatial/d2dz2/CMakeLists.txt index 01b61eaa5d..998df35ebc 100644 --- a/tests/MMS/spatial/d2dz2/CMakeLists.txt +++ b/tests/MMS/spatial/d2dz2/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_mms_test(MMS-spatial-d2dz2 - SOURCES test_d2dz2.cxx +bout_add_mms_test( + MMS-spatial-d2dz2 + SOURCES + test_d2dz2.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_RUN_ALL_TESTS -) + REQUIRES + BOUT_RUN_ALL_TESTS) diff --git a/tests/MMS/spatial/diffusion/CMakeLists.txt b/tests/MMS/spatial/diffusion/CMakeLists.txt index 67a27aafa4..76053cc195 100644 --- a/tests/MMS/spatial/diffusion/CMakeLists.txt +++ b/tests/MMS/spatial/diffusion/CMakeLists.txt @@ -1,7 +1,11 @@ -bout_add_mms_test(MMS-spatial-diffusion - SOURCES diffusion.cxx +bout_add_mms_test( + MMS-spatial-diffusion + SOURCES + diffusion.cxx USE_RUNTEST - EXTRA_FILES X/BOUT.inp - REQUIRES BOUT_RUN_ALL_TESTS - PROCESSORS 2 -) + EXTRA_FILES + X/BOUT.inp + REQUIRES + BOUT_RUN_ALL_TESTS + PROCESSORS + 2) diff --git a/tests/MMS/spatial/fci/CMakeLists.txt b/tests/MMS/spatial/fci/CMakeLists.txt index 94b9682c9c..6f5a686f0a 100644 --- a/tests/MMS/spatial/fci/CMakeLists.txt +++ b/tests/MMS/spatial/fci/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_mms_test(MMS-spatial-fci - SOURCES fci_mms.cxx +bout_add_mms_test( + MMS-spatial-fci + SOURCES + fci_mms.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES zoidberg_FOUND - PROCESSORS 2 -) + REQUIRES + zoidberg_FOUND + PROCESSORS + 2) diff --git a/tests/MMS/time-petsc/CMakeLists.txt b/tests/MMS/time-petsc/CMakeLists.txt index 3cbbe6f717..6697c804b1 100644 --- a/tests/MMS/time-petsc/CMakeLists.txt +++ b/tests/MMS/time-petsc/CMakeLists.txt @@ -1,7 +1,8 @@ -bout_add_mms_test(MMS-time-petsc +bout_add_mms_test( + MMS-time-petsc USE_RUNTEST REQUIRES - BOUT_HAS_PETSC - BOUT_RUN_ALL_TESTS - EXTRA_DEPENDS MMS-time -) + BOUT_HAS_PETSC + BOUT_RUN_ALL_TESTS + EXTRA_DEPENDS + MMS-time) diff --git a/tests/MMS/time/CMakeLists.txt b/tests/MMS/time/CMakeLists.txt index 9c3480629e..c99b79ecd6 100644 --- a/tests/MMS/time/CMakeLists.txt +++ b/tests/MMS/time/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_mms_test(MMS-time - SOURCES time.cxx +bout_add_mms_test( + MMS-time + SOURCES + time.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_RUN_ALL_TESTS -) + REQUIRES + BOUT_RUN_ALL_TESTS) diff --git a/tests/MMS/upwinding3/CMakeLists.txt b/tests/MMS/upwinding3/CMakeLists.txt index 5daf58b29b..6c83f62aae 100644 --- a/tests/MMS/upwinding3/CMakeLists.txt +++ b/tests/MMS/upwinding3/CMakeLists.txt @@ -1,7 +1,4 @@ -bout_add_mms_test(MMS-upwinding3 - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_mms_test(MMS-upwinding3 USE_RUNTEST USE_DATA_BOUT_INP REQUIRES + BOUT_ENABLE_PYTHON) add_dependencies(MMS-upwinding3 boutpp) diff --git a/tests/MMS/wave-1d-y/CMakeLists.txt b/tests/MMS/wave-1d-y/CMakeLists.txt index cc5cddfff4..4bf846ba6b 100644 --- a/tests/MMS/wave-1d-y/CMakeLists.txt +++ b/tests/MMS/wave-1d-y/CMakeLists.txt @@ -1,5 +1 @@ -bout_add_mms_test(MMS-wave-1d-y - SOURCES wave.cxx - USE_RUNTEST - USE_DATA_BOUT_INP -) +bout_add_mms_test(MMS-wave-1d-y SOURCES wave.cxx USE_RUNTEST USE_DATA_BOUT_INP) diff --git a/tests/MMS/wave-1d/CMakeLists.txt b/tests/MMS/wave-1d/CMakeLists.txt index a9ae3d748c..8f64b22fa8 100644 --- a/tests/MMS/wave-1d/CMakeLists.txt +++ b/tests/MMS/wave-1d/CMakeLists.txt @@ -1,5 +1 @@ -bout_add_mms_test(MMS-wave-1d - SOURCES wave.cxx - USE_RUNTEST - USE_DATA_BOUT_INP -) +bout_add_mms_test(MMS-wave-1d SOURCES wave.cxx USE_RUNTEST USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-backtrace/CMakeLists.txt b/tests/integrated/test-backtrace/CMakeLists.txt index 579f4863ac..1f1dd47abb 100644 --- a/tests/integrated/test-backtrace/CMakeLists.txt +++ b/tests/integrated/test-backtrace/CMakeLists.txt @@ -1,4 +1 @@ -bout_add_integrated_test(test-backtrace - SOURCES boutexcept.cxx - USE_RUNTEST -) +bout_add_integrated_test(test-backtrace SOURCES boutexcept.cxx USE_RUNTEST) diff --git a/tests/integrated/test-beuler/CMakeLists.txt b/tests/integrated/test-beuler/CMakeLists.txt index c67b2dce44..fb33f0c465 100644 --- a/tests/integrated/test-beuler/CMakeLists.txt +++ b/tests/integrated/test-beuler/CMakeLists.txt @@ -1,2 +1,2 @@ -bout_add_integrated_test(test_beuler SOURCES test_beuler.cxx - REQUIRES BOUT_HAS_PETSC) +bout_add_integrated_test(test_beuler SOURCES test_beuler.cxx REQUIRES + BOUT_HAS_PETSC) diff --git a/tests/integrated/test-bout-override-default-option/CMakeLists.txt b/tests/integrated/test-bout-override-default-option/CMakeLists.txt index 7c3d6390b0..809ebf4af3 100644 --- a/tests/integrated/test-bout-override-default-option/CMakeLists.txt +++ b/tests/integrated/test-bout-override-default-option/CMakeLists.txt @@ -1,3 +1,2 @@ -bout_add_integrated_test(test-bout-override-default-option - SOURCES test-bout-override-default-option.cxx - ) +bout_add_integrated_test(test-bout-override-default-option SOURCES + test-bout-override-default-option.cxx) diff --git a/tests/integrated/test-boutpp/CMakeLists.txt b/tests/integrated/test-boutpp/CMakeLists.txt index 0a16bba43e..8aa0d55869 100644 --- a/tests/integrated/test-boutpp/CMakeLists.txt +++ b/tests/integrated/test-boutpp/CMakeLists.txt @@ -1,4 +1,4 @@ -if (BOUT_ENABLE_PYTHON) +if(BOUT_ENABLE_PYTHON) add_subdirectory(collect) add_subdirectory(collect-staggered) add_subdirectory(legacy-model) diff --git a/tests/integrated/test-boutpp/collect-staggered/CMakeLists.txt b/tests/integrated/test-boutpp/collect-staggered/CMakeLists.txt index 7a41adc15d..f334c41f19 100644 --- a/tests/integrated/test-boutpp/collect-staggered/CMakeLists.txt +++ b/tests/integrated/test-boutpp/collect-staggered/CMakeLists.txt @@ -1,8 +1,5 @@ -bout_add_integrated_test(test-boutpp-collect-staggered - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_integrated_test(test-boutpp-collect-staggered USE_RUNTEST + USE_DATA_BOUT_INP REQUIRES BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-collect-staggered boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-boutpp/collect/CMakeLists.txt b/tests/integrated/test-boutpp/collect/CMakeLists.txt index beb78086c6..9ad4d94a7c 100644 --- a/tests/integrated/test-boutpp/collect/CMakeLists.txt +++ b/tests/integrated/test-boutpp/collect/CMakeLists.txt @@ -1,8 +1,5 @@ -bout_add_integrated_test(test-boutpp-collect - USE_RUNTEST - EXTRA_FILES input/BOUT.inp - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_integrated_test(test-boutpp-collect USE_RUNTEST EXTRA_FILES + input/BOUT.inp REQUIRES BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-collect boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-boutpp/legacy-model/CMakeLists.txt b/tests/integrated/test-boutpp/legacy-model/CMakeLists.txt index a32b1f4f4b..9f730cd9e8 100644 --- a/tests/integrated/test-boutpp/legacy-model/CMakeLists.txt +++ b/tests/integrated/test-boutpp/legacy-model/CMakeLists.txt @@ -1,8 +1,5 @@ -bout_add_integrated_test(test-boutpp-legacy-model - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_integrated_test(test-boutpp-legacy-model USE_RUNTEST USE_DATA_BOUT_INP + REQUIRES BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-legacy-model boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-boutpp/mms-ddz/CMakeLists.txt b/tests/integrated/test-boutpp/mms-ddz/CMakeLists.txt index dfac88662a..4f4f5b7e90 100644 --- a/tests/integrated/test-boutpp/mms-ddz/CMakeLists.txt +++ b/tests/integrated/test-boutpp/mms-ddz/CMakeLists.txt @@ -1,8 +1,5 @@ -bout_add_integrated_test(test-boutpp-mms-ddz - USE_RUNTEST - USE_DATA_BOUT_INP - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_integrated_test(test-boutpp-mms-ddz USE_RUNTEST USE_DATA_BOUT_INP + REQUIRES BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-mms-ddz boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-boutpp/print/CMakeLists.txt b/tests/integrated/test-boutpp/print/CMakeLists.txt index 2e61b29257..75272a5451 100644 --- a/tests/integrated/test-boutpp/print/CMakeLists.txt +++ b/tests/integrated/test-boutpp/print/CMakeLists.txt @@ -1,8 +1,11 @@ -bout_add_integrated_test(test-boutpp-print +bout_add_integrated_test( + test-boutpp-print USE_RUNTEST - EXTRA_FILES test/BOUT.inp test.py - REQUIRES BOUT_ENABLE_PYTHON -) + EXTRA_FILES + test/BOUT.inp + test.py + REQUIRES + BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-print boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-boutpp/simple-model/CMakeLists.txt b/tests/integrated/test-boutpp/simple-model/CMakeLists.txt index 891a2ce018..2a7887fad0 100644 --- a/tests/integrated/test-boutpp/simple-model/CMakeLists.txt +++ b/tests/integrated/test-boutpp/simple-model/CMakeLists.txt @@ -1,8 +1,5 @@ -bout_add_integrated_test(test-boutpp-simple-model - USE_RUNTEST - EXTRA_FILES mini/BOUT.inp - REQUIRES BOUT_ENABLE_PYTHON -) +bout_add_integrated_test(test-boutpp-simple-model USE_RUNTEST EXTRA_FILES + mini/BOUT.inp REQUIRES BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-simple-model boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-boutpp/slicing/CMakeLists.txt b/tests/integrated/test-boutpp/slicing/CMakeLists.txt index 4f314a40cc..08d2a5a5d1 100644 --- a/tests/integrated/test-boutpp/slicing/CMakeLists.txt +++ b/tests/integrated/test-boutpp/slicing/CMakeLists.txt @@ -1,8 +1,11 @@ -bout_add_integrated_test(test-boutpp-slicing +bout_add_integrated_test( + test-boutpp-slicing USE_RUNTEST - EXTRA_FILES test/BOUT.inp test.py - REQUIRES BOUT_ENABLE_PYTHON -) + EXTRA_FILES + test/BOUT.inp + test.py + REQUIRES + BOUT_ENABLE_PYTHON) add_dependencies(test-boutpp-slicing boutpp) add_dependencies(build-check-integrated-tests boutpp) diff --git a/tests/integrated/test-collect/CMakeLists.txt b/tests/integrated/test-collect/CMakeLists.txt index e5fa43dde8..c83dd8460f 100644 --- a/tests/integrated/test-collect/CMakeLists.txt +++ b/tests/integrated/test-collect/CMakeLists.txt @@ -1,5 +1,2 @@ -bout_add_integrated_test(test-collect - SOURCES test-collect.cxx - USE_RUNTEST - USE_DATA_BOUT_INP - ) +bout_add_integrated_test(test-collect SOURCES test-collect.cxx USE_RUNTEST + USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-command-args/CMakeLists.txt b/tests/integrated/test-command-args/CMakeLists.txt index 1a2cf88a5e..969715ec8e 100644 --- a/tests/integrated/test-command-args/CMakeLists.txt +++ b/tests/integrated/test-command-args/CMakeLists.txt @@ -1,5 +1,2 @@ -bout_add_integrated_test(test-command-args - SOURCES command-args.cxx - USE_RUNTEST - EXTRA_FILES BOUT.inp - ) +bout_add_integrated_test(test-command-args SOURCES command-args.cxx USE_RUNTEST + EXTRA_FILES BOUT.inp) diff --git a/tests/integrated/test-communications/CMakeLists.txt b/tests/integrated/test-communications/CMakeLists.txt index de6197ce3b..75b0982844 100644 --- a/tests/integrated/test-communications/CMakeLists.txt +++ b/tests/integrated/test-communications/CMakeLists.txt @@ -1,9 +1,12 @@ -bout_add_integrated_test(test-communications - SOURCES test-communications.cxx +bout_add_integrated_test( + test-communications + SOURCES + test-communications.cxx USE_RUNTEST USE_DATA_BOUT_INP EXTRA_FILES - data_limiter/BOUT.inp - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 18 - ) + data_limiter/BOUT.inp + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 18) diff --git a/tests/integrated/test-coordinates-initialization/CMakeLists.txt b/tests/integrated/test-coordinates-initialization/CMakeLists.txt index ad47be272c..0f3a0ff8fb 100644 --- a/tests/integrated/test-coordinates-initialization/CMakeLists.txt +++ b/tests/integrated/test-coordinates-initialization/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-coordinates-initialization - SOURCES test-coordinates-initialization.cxx +bout_add_integrated_test( + test-coordinates-initialization + SOURCES + test-coordinates-initialization.cxx USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 3 - ) + PROCESSORS + 3) diff --git a/tests/integrated/test-cyclic/CMakeLists.txt b/tests/integrated/test-cyclic/CMakeLists.txt index a6296858e2..1b18517f73 100644 --- a/tests/integrated/test-cyclic/CMakeLists.txt +++ b/tests/integrated/test-cyclic/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_integrated_test(test-cyclic - SOURCES test_cyclic.cxx +bout_add_integrated_test( + test-cyclic + SOURCES + test_cyclic.cxx USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES test_io.grd.nc - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 4 - ) + EXTRA_FILES + test_io.grd.nc + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 4) diff --git a/tests/integrated/test-datafilefacade/CMakeLists.txt b/tests/integrated/test-datafilefacade/CMakeLists.txt index a6d1876e16..23149022e7 100644 --- a/tests/integrated/test-datafilefacade/CMakeLists.txt +++ b/tests/integrated/test-datafilefacade/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-datafile-facade - SOURCES test-datafile-facade.cxx +bout_add_integrated_test( + test-datafile-facade + SOURCES + test-datafile-facade.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 4) diff --git a/tests/integrated/test-delp2/CMakeLists.txt b/tests/integrated/test-delp2/CMakeLists.txt index c1de3ed940..a0cec131bf 100644 --- a/tests/integrated/test-delp2/CMakeLists.txt +++ b/tests/integrated/test-delp2/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-delp2 - SOURCES test_delp2.cxx +bout_add_integrated_test( + test-delp2 + SOURCES + test_delp2.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 4) diff --git a/tests/integrated/test-drift-instability-staggered/CMakeLists.txt b/tests/integrated/test-drift-instability-staggered/CMakeLists.txt index 18383286cd..d703c1a515 100644 --- a/tests/integrated/test-drift-instability-staggered/CMakeLists.txt +++ b/tests/integrated/test-drift-instability-staggered/CMakeLists.txt @@ -1,10 +1,16 @@ -bout_add_integrated_test(test-drift-instability-staggered - SOURCES 2fluid.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-drift-instability-staggered + SOURCES + 2fluid.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES uedge.grd_std.cdl - REQUIRES BOUT_HAS_NETCDF - REQUIRES BOUT_HAS_FFTW - PROCESSORS 2 - ) + EXTRA_FILES + uedge.grd_std.cdl + REQUIRES + BOUT_HAS_NETCDF + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 2) diff --git a/tests/integrated/test-drift-instability/CMakeLists.txt b/tests/integrated/test-drift-instability/CMakeLists.txt index b0b73eca01..10ead49309 100644 --- a/tests/integrated/test-drift-instability/CMakeLists.txt +++ b/tests/integrated/test-drift-instability/CMakeLists.txt @@ -1,10 +1,16 @@ -bout_add_integrated_test(test-drift-instability - SOURCES 2fluid.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-drift-instability + SOURCES + 2fluid.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES uedge.grd_std.cdl - REQUIRES BOUT_HAS_NETCDF - REQUIRES BOUT_HAS_FFTW - PROCESSORS 2 - ) + EXTRA_FILES + uedge.grd_std.cdl + REQUIRES + BOUT_HAS_NETCDF + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 2) diff --git a/tests/integrated/test-fci-boundary/CMakeLists.txt b/tests/integrated/test-fci-boundary/CMakeLists.txt index bf25cd7c57..f71f474154 100644 --- a/tests/integrated/test-fci-boundary/CMakeLists.txt +++ b/tests/integrated/test-fci-boundary/CMakeLists.txt @@ -1,22 +1,27 @@ -bout_add_mms_test(test-fci-boundary - SOURCES get_par_bndry.cxx +bout_add_mms_test( + test-fci-boundary + SOURCES + get_par_bndry.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES zoidberg_FOUND - PROCESSORS 1 - ) + REQUIRES + zoidberg_FOUND + PROCESSORS + 1) -if (zoidberg_FOUND) +if(zoidberg_FOUND) set(gridfile ${CMAKE_CURRENT_BINARY_DIR}/grid.fci.nc) - add_custom_command(OUTPUT ${gridfile} - COMMAND ${CMAKE_COMMAND} -E env PYTHONPATH=${BOUT_PYTHONPATH}:$ENV{PYTHONPATH} ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/grid.py ${gridfile} + add_custom_command( + OUTPUT ${gridfile} + COMMAND + ${CMAKE_COMMAND} -E env PYTHONPATH=${BOUT_PYTHONPATH}:$ENV{PYTHONPATH} + ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/grid.py ${gridfile} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/../../../tools/pylib/boutconfig/__init__.py + DEPENDS + ${CMAKE_CURRENT_BINARY_DIR}/../../../tools/pylib/boutconfig/__init__.py DEPENDS grid.py - IMPLICIT_DEPENDS ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Creating test-fci-boundary grid file" - ) + IMPLICIT_DEPENDS ${CMAKE_CURRENT_BINARY_DIR} + COMMENT "Creating test-fci-boundary grid file") add_custom_target(test-fci-boundary-grid DEPENDS ${gridfile}) - add_dependencies(test-fci-boundary - test-fci-boundary-grid) + add_dependencies(test-fci-boundary test-fci-boundary-grid) endif() diff --git a/tests/integrated/test-fci-mpi/CMakeLists.txt b/tests/integrated/test-fci-mpi/CMakeLists.txt index 0dd38487a3..82742d276d 100644 --- a/tests/integrated/test-fci-mpi/CMakeLists.txt +++ b/tests/integrated/test-fci-mpi/CMakeLists.txt @@ -1,9 +1,14 @@ -bout_add_mms_test(test-fci-mpi - SOURCES fci_mpi.cxx +bout_add_mms_test( + test-fci-mpi + SOURCES + fci_mpi.cxx USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 6 - DOWNLOAD https://zenodo.org/record/7614499/files/W7X-conf4-36x8x128.fci.nc?download=1 - DOWNLOAD_NAME grid.fci.nc - REQUIRES BOUT_HAS_PETSC -) + PROCESSORS + 6 + DOWNLOAD + https://zenodo.org/record/7614499/files/W7X-conf4-36x8x128.fci.nc?download=1 + DOWNLOAD_NAME + grid.fci.nc + REQUIRES + BOUT_HAS_PETSC) diff --git a/tests/integrated/test-fieldgroupComm/CMakeLists.txt b/tests/integrated/test-fieldgroupComm/CMakeLists.txt index 17a699c187..f438a8332a 100644 --- a/tests/integrated/test-fieldgroupComm/CMakeLists.txt +++ b/tests/integrated/test-fieldgroupComm/CMakeLists.txt @@ -1,9 +1,14 @@ -bout_add_integrated_test(test-fieldgroupComm - SOURCES test_fieldgroupcomm.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-fieldgroupComm + SOURCES + test_fieldgroupcomm.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES cyclone_68x32.nc - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 4 - ) + EXTRA_FILES + cyclone_68x32.nc + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 4) diff --git a/tests/integrated/test-griddata-yboundary-guards/CMakeLists.txt b/tests/integrated/test-griddata-yboundary-guards/CMakeLists.txt index b01afa5f32..35a7d690fd 100644 --- a/tests/integrated/test-griddata-yboundary-guards/CMakeLists.txt +++ b/tests/integrated/test-griddata-yboundary-guards/CMakeLists.txt @@ -1,13 +1,16 @@ -bout_add_integrated_test(test-griddata-yboundary-guards - SOURCES test_griddata.cxx +bout_add_integrated_test( + test-griddata-yboundary-guards + SOURCES + test_griddata.cxx USE_RUNTEST EXTRA_FILES - data-doublenull-0/BOUT.inp - data-doublenull-1/BOUT.inp - data-doublenull-2/BOUT.inp - data-singlenull-0/BOUT.inp - data-singlenull-1/BOUT.inp - data-singlenull-2/BOUT.inp - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 6 - ) + data-doublenull-0/BOUT.inp + data-doublenull-1/BOUT.inp + data-doublenull-2/BOUT.inp + data-singlenull-0/BOUT.inp + data-singlenull-1/BOUT.inp + data-singlenull-2/BOUT.inp + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 6) diff --git a/tests/integrated/test-griddata/CMakeLists.txt b/tests/integrated/test-griddata/CMakeLists.txt index 8e03b5de0d..8595dc13c0 100644 --- a/tests/integrated/test-griddata/CMakeLists.txt +++ b/tests/integrated/test-griddata/CMakeLists.txt @@ -1,5 +1,2 @@ -bout_add_integrated_test(test-griddata - SOURCES test_griddata.cxx - USE_RUNTEST - EXTRA_FILES screw/BOUT.inp - ) +bout_add_integrated_test(test-griddata SOURCES test_griddata.cxx USE_RUNTEST + EXTRA_FILES screw/BOUT.inp) diff --git a/tests/integrated/test-gyro/CMakeLists.txt b/tests/integrated/test-gyro/CMakeLists.txt index 00be4d29a0..4bcc1f99fa 100644 --- a/tests/integrated/test-gyro/CMakeLists.txt +++ b/tests/integrated/test-gyro/CMakeLists.txt @@ -1,10 +1,17 @@ -bout_add_integrated_test(test-gyro - SOURCES test_gyro.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-gyro + SOURCES + test_gyro.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES cyclone_68x32.nc data/benchmark.0.nc - REQUIRES BOUT_HAS_NETCDF - REQUIRES BOUT_HAS_FFTW - PROCESSORS 4 - ) + EXTRA_FILES + cyclone_68x32.nc + data/benchmark.0.nc + REQUIRES + BOUT_HAS_NETCDF + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 4) diff --git a/tests/integrated/test-initial/CMakeLists.txt b/tests/integrated/test-initial/CMakeLists.txt index 33a03d89ca..023edfcf9c 100644 --- a/tests/integrated/test-initial/CMakeLists.txt +++ b/tests/integrated/test-initial/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-initial - SOURCES test_initial.cxx +bout_add_integrated_test( + test-initial + SOURCES + test_initial.cxx USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 4 - ) + PROCESSORS + 4) diff --git a/tests/integrated/test-interchange-instability/CMakeLists.txt b/tests/integrated/test-interchange-instability/CMakeLists.txt index 380c93d029..bc94d90f7e 100644 --- a/tests/integrated/test-interchange-instability/CMakeLists.txt +++ b/tests/integrated/test-interchange-instability/CMakeLists.txt @@ -1,9 +1,18 @@ -bout_add_integrated_test(test-interchange-instability - SOURCES 2fluid.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-interchange-instability + SOURCES + 2fluid.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST - EXTRA_FILES slab.6b5.r1.cdl slab.6b5.r10.cdl data_1/BOUT.inp data_10/BOUT.inp - REQUIRES BOUT_HAS_NETCDF - REQUIRES BOUT_HAS_FFTW - PROCESSORS 2 - ) + EXTRA_FILES + slab.6b5.r1.cdl + slab.6b5.r10.cdl + data_1/BOUT.inp + data_10/BOUT.inp + REQUIRES + BOUT_HAS_NETCDF + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 2) diff --git a/tests/integrated/test-interpolate-z/CMakeLists.txt b/tests/integrated/test-interpolate-z/CMakeLists.txt index 399edcd8ff..80034cf5ef 100644 --- a/tests/integrated/test-interpolate-z/CMakeLists.txt +++ b/tests/integrated/test-interpolate-z/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-interpolate-z - SOURCES test_interpolate.cxx +bout_add_integrated_test( + test-interpolate-z + SOURCES + test_interpolate.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - ) + REQUIRES + BOUT_HAS_FFTW) diff --git a/tests/integrated/test-interpolate/CMakeLists.txt b/tests/integrated/test-interpolate/CMakeLists.txt index de4bd14f8a..849c0938ca 100644 --- a/tests/integrated/test-interpolate/CMakeLists.txt +++ b/tests/integrated/test-interpolate/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-interpolate - SOURCES test_interpolate.cxx +bout_add_integrated_test( + test-interpolate + SOURCES + test_interpolate.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - ) + REQUIRES + BOUT_HAS_FFTW) diff --git a/tests/integrated/test-invertable-operator/CMakeLists.txt b/tests/integrated/test-invertable-operator/CMakeLists.txt index de45de14a2..602b8dacd8 100644 --- a/tests/integrated/test-invertable-operator/CMakeLists.txt +++ b/tests/integrated/test-invertable-operator/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_integrated_test(test-invertable-operator - SOURCES invertable_operator.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-invertable-operator + SOURCES + invertable_operator.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_PETSC - PROCESSORS 2 - ) + REQUIRES + BOUT_HAS_PETSC + PROCESSORS + 2) diff --git a/tests/integrated/test-invpar/CMakeLists.txt b/tests/integrated/test-invpar/CMakeLists.txt index 8e143a1188..f64fa3d9a3 100644 --- a/tests/integrated/test-invpar/CMakeLists.txt +++ b/tests/integrated/test-invpar/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_integrated_test(test-invpar - SOURCES test_invpar.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-invpar + SOURCES + test_invpar.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 4) diff --git a/tests/integrated/test-laplace-hypre3d/CMakeLists.txt b/tests/integrated/test-laplace-hypre3d/CMakeLists.txt index 2645c18c67..59bff16bc4 100644 --- a/tests/integrated/test-laplace-hypre3d/CMakeLists.txt +++ b/tests/integrated/test-laplace-hypre3d/CMakeLists.txt @@ -1,10 +1,12 @@ -bout_add_integrated_test(test-laplace-hypre3d - SOURCES test-laplace3d.cxx +bout_add_integrated_test( + test-laplace-hypre3d + SOURCES + test-laplace3d.cxx EXTRA_FILES - data_circular_core/BOUT.inp - data_circular_core-sol/BOUT.inp - data_slab_core/BOUT.inp - data_slab_sol/BOUT.inp + data_circular_core/BOUT.inp + data_circular_core-sol/BOUT.inp + data_slab_core/BOUT.inp + data_slab_sol/BOUT.inp USE_RUNTEST - REQUIRES BOUT_HAS_HYPRE - ) + REQUIRES + BOUT_HAS_HYPRE) diff --git a/tests/integrated/test-laplace-petsc3d/CMakeLists.txt b/tests/integrated/test-laplace-petsc3d/CMakeLists.txt index 93bf4f7efa..4bf7b250fd 100644 --- a/tests/integrated/test-laplace-petsc3d/CMakeLists.txt +++ b/tests/integrated/test-laplace-petsc3d/CMakeLists.txt @@ -1,10 +1,12 @@ -bout_add_integrated_test(test-laplace-petsc3d - SOURCES test-laplace3d.cxx +bout_add_integrated_test( + test-laplace-petsc3d + SOURCES + test-laplace3d.cxx EXTRA_FILES - data_circular_core/BOUT.inp - data_circular_core-sol/BOUT.inp - data_slab_core/BOUT.inp - data_slab_sol/BOUT.inp + data_circular_core/BOUT.inp + data_circular_core-sol/BOUT.inp + data_slab_core/BOUT.inp + data_slab_sol/BOUT.inp USE_RUNTEST - REQUIRES BOUT_HAS_PETSC - ) + REQUIRES + BOUT_HAS_PETSC) diff --git a/tests/integrated/test-laplace/CMakeLists.txt b/tests/integrated/test-laplace/CMakeLists.txt index f3e95f3fa5..a05f4ff12c 100644 --- a/tests/integrated/test-laplace/CMakeLists.txt +++ b/tests/integrated/test-laplace/CMakeLists.txt @@ -1,10 +1,17 @@ -bout_add_integrated_test(test-laplace - SOURCES test_laplace.cxx - CONFLICTS BOUT_USE_METRIC_3D - EXTRA_FILES test_laplace.grd.nc data/benchmark.0.nc +bout_add_integrated_test( + test-laplace + SOURCES + test_laplace.cxx + CONFLICTS + BOUT_USE_METRIC_3D + EXTRA_FILES + test_laplace.grd.nc + data/benchmark.0.nc USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_NETCDF - REQUIRES BOUT_HAS_FFTW - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_NETCDF + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 4) diff --git a/tests/integrated/test-laplacexy-fv/CMakeLists.txt b/tests/integrated/test-laplacexy-fv/CMakeLists.txt index 57e98edd38..360a66bc41 100644 --- a/tests/integrated/test-laplacexy-fv/CMakeLists.txt +++ b/tests/integrated/test-laplacexy-fv/CMakeLists.txt @@ -1,8 +1,13 @@ -bout_add_integrated_test(test-laplacexy-fv - SOURCES test-laplacexy.cxx - REQUIRES BOUT_HAS_PETSC - CONFLICTS BOUT_USE_METRIC_3D # Test requires Laplace_perpXY operator, which is not implemented for 3d metrics +bout_add_integrated_test( + test-laplacexy-fv + SOURCES + test-laplacexy.cxx + REQUIRES + BOUT_HAS_PETSC + CONFLICTS + BOUT_USE_METRIC_3D # Test requires Laplace_perpXY operator, which is not + # implemented for 3d metrics USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 8 - ) + PROCESSORS + 8) diff --git a/tests/integrated/test-laplacexy-short/CMakeLists.txt b/tests/integrated/test-laplacexy-short/CMakeLists.txt index c5576b58ac..498dfb14a5 100644 --- a/tests/integrated/test-laplacexy-short/CMakeLists.txt +++ b/tests/integrated/test-laplacexy-short/CMakeLists.txt @@ -1,8 +1,13 @@ -bout_add_integrated_test(test-laplacexy-short - SOURCES test-laplacexy.cxx - REQUIRES BOUT_HAS_PETSC - CONFLICTS BOUT_USE_METRIC_3D # Test uses cyclic Laplace solver as a preconditioner, which is not available with 3d metrics +bout_add_integrated_test( + test-laplacexy-short + SOURCES + test-laplacexy.cxx + REQUIRES + BOUT_HAS_PETSC + CONFLICTS + BOUT_USE_METRIC_3D # Test uses cyclic Laplace solver as a preconditioner, + # which is not available with 3d metrics USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 8 - ) + PROCESSORS + 8) diff --git a/tests/integrated/test-laplacexy/CMakeLists.txt b/tests/integrated/test-laplacexy/CMakeLists.txt index 338bb46763..bf149d90b7 100644 --- a/tests/integrated/test-laplacexy/CMakeLists.txt +++ b/tests/integrated/test-laplacexy/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-laplacexy - SOURCES test-laplacexy.cxx - REQUIRES BOUT_HAS_PETSC +bout_add_integrated_test( + test-laplacexy + SOURCES + test-laplacexy.cxx + REQUIRES + BOUT_HAS_PETSC USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 8 - ) + PROCESSORS + 8) diff --git a/tests/integrated/test-laplacexy2-hypre/CMakeLists.txt b/tests/integrated/test-laplacexy2-hypre/CMakeLists.txt index a787752ebc..570d5ff08b 100644 --- a/tests/integrated/test-laplacexy2-hypre/CMakeLists.txt +++ b/tests/integrated/test-laplacexy2-hypre/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-laplacexy2-hypre - SOURCES test-laplacexy.cxx - REQUIRES BOUT_HAS_HYPRE +bout_add_integrated_test( + test-laplacexy2-hypre + SOURCES + test-laplacexy.cxx + REQUIRES + BOUT_HAS_HYPRE USE_RUNTEST - USE_DATA_BOUT_INP - ) + USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-laplacexz/CMakeLists.txt b/tests/integrated/test-laplacexz/CMakeLists.txt index b19aa986ad..4a8ac10736 100644 --- a/tests/integrated/test-laplacexz/CMakeLists.txt +++ b/tests/integrated/test-laplacexz/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-laplacexz - SOURCES test-laplacexz.cxx - REQUIRES BOUT_HAS_PETSC - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-laplacexz + SOURCES + test-laplacexz.cxx + REQUIRES + BOUT_HAS_PETSC + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST - USE_DATA_BOUT_INP - ) + USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-multigrid_laplace/CMakeLists.txt b/tests/integrated/test-multigrid_laplace/CMakeLists.txt index 67cf38b510..6c2db1bdf7 100644 --- a/tests/integrated/test-multigrid_laplace/CMakeLists.txt +++ b/tests/integrated/test-multigrid_laplace/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-multigrid-laplace - SOURCES test_multigrid_laplace.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-multigrid-laplace + SOURCES + test_multigrid_laplace.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 3 - ) + PROCESSORS + 3) diff --git a/tests/integrated/test-naulin-laplace/CMakeLists.txt b/tests/integrated/test-naulin-laplace/CMakeLists.txt index 9e63477a53..e75346b1b4 100644 --- a/tests/integrated/test-naulin-laplace/CMakeLists.txt +++ b/tests/integrated/test-naulin-laplace/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_integrated_test(test-naulin-laplace - SOURCES test_naulin_laplace.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-naulin-laplace + SOURCES + test_naulin_laplace.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - PROCESSORS 3 - ) + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 3) diff --git a/tests/integrated/test-options-netcdf/CMakeLists.txt b/tests/integrated/test-options-netcdf/CMakeLists.txt index f2d115d768..da188068e2 100644 --- a/tests/integrated/test-options-netcdf/CMakeLists.txt +++ b/tests/integrated/test-options-netcdf/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-options-netcdf - SOURCES test-options-netcdf.cxx +bout_add_integrated_test( + test-options-netcdf + SOURCES + test-options-netcdf.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_NETCDF - CONFLICTS BOUT_HAS_LEGACY_NETCDF - ) + REQUIRES + BOUT_HAS_NETCDF + CONFLICTS + BOUT_HAS_LEGACY_NETCDF) diff --git a/tests/integrated/test-petsc_laplace/CMakeLists.txt b/tests/integrated/test-petsc_laplace/CMakeLists.txt index 9492b9f34f..ffe7148414 100644 --- a/tests/integrated/test-petsc_laplace/CMakeLists.txt +++ b/tests/integrated/test-petsc_laplace/CMakeLists.txt @@ -1,8 +1,13 @@ -bout_add_integrated_test(test-petsc-laplace - SOURCES test_petsc_laplace.cxx - REQUIRES BOUT_HAS_PETSC - CONFLICTS BOUT_USE_METRIC_3D # default preconditioner uses 'cyclic' Laplace solver which is not available with 3d metrics +bout_add_integrated_test( + test-petsc-laplace + SOURCES + test_petsc_laplace.cxx + REQUIRES + BOUT_HAS_PETSC + CONFLICTS + BOUT_USE_METRIC_3D # default preconditioner uses 'cyclic' Laplace solver which + # is not available with 3d metrics USE_RUNTEST USE_DATA_BOUT_INP - PROCESSORS 4 - ) + PROCESSORS + 4) diff --git a/tests/integrated/test-petsc_laplace_MAST-grid/CMakeLists.txt b/tests/integrated/test-petsc_laplace_MAST-grid/CMakeLists.txt index 53e4acbedb..6341374469 100644 --- a/tests/integrated/test-petsc_laplace_MAST-grid/CMakeLists.txt +++ b/tests/integrated/test-petsc_laplace_MAST-grid/CMakeLists.txt @@ -1,14 +1,18 @@ -bout_add_integrated_test(test-petsc-laplace-MAST-grid - SOURCES test_petsc_laplace_MAST_grid.cxx - REQUIRES BOUT_HAS_PETSC - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-petsc-laplace-MAST-grid + SOURCES + test_petsc_laplace_MAST_grid.cxx + REQUIRES + BOUT_HAS_PETSC + CONFLICTS + BOUT_USE_METRIC_3D USE_RUNTEST USE_DATA_BOUT_INP EXTRA_FILES - grids/grid_MAST_SOL_jyis2.nc - grids/grid_MAST_SOL_jyis34.nc - grids/grid_MAST_SOL_jyis65.nc - grids/grid_MAST_SOL_jyis81.nc - grids/grid_MAST_SOL_jyis113.nc - PROCESSORS 4 - ) + grids/grid_MAST_SOL_jyis2.nc + grids/grid_MAST_SOL_jyis34.nc + grids/grid_MAST_SOL_jyis65.nc + grids/grid_MAST_SOL_jyis81.nc + grids/grid_MAST_SOL_jyis113.nc + PROCESSORS + 4) diff --git a/tests/integrated/test-restart-io/CMakeLists.txt b/tests/integrated/test-restart-io/CMakeLists.txt index c3de2ad0e1..7246c1663f 100644 --- a/tests/integrated/test-restart-io/CMakeLists.txt +++ b/tests/integrated/test-restart-io/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-restart-io - SOURCES test-restart-io.cxx +bout_add_integrated_test( + test-restart-io + SOURCES + test-restart-io.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 4) diff --git a/tests/integrated/test-restarting/CMakeLists.txt b/tests/integrated/test-restarting/CMakeLists.txt index 83cb9c808f..e7fc5cb487 100644 --- a/tests/integrated/test-restarting/CMakeLists.txt +++ b/tests/integrated/test-restarting/CMakeLists.txt @@ -1,5 +1,2 @@ -bout_add_integrated_test(test-restarting - SOURCES test_restarting.cxx - USE_RUNTEST - USE_DATA_BOUT_INP - ) +bout_add_integrated_test(test-restarting SOURCES test_restarting.cxx + USE_RUNTEST USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-slepc-solver/CMakeLists.txt b/tests/integrated/test-slepc-solver/CMakeLists.txt index bc4c178de9..6e8a4dd4fd 100644 --- a/tests/integrated/test-slepc-solver/CMakeLists.txt +++ b/tests/integrated/test-slepc-solver/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-slepc-solver - SOURCES test-slepc-solver.cxx +bout_add_integrated_test( + test-slepc-solver + SOURCES + test-slepc-solver.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_SLEPC - ) + REQUIRES + BOUT_HAS_SLEPC) diff --git a/tests/integrated/test-smooth/CMakeLists.txt b/tests/integrated/test-smooth/CMakeLists.txt index 6b6f0c2001..e37ccae8c0 100644 --- a/tests/integrated/test-smooth/CMakeLists.txt +++ b/tests/integrated/test-smooth/CMakeLists.txt @@ -1,8 +1,13 @@ -bout_add_integrated_test(test-smooth - SOURCES test_smooth.cxx +bout_add_integrated_test( + test-smooth + SOURCES + test_smooth.cxx USE_RUNTEST USE_DATA_BOUT_INP - EXTRA_FILES test_smooth.nc data/benchmark.0.nc - REQUIRES BOUT_HAS_NETCDF - PROCESSORS 4 - ) + EXTRA_FILES + test_smooth.nc + data/benchmark.0.nc + REQUIRES + BOUT_HAS_NETCDF + PROCESSORS + 4) diff --git a/tests/integrated/test-snb/CMakeLists.txt b/tests/integrated/test-snb/CMakeLists.txt index 1a81c392a4..8aad11efbe 100644 --- a/tests/integrated/test-snb/CMakeLists.txt +++ b/tests/integrated/test-snb/CMakeLists.txt @@ -1,6 +1,9 @@ -bout_add_integrated_test(test-snb - SOURCES test_snb.cxx - CONFLICTS BOUT_USE_METRIC_3D +bout_add_integrated_test( + test-snb + SOURCES + test_snb.cxx + CONFLICTS + BOUT_USE_METRIC_3D USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - ) + REQUIRES + BOUT_HAS_FFTW) diff --git a/tests/integrated/test-solver/CMakeLists.txt b/tests/integrated/test-solver/CMakeLists.txt index f20e11f1eb..de3c602621 100644 --- a/tests/integrated/test-solver/CMakeLists.txt +++ b/tests/integrated/test-solver/CMakeLists.txt @@ -1,3 +1 @@ -bout_add_integrated_test(test-solver - SOURCES test_solver.cxx - ) +bout_add_integrated_test(test-solver SOURCES test_solver.cxx) diff --git a/tests/integrated/test-squash/CMakeLists.txt b/tests/integrated/test-squash/CMakeLists.txt index 2c7e8d734d..4ac8238d2e 100644 --- a/tests/integrated/test-squash/CMakeLists.txt +++ b/tests/integrated/test-squash/CMakeLists.txt @@ -1,8 +1,12 @@ -bout_add_integrated_test(test-squash - SOURCES squash.cxx +bout_add_integrated_test( + test-squash + SOURCES + squash.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_NETCDF - TESTARGS "${CMAKE_CURRENT_LIST_DIR}/../../../bin" - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_NETCDF + TESTARGS + "${CMAKE_CURRENT_LIST_DIR}/../../../bin" + PROCESSORS + 4) diff --git a/tests/integrated/test-stopCheck-file/CMakeLists.txt b/tests/integrated/test-stopCheck-file/CMakeLists.txt index 60b9b23489..9f8e8d19d3 100644 --- a/tests/integrated/test-stopCheck-file/CMakeLists.txt +++ b/tests/integrated/test-stopCheck-file/CMakeLists.txt @@ -1,9 +1,10 @@ -bout_add_integrated_test(test-stopCheck-file - SOURCES test_stopCheck.cxx +bout_add_integrated_test( + test-stopCheck-file + SOURCES + test_stopCheck.cxx USE_RUNTEST USE_DATA_BOUT_INP EXTRA_FILES - data/BOUT.stop - dataSecond/BOUT.inp - dataSecond/otherStop.check - ) + data/BOUT.stop + dataSecond/BOUT.inp + dataSecond/otherStop.check) diff --git a/tests/integrated/test-stopCheck/CMakeLists.txt b/tests/integrated/test-stopCheck/CMakeLists.txt index 93cf5fb67b..e5c38b6731 100644 --- a/tests/integrated/test-stopCheck/CMakeLists.txt +++ b/tests/integrated/test-stopCheck/CMakeLists.txt @@ -1,5 +1,2 @@ -bout_add_integrated_test(test-stopCheck - SOURCES test_stopCheck.cxx - USE_RUNTEST - USE_DATA_BOUT_INP - ) +bout_add_integrated_test(test-stopCheck SOURCES test_stopCheck.cxx USE_RUNTEST + USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-twistshift-staggered/CMakeLists.txt b/tests/integrated/test-twistshift-staggered/CMakeLists.txt index 747e64dda1..198e074ed2 100644 --- a/tests/integrated/test-twistshift-staggered/CMakeLists.txt +++ b/tests/integrated/test-twistshift-staggered/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-twistshift-staggered - SOURCES test-twistshift.cxx +bout_add_integrated_test( + test-twistshift-staggered + SOURCES + test-twistshift.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - ) + REQUIRES + BOUT_HAS_FFTW) diff --git a/tests/integrated/test-twistshift/CMakeLists.txt b/tests/integrated/test-twistshift/CMakeLists.txt index e8712e9844..08bec289a0 100644 --- a/tests/integrated/test-twistshift/CMakeLists.txt +++ b/tests/integrated/test-twistshift/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-twistshift - SOURCES test-twistshift.cxx +bout_add_integrated_test( + test-twistshift + SOURCES + test-twistshift.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - ) + REQUIRES + BOUT_HAS_FFTW) diff --git a/tests/integrated/test-vec/CMakeLists.txt b/tests/integrated/test-vec/CMakeLists.txt index 111840c71a..18b9e12e41 100644 --- a/tests/integrated/test-vec/CMakeLists.txt +++ b/tests/integrated/test-vec/CMakeLists.txt @@ -1,7 +1,10 @@ -bout_add_integrated_test(test-vec - SOURCES testVec.cxx +bout_add_integrated_test( + test-vec + SOURCES + testVec.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - PROCESSORS 4 - ) + REQUIRES + BOUT_HAS_FFTW + PROCESSORS + 4) diff --git a/tests/integrated/test-yupdown-weights/CMakeLists.txt b/tests/integrated/test-yupdown-weights/CMakeLists.txt index 5b6d825c87..3b70601efb 100644 --- a/tests/integrated/test-yupdown-weights/CMakeLists.txt +++ b/tests/integrated/test-yupdown-weights/CMakeLists.txt @@ -1,5 +1,2 @@ -bout_add_integrated_test(test-yupdown-weights - SOURCES test_yupdown_weights.cxx - USE_RUNTEST - USE_DATA_BOUT_INP - ) +bout_add_integrated_test(test-yupdown-weights SOURCES test_yupdown_weights.cxx + USE_RUNTEST USE_DATA_BOUT_INP) diff --git a/tests/integrated/test-yupdown/CMakeLists.txt b/tests/integrated/test-yupdown/CMakeLists.txt index 3c6d500b4e..ab5d381afd 100644 --- a/tests/integrated/test-yupdown/CMakeLists.txt +++ b/tests/integrated/test-yupdown/CMakeLists.txt @@ -1,6 +1,8 @@ -bout_add_integrated_test(test-yupdown - SOURCES test_yupdown.cxx +bout_add_integrated_test( + test-yupdown + SOURCES + test_yupdown.cxx USE_RUNTEST USE_DATA_BOUT_INP - REQUIRES BOUT_HAS_FFTW - ) + REQUIRES + BOUT_HAS_FFTW) diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 40aa207dea..bd81f478d8 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -1,122 +1,139 @@ bout_update_submodules() -if (NOT EXISTS "${PROJECT_SOURCE_DIR}/externalpackages/googletest/CMakeLists.txt") - set(BOUT_ENABLE_UNIT_TESTS OFF PARENT_SCOPE) - message(WARNING "googletest not found! Have you disabled the git submodules (GIT_SUBMODULE)?") +if(NOT EXISTS + "${PROJECT_SOURCE_DIR}/externalpackages/googletest/CMakeLists.txt") + set(BOUT_ENABLE_UNIT_TESTS + OFF + PARENT_SCOPE) + message( + WARNING + "googletest not found! Have you disabled the git submodules (GIT_SUBMODULE)?" + ) return() endif() -set(BOUT_ENABLE_UNIT_TESTS ON PARENT_SCOPE) +set(BOUT_ENABLE_UNIT_TESTS + ON + PARENT_SCOPE) # disable gtest pthreads for proper execution of Death tests (on some platforms) add_definitions(-DGTEST_HAS_PTHREAD=0) -set(gtest_disable_pthreads ON CACHE BOOL "" FORCE) +set(gtest_disable_pthreads + ON + CACHE BOOL "" FORCE) add_subdirectory("${PROJECT_SOURCE_DIR}/externalpackages/googletest" - "externalpackages/googletest") + "externalpackages/googletest") if(NOT TARGET gtest) - message(FATAL_ERROR "googletest not found! Have you disabled the git submodules (GIT_SUBMODULE)?") + message( + FATAL_ERROR + "googletest not found! Have you disabled the git submodules (GIT_SUBMODULE)?" + ) endif() # Some unit tests require GMOCK, so make sure we build it set(BUILD_GMOCK ON) mark_as_advanced( - BUILD_GMOCK BUILD_GTEST BUILD_SHARED_LIBS - gmock_build_tests gtest_build_samples gtest_build_tests - gtest_disable_pthreads gtest_force_shared_crt gtest_hide_internal_symbols - ) + BUILD_GMOCK + BUILD_GTEST + BUILD_SHARED_LIBS + gmock_build_tests + gtest_build_samples + gtest_build_tests + gtest_disable_pthreads + gtest_force_shared_crt + gtest_hide_internal_symbols) set(serial_tests_source - ./bout_test_main.cxx - ./field/test_field.cxx - ./field/test_field2d.cxx - ./field/test_field3d.cxx - ./field/test_field_factory.cxx - ./field/test_fieldgroup.cxx - ./field/test_fieldperp.cxx - ./field/test_initialprofiles.cxx - ./field/test_vector2d.cxx - ./field/test_vector3d.cxx - ./field/test_where.cxx - ./include/bout/test_array.cxx - ./include/bout/test_assert.cxx - ./include/bout/test_bout_enum_class.cxx - ./include/bout/test_deriv_store.cxx - ./include/bout/test_generic_factory.cxx - ./include/bout/test_macro_for_each.cxx - ./include/bout/test_monitor.cxx - ./include/bout/test_petsc_indexer.cxx - ./include/bout/test_petsc_matrix.cxx - ./include/bout/test_petsc_setters.cxx - ./include/bout/test_petsc_vector.cxx - ./include/bout/test_region.cxx - ./include/bout/test_single_index_ops.cxx - ./include/bout/test_stencil.cxx - ./include/bout/test_template_combinations.cxx - ./include/bout/test_traits.cxx - ./include/test_cyclic_reduction.cxx - ./include/test_derivs.cxx - ./include/test_mask.cxx - ./invert/test_fft.cxx - ./invert/laplace/test_laplace_petsc3damg.cxx - ./invert/laplace/test_laplace_cyclic.cxx - ./mesh/data/test_gridfromoptions.cxx - ./mesh/parallel/test_shiftedmetric.cxx - ./mesh/test_boundary_factory.cxx - ./mesh/test_boutmesh.cxx - ./mesh/test_coordinates.cxx - ./mesh/test_coordinates_accessor.cxx - ./mesh/test_interpolation.cxx - ./mesh/test_invert3x3.cxx - ./mesh/test_mesh.cxx - ./mesh/test_paralleltransform.cxx - ./solver/test_fakesolver.cxx - ./solver/test_fakesolver.hxx - ./solver/test_solver.cxx - ./solver/test_solverfactory.cxx - ./sys/test_boutexception.cxx - ./sys/test_expressionparser.cxx - ./sys/test_msg_stack.cxx - ./sys/test_options.cxx - ./sys/test_options_adios2.cxx - ./sys/test_options_fields.cxx - ./sys/test_options_netcdf.cxx - ./sys/test_optionsreader.cxx - ./sys/test_output.cxx - ./sys/test_range.cxx - ./sys/test_timer.cxx - ./sys/test_type_name.cxx - ./sys/test_utils.cxx - ./sys/test_variant.cxx - ./sys/test_raja.cxx - ./test_extras.cxx - ./test_extras.hxx - ./fake_mesh.hxx - ./fake_mesh_fixture.hxx - ./src/test_bout++.cxx) + ./bout_test_main.cxx + ./field/test_field.cxx + ./field/test_field2d.cxx + ./field/test_field3d.cxx + ./field/test_field_factory.cxx + ./field/test_fieldgroup.cxx + ./field/test_fieldperp.cxx + ./field/test_initialprofiles.cxx + ./field/test_vector2d.cxx + ./field/test_vector3d.cxx + ./field/test_where.cxx + ./include/bout/test_array.cxx + ./include/bout/test_assert.cxx + ./include/bout/test_bout_enum_class.cxx + ./include/bout/test_deriv_store.cxx + ./include/bout/test_generic_factory.cxx + ./include/bout/test_macro_for_each.cxx + ./include/bout/test_monitor.cxx + ./include/bout/test_petsc_indexer.cxx + ./include/bout/test_petsc_matrix.cxx + ./include/bout/test_petsc_setters.cxx + ./include/bout/test_petsc_vector.cxx + ./include/bout/test_region.cxx + ./include/bout/test_single_index_ops.cxx + ./include/bout/test_stencil.cxx + ./include/bout/test_template_combinations.cxx + ./include/bout/test_traits.cxx + ./include/test_cyclic_reduction.cxx + ./include/test_derivs.cxx + ./include/test_mask.cxx + ./invert/test_fft.cxx + ./invert/laplace/test_laplace_petsc3damg.cxx + ./invert/laplace/test_laplace_cyclic.cxx + ./mesh/data/test_gridfromoptions.cxx + ./mesh/parallel/test_shiftedmetric.cxx + ./mesh/test_boundary_factory.cxx + ./mesh/test_boutmesh.cxx + ./mesh/test_coordinates.cxx + ./mesh/test_coordinates_accessor.cxx + ./mesh/test_interpolation.cxx + ./mesh/test_invert3x3.cxx + ./mesh/test_mesh.cxx + ./mesh/test_paralleltransform.cxx + ./solver/test_fakesolver.cxx + ./solver/test_fakesolver.hxx + ./solver/test_solver.cxx + ./solver/test_solverfactory.cxx + ./sys/test_boutexception.cxx + ./sys/test_expressionparser.cxx + ./sys/test_msg_stack.cxx + ./sys/test_options.cxx + ./sys/test_options_adios2.cxx + ./sys/test_options_fields.cxx + ./sys/test_options_netcdf.cxx + ./sys/test_optionsreader.cxx + ./sys/test_output.cxx + ./sys/test_range.cxx + ./sys/test_timer.cxx + ./sys/test_type_name.cxx + ./sys/test_utils.cxx + ./sys/test_variant.cxx + ./sys/test_raja.cxx + ./test_extras.cxx + ./test_extras.hxx + ./fake_mesh.hxx + ./fake_mesh_fixture.hxx + ./src/test_bout++.cxx) if(BOUT_HAS_HYPRE) - list(APPEND serial_tests_source ./include/bout/test_hypre_interface.cxx) - list(APPEND serial_tests_source ./invert/laplace/test_laplace_hypre3d.cxx) -endif () + list(APPEND serial_tests_source ./include/bout/test_hypre_interface.cxx) + list(APPEND serial_tests_source ./invert/laplace/test_laplace_hypre3d.cxx) +endif() add_executable(serial_tests ${serial_tests_source}) -target_include_directories(serial_tests PUBLIC bout++ - $ - $ - ${CMAKE_INSTALL_PREFIX}/include - ) +target_include_directories( + serial_tests + PUBLIC bout++ $ + $ + ${CMAKE_INSTALL_PREFIX}/include) set_target_properties(serial_tests PROPERTIES LINKER_LANGUAGE CXX) target_link_libraries(serial_tests gtest gmock bout++::bout++) add_test(NAME serial_tests COMMAND serial_tests --gtest_brief=1) set_target_properties(serial_tests PROPERTIES FOLDER tests/unit) add_dependencies(build-check-unit-tests serial_tests) -if (BOUT_HAS_CUDA) - set_source_files_properties(${serial_tests_source} PROPERTIES LANGUAGE CUDA ) - set_target_properties(serial_tests PROPERTIES CUDA_STANDARD 14) -endif () - +if(BOUT_HAS_CUDA) + set_source_files_properties(${serial_tests_source} PROPERTIES LANGUAGE CUDA) + set_target_properties(serial_tests PROPERTIES CUDA_STANDARD 14) +endif() diff --git a/tools/GRIDUE_to_BOUT_grid_converter/README.md b/tools/GRIDUE_to_BOUT_grid_converter/README.md new file mode 100644 index 0000000000..155ab5543f --- /dev/null +++ b/tools/GRIDUE_to_BOUT_grid_converter/README.md @@ -0,0 +1,57 @@ +This script was modified from B. Dudson's original gridue_to_bout.py script. It creates a BOUT grid file from a GRIDUE file. + +It can be ran from the terminal. + +To run it directly after the creation of a gridue file on INGRID the following code needs to be added to ingrid.py: + +```python + def ExportBOUTgrid(self, gridue_file, bout_grid_name: str = 'bout_from_in.grd.nc', plotting = True, verbose = True, ignore_checks = False): + """ + Export a BOUT grid file for the created grid. + + Parameters + ---------- + gridue_file : str, mandatory + Name of gridue file to convert to BOUT grid. + + bout_grid_name : str, optional + Name of BOUT grid file to save. + + plotting : bool, optional + If True, plot the gridue file before conversion. + + verbose : bool, optional + If True, print verbose output during conversion. + + ignore_checks : bool, optional + If True, ignore checks for gridue file format and structure. + + """ + bout_grid_name = gridue_file + "_" + bout_grid_name + Convert_grids(gridue_file,bout_grid_name,plotting, verbose, ignore_checks) + +``` + +And modifying the `ExportGridue` function to: + +```python + if type(self.CurrentTopology) in [SNL]: + if self.WriteGridueSNL(self.CurrentTopology.gridue_settings, fname): + print(f"# Successfully saved gridue file as '{fname}'") + self.ExportBOUTgrid(fname, 'bout_from_in.grd.nc', plotting = True, verbose = True, ignore_checks = False) + elif type(self.CurrentTopology) in [SF15, SF45, SF75, SF105, SF135, SF165, UDN, CDN]: + if self.WriteGridueDNL(self.CurrentTopology.gridue_settings, fname): + print(f"# Successfully saved gridue file as '{fname}'") + self.ExportBOUTgrid(fname, 'bout_from_in.grd.nc', plotting = True, verbose = True, ignore_checks = False) +``` + +*Note:* The created file will have +`dimensions: + x ; + y ; + z ; + x2 ; + y2 ; + t = UNLIMITED ; //` + +Where x has guard cells following INGRID's convention, and y doesn't have any. The converter takes the BOUT coordinates to be x -> y2 (and removes the guard cells) and y -> x2 (and adds the guard cells). So BOUT ends up using x2 as X and y2 as Y coordinates. \ No newline at end of file diff --git a/tools/GRIDUE_to_BOUT_grid_converter/gridue_to_bout.py b/tools/GRIDUE_to_BOUT_grid_converter/gridue_to_bout.py new file mode 100644 index 0000000000..1bca58689e --- /dev/null +++ b/tools/GRIDUE_to_BOUT_grid_converter/gridue_to_bout.py @@ -0,0 +1,962 @@ +#!/usr/bin/env python3 +# +# Convert UEDGE grid files (gridue) to BOUT++ grids +# +# Parts adapted from INGRID https://github.com/LLNL/INGRID +# Copyright (c) 2020, Lawrence Livermore National Security, LLC +# Parts adapted from Hypnotoad https://github.com/boutproject/hypnotoad/ +# Copyright 2019 J.T. Omotani +# +# +# Modified and finished by S. Ruiz, 2025 from the orinigal gridue_to_bout.py made by B. Dudson. +# +# + +import numpy as np +import matplotlib +import matplotlib.pyplot as plt + +from scipy import linalg + + +def _importBody(gridue_settings, f): + """ + Imports the body of a gridue file and returns a dictionary with the data. + + Parameters: + gridue_settings : dict + Dictionary with header information from the gridue file. + + f : file object + Opened file object to read the body of the gridue file. + + Returns: + gridue_settings : dict + A dictionary containing the gridue body data, with keys corresponding to + the items in BodyItems and values as numpy arrays. + """ + next(f) + BodyItems = ["rm", "zm", "psi", "br", "bz", "bpol", "bphi", "b"] + Str = {i: [] for i in BodyItems} + k = iter(Str.keys()) + Key = next(k) + + for line in f: + if line == "iogridue\n": + continue + if line == "\n": + try: + Key = next(k) + except: + continue + else: + Str[Key].append(line) + f.close() + nx = gridue_settings["nxm"] + 2 + ny = gridue_settings["nym"] + 2 + + for k, v in Str.items(): + L = ("".join(v).replace("\n", "").replace("D", "e")).split() + _l = iter(L) + vv = next(_l) + + data_ = np.zeros((nx, ny, 5)) + for n in range(5): + for j in range(ny): + for i in range(nx): + data_[i][j][n] = float(vv) + + try: + vv = next(_l) + except: + continue + gridue_settings[k] = data_ + return gridue_settings + + +def _importSN(values, f): + """ + Import a single null file. Only used for Single null divertor topology. + """ + + HeaderItems = ["nxm", "nym", "ixpt1", "ixpt2", "iyseparatrix1"] + gridue_settings = dict(zip(HeaderItems, values)) + + # Add indices that are present in DN but not SN + gridue_settings.update( + { + "iyseparatrix2": gridue_settings["nym"] + 10, # Outside grid + "ix_cut1": gridue_settings["ixpt1"], + "ix_cut2": gridue_settings["nxm"] // 2, + "ix_inner": gridue_settings["nxm"] // 2, + "ix_cut3": gridue_settings["nxm"] // 2, + "ix_cut4": gridue_settings["ixpt2"], + } + ) + + return _importBody(gridue_settings, f) + + +def _importDN(values, f): + """ + Import a double null file. Used for most divertor topologies. + """ + + gridue_settings = dict(zip(["nxm", "nym"], values)) + + header_rows = [ + ["iyseparatrix1", "iyseparatrix2"], + ["ix_plate1", "ix_cut1", "_FILLER_", "ix_cut2", "ix_plate2"], + ["iyseparatrix3", "iyseparatrix4"], + ["ix_plate3", "ix_cut3", "_FILLER_", "ix_cut4", "ix_plate4"], + ] + + for row in header_rows: + values = [int(x) for x in next(f).split()] + if len(values) != len(row): + raise ValueError( + "Expected row with {} integers, found {}".format(len(row), len(values)) + ) + gridue_settings.update(zip(row, values)) + + gridue_settings.update({"ix_inner": gridue_settings["ix_plate2"]}) + + return _importBody(gridue_settings, f) + + +def importGridue(fname: str = "gridue") -> dict: + """ + Import UEDGE grid file as dictionary. + + Parameters + ---------- + fname : str, optional + Path/file name to gridue formatted file. + + Returns + ------- + A dict containing header and body information from the gridue file. + + """ + f = open(fname, mode="r") + values = [int(x) for x in next(f).split()] + + if len(values) == 5: + return _importSN(values, f) + if len(values) == 2: + return _importDN(values, f) + + raise ValueError("Unrecognised gridue format") + + +def plot(GridueParams: dict, edgecolor="black", ax: object = None, show=True): + """ + Plot UEDGE grid from 'dict' obtained from method 'ImportGridue' + + Parameters + ---------- + GridueParams : dict + Gridue header and body information as a dictionary. + + edgecolor : str, optional + Color of grid. + + ax : object, optional + Matplotlib axes to plot on. + + """ + r = GridueParams["rm"] + z = GridueParams["zm"] + Nx = len(r) + Ny = len(r[0]) + patches = [] + plt.figure(figsize=(6, 10)) + if ax is None: + ax = plt.gca() + idx = [np.array([1, 2, 4, 3, 1])] + for i in range(Nx): + for j in range(Ny): + p = matplotlib.patches.Polygon( + np.concatenate((r[i][j][idx], z[i][j][idx])).reshape(2, 5).T, + fill=False, + closed=True, + edgecolor=edgecolor, + ) + ax.add_patch(p) # draw the contours + ax.set_aspect("equal", adjustable="box") + ax.set_xlabel("R") + ax.set_ylabel("Z") + ax.set_ylim(z.min(), z.max()) + ax.set_xlim(r.min(), r.max()) + if show: + plt.show() + return ax + + +def calcHy(nx, ny, g: dict, dy: float): + """ + Calculate poloidal arc length metric from gridue dictionary + """ + rm = g["rm"] + zm = g["zm"] + + hy = np.zeros((nx, ny)) + for i in range(nx): + for j in range(ny): + r = rm[j, i, :] + z = zm[j, i, :] + # Find intersection with (1) -- (3) + R1 = 0.5 * (r[1] + r[3]) + Z1 = 0.5 * (z[1] + z[3]) + # Find intersection with (2) -- (4) + R2 = 0.5 * (r[2] + r[4]) + Z2 = 0.5 * (z[2] + z[4]) + + # Distance from one side to the other + dl = np.sqrt((R2 - R1) ** 2 + (Z2 - Z1) ** 2) + hy[i, j] = dl / dy + return hy + + +def calcGridAngle(g: dict): + Brxy = g["br"][:, :, 0].T + Bzxy = g["bz"][:, :, 0].T + Bpxy = g["bpol"][:, :, 0].T + + psi = g["psi"] + rm = g["rm"] + zm = g["zm"] + + delta_y = [Brxy / Bpxy, Bzxy / Bpxy] # Unit vector along e_y + + R_xlow = 0.5 * (rm[:, :, 1] + rm[:, :, 2]).T + Z_xlow = 0.5 * (zm[:, :, 1] + zm[:, :, 2]).T + R_xhigh = 0.5 * (rm[:, :, 3] + rm[:, :, 4]).T + Z_xhigh = 0.5 * (zm[:, :, 3] + zm[:, :, 4]).T + dR = R_xhigh - R_xlow + dZ = Z_xhigh - Z_xlow + dl = np.sqrt(dR**2 + dZ**2) + delta_x = [dR / dl, dZ / dl] # unit vector along e_x + + # Calculate angle between x and y unit vectors. + # sin(beta) = cos(pi/2 - beta) = e_x_hat.e_y_hat = delta_x.delta_y + sinBeta = delta_x[0] * delta_y[0] + delta_x[1] * delta_y[1] + + # Rotate delta_y by 90 degrees anticlockwise to get unit vector in psi direction + delta_psi = [-delta_y[1], delta_y[0]] + + # cosBeta = delta_x.delta_psi + cosBeta = delta_x[0] * delta_psi[0] + delta_x[1] * delta_psi[1] + + return sinBeta, cosBeta + + +def calcRZderivs(R, Z, var): + """ + Calculate derivatives of var w.r.t R and Z, using 5 points + + Inputs R, Z and var should be 3D [poloidal, radial, 5] + as stored in gridue files. + + Returns (dvar/dR, dvar/dZ) as 2D arrays [poloidal, radial] + at the location of point index 0 + """ + npol, nr, _ = R.shape + dR = np.zeros((npol, nr)) + dZ = np.zeros((npol, nr)) + for i in range(npol): + for j in range(nr): + A = np.zeros((5, 5)) + for p in range(5): + # Constraint on derivatives for this point + A[p, 0] = 1.0 # Constant + A[p, 1] = R[i, j, p] - R[i, j, 0] # dR + A[p, 2] = Z[i, j, p] - Z[i, j, 0] # dZ + A[p, 3] = (R[i, j, p] - R[i, j, 0]) ** 2 # dR^2 + A[p, 4] = (Z[i, j, p] - Z[i, j, 0]) ** 2 # dR^2 + # Values of the function being fitted + b = var[i, j, :].squeeze() + # Invert using LU for stability + lu, piv = linalg.lu_factor(A) + x = linalg.lu_solve((lu, piv), b) + dR[i, j] = x[1] + dZ[i, j] = x[2] + return dR, dZ # dvar/dR, dvar/dZ at cell center + + +def calcMetric(grd: dict, bpsign, verbose=False, ignore_checks=False): + """ + Calculate metric tensor given BOUT++ cell centers at each grid point. + + Parameters: + grd : dict + Dictionary containing grid data with keys: + "Rxy", "Zxy", "Brxy", "Bzxy", "Btxy", "Bpxy", "Bxy", "hy", + "cosBeta", "tanBeta", "curl_bOverB_Rhat", "curl_bOverB_Zhat", + "curl_bOverB_zetahat". + bpsign : int + Sign of the magnetic field (1 for normal, -1 for reversed). + verbose : bool, optional + If True, print detailed information about the metric tensor and Jacobian. + ignore_checks : bool, optional + If True, ignore checks on the Jacobian's relative error. + + Returns: + dict + A dictionary containing the metric tensor components, Jacobian, and other related quantities for each grid point. + --> Each component is a 2D array with shape [radial, poloidal]. + """ + + Rxy = grd["Rxy"] + Zxy = grd["Zxy"] + Brxy = grd["Brxy"] + Bzxy = grd["Bzxy"] + Btxy = grd["Btxy"] + Bpxy = grd["Bpxy"] + Bxy = grd["Bxy"] + hy = grd["hy"] + cosBeta = grd["cosBeta"] + tanBeta = grd["tanBeta"] + curl_bOverB_Rhat = grd["curl_bOverB_Rhat"] + curl_bOverB_Zhat = grd["curl_bOverB_Zhat"] + curl_bOverB_zetahat = grd["curl_bOverB_zetahat"] + + if verbose: + for var, name in [ + (cosBeta, "cos(beta)"), + (tanBeta, "tan(beta)"), + ]: + print( + "{} min {}, mean {}, max {}".format( + name, np.amin(var), np.mean(var), np.amax(var) + ) + ) + + dphidy = hy * Btxy / (Bpxy * Rxy) + + I = np.zeros(Rxy.shape) + + g11 = (Rxy * Bpxy) ** 2 + g22 = 1.0 / (hy * cosBeta) ** 2 + g33 = ( + 1.0 / Rxy**2 + + (Rxy * Bpxy * I) ** 2 + + (dphidy / (hy * cosBeta)) ** 2 + + 2.0 * Rxy * Bpxy * I * dphidy * tanBeta / hy + ) + g12 = Rxy * np.abs(Bpxy) * tanBeta / hy + g13 = -Rxy * Bpxy * dphidy * tanBeta / hy - I * (Rxy * Bpxy) ** 2 + g23 = -bpsign * dphidy / (hy * cosBeta) ** 2 - Rxy * np.abs(Bpxy) * I * tanBeta / hy + + J = hy / Bpxy + + g_11 = 1.0 / (Rxy * Bpxy * cosBeta) ** 2 + (I * Rxy) ** 2 + g_22 = hy**2 + (dphidy * Rxy) ** 2 + g_33 = Rxy**2 + g_12 = bpsign * I * dphidy * Rxy**2 - hy * tanBeta / (Rxy * np.abs(Bpxy)) + g_13 = I * Rxy**2 + g_23 = bpsign * dphidy * Rxy**2 + + Jcheck = ( + bpsign + * 1.0 + / np.sqrt( + g11 * g22 * g33 + + 2.0 * g12 * g13 * g23 + - g11 * g23**2 + - g22 * g13**2 + - g33 * g12**2 + ) + ) + + rel_error = (J - Jcheck) / J + + if verbose: + for var, name in [ + (hy, "hy"), + (J, "J"), + (Jcheck, "Jcheck"), + (J - Jcheck, "J - Jcheck"), + (rel_error, "(J - Jcheck)/J"), + ]: + print( + "{} min {}, mean {}, max {}".format( + name, np.amin(var), np.mean(var), np.amax(var) + ) + ) + + if np.max(np.abs(rel_error)) > 1e-6: + if ignore_checks: + print("WARNING: Relative error in Jacobian too large.") + # else: + # raise ValueError(f"Relative error in Jacobian too large: {np.max(np.abs(rel_error))}") + + # We want to output contravariant components of Curl(b/B) in the + # locally field-aligned coordinate system. + # The contravariant components of an arbitrary vector A are + # A^x = A.Grad(x) + # A^y = A.Grad(y) + # A^z = A.Grad(z) + + # Grad in cylindrical coordinates is + # Grad(f) = df/dR Rhat + 1/R df/dzeta zetahat + df/dZ Zhat + # https://en.wikipedia.org/wiki/Del_in_cylindrical_and_spherical_coordinates, + + # x = psi - psi_min + # dpsi/dR = -R*BZ + # dpsi/dZ = R*BR + # => Grad(x) = (dpsi/dR, 0, dpsi/dZ).(Rhat, zetahat, Zhat) + # => Grad(x) = (-R BZ, 0, R BR).(Rhat, zetahat, Zhat) + curl_bOverB_x = -Rxy * Bzxy * curl_bOverB_Rhat + Rxy * Brxy * curl_bOverB_Zhat + + # Grad(y) = (d_Z, 0, -d_R)/(hy*cosBeta) + # = (BR*cosBeta-BZ*sinBeta, 0, BZ*cosBeta+BR*sinBeta) + # /(Bp*hy*cosBeta) + # = (BR-BZ*tanBeta, 0, BZ+BR*tanBeta)/(Bp*hy) + curl_bOverB_y = ( + curl_bOverB_Rhat * (Brxy - Bzxy * tanBeta) + + curl_bOverB_Zhat * (Bzxy + Brxy * tanBeta) + ) / (Bpxy * hy) + + # Grad(z) = Grad(zeta) - Bt*hy/(Bp*R)*Grad(y) - I*Grad(x) + # Grad(z) = (0, 1/R, 0) - Bt*hy/(Bp*R)*Grad(y) - I*Grad(x) + curl_bOverB_z = ( + curl_bOverB_zetahat / Rxy + - Btxy * hy / (Bpxy * Rxy) * curl_bOverB_y + - I * curl_bOverB_x + ) + + bxcvx = Bxy / 2.0 * curl_bOverB_x + bxcvy = Bxy / 2.0 * curl_bOverB_y + bxcvz = Bxy / 2.0 * curl_bOverB_z + + if verbose: + for var, name in [ + (bxcvx, "bxcvx"), + (bxcvy, "bxcvy"), + (bxcvz, "bxcvz"), + ]: + print( + "{} min {}, mean {}, max {}".format( + name, np.amin(var), np.mean(var), np.amax(var) + ) + ) + + return { + "dphidy": dphidy, + # Metric tensor + "g11": g11, + "g22": g22, + "g33": g33, + "g12": g12, + "g13": g13, + "g23": g23, + # Inverse metric tensor + "g_11": g_11, + "g_22": g_22, + "g_33": g_33, + "g_12": g_12, + "g_13": g_13, + "g_23": g_23, + # Jacobian + "J": J, + # Integrated shear + "sinty": I, + # Curvature + "curl_bOverB_x": curl_bOverB_x, + "curl_bOverB_y": curl_bOverB_y, + "curl_bOverB_z": curl_bOverB_z, + "bxcvx": bxcvx, + "bxcvy": bxcvy, + "bxcvz": bxcvz, + } + + +def calcRZCurvature(g: dict): + """ + Calculate curvature in (R, Z, zeta) + Returns 2D arrays [radial, poloidal] + """ + # Variables in UEDGE format [poloidal, radial, 5] + BR = g["br"] + BZ = g["bz"] + Bzeta = g["bphi"] + B2 = g["b"] ** 2 + R = g["rm"] + Z = g["zm"] + + dBzetadR, dBzetadZ = calcRZderivs(R, Z, Bzeta) + dBRdR, dBRdZ = calcRZderivs(R, Z, BR) + dBZdR, dBZdZ = calcRZderivs(R, Z, BZ) + dB2dR, dB2dZ = calcRZderivs(R, Z, B2) + + # Select point at centre of cell + BR = BR[:, :, 0] + BZ = BZ[:, :, 0] + Bzeta = Bzeta[:, :, 0] + B2 = B2[:, :, 0] + R = R[:, :, 0] + Z = Z[:, :, 0] + + # In cylindrical coords + # curl(A) = (1/R*d(AZ)/dzeta - d(Azeta)/dZ) * Rhat + # + 1/R*(d(R Azeta)/dR - d(AR)/dzeta) * Zhat + # + (d(AR)/dZ - d(AZ)/dR) * zetahat + # Where AR, AZ and Azeta are the components on a basis of unit vectors, + # i.e. AR = A.Rhat; AZ = A.Zhat; Azeta = A.zetahat + # https://en.wikipedia.org/wiki/Del_in_cylindrical_and_spherical_coordinates, + # + # curl(b/B) = curl((BR/B2), (BZ/B2), (Bzeta/B2)) + # curl(b/B)_Rhat = 1/R d(BZ/B2)/dzeta - d(Bzeta/B2)/dZ + # = 1/(R*B2)*d(BZ)/dzeta - BZ/(R*B4)*d(B2)/dzeta + # - 1/B2*d(Bzeta)/dZ + Bzeta/B4*d(B2)/dZ + # = -1/B2*d(Bzeta)/dZ + Bzeta/B4*d(B2)/dZ + # curl(b/B)_Zhat = 1/R * (d(R Bzeta/B2)/dR - d(BR/B2)/dzeta) + # = Bzeta/(R*B2) + 1/B2*d(Bzeta)/dR - Bzeta/B4*d(B2)/dR + # - 1/(R*B2)*d(BR)/dzeta + BR/(R*B4)*d(B2)/dzeta + # = Bzeta/(R*B2) + 1/B2*d(Bzeta)/dR - Bzeta/B4*d(B2)/dR + # curl(b/B)_zetahat = d(BR/B2)/dZ - d(BZ/B2)/dR + # = 1/B2*d(BR)/dZ - BR/B4*d(B2)/dZ + # - 1/B2*d(BZ)/dR + BZ/B4*d(B2)/dR + # remembering d/dzeta=0 for axisymmetric equilibrium + + curl_bOverB_Rhat = -dBzetadZ / B2 + Bzeta / B2**2 * dB2dZ + curl_bOverB_Zhat = Bzeta / (R * B2) + dBzetadR / B2 - Bzeta / B2**2 * dB2dR + curl_bOverB_zetahat = ( + dBRdZ / B2 - BR / B2**2 * dB2dZ - dBZdR / B2 + BZ / B2**2 * dB2dR + ) + + # Return as [radial, poloidal] + return curl_bOverB_Rhat.T, curl_bOverB_Zhat.T, curl_bOverB_zetahat.T + + +def main(): + import argparse + + parser = argparse.ArgumentParser( + description="""Converts UEDGE grid files (gridue) into BOUT++ grids. +Note that in most cases these grids are non-orthogonal.""" + ) + parser.add_argument("gridue_file", type=str) + parser.add_argument("-o", "--output", default="bout.grd.nc") + parser.add_argument("-p", "--plot", action="store_true", default=False) + parser.add_argument("-v", "--verbose", action="store_true", default=False) + parser.add_argument("-i", "--ignore-checks", action="store_true", default=False) + try: + import argcomplete + + argcomplete.autocomplete(parser) + except ImportError: + pass + + args = parser.parse_args() + gridue_file = args.gridue_file + output_filename = args.output + plotting = args.plot + verbose = args.verbose + ignore_checks = args.ignore_checks + + Convert_grids(gridue_file, output_filename, plotting, verbose, ignore_checks) + + +def Convert_grids( + gridue_file: str, + output_filename: str, + plotting: bool = False, + verbose: bool = False, + ignore_checks: bool = False, +): + """ + Convert UEDGE grid file to BOUT++ grid format. + + Parameters: + gridue_file : str + Path to the UEDGE grid file (gridue format). + output_filename : str + Path to save the converted BOUT++ grid file. + plotting : bool, optional + If True, plot the grid after conversion. + verbose : bool, optional + If True, print detailed information about the grid and conversion process. + ignore_checks : bool, optional + If True, ignore checks on the Jacobian's relative error. + Raises: + ------- + ValueError + If the magnetic field direction and poloidal magnetic field have opposite signs. + ValueError + If the relative error in the Jacobian is too large and ignore_checks is False. + ValueError + If the gridue file format is unrecognized. + ValueError + If the gridue file does not contain the expected header information. + ValueError + If the gridue file does not contain the expected body information. + ValueError + If the gridue file does not contain the expected single null or double null topology. + ValueError + If the gridue file does not contain the expected number of grid points. + ValueError + If the gridue file does not contain the expected number of poloidal or radial points. + ValueError + If the gridue file does not contain the expected number of boundary points. + ValueError + If the gridue file does not contain the expected number of inner or outer points. + ValueError + If the gridue file does not contain the expected number of inner or outer boundary points. + ValueError + If the gridue file does not contain the expected number of inner or outer plate points. + """ + + g = importGridue(gridue_file) + + if plotting: + plot(g) + + psi = g["psi"] + rm = g["rm"] + zm = g["zm"] + + Rxy = rm[:, :, 0].T + Zxy = zm[:, :, 0].T + Brxy = g["br"][:, :, 0].T + Bzxy = g["bz"][:, :, 0].T + Bpxy = g["bpol"][:, :, 0].T + Btxy = g["bphi"][:, :, 0].T + Bxy = g["b"][:, :, 0].T + psixy = psi[:, :, 0].T + nx, ny = Rxy.shape + + # idx = [np.array([1, 2, 4, 3, 1])] + + # pol = [] + # for i in range(nx): + # for j in range(ny): + # np.concatenate((rm[i][j][idx], zm[i][j][idx])).reshape(2, 5).T + + # Ordering + # (1) -- (3) + # | | + # | (0) | -> Radial, BOUT++ "x" + # | | + # (2) -- (4) + + # calculate change in psi across cell -> dx + + dx = np.zeros((nx, ny)) + for i in range(nx): + for j in range(ny): + if i > 1 and i < nx - 2: + dx[i, j] = 0.5 * (psi[j, i + 1, 0] - psi[j, i - 1, 0]) + else: + dx[i, j] = 0.5 * ( + psi[j, i, 3] + psi[j, i, 4] - psi[j, i, 1] - psi[j, i, 2] + ) + + # Note: UEDGE grids have narrow cells on the radial + # boundaries. BOUT++ applies boundary conditions half-way between + # cells, and usually expects two boundary cells. + + # Calculate direction of magnetic field Bp dot Grad(y) + Bp_dot_grady = Brxy[1, 1] * (Rxy[1, 1] - Rxy[1, 0]) + Bzxy[1, 1] * ( + Zxy[1, 1] - Zxy[1, 0] + ) + + if Bp_dot_grady * Bpxy[1, 1] < 0.0: + raise ValueError("Bp_dot_grady and Bpxy have opposite signs") + bpsign = np.sign(Bpxy[1, 1]) # Sign of the poloidal magnetic field + + # Choose angle dy across poloidal cell. This is somewhat arbitrary, + # but it is helpful if the y angle changes by 2pi for each poloidal transit of the core + + dy = 2 * np.pi / ny + + # Calculate hy, the arc length along the flux surface passing through + # the center of each cell. + hy = calcHy(nx, ny, g, dy) + + # Calculate angle between x and y coordinates. sinBeta = 0, cosBeta = 1 for an orthogonal mesh + sinBeta, cosBeta = calcGridAngle(g) + tanBeta = sinBeta / cosBeta + + # Calculate curvature + curl_bOverB_Rhat, curl_bOverB_Zhat, curl_bOverB_zetahat = calcRZCurvature(g) + + # Collect 2D variables for output + grd = { + "Rxy": Rxy, + "Zxy": Zxy, + "psixy": psixy, + "dx": dx, + "dy": np.full((nx, ny), dy), + # Magnetic field components + "Brxy": Brxy, + "Bzxy": Bzxy, + "Bpxy": Bpxy, + "Btxy": Btxy, + "Bxy": Bxy, + # Poloidal arc length + "hy": hy, + "hthe": hy, + # Curvature + "curl_bOverB_Rhat": curl_bOverB_Rhat, + "curl_bOverB_Zhat": curl_bOverB_Zhat, + "curl_bOverB_zetahat": curl_bOverB_zetahat, + # Grid angles + "cosBeta": cosBeta, + "tanBeta": tanBeta, + } + + # Remove Y (poloidal) boundary cells + for name in grd: + grd[name] = grd[name][:, 1:-1] + + Rxy = grd["Rxy"] + Zxy = grd["Zxy"] + nx, ny = Rxy.shape + + # Get Mesh Topology and remove guard cells accordingly + mesh_topology = getMeshTopology(g, nx, ny) + + if mesh_topology == "SF": + # SF case + ixseps1 = g["iyseparatrix1"] + 2 # Main X-point separatrix + ixseps2 = min(g["iyseparatrix3"] + 2, nx) # Secondary X-point separatrix + # Remove guard cells on either side of X-point. + ny_inner = g["ix_inner"] + for name in grd: + var = grd[name] + nx, ny = var.shape + newvar = np.zeros((nx, ny - 2)) + newvar[:, :ny_inner] = var[:, :ny_inner] + newvar[:, ny_inner:] = var[:, (ny_inner + 2) :] + grd[name] = newvar + g["ix_cut2"] = g["ix_cut2"] - 1 + g["ix_cut3"] = g["ix_cut3"] - 1 + g["ix_cut4"] = g["ix_cut4"] - 2 + else: + ixseps1 = g["iyseparatrix1"] + 2 # Lower X-point separatrix + ixseps2 = min(g["iyseparatrix2"] + 2, nx) # Upper X-point separatrix + # Double null -> Remove upper Y guard cells + ny_inner = g["ix_inner"] + for name in grd: + var = grd[name] + nx, ny = var.shape + newvar = np.zeros((nx, ny - 2)) + newvar[:, :ny_inner] = var[:, :ny_inner] + newvar[:, ny_inner:] = var[:, (ny_inner + 2) :] + grd[name] = newvar + g["ix_cut2"] = g["ix_cut2"] - 1 + g["ix_cut3"] = g["ix_cut3"] - 3 + g["ix_cut4"] = g["ix_cut4"] - 2 + + # Extrapolate X (radial) boundary cells + # Removing one cell, adding two on each X boundary + for name in grd: + var = grd[name] + nx, ny = var.shape + newvar = np.zeros((nx + 2, ny)) + newvar[2:-2, :] = var[1:-1, :] + if name in ["Rxy", "Zxy", "psixy"]: + # Linear extrapolation + newvar[1, :] = 2.0 * newvar[2, :] - newvar[3, :] + newvar[0, :] = 2.0 * newvar[1, :] - newvar[2, :] + newvar[-2, :] = 2.0 * newvar[-3, :] - newvar[-4, :] + newvar[-1, :] = 2.0 * newvar[-2, :] - newvar[-3, :] + else: + # Constant extrapolation + newvar[1, :] = newvar[2, :] + newvar[0, :] = newvar[2, :] + newvar[-2, :] = newvar[-3, :] + newvar[-1, :] = newvar[-3, :] + grd[name] = newvar + + # Re assign grid indices after removing guard cells + jyseps1_1 = g["ix_cut1"] - 1 + jyseps2_1 = g["ix_cut2"] + ny_inner = g["ix_inner"] + jyseps1_2 = g["ix_cut3"] + jyseps2_2 = g["ix_cut4"] - 1 + + # Calculate metric tensor + grd.update(calcMetric(grd, bpsign, verbose, ignore_checks)) + + dphidy = grd["dphidy"] + Rxy = grd["Rxy"] + Zxy = grd["Zxy"] + nx, ny = Rxy.shape + + # Calculate zShift and ShiftAngle + zShift = np.zeros((nx, ny)) + + # Inner core region + zShift[:, jyseps1_1 + 1] = 0.5 * dphidy[:, jyseps1_1 + 1] * dy + # Note: This goes to jyseps2_1 + 1, the first cell in the upper inner leg (including upper PF) + for jy in range(jyseps1_1 + 2, jyseps2_1 + 2): + zShift[:, jy] = ( + zShift[:, jy - 1] + 0.5 * (dphidy[:, jy] + dphidy[:, jy - 1]) * dy + ) + + # Outer core + # Note: ixseps2 points are connected from inner to outer core + # If single null, ixseps2 = nx, jyseps1_2 = jyseps2_1 + zShift[:, jyseps1_2 + 1] = ( + zShift[:, jyseps2_1] + + 0.5 * (dphidy[:, jyseps2_1] + dphidy[:, jyseps1_2 + 1]) * dy + ) + for jy in range(jyseps1_2 + 2, jyseps2_2 + 2): + zShift[:, jy] = ( + zShift[:, jy - 1] + 0.5 * (dphidy[:, jy] + dphidy[:, jy - 1]) * dy + ) + + if jyseps2_1 != jyseps1_2: + # Double null, with upper X-point + + # Upper inner leg. Note that upper PF region set from core at y index jyseps2_1 + 1 + for jy in range(jyseps2_1 + 2, ny_inner): + zShift[:, jy] = ( + zShift[:, jy - 1] + 0.5 * (dphidy[:, jy] + dphidy[:, jy - 1]) * dy + ) + + # Upper outer leg. Joins onto upper inner leg (jyseps2_1 + 1) for x < ixseps2 + zShift[:ixseps2, jyseps1_2] = ( + zShift[:ixseps2, jyseps2_1 + 1] + - 0.5 * (dphidy[:ixseps2, jyseps2_1 + 1] + dphidy[:ixseps2, jyseps1_2]) * dy + ) + # joins outer core/SOL region for x >= ixseps2 + zShift[ixseps2:, jyseps1_2] = ( + zShift[ixseps2:, jyseps1_2 + 1] + - 0.5 * (dphidy[ixseps2:, jyseps1_2 + 1] + dphidy[ixseps2:, jyseps1_2]) * dy + ) + # Iterate backwards along upper outer leg from X-point towards target + for jy in range(jyseps1_2 - 1, ny_inner - 1, -1): + zShift[:, jy] = ( + zShift[:, jy + 1] - 0.5 * (dphidy[:, jy] + dphidy[:, jy - 1]) * dy + ) + + # Lower outer leg + zShift[:ixseps1, jyseps2_2 + 1] = 0.5 * dphidy[:ixseps1, jyseps2_2 + 1] * dy + for jy in range(jyseps2_2 + 2, ny): + zShift[:, jy] = ( + zShift[:, jy - 1] + 0.5 * (dphidy[:, jy] + dphidy[:, jy - 1]) * dy + ) + # Lower inner leg. Going backwards in Y toward the plate + zShift[:ixseps1, jyseps1_1] = -0.5 * dphidy[:ixseps1, jyseps1_1] * dy + for jy in range(jyseps1_1 - 1, -1, -1): + zShift[:, jy] = ( + zShift[:, jy + 1] - 0.5 * (dphidy[:, jy] + dphidy[:, jy + 1]) * dy + ) + + ShiftAngle = np.zeros(nx) + ShiftAngle[:ixseps1] = ( + np.sum( + dphidy[:ixseps1, (jyseps1_1 + 1) : (jyseps2_1 + 1)] * dy, + axis=1, # Inner core + ) + + np.sum( + dphidy[:ixseps1, (jyseps1_2 + 1) : (jyseps2_2 + 1)] * dy, + axis=1, # Outer core + ) + ) + + if verbose: + print( + "Safety factor: min {}, mean {}, max {}".format( + np.amin(ShiftAngle[:ixseps1]) / (2 * np.pi), + np.mean(ShiftAngle[:ixseps1]) / (2 * np.pi), + np.amax(ShiftAngle[:ixseps1]) / (2 * np.pi), + ) + ) + + if plotting: + plt.plot(Rxy, Zxy, "x") # needs to be transposed. + plt.plot(Rxy[ixseps1, :], Zxy[ixseps1, :], color="magenta", label="ixseps1") + if ixseps2 < nx: + plt.plot(Rxy[ixseps2, :], Zxy[ixseps2, :], color="r", label="ixseps2") + + plt.plot(Rxy[:, jyseps1_1], Zxy[:, jyseps1_1], color="k", label="jyseps1_1") + plt.plot(Rxy[:, jyseps1_2], Zxy[:, jyseps1_2], color="b", label="jyseps1_2") + plt.plot(Rxy[:, ny_inner], Zxy[:, ny_inner], color="c", label="ny_inner") + plt.plot(Rxy[:, jyseps2_1], Zxy[:, jyseps2_1], color="g", label="jyseps2_1") + plt.plot(Rxy[:, jyseps2_2], Zxy[:, jyseps2_2], color="r", label="jyseps2_2") + + ax = plt.gca() + ax.set_aspect("equal", adjustable="box") + ax.set_xlabel("R") + ax.set_ylabel("Z") + plt.legend() + plt.show() + + from boututils.datafile import DataFile + + if verbose: + print("Saving to " + output_filename) + + with DataFile(output_filename, create=True, format="NETCDF4") as f: + # Save unique ID for grid file + import uuid + + f.write_file_attribute("grid_id", str(uuid.uuid1())) + f.write_file_attribute("gridue", str(gridue_file)) + + f.write("nx", nx) + f.write("ny", ny) + f.write("ixseps1", ixseps1) + f.write("ixseps2", ixseps2) + f.write("jyseps1_1", jyseps1_1) + f.write("jyseps2_1", jyseps2_1) + f.write("ny_inner", ny_inner) + f.write("jyseps1_2", jyseps1_2) + f.write("jyseps2_2", jyseps2_2) + f.write("rm", rm) + f.write("zm", zm) + f.write("topology", mesh_topology) + + # 2D fields + for name in grd: + f.write(name, grd[name]) + + f.write("zShift", zShift) + f.write("ShiftAngle", ShiftAngle) + + +def getMeshTopology(g, nx, ny): + """ + Get mesh topology from gridue data. + + Returns: + -------- + dict + A dictionary containing the mesh topology information, including: + - "single_null": bool indicating if the mesh is single null or double null. + - "ixseps1": int, index of the lower X-point separatrix. + - "ixseps2": int, index of the upper X-point separatrix (if double null). + - "jyseps1_1": int, index of the lower inner leg separatrix. + - "jyseps2_1": int, index of the upper inner leg separatrix. + - "ny_inner": int, number of inner poloidal points. + - "jyseps1_2": int, index of the lower outer leg separatrix. + - "jyseps2_2": int, index of the upper outer leg separatrix. + """ + + ixseps1 = g["iyseparatrix1"] + 2 # Lower X-point separatrix + ixseps2 = min(g["iyseparatrix2"] + 2, nx) # Upper X-point separatrix + jyseps1_1 = g["ix_cut1"] - 1 + jyseps2_1 = g["ix_cut2"] + ny_inner = g["ix_inner"] + jyseps1_2 = g["ix_cut3"] + jyseps2_2 = g["ix_cut4"] - 1 + + if jyseps1_1 < 0 and jyseps2_2 >= ny - 1: + return "CFL" + elif jyseps2_1 == jyseps1_2: + return "SN" + elif jyseps1_2 <= ny_inner and ny_inner <= jyseps2_2: + return "SF" + elif ixseps1 == ixseps2: + return "CDN" + else: + return "UDN" + + +if __name__ == "__main__": + main() diff --git a/tools/pylib/_boutpp_build/CMakeLists.txt b/tools/pylib/_boutpp_build/CMakeLists.txt index 3be2a5d2aa..14ec3c8e2d 100644 --- a/tools/pylib/_boutpp_build/CMakeLists.txt +++ b/tools/pylib/_boutpp_build/CMakeLists.txt @@ -1,19 +1,20 @@ -# Error if Python API was explicitly requested, otherwise just a -# warning and don't build Python API +# Error if Python API was explicitly requested, otherwise just a warning and +# don't build Python API macro(bout_python_maybe_error VAR NAME) - if (NOT ${VAR}) + if(NOT ${VAR}) set(_error_msg "${NAME} is required for the Python interface") - if (NOT "${BOUT_ENABLE_PYTHON}" STREQUAL "AUTO") + if(NOT "${BOUT_ENABLE_PYTHON}" STREQUAL "AUTO") message(FATAL_ERROR ${_error_msg}) else() message(WARNING ${_error_msg}) - set(BOUT_ENABLE_PYTHON OFF PARENT_SCOPE) + set(BOUT_ENABLE_PYTHON + OFF + PARENT_SCOPE) return() endif() endif() endmacro() - bout_python_maybe_error(BUILD_SHARED_LIBS "BOUT++ shared library") find_package(Numpy) @@ -26,34 +27,37 @@ find_package(Bash) bout_python_maybe_error(${Bash_FOUND} Bash) execute_process(COMMAND ${Python3_EXECUTABLE} -c "import jinja2" - RESULT_VARIABLE jinja2_FOUND) -if (jinja2_FOUND EQUAL 0) + RESULT_VARIABLE jinja2_FOUND) +if(jinja2_FOUND EQUAL 0) # We have jinja2 - all good else() bout_python_maybe_error(OFF jinja2) endif() -execute_process(COMMAND ${Python3_EXECUTABLE} -c "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX')[:-3])" +execute_process( + COMMAND ${Python3_EXECUTABLE} -c + "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX')[:-3])" RESULT_VARIABLE PYTHON_WORKING OUTPUT_VARIABLE PYTHON_EXT_SUFFIX - OUTPUT_STRIP_TRAILING_WHITESPACE - ) -if (NOT ${PYTHON_WORKING} EQUAL 0) + OUTPUT_STRIP_TRAILING_WHITESPACE) +if(NOT ${PYTHON_WORKING} EQUAL 0) set(MSG "Failed to get the extension name from python!") - if ("${BOUT_ENABLE_PYTHON}" STREQUAL "ON") + if("${BOUT_ENABLE_PYTHON}" STREQUAL "ON") message(FATAL_ERROR ${MSG}) else() message(WARNING ${MSG}) - set(BOUT_ENABLE_PYTHON OFF ) + set(BOUT_ENABLE_PYTHON OFF) endif() endif() # No errors? We can build the interface! -if ("${BOUT_ENABLE_PYTHON}" STREQUAL "AUTO") - set(BOUT_ENABLE_PYTHON ON PARENT_SCOPE) +if("${BOUT_ENABLE_PYTHON}" STREQUAL "AUTO") + set(BOUT_ENABLE_PYTHON + ON + PARENT_SCOPE) endif() -if (NOT BOUT_ENABLE_PYTHON) +if(NOT BOUT_ENABLE_PYTHON) message(WARNING "Python interface will not be built, see warnings above") return() endif() @@ -65,60 +69,67 @@ set(tar ${CMAKE_CURRENT_BINARY_DIR}) set(files "boutpp.pyx" "resolve_enum.pxd" "helper.cxx" "helper.h" "boutcpp.pxd") foreach(file IN LISTS files) # helper.py and resolve_enum_inv.pyx.in are only required by boutpp.pyx - #set(deps {src}/$file.in ${src}/common.sh) - #if (${file} STREQUAL boutpp.pyx) - #list(APPEND deps + # set(deps {src}/$file.in ${src}/common.sh) if (${file} STREQUAL boutpp.pyx) + # list(APPEND deps set(gen ${tar}/${file}) list(APPEND generated ${gen}) - #message(FATAL_ERROR "${gen} ${src}/${file}.jinja") - add_custom_command(OUTPUT ${gen} - COMMAND ${CMAKE_COMMAND} -E make_directory ${tar} - COMMAND ${CMAKE_COMMAND} -E env PYTHONPATH=${tar}/..:\${PYTHONPATH} ${Python3_EXECUTABLE} generate.py ${file}.jinja ${gen} - DEPENDS ${src}/${file}.jinja - DEPENDS ${src}/helper.py - DEPENDS ${src}/resolve_enum_inv.pyx.jinja - DEPENDS ${src}/generate.py - DEPENDS bout++ - WORKING_DIRECTORY ${src}/ - COMMENT "Generating ${file}") + # message(FATAL_ERROR "${gen} ${src}/${file}.jinja") + add_custom_command( + OUTPUT ${gen} + COMMAND ${CMAKE_COMMAND} -E make_directory ${tar} + COMMAND ${CMAKE_COMMAND} -E env PYTHONPATH=${tar}/..:\${PYTHONPATH} + ${Python3_EXECUTABLE} generate.py ${file}.jinja ${gen} + DEPENDS ${src}/${file}.jinja + DEPENDS ${src}/helper.py + DEPENDS ${src}/resolve_enum_inv.pyx.jinja + DEPENDS ${src}/generate.py + DEPENDS bout++ + WORKING_DIRECTORY ${src}/ + COMMENT "Generating ${file}") endforeach() set(boutpp_depends ${generated}) -set(files "boutexception_helper.hxx" "boutexception_helper.cxx" "boutpp_openmpi_compat.hxx" "bout_options.pxd" "setup.py") +set(files "boutexception_helper.hxx" "boutexception_helper.cxx" + "boutpp_openmpi_compat.hxx" "bout_options.pxd" "setup.py") foreach(file IN LISTS files) list(APPEND ${boutpp_depends} "${CMAKE_CURRENT_BINARY_DIR}/${file}") bout_copy_file("${file}") endforeach() -add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/libboutpp.cpp +add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/libboutpp.cpp COMMAND ${CMAKE_COMMAND} -E copy boutpp.pyx libboutpp.pyx - COMMAND ${Python3_EXECUTABLE} -m cython libboutpp.pyx --cplus -3 -X binding=True -X embedsignature=True + COMMAND ${Python3_EXECUTABLE} -m cython libboutpp.pyx --cplus -3 -X + binding=True -X embedsignature=True WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - DEPENDS ${boutpp_depends} - ) - -add_library(boutpp${PYTHON_EXT_SUFFIX} SHARED - ${tar}/libboutpp.cpp - ${tar}/helper.cxx - ${tar}/boutexception_helper.cxx - ) - -add_custom_target(boutpp ALL - COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/libboutpp${PYTHON_EXT_SUFFIX}.so ${CMAKE_CURRENT_BINARY_DIR}/../boutpp/libboutpp${PYTHON_EXT_SUFFIX}.so - COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/boutpp.py ${CMAKE_CURRENT_BINARY_DIR}/../boutpp/__init__.py + DEPENDS ${boutpp_depends}) + +add_library( + boutpp${PYTHON_EXT_SUFFIX} SHARED ${tar}/libboutpp.cpp ${tar}/helper.cxx + ${tar}/boutexception_helper.cxx) + +add_custom_target( + boutpp ALL + COMMAND + ${CMAKE_COMMAND} -E copy + ${CMAKE_CURRENT_BINARY_DIR}/libboutpp${PYTHON_EXT_SUFFIX}.so + ${CMAKE_CURRENT_BINARY_DIR}/../boutpp/libboutpp${PYTHON_EXT_SUFFIX}.so + COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/boutpp.py + ${CMAKE_CURRENT_BINARY_DIR}/../boutpp/__init__.py DEPENDS boutpp${PYTHON_EXT_SUFFIX} - COMMENT "Building python interface" -) + COMMENT "Building python interface") -install(TARGETS boutpp${PYTHON_EXT_SUFFIX} - DESTINATION ${CMAKE_INSTALL_PYTHON_SITEARCH}/boutpp/ -) +install(TARGETS boutpp${PYTHON_EXT_SUFFIX} + DESTINATION ${CMAKE_INSTALL_PYTHON_SITEARCH}/boutpp/) -install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/boutpp.py +install( + FILES ${CMAKE_CURRENT_SOURCE_DIR}/boutpp.py DESTINATION ${CMAKE_INSTALL_PYTHON_SITEARCH}/boutpp/ - RENAME __init__.py - ) + RENAME __init__.py) target_link_libraries(boutpp${PYTHON_EXT_SUFFIX} bout++) -target_include_directories(boutpp${PYTHON_EXT_SUFFIX} PRIVATE $ ${Numpy_INCLUDE_DIRS} ${Python3_INCLUDE_DIRS}) +target_include_directories( + boutpp${PYTHON_EXT_SUFFIX} + PRIVATE $ ${Numpy_INCLUDE_DIRS} + ${Python3_INCLUDE_DIRS})