diff --git a/.github/workflows/build-and-test-callable.yml b/.github/workflows/build-and-test-callable.yml index d2f57d70..f810476f 100644 --- a/.github/workflows/build-and-test-callable.yml +++ b/.github/workflows/build-and-test-callable.yml @@ -67,11 +67,11 @@ jobs: CUDA_HOME: '/usr/local/cuda' # These are all passed to setup.py as one concatenated string build-flags: >- - ${{ inputs.parallel && '-C"--build-option=--with-parallel"' || '' }} - ${{ inputs.cuda && '-C"--build-option=--with-cuda"' || '' }} - ${{ inputs.libceed && '-C"--build-option=--with-libceed"' || '' }} - ${{ inputs.gslib && '-C"--build-option=--with-gslib"' || '' }} - ${{ (!(inputs.mfem-branch == 'default') && format('-C"--build-option=--mfem-branch=''{0}''"', inputs.mfem-branch)) || '' }} + ${{ inputs.parallel && '-C"with-parallel=Yes"' || '' }} + ${{ inputs.cuda && '-C"with-cuda=Yes"' || '' }} + ${{ inputs.libceed && '-C"with-libceed=Yes"' || '' }} + ${{ inputs.gslib && '-C"with-gslib=Yes"' || '' }} + ${{ (!(inputs.mfem-branch == 'default') && format('-C"mfem-branch=''{0}''"', inputs.mfem-branch)) || '' }} # ------------------------------------------------------------------------------------------------- # Begin workflow @@ -88,10 +88,17 @@ jobs: # ------------------------------------------------------------------------------------------------- # Download/install dependencies # ------------------------------------------------------------------------------------------------- - # - name: Install core dependencies via requirements.txt - # run: | - # pip install setuptools - # pip install -r requirements.txt --verbose + - name: Install core dependencies via requirements.txt + run: | + pip install -U setuptools>=79.0.1 + pip install -U numpy>=2.0.0 + pip install -U cmake>=4.0.0 + pip install -U swig>=4.3 + + #- name: Install chrpath on ubuntu + # if: inputs.os == 'ubuntu-latest' + # run: | + # sudo apt-get install chrpath - name: Install MPI if: inputs.parallel @@ -150,9 +157,13 @@ jobs: # if: inputs.phases # run: python setup.py install --skip-ext --skip-swig --vv ${{ env.build-flags }} + #- name: Build all (steps 1-3) + # if: inputs.phases == false + # run: pip install -e . -C"--build-option=--vv" ${{ env.build-flags }} + - name: Build all (steps 1-3) if: inputs.phases == false - run: pip install -e . -C"--build-option=--vv" ${{ env.build-flags }} + run: pip install . ${{ env.build-flags }} --verbose # ------------------------------------------------------------------------------------------------- # Run tests diff --git a/.github/workflows/build-and-test-dispatch.yml b/.github/workflows/build-and-test-dispatch.yml index 6f0ff98d..10923346 100644 --- a/.github/workflows/build-and-test-dispatch.yml +++ b/.github/workflows/build-and-test-dispatch.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false matrix: mfem-branch: [master, default] # 'default' uses a specific commit hash defined in setup.py:repos_sha - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] # 3.12 is not supported by scipy + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] parallel: [false] name: test-linux | ${{ matrix.mfem-branch }} | ${{ matrix.python-version }} | ${{ matrix.parallel && 'parallel' || 'serial' }} uses: ./.github/workflows/build-and-test-callable.yml @@ -46,7 +46,7 @@ jobs: fail-fast: false matrix: mfem-branch: [master, default] # 'default' uses a specific commit hash defined in setup.py:repos_sha - python-version: ['3.9', '3.10', '3.11', '3.12'] # 3.12 is not supported by scipy + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] parallel: [true] name: test-linux | ${{ matrix.mfem-branch }} | ${{ matrix.python-version }} | ${{ matrix.parallel && 'parallel' || 'serial' }} uses: ./.github/workflows/build-and-test-callable.yml diff --git a/.github/workflows/release_binary.yml b/.github/workflows/release_binary.yml index 9b2b3ad5..89dd416c 100644 --- a/.github/workflows/release_binary.yml +++ b/.github/workflows/release_binary.yml @@ -70,7 +70,7 @@ jobs: CWD=$PWD yum install -y zlib-devel - yum install -y chrpath + #yum install -y chrpath mkdir dist diff --git a/.github/workflows/testrelease_binary.yml b/.github/workflows/testrelease_binary.yml index 04f13e14..7e574237 100644 --- a/.github/workflows/testrelease_binary.yml +++ b/.github/workflows/testrelease_binary.yml @@ -69,7 +69,7 @@ jobs: CWD=$PWD yum install -y zlib-devel - yum install -y chrpath + #yum install -y chrpath mkdir dist diff --git a/README.md b/README.md index 6c9353e6..6ce5ee20 100644 --- a/README.md +++ b/README.md @@ -29,27 +29,16 @@ $ pip download mfem --no-binary mfem (expand tar.gz file and move to the downloa or clone this repository $ git clone https://github.com/mfem/PyMFEM.git -# Then, build it from local source -$ python -m pip install ./ --install-option="--with-parallel" --install-option="--mfem-branch=master" -or -$ python setup.py install --with-parallel # it download and build metis/hypre/mfem - -# Verbose output -$ python setup.py install --verbose # SWIG output and CMAKE_VERBOSE_MAKEFILE is on +# Build it from local source with MPI +$ pip install ./ -C"with-parallel=Yes" --verbose # Cleaning $ python setup.py clean --all # clean external dependencies + wrapper code -# Choosing compiler -$ python setup.py install --with-parallel --CC=icc --CXX=icpc --MPICC=mpiicc --MPICXX=mpiicpc - # Run test cd test python test_examples.py -serial -# For other configurations, see docs/install.txt or help -$ python setup.py install --help - ``` ## Usage diff --git a/_build_system/__init__.py b/_build_system/__init__.py new file mode 100644 index 00000000..b68fde2a --- /dev/null +++ b/_build_system/__init__.py @@ -0,0 +1 @@ +k diff --git a/_build_system/backend.py b/_build_system/backend.py new file mode 100644 index 00000000..cc4e5e16 --- /dev/null +++ b/_build_system/backend.py @@ -0,0 +1,31 @@ +from setuptools import build_meta as _orig +from setuptools.build_meta import * + +import build_globals as bglb + + +def get_requires_for_build_wheel(config_settings=None): + ret = _orig.get_requires_for_build_wheel(config_settings) + + need_mpi = False + if config_settings is not None: + for flag in ("with-parallel", ): + value = config_settings.pop(flag, "No") + if value.upper() in ("YES", "TRUE", "1"): + need_mpi = True + break + + if need_mpi: + ret = ret + ['mpi4py'] + return ret + + +def get_requires_for_build_sdist(config_settings=None): + return _orig.get_requires_for_build_sdist(config_settings) + + +def build_wheel(*args, **kwargs): + bglb.cfs = args[1] + if bglb.cfs is None: + bglb.cfs = {} + return _orig.build_wheel(*args, **kwargs) diff --git a/_build_system/build_config.py b/_build_system/build_config.py new file mode 100644 index 00000000..1469a3c2 --- /dev/null +++ b/_build_system/build_config.py @@ -0,0 +1,488 @@ +""" +Helper functions for setup.py +""" + +import os +import sys +import configparser +from urllib import request +import itertools +import site +import re +import subprocess +import multiprocessing +import ssl +import tarfile +from collections import namedtuple +from shutil import which as find_command + +__all__ = ["print_config", + "initialize_cmd_options", + "cmd_options", + "process_cmd_options", + "configure_build" +] + +from build_utils import * +from build_consts import * +import build_globals as bglb + +def print_config(): + print("----configuration----") + print(" prefix", bglb.prefix) + print(" when needed, the dependency (mfem/hypre/metis) will be installed under " + + bglb.ext_prefix) + print(" build mfem : " + ("Yes" if bglb.build_mfem else "No")) + print(" build metis : " + ("Yes" if bglb.build_metis else "No")) + print(" build hypre : " + ("Yes" if bglb.build_hypre else "No")) + print(" build libceed : " + ("Yes" if bglb.build_libceed else "No")) + print(" build gslib : " + ("Yes" if bglb.build_gslib else "No")) + print(" call SWIG wrapper generator: " + ("Yes" if bglb.run_swig else "No")) + print(" build serial wrapper: " + ("Yes" if bglb.build_serial else "No")) + print(" build parallel wrapper : " + ("Yes" if bglb.build_parallel else "No")) + + print(" hypre prefix", bglb.hypre_prefix) + print(" metis prefix", bglb.metis_prefix) + print(" c compiler : " + bglb.cc_command) + print(" c++ compiler : " + bglb.cxx_command) + print(" mpi-c compiler : " + bglb.mpicc_command) + print(" mpi-c++ compiler : " + bglb.mpicxx_command) + + print(" verbose : " + ("Yes" if bglb.verbose else "No")) + print(" SWIG : " + swig_command) + + if bglb.blas_libraries != "": + print(" BLAS libraries : " + bglb.blas_libraries) + if bglb.lapack_libraries != "": + print(" Lapack libraries : " + bglb.lapack_libraries) + + print("") + + +def initialize_cmd_options(command_obj): + command_obj.swig = False + command_obj.skip_swig = False + command_obj.ext_only = False + + command_obj.git_sshclone = False + command_obj.skip_ext = False + command_obj.with_parallel = False + command_obj.build_only = False + command_obj.no_serial = False + command_obj.mfem_prefix = '' + command_obj.mfems_prefix = '' + command_obj.mfemp_prefix = '' + command_obj.mfem_source = bglb.mfem_source + command_obj.mfem_branch = '' + command_obj.mfem_debug = False + command_obj.mfem_build_miniapps = False + command_obj.metis_prefix = '' + command_obj.hypre_prefix = '' + + command_obj.with_cuda = False + command_obj.with_cuda_hypre = False + command_obj.cuda_arch = None + command_obj.with_metis64 = False + + command_obj.with_pumi = False + command_obj.pumi_prefix = '' + + command_obj.with_strumpack = False + command_obj.strumpack_prefix = '' + + command_obj.with_suitesparse = False + command_obj.suitesparse_prefix = '' + + command_obj.with_lapack = False + command_obj.blas_libraries = "" + command_obj.lapack_libraries = "" + + command_obj.with_libceed = False + command_obj.libceed_prefix = '' + command_obj.libceed_only = False + + command_obj.with_gslib = False + command_obj.gslib_prefix = '' + command_obj.gslib_only = False + + command_obj.CC = '' + command_obj.CXX = '' + command_obj.MPICC = '' + command_obj.MPICXX = '' + command_obj.vv = False + + command_obj.unverifiedSSL = False + + +cmd_options = [ + ('vv', None, 'More verbose output (CMAKE_VERBOSE_MAKEFILE etc)'), + ('with-parallel', None, 'Installed both serial and parallel version'), + ('no-serial', None, 'Skip building the serial wrapper'), + ('mfem-prefix=', None, 'Specify locaiton of mfem' + + 'libmfem.so must exits under /lib. ' + + 'This mode uses clean-swig + run-swig, unless mfem-prefix-no-swig is on'), + ('mfemp-prefix=', None, 'Specify locaiton of parallel mfem ' + + 'libmfem.so must exits under /lib. ' + + 'Need to use it with mfem-prefix'), + ('mfems-prefix=', None, 'Specify locaiton of serial mfem ' + + 'libmfem.so must exits under /lib. ' + + 'Need to use it with mfem-prefix'), + ('mfem-branch=', None, 'Specify branch of mfem' + + 'MFEM is cloned and built using the specfied branch '), + ('mfem-source=', None, 'Specify mfem source location' + + 'MFEM source directory. Required to run-swig '), + ('mfem-debug', None, 'Build MFME with MFEM_DEBUG enabled'), + ('mfem-build-miniapps', None, 'build MFME Miniapps'), + ('hypre-prefix=', None, 'Specify locaiton of hypre' + + 'libHYPRE.so must exits under /lib'), + ('metis-prefix=', None, 'Specify locaiton of metis' + + 'libmetis.so must exits under /lib'), + ('git-sshclone', None, 'Use SSH for git clone' + + 'try if default git clone using https fails (need Github account and setting for SSH)'), + ('swig', None, 'Run Swig and exit'), + ('skip-swig', None, + 'Skip running swig (used when wrapper is generated for the MFEM C++ library to be used'), + ('ext-only', None, 'Build metis, hypre, mfem(C++) only'), + ('skip-ext', None, 'Skip building metis, hypre, mfem(C++) only'), + ('build-only', None, 'Skip final install stage to prefix'), + ('CC=', None, 'c compiler'), + ('CXX=', None, 'c++ compiler'), + ('MPICC=', None, 'mpic compiler'), + ('MPICXX=', None, 'mpic++ compiler'), + ('unverifiedSSL', None, 'use unverified SSL context for downloading'), + ('with-cuda', None, 'enable cuda'), + ('with-cuda-hypre', None, 'enable cuda in hypre'), + ('cuda-arch=', None, 'set cuda compute capability. Ex if A100, set to 80'), + ('with-metis64', None, 'use 64bit int in metis'), + ('with-pumi', None, 'enable pumi (parallel only)'), + ('pumi-prefix=', None, 'Specify locaiton of pumi'), + ('with-suitesparse', None, + 'build MFEM with suitesparse (MFEM_USE_SUITESPARSE=YES) (parallel only)'), + ('suitesparse-prefix=', None, + 'Specify locaiton of suitesparse (=SuiteSparse_DIR)'), + ('with-libceed', None, 'enable libceed'), + ('libceed-prefix=', None, 'Specify locaiton of libceed'), + ('libceed-only', None, 'Build libceed only'), + ('gslib-prefix=', None, 'Specify locaiton of gslib'), + ('with-gslib', None, 'enable gslib'), + ('gslib-only', None, 'Build gslib only'), + ('with-strumpack', None, 'enable strumpack (parallel only)'), + ('strumpack-prefix=', None, 'Specify locaiton of strumpack'), + ('with-lapack', None, 'build MFEM with lapack'), + ('blas-libraries=', None, 'Specify locaiton of Blas library (used to build MFEM)'), + ('lapack-libraries=', None, + 'Specify locaiton of Lapack library (used to build MFEM)'), + ] + +def process_cmd_options(command_obj, cfs): + ''' + called when install workflow is used + ''' + cc = cfs.pop("CC", "") + if cc != "": + command_obj.cc_command = cc + + cc = cfs.pop("CXX", "") + if cc != "": + command_obj.cxx_command = cc + + cc = cfs.pop("MPICC", "") + if cc != "": + command_obj.mpicc_command = cc + + cc = cfs.pop("MPICXX", "") + if cc != "": + command_obj.mpicxx_command = cc + + for item in cmd_options: + param, _none, hit = item + attr = "_".join(param.split("-")) + + if param.endswith("="): + param = param[:-1] + attr = attr[:-1] + value = cfs.pop(param, "") + if cc != "": + if not hasattr(command_obj, attr): + assert False, str(command_obj) + " does not have " + attr + setattr(command_obj, attr, value) + else: + value = cfs.pop(param, "No") + if not hasattr(command_obj, attr): + assert False, str(command_obj) + " does not have " + attr + + if value.upper() in ("YES", "TRUE", "1"): + setattr(command_obj, attr, True) + else: + setattr(command_obj, attr, False) + +def process_setup_options(command_obj, args): + for item in args: + if item.startswith('--'): + item = item[2:] + if item.startswith('-'): + item = item[1:] + + if len(item.split('='))==2: + param = item.split('=')[0] + value = item.split('=')[1] + else: + param = item.strip() + value = True + attr = "_".join(param.split("-")) + + setattr(command_obj, attr, value) + +def configure_install(self): + ''' + called when install workflow is used + + ''' + print("!!!!!!!!") + print("!!!!!!!! setting up build global configuration parameters") + print("!!!!!!!!") + + if sys.argv[0] == 'setup.py' and sys.argv[1] == 'install': + print("!!!!!!!! command-line input (setup.py install): ", sys.argv) + process_setup_options(self, sys.argv[2:]) + else: + print("!!!!!!!! command-line input (pip): ", bglb.cfs) + process_cmd_options(self, bglb.cfs) + + bglb.verbose = bool(self.vv) if not bglb.verbose else bglb.verbose + if bglb.dry_run: + bglb.verbose = True + + bglb.git_sshclone = bool(self.git_sshclone) + + bglb.mfem_source = abspath(self.mfem_source) + + bglb.skip_ext = bool(self.skip_ext) + bglb.skip_install = bool(self.build_only) + bglb.skip_swig = bool(self.skip_swig) + + bglb.swig_only = bool(self.swig) + bglb.ext_only = bool(self.ext_only) + + bglb.metis_64 = bool(self.with_metis64) + bglb.enable_pumi = bool(self.with_pumi) + bglb.enable_strumpack = bool(self.with_strumpack) + bglb.enable_cuda = bool(self.with_cuda) + bglb.enable_cuda_hypre = bool(self.with_cuda_hypre) + if self.cuda_arch is not None: + bglb.cuda_arch = self.cuda_arch + bglb.enable_libceed = bool(self.with_libceed) + bglb.libceed_only = bool(self.libceed_only) + bglb.enable_gslib = bool(self.with_gslib) + bglb.gslib_only = bool(self.gslib_only) + bglb.enable_suitesparse = bool(self.with_suitesparse) + bglb.enable_lapack = bool(self.with_lapack) + + bglb.build_parallel = bool(self.with_parallel) # controlls PyMFEM parallel + bglb.build_serial = not bool(self.no_serial) + + bglb.clean_swig = True + bglb.run_swig = True + + bglb.mfem_debug = bool(self.mfem_debug) + bglb.mfem_build_miniapps = bool(self.mfem_build_miniapps) + + if bglb.build_serial: + bglb.build_serial = (not bglb.swig_only and not bglb.ext_only) + + if bglb.build_parallel: + try: + import mpi4py + except ImportError: + assert False, "Can not import mpi4py" + + if self.mfem_prefix != '': + bglb.mfem_prefix = abspath(self.mfem_prefix) + bglb.mfems_prefix = abspath(self.mfem_prefix) + bglb.mfemp_prefix = abspath(self.mfem_prefix) + if self.mfems_prefix != '': + bglb.mfems_prefix = abspath(self.mfems_prefix) + if self.mfemp_prefix != '': + bglb.mfemp_prefix = abspath(self.mfemp_prefix) + + check = find_libpath_from_prefix('mfem', bglb.mfems_prefix) + assert check != '', "libmfem.so is not found in the specified /lib" + check = find_libpath_from_prefix('mfem', bglb.mfemp_prefix) + assert check != '', "libmfem.so is not found in the specified /lib" + + bglb.build_mfem = False + hypre_prefix = bglb.mfem_prefix + metis_prefix = bglb.mfem_prefix + + if bglb.swig_only: + bglb.clean_swig = False + + else: + bglb.build_mfem = True + bglb.build_mfemp = bglb.build_parallel + bglb.build_hypre = bglb.build_parallel + bglb.build_metis = bglb.build_parallel or bglb.enable_suitesparse + + print("!!!!! ext_prefix", bglb.ext_prefix) + if bglb.ext_prefix == '': + bglb.ext_prefix = external_install_prefix(bglb.prefix) + bglb.hypre_prefix = os.path.join(bglb.ext_prefix) + bglb.metis_prefix = os.path.join(bglb.ext_prefix) + + bglb.mfem_prefix = bglb.ext_prefix + bglb.mfems_prefix = os.path.join(bglb.ext_prefix, 'ser') + bglb.mfemp_prefix = os.path.join(bglb.ext_prefix, 'par') + # enable_gslib = True + + if self.mfem_branch != '': + bglb.mfem_branch = self.mfem_branch + + if self.hypre_prefix != '': + check = find_libpath_from_prefix('HYPRE', self.hypre_prefix) + assert check != '', "libHYPRE.so is not found in the specified /lib or lib64" + hypre_prefix = os.path.expanduser(self.hypre_prefix) + build_hypre = False + + if self.metis_prefix != '': + check = find_libpath_from_prefix('metis', self.metis_prefix) + assert check != '', "libmetis.so is not found in the specified /lib or lib64" + bglb.metis_prefix = os.path.expanduser(self.metis_prefix) + bglb.build_metis = False + + if bglb.enable_libceed or bglb.libceed_only: + if self.libceed_prefix != '': + bglb.libceed_prefix = os.path.expanduser(self.libceed_prefix) + bglb.build_libceed = False + else: + bglb.libceed_prefix = bglb.mfem_prefix + bglb.build_libceed = True + + if bglb.enable_gslib or bglb.gslib_only: + if self.gslib_prefix != '': + bglb.build_gslib = False + bglb.gslibs_prefix = os.path.expanduser(self.gslib_prefix) + bglb.gslibp_prefix = os.path.expanduser(self.gslib_prefix) + else: + bglb.gslibs_prefix = bglb.mfems_prefix + bglb.gslibp_prefix = bglb.mfemp_prefix + bglb.build_gslib = True + + if bglb.enable_suitesparse and self.suitesparse_prefix != '': + bglb.suitesparse_prefix = self.suitesparse_prefix + + if self.pumi_prefix != '': + bglb.pumi_prefix = abspath(self.pumi_prefix) + else: + bglb.pumi_prefix = bglb.mfem_prefix + + if self.strumpack_prefix != '': + bglb.strumpack_prefix = abspath(self.strumpack_prefix) + else: + bglb.strumpack_prefix = bglb.mfem_prefix + + if bglb.enable_cuda: + nvcc = find_command('nvcc') + bglb.cuda_prefix = os.path.dirname(os.path.dirname(nvcc)) + + + if self.CC != '': + bglb.cc_command = self.CC + if self.CXX != '': + bglb.cxx_command = self.CXX + if self.MPICC != '': + bglb.mpicc_command = self.MPICC + if self.MPICXX != '': + bglb.mpicxx_command = self.MPICXX + + if self.blas_libraries != "": + bglb.blas_libraries = self.blas_libraries + if self.lapack_libraries != "": + bglb.lapack_libraries = self.lapack_libraries + + if bglb.skip_ext: + bglb.build_metis = False + bglb.build_hypre = False + bglb.build_mfem = False + bglb.build_mfemp = False + bglb.build_libceed = False + bglb.build_gslib = False + + if bglb.skip_swig: + bglb.clean_swig = False + bglb.run_swig = False + + if bglb.swig_only: + bglb.build_serial = False + bglb.clean_swig = False + + if bglb.ext_only: + bglb.clean_swig = False + bglb.run_swig = False + bglb.build_serial = False + bglb.build_parallel = False + bglb.skip_install = True + + if bglb.libceed_only: + bglb.clean_swig = False + bglb.run_swig = False + bglb.build_mfem = False + bglb.build_mfemp = False + bglb.build_metis = False + bglb.build_hypre = False + bglb.build_gslib = False + bglb.build_serial = False + bglb.build_parallel = False + bglb.build_libceed = True + bglb.skip_install = True + + if bglb.gslib_only: + bglb.clean_swig = False + bglb.run_swig = False + bglb.build_mfem = False + bglb.build_mfemp = False + bglb.build_metis = False + bglb.build_hypre = False + bglb.build_serial = False + bglb.build_libceed = False + bglb.build_gslib = True + bglb.skip_install = True + + bglb.is_configured = True + +def configure_bdist(self): + ''' + called when bdist workflow is used + ''' + bglb.dry_run = bool(self.dry_run) or bglb.dry_run + + bglb.prefix = abspath(self.bdist_dir) + + bglb.build_parallel = False + + if self.skip_build == 1: + bglb.build_mfem = False + bglb.build_serial = False + bglb.run_swig = False + else: + bglb.build_mfem = True + bglb.build_serial = True + # build_gslib = True + bglb.run_swig = True + + bglb.is_configured = True + bglb.do_bdist_wheel = True + + # mfem_source = './external/mfem' + bglb.ext_prefix = os.path.join(bglb.prefix, 'mfem', 'external') + print("!!!!! ext_prefix(bdist)", bglb.ext_prefix) + bglb.hypre_prefix = bglb.ext_prefix + bglb.metis_prefix = bglb.ext_prefix + + bglb.mfem_prefix = bglb.ext_prefix + bglb.mfems_prefix = os.path.join(bglb.ext_prefix, 'ser') + bglb.mfemp_prefix = os.path.join(bglb.ext_prefix, 'par') + + bglb.mfem_build_miniapps = False + +configure_build = configure_install +#configure_build = configure_bdist diff --git a/_build_system/build_consts.py b/_build_system/build_consts.py new file mode 100644 index 00000000..78e2d837 --- /dev/null +++ b/_build_system/build_consts.py @@ -0,0 +1,101 @@ +# ---------------------------------------------------------------------------------------- +# Global build constant parameters +# ---------------------------------------------------------------------------------------- +from sys import platform +import os +from shutil import which as find_command +from collections import namedtuple + +__all__ = ["swig_command", "rootdir", "extdir", + "REPOS", "dylibext", "osx_sysroot"] + +# ---------------------------------------------------------------------------------------- +# package directory +# ---------------------------------------------------------------------------------------- +rootdir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..") +extdir = os.path.join(rootdir, 'external') +if not os.path.exists(extdir): + os.mkdir(os.path.join(rootdir, 'external')) + +# ---------------------------------------------------------------------------------------- +# Platform dependency +# ---------------------------------------------------------------------------------------- + +osx_sysroot = '' +dylibext = '.so' + +if platform == "linux" or platform == "linux2": + dylibext = '.so' + +elif platform == "darwin": + # OS X + dylibext = '.dylib' + import sysconfig + for i, x in enumerate(sysconfig.get_config_vars()['CFLAGS'].split()): + if x == '-isysroot': + osx_sysroot = sysconfig.get_config_vars()['CFLAGS'].split()[i+1] + break + +elif platform == "win32": + # Windows... + assert False, "Windows is not supported yet. Contribution is welcome" + +# ---------------------------------------------------------------------------------------- +# SWIG +# ---------------------------------------------------------------------------------------- + +swig_command = (find_command('swig') if os.getenv("SWIG") is None + else os.getenv("SWIG")) +if swig_command is None: + assert False, "SWIG is not installed (hint: pip install swig)" + +# ---------------------------------------------------------------------------------------- +# Constants +# ---------------------------------------------------------------------------------------- + +release = namedtuple('Release', ['version', 'hash', 'tarball']) +REPOS = dict( + mfem=dict( + url="https://github.com/mfem/mfem.git", + # version, hash, tarball + releases=[ + release("4.7", "dc9128ef596e84daf1138aa3046b826bba9d259f", None), + release("4.8", "a01719101027383954b69af1777dc828bf795d62", None), + ] + ), + metis=dict( + url="https://github.com/KarypisLab/METIS", + releases=[ + release("5.1.0", "94c03a6e2d1860128c2d0675cbbb86ad4f261256", + "https://github.com/mfem/tpls/raw/gh-pages/metis-5.1.0.tar.gz"), + ] + ), + gklib=dict( + url="https://github.com/KarypisLab/GKlib", + releases=[ + release("5.1.1", "a7f8172703cf6e999dd0710eb279bba513da4fec", + "https://github.com/KarypisLab/GKlib/archive/refs/tags/METIS-v5.1.1-DistDGL-0.5.tar.gz"), + ] + ), + libceed=dict( + url="https://github.com/CEED/libCEED.git", + releases=[ + release( + "0.12.0", None, "https://github.com/CEED/libCEED/archive/refs/tags/v0.12.0.tar.gz"), + ] + ), + hypre=dict( + url=None, + releases=[ + release( + "2.28.0", None, "https://github.com/hypre-space/hypre/archive/v2.28.0.tar.gz"), + ] + ), + gslib=dict( + url=None, + releases=[ + release( + "1.0.8", None, "https://github.com/Nek5000/gslib/archive/refs/tags/v1.0.8.tar.gz"), + ] + ), +) diff --git a/_build_system/build_globals.py b/_build_system/build_globals.py new file mode 100644 index 00000000..0ec4c9b2 --- /dev/null +++ b/_build_system/build_globals.py @@ -0,0 +1,84 @@ +# ---------------------------------------------------------------------------------------- +# Global build parameters +# ---------------------------------------------------------------------------------------- +import os + +is_configured = False +prefix = '' + +verbose = True +git_sshclone = False +swig_only = False +skip_install = False +run_swig = False +clean_swig = False +build_mfem = False +mfem_branch = None +build_mfemp = False +build_metis = False +build_hypre = False +build_libceed = False +build_gslib = False +build_parallel = False +build_serial = False + +ext_prefix = '' +mfems_prefix = '' +mfemp_prefix = '' +mfem_source = os.path.join(os.path.dirname(__file__), "..", "external", "mfem") +metis_prefix = '' +hypre_prefix = '' + +enable_cuda = False +enable_cuda_hypre = False +cuda_prefix = '' +cuda_arch = '' +enable_pumi = False +pumi_prefix = '' +enable_strumpack = False +strumpack_prefix = '' +enable_libceed = False +libceed_prefix = '' +libceed_only = False +enable_gslib = False +gslibs_prefix = '' +gslibp_prefix = '' +gslib_only = False +mfem_debug = False +mfem_build_miniapps = True + +enable_suitesparse = False +suitesparse_prefix = "/usr/" + +enable_lapack = False +blas_libraries = "" +lapack_libraries = "" + +dry_run = False +do_bdist_wheel = False +bdist_wheel_dir = '' + +use_unverifed_SSL = False if os.getenv( + "unverifedSSL") is None else os.getenv("unverifiedSSL") + +use_metis_gklib = False +metis_64 = False + +# ---------------------------------------------------------------------------------------- +# command line configuration parameter (pip -C) +# ---------------------------------------------------------------------------------------- + +cfs = {} + +# ---------------------------------------------------------------------------------------- +# enviromental variables. +# ---------------------------------------------------------------------------------------- +cc_command = 'cc' if os.getenv("CC") is None else os.getenv("CC") +cxx_command = 'c++' if os.getenv("CC") is None else os.getenv("CXX") +mpicc_command = 'mpicc' if os.getenv("MPICC") is None else os.getenv("MPICC") +mpicxx_command = 'mpic++' if os.getenv( + "MPICXX") is None else os.getenv("MPICXX") +cxx11_flag = '-std=c++11' if os.getenv( + "CXX11FLAG") is None else os.getenv("CXX11FLAG") + + diff --git a/_build_system/build_gslib.py b/_build_system/build_gslib.py new file mode 100644 index 00000000..d27e025a --- /dev/null +++ b/_build_system/build_gslib.py @@ -0,0 +1,35 @@ +# ---------------------------------------------------------------------------------------- +# Routines for gslib +# ---------------------------------------------------------------------------------------- +import sys +import os +import re +import subprocess + +import build_globals as bglb +from build_consts import * +from build_utils import * + +__all__ = ['make_gslib'] + +def make_gslib(serial=False): + if bglb.verbose: + print("Building gslib") + + path = os.path.join(extdir, 'gslib') + if not os.path.exists(path): + assert False, "gslib is not downloaded" + + pwd = chdir(path) + make_call(['make', 'clean']) + if serial: + command = ['make', 'CC=' + bglb.cc_command, 'MPI=0', 'CFLAGS=-fPIC'] + make_call(command) + command = ['make', 'MPI=0', 'DESTDIR=' + bglb.gslibs_prefix] + make_call(command) + else: + command = ['make', 'CC=' + bglb.mpicc_command, 'CFLAGS=-O2 -fPIC'] + make_call(command) + command = ['make', 'DESTDIR=' + bglb.gslibp_prefix] + make_call(command) + os.chdir(pwd) diff --git a/_build_system/build_hypre.py b/_build_system/build_hypre.py new file mode 100644 index 00000000..ca3a5e02 --- /dev/null +++ b/_build_system/build_hypre.py @@ -0,0 +1,58 @@ +# ---------------------------------------------------------------------------------------- +# Routines for hypre +# ---------------------------------------------------------------------------------------- + +import sys +import os +import re +import subprocess + +__all__ = ["cmake_make_hypre"] + +from build_utils import * +from build_consts import * + +import build_globals as bglb + + +def cmake_make_hypre(): + ''' + build hypre + ''' + if bglb.verbose: + print("Building hypre") + + cmbuild = 'cmbuild' + path = os.path.join(extdir, 'hypre', 'src', cmbuild) + if os.path.exists(path): + print("working directory already exists!") + else: + os.makedirs(path) + + pwd = chdir(path) + + cmake_opts = {'DBUILD_SHARED_LIBS': '1', + 'DHYPRE_INSTALL_PREFIX': bglb.hypre_prefix, + 'DHYPRE_ENABLE_SHARED': '1', + 'DCMAKE_C_FLAGS': '-fPIC', + 'DCMAKE_INSTALL_PREFIX': bglb.hypre_prefix, + 'DCMAKE_INSTALL_NAME_DIR': "@rpath", } + if bglb.verbose: + cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' + + if bglb.enable_cuda and bglb.enable_cuda_hypre: + # in this case, settitng CMAKE_C_COMPILER + # causes "mpi.h" not found error. For now, letting CMAKE + # to find MPI + cmake_opts['DHYPRE_WITH_CUDA'] = '1' + if bglb.cuda_arch != '': + cmake_opts['DCMAKE_CUDA_ARCHITECTURES'] = bglb.cuda_arch + else: + cmake_opts['DCMAKE_C_COMPILER'] = bglb.mpicc_command + + cmake('..', **cmake_opts) + + make('hypre') + make_install('hypre') + + os.chdir(pwd) diff --git a/_build_system/build_libceed.py b/_build_system/build_libceed.py new file mode 100644 index 00000000..6574e182 --- /dev/null +++ b/_build_system/build_libceed.py @@ -0,0 +1,35 @@ +# ---------------------------------------------------------------------------------------- +# Routines for libceed +# ---------------------------------------------------------------------------------------- +import sys +import os +import re +import subprocess + +import build_globals as bglb +from build_consts import * +from build_utils import * + +__all__ = ["make_libceed"] + +def make_libceed(serial=False): + if bglb.verbose: + print("Building libceed") + + path = os.path.join(extdir, 'libceed') + if not os.path.exists(path): + assert False, "libceed is not downloaded" + + pwd = chdir(path) + try: + make_call(['make', 'clean']) + except: + pass + + if bglb.enable_cuda: + command = ['make', 'configure', 'CUDA_DIR='+bglb.cuda_prefix] + make_call(command) + + make('libceed') + make_install('libceed', prefix=bglb.libceed_prefix) + os.chdir(pwd) diff --git a/_build_system/build_metis.py b/_build_system/build_metis.py new file mode 100644 index 00000000..d476587a --- /dev/null +++ b/_build_system/build_metis.py @@ -0,0 +1,157 @@ +# ---------------------------------------------------------------------------------------- +# Routines for metis +# ---------------------------------------------------------------------------------------- +import sys +import os +import re +import subprocess +from sys import platform + +__all__ = ["make_metis_gklib", "make_metis"] + +from build_utils import * +from build_consts import * + +import build_globals as bglb + + +def make_metis_gklib(use_int64=False, use_real64=False): + ''' + build GKlib/metis + ''' + + ''' + build/install GKlib + ''' + if bglb.verbose: + print("Building gklib") + + path = os.path.join(extdir, 'gklib') + if not bglb.dry_run and not os.path.exists(path): + assert False, "gklib is not downloaded" + + path = os.path.join(path, 'cmbuild') + if os.path.exists(path): + print("working directory already exists!") + else: + os.makedirs(path) + pwd = chdir(path) + + cmake_opts = {'DBUILD_SHARED_LIBS': '1', + 'DCMAKE_INSTALL_PREFIX': metis_prefix} + if verbose: + cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' + + cmake('..', **cmake_opts) + make('gklib') + make_install('gklib') + os.chdir(pwd) + + ''' + build/install metis + ''' + path = os.path.join(extdir, 'metis') + if not bglb.dry_run and not os.path.exists(path): + assert False, "metis is not downloaded" + elif not os.path.exists(path): + os.makedirs(path) + os.makedirs(os.path.join(path, 'build')) + + pwd = chdir(path) + + gklibpath = os.path.dirname(find_libpath_from_prefix( + 'GKlib', bglb.metis_prefix)) + + options = ['gklib_path='+bglb.metis_prefix] + if use_int64: + options.append('i64=1') + + if use_real64: + options.append('r64=1') + + command = ['make', 'config', 'shared=1'] + options + command = command + ['prefix=' + bglb.metis_prefix, 'cc=' + bglb.cc_command] + make_call(command) + + chdir('build') + cmake_opts = {'DGKLIB_PATH': bglb.metis_prefix, + 'DSHARED': '1', + 'DCMAKE_C_COMPILER': bglb.cc_command, + 'DCMAKE_C_STANDARD_LIBRARIES': '-lGKlib', + 'DCMAKE_INSTALL_RPATH': "@loader_path", + 'DCMAKE_BUILD_WITH_INSTALL_RPATH': '1', + 'DCMAKE_INSTALL_PREFIX': bglb.metis_prefix} + if bglb.verbose: + cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' + + cmake('..', **cmake_opts) + chdir(path) + make('metis') + make_install('metis') + + if platform == "darwin": + command = ['install_name_tool', + '-id', + os.path.join("@rpath", 'libGKlib.dylib'), +# os.path.join(bglb.metis_prefix, 'lib', 'libGKlib.dylib'), + os.path.join(bglb.metis_prefix, 'lib', 'libGKlib.dylib'), ] + make_call(command) + command = ['install_name_tool', + '-id', + os.path.join("@rpath", 'libmetis.dylib'), + os.path.join(bglb.metis_prefix, 'lib', 'libmetis.dylib'), ] + make_call(command) + os.chdir(pwd) + + +def make_metis(use_int64=False, use_real64=False): + ''' + build metis + ''' + if bglb.verbose: + print("Building metis") + + path = os.path.join(extdir, 'metis') + if not os.path.exists(path): + assert False, "metis is not downloaded" + + pwd = chdir(path) + + if use_int64: + pattern_int = "#define IDXTYPEWIDTH 32" + replace_int = "#define IDXTYPEWIDTH 64" + else: + pattern_int = "#define IDXTYPEWIDTH 64" + replace_int = "#define IDXTYPEWIDTH 32" + with open("include/metis.h", "r") as metis_header_fid: + metis_header_lines = metis_header_fid.readlines() + with open("include/metis.h", "w") as metis_header_fid: + for line in metis_header_lines: + metis_header_fid.write(re.sub(pattern_int, replace_int, line)) + + if use_real64: + pattern_real = "#define REALTYPEWIDTH 32" + replace_real = "#define REALTYPEWIDTH 64" + else: + pattern_real = "#define REALTYPEWIDTH 64" + replace_real = "#define REALTYPEWIDTH 32" + with open("include/metis.h", "r") as metis_header_fid: + metis_header_lines = metis_header_fid.readlines() + with open("include/metis.h", "w") as metis_header_fid: + for line in metis_header_lines: + metis_header_fid.write(re.sub(pattern_real, replace_real, line)) + + command = ['make', 'config', 'shared=1', + 'prefix=' + bglb.metis_prefix, + 'cc=' + bglb.cc_command] + make_call(command, env={'CMAKE_POLICY_VERSION_MINIMUM': '3.5'}) + make('metis') + make_install('metis') + + if platform == "darwin": + command = ['install_name_tool', + '-id', + os.path.join('@rpath', 'libmetis.dylib'), + os.path.join(bglb.metis_prefix, 'lib', 'libmetis.dylib'), ] + make_call(command) + os.chdir(pwd) diff --git a/_build_system/build_mfem.py b/_build_system/build_mfem.py new file mode 100644 index 00000000..a7c76b95 --- /dev/null +++ b/_build_system/build_mfem.py @@ -0,0 +1,186 @@ +# ---------------------------------------------------------------------------------------- +# Routines for PyMFEM Wrapper Generation/Compile +# ---------------------------------------------------------------------------------------- +import sys +import os +import re +import subprocess + +__all__ = ["cmake_make_mfem"] + +from build_consts import * +from build_utils import * + +import build_globals as bglb + + +def cmake_make_mfem(serial=True): + ''' + build MFEM + ''' + cmbuild = 'cmbuild_ser' if serial else 'cmbuild_par' + path = os.path.join(extdir, 'mfem', cmbuild) + if os.path.exists(path): + print("working directory already exists!") + else: + os.makedirs(path) + + ldflags = os.getenv('LDFLAGS') if os.getenv('LDFLAGS') is not None else '' + metisflags = '' + hypreflags = '' + + rpaths = [] + if sys.platform in ("linux", "linux2"): + rpaths_origin = "$ORIGIN" + elif sys.platform == "darwin": + rpaths_origin = "@loader_path" + + def add_rpath(p, dest): + p = os.path.join(rpaths_origin, os.path.relpath(p, dest)) + + if not p in rpaths: + rpaths.append(p) + + cmake_opts = {'DBUILD_SHARED_LIBS': '1', + 'DMFEM_ENABLE_EXAMPLES': '1', + 'DMFEM_ENABLE_MINIAPPS': '0', + 'DCMAKE_SHARED_LINKER_FLAGS': ldflags, + 'DMFEM_USE_ZLIB': '1', + 'DCMAKE_CXX_FLAGS': bglb.cxx11_flag, + 'DCMAKE_BUILD_WITH_INSTALL_RPATH': '1'} + + if sys.platform == 'darwin': + cmake_opts["DCMAKE_MACOSX_RPATH"] = 'YES' + cmake_opts["DCMAKE_INSTALL_NAME_DIR"] = '@rpath' + + if bglb.mfem_debug: + cmake_opts['DMFEM_DEBUG'] = 'YES' + + if bglb.mfem_build_miniapps: + cmake_opts['DMFEM_ENABLE_MINIAPPS'] = '1' + + if bglb.verbose: + cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' + + if serial: + ex_loc = os.path.join(bglb.mfems_prefix, "examples") + cmake_opts['DCMAKE_CXX_COMPILER'] = bglb.cxx_command + cmake_opts['DMFEM_USE_EXCEPTIONS'] = '1' + cmake_opts['DCMAKE_INSTALL_PREFIX'] = bglb.mfems_prefix + + add_rpath(os.path.join(bglb.mfems_prefix, 'lib'), ex_loc) + if bglb.enable_suitesparse: + enable_metis = True + else: + enable_metis = False + #assert False, rpaths + else: + ex_loc = os.path.join(bglb.mfemp_prefix, "examples") + cmake_opts['DCMAKE_CXX_COMPILER'] = bglb.mpicxx_command + cmake_opts['DMFEM_USE_EXCEPTIONS'] = '0' + cmake_opts['DCMAKE_INSTALL_PREFIX'] = bglb.mfemp_prefix + cmake_opts['DMFEM_USE_MPI'] = '1' + cmake_opts['DHYPRE_DIR'] = bglb.hypre_prefix + cmake_opts['DHYPRE_INCLUDE_DIRS'] = os.path.join( + bglb.hypre_prefix, "include") + + add_rpath(os.path.join(bglb.mfemp_prefix, 'lib'), ex_loc) + + hyprelibpath = os.path.dirname( + find_libpath_from_prefix( + 'HYPRE', bglb.hypre_prefix)) + + add_rpath(hyprelibpath, ex_loc) + + hypreflags = "-L" + hyprelibpath + " -lHYPRE " + + if bglb.enable_strumpack: + cmake_opts['DMFEM_USE_STRUMPACK'] = '1' + cmake_opts['DSTRUMPACK_DIR'] = bglb.strumpack_prefix + libpath = os.path.dirname( + find_libpath_from_prefix("STRUMPACK", bglb.strumpack_prefix)) + add_rpath(libpath, ex_loc) + if bglb.enable_pumi: + cmake_opts['DMFEM_USE_PUMI'] = '1' + cmake_opts['DPUMI_DIR'] = bglb.pumi_prefix + libpath = os.path.dirname( + find_libpath_from_prefix("pumi", bglb.strumpack_prefix)) + add_rpath(libpath, ex_loc) + enable_metis = True + + if enable_metis: + cmake_opts['DMFEM_USE_METIS_5'] = '1' + cmake_opts['DMETIS_DIR'] = bglb.metis_prefix + cmake_opts['DMETIS_INCLUDE_DIRS'] = os.path.join( + bglb.metis_prefix, "include") + metislibpath = os.path.dirname( + find_libpath_from_prefix( + 'metis', bglb.metis_prefix)) + add_rpath(metislibpath, ex_loc) + + if bglb.use_metis_gklib: + metisflags = "-L" + metislibpath + " -lmetis -lGKlib " + else: + metisflags = "-L" + metislibpath + " -lmetis " + + if ldflags != '': + cmake_opts['DCMAKE_SHARED_LINKER_FLAGS'] = ldflags + cmake_opts['DCMAKE_EXE_LINKER_FLAGS'] = ldflags + + if metisflags != '': + cmake_opts['DMETIS_LIBRARIES'] = metisflags + if hypreflags != '': + cmake_opts['DHYPRE_LIBRARIES'] = hypreflags + + if bglb.enable_cuda: + cmake_opts['DMFEM_USE_CUDA'] = '1' + if bglb.cuda_arch != '': + cmake_opts['DCMAKE_CUDA_ARCHITECTURES'] = bglb.cuda_arch + + if bglb.enable_libceed: + cmake_opts['DMFEM_USE_CEED'] = '1' + cmake_opts['DCEED_DIR'] = bglb.libceed_prefix + libpath = os.path.dirname( + find_libpath_from_prefix("ceed", bglb.libceed_prefix)) + add_rpath(libpath, ex_loc) + + if bglb.enable_gslib: + if serial: + cmake_opts['DMFEM_USE_GSLIB'] = '1' + cmake_opts['DGSLIB_DIR'] = bglb.gslibs_prefix + else: + cmake_opts['DMFEM_USE_GSLIB'] = '1' + cmake_opts['DGSLIB_DIR'] = bglb.gslibp_prefix + + if bglb.enable_suitesparse: + cmake_opts['DMFEM_USE_SUITESPARSE'] = '1' + if bglb.suitesparse_prefix != '': + cmake_opts['DSuiteSparse_DIR'] = bglb.suitesparse_prefix + + if bglb.enable_lapack: + cmake_opts['DMFEM_USE_LAPACK'] = '1' + if bglb.blas_libraries != "": + cmake_opts['DBLAS_LIBRARIES'] = bglb.blas_libraries + if bglb.lapack_libraries != "": + cmake_opts['DLAPACK_LIBRARIES'] = bglb.lapack_libraries + + cmake_opts['DCMAKE_INSTALL_RPATH'] = ";".join(rpaths) + + pwd = chdir(path) + cmake('..', **cmake_opts) + + txt = 'serial' if serial else 'parallel' + + make('mfem_' + txt) + make_install('mfem_' + txt) + + from shutil import copytree, rmtree + + print("copying mesh data for testing", "../data", + cmake_opts['DCMAKE_INSTALL_PREFIX']) + path = os.path.join(cmake_opts['DCMAKE_INSTALL_PREFIX'], "data") + if os.path.exists(path): + rmtree(path) + copytree("../data", path) + + os.chdir(pwd) diff --git a/_build_system/build_pymfem.py b/_build_system/build_pymfem.py new file mode 100644 index 00000000..c0f7e1c8 --- /dev/null +++ b/_build_system/build_pymfem.py @@ -0,0 +1,316 @@ +# ---------------------------------------------------------------------------------------- +# Routines for PyMFEM Wrapper Generation/Compile +# ---------------------------------------------------------------------------------------- +import sys +import os +import re +import subprocess + +__all__ = ["write_setup_local", "generate_wrapper", + "clean_wrapper", "make_mfem_wrapper"] + +from build_utils import * +from build_consts import * +import build_globals as bglb + + +def write_setup_local(): + ''' + create setup_local.py. parameters written here will be read + by setup.py in mfem._ser and mfem._par + ''' + mfemser = bglb.mfems_prefix + mfempar = bglb.mfemp_prefix + + hyprelibpath = os.path.dirname( + find_libpath_from_prefix('HYPRE', bglb.hypre_prefix)) + metislibpath = os.path.dirname( + find_libpath_from_prefix('metis', bglb.metis_prefix)) + + mfems_tpl = read_mfem_tplflags(bglb.mfems_prefix) + mfemp_tpl = read_mfem_tplflags( + bglb.mfemp_prefix) if bglb.build_parallel else '' + + print(mfems_tpl, mfemp_tpl) + + params = {'cxx_ser': bglb.cxx_command, + 'cc_ser': bglb.cc_command, + 'cxx_par': bglb.mpicxx_command, + 'cc_par': bglb.mpicc_command, + 'whole_archive': '--whole-archive', + 'no_whole_archive': '--no-whole-archive', + 'nocompactunwind': '', + 'swigflag': '-Wall -c++ -python -fastproxy -olddefs -keyword', + 'hypreinc': os.path.join(bglb.hypre_prefix, 'include'), + 'hyprelib': hyprelibpath, + 'metisinc': os.path.join(bglb.metis_prefix, 'include'), + 'metis5lib': metislibpath, + 'numpyinc': get_numpy_inc(), + 'mpi4pyinc': '', + 'mfembuilddir': os.path.join(mfempar, 'include'), + 'mfemincdir': os.path.join(mfempar, 'include', 'mfem'), + 'mfemlnkdir': os.path.join(mfempar, 'lib'), + 'mfemserbuilddir': os.path.join(mfemser, 'include'), + 'mfemserincdir': os.path.join(mfemser, 'include', 'mfem'), + 'mfemserlnkdir': os.path.join(mfemser, 'lib'), + 'mfemsrcdir': os.path.join(bglb.mfem_source), + 'mfemstpl': mfems_tpl, + 'mfemptpl': mfemp_tpl, + 'add_pumi': '', + 'add_strumpack': '', + 'add_cuda': '', + 'add_libceed': '', + 'add_suitesparse': '', + 'add_gslib': '', + 'add_gslibp': '', + 'add_gslibs': '', + 'libceedinc': os.path.join(bglb.libceed_prefix, 'include'), + 'gslibsinc': os.path.join(bglb.gslibs_prefix, 'include'), + 'gslibpinc': os.path.join(bglb.gslibp_prefix, 'include'), + 'cxx11flag': bglb.cxx11_flag, + 'build_mfem': '1' if bglb.build_mfem else '0', + 'bdist_wheel_dir': bglb.bdist_wheel_dir, + } + + if bglb.build_parallel: + params['mpi4pyinc'] = get_mpi4py_inc() + + def add_extra(xxx, inc_sub=None): + params['add_' + xxx] = '1' + ex_prefix = getattr(bglb, xxx + '_prefix') + if inc_sub is None: + params[xxx + + 'inc'] = os.path.join(ex_prefix, 'include') + else: + params[xxx + + 'inc'] = os.path.join(ex_prefix, 'include', inc_sub) + + params[xxx + 'lib'] = os.path.join(ex_prefix, 'lib') + + if bglb.enable_pumi: + add_extra('pumi') + if bglb.enable_strumpack: + add_extra('strumpack') + if bglb.enable_cuda: + add_extra('cuda') + if bglb.enable_libceed: + add_extra('libceed') + if bglb.enable_suitesparse: + add_extra('suitesparse', inc_sub='suitesparse') + if bglb.enable_gslib: + add_extra('gslibs') + if bglb.enable_gslib: + add_extra('gslibp') + + pwd = chdir(rootdir) + + fid = open('setup_local.py', 'w') + fid.write("# setup_local.py \n") + fid.write("# generated from setup.py\n") + fid.write("# do not edit this directly\n") + + for key, value in params.items(): + text = key.lower() + ' = "' + value + '"' + fid.write(text + "\n") + fid.close() + + os.chdir(pwd) + + +def generate_wrapper(): + ''' + run swig. + ''' + # this should work as far as we are in the same directory ? + from multiprocessing import Pool, cpu_count + import build_globals as bglb + + if bglb.dry_run or bglb.verbose: + print("generating SWIG wrapper") + print("using MFEM source", os.path.abspath(bglb.mfem_source)) + if not os.path.exists(os.path.abspath(bglb.mfem_source)): + assert False, "MFEM source directory. Use --mfem-source=" + + def ifiles(): + ifiles = os.listdir() + ifiles = [x for x in ifiles if x.endswith('.i')] + ifiles = [x for x in ifiles if not x.startswith('#')] + ifiles = [x for x in ifiles if not x.startswith('.')] + return ifiles + + def check_new(ifile): + wfile = ifile[:-2] + '_wrap.cxx' + if not os.path.exists(wfile): + return True + return os.path.getmtime(ifile) > os.path.getmtime(wfile) + + def update_integrator_exts(): + pwd = chdir(os.path.join(rootdir, 'mfem', 'common')) + command1 = [sys.executable, "generate_lininteg_ext.py"] + command2 = [sys.executable, "generate_bilininteg_ext.py"] + make_call(command1) + make_call(command2) + os.chdir(pwd) + + def update_header_exists(mfem_source): + print("updating the list of existing headers") + list_of_headers = [] + L = len(mfem_source.split(os.sep)) + for (dirpath, dirnames, filenames) in os.walk(mfem_source): + for filename in filenames: + if filename.endswith('.hpp'): + dirs = dirpath.split(os.sep)[L:] + dirs.append(filename[:-4]) + tmp = '_'.join(dirs) + xx = re.split('_|-', tmp) + new_name = 'FILE_EXISTS_'+'_'.join([x.upper() for x in xx]) + if new_name not in list_of_headers: + list_of_headers.append(new_name) + + pwd = chdir(os.path.join(rootdir, 'mfem', 'common')) + fid = open('existing_mfem_headers.i', 'w') + for x in list_of_headers: + fid.write("#define " + x + "\n") + fid.close() + os.chdir(pwd) + + mfemser = bglb.mfems_prefix + mfempar = bglb.mfemp_prefix + + update_header_exists(bglb.mfem_source) + + swigflag = '-Wall -c++ -python -fastproxy -olddefs -keyword'.split(' ') + + pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) + + serflag = ['-I' + os.path.join(mfemser, 'include'), + '-I' + os.path.join(mfemser, 'include', 'mfem'), + '-I' + os.path.abspath(bglb.mfem_source)] + if bglb.enable_suitesparse: + serflag.append('-I' + os.path.join(bglb.suitesparse_prefix, + 'include', 'suitesparse')) + + for filename in ['lininteg.i', 'bilininteg.i']: + command = [swig_command] + swigflag + serflag + [filename] + make_call(command) + update_integrator_exts() + + commands = [] + for filename in ifiles(): + if not check_new(filename): + continue + command = [swig_command] + swigflag + serflag + [filename] + commands.append(command) + + mp_pool = Pool(max((cpu_count() - 1, 1))) + with mp_pool: + mp_pool.map(subprocess.run, commands) + + if not bglb.build_parallel: + os.chdir(pwd) + return + + chdir(os.path.join(rootdir, 'mfem', '_par')) + + parflag = ['-I' + os.path.join(mfempar, 'include'), + '-I' + os.path.join(mfempar, 'include', 'mfem'), + '-I' + os.path.abspath(bglb.mfem_source), + '-I' + os.path.join(bglb.hypre_prefix, 'include'), + '-I' + os.path.join(bglb.metis_prefix, 'include'), + '-I' + get_mpi4py_inc()] + + if bglb.enable_pumi: + parflag.append('-I' + os.path.join(bglb.pumi_prefix, 'include')) + if bglb.enable_strumpack: + parflag.append('-I' + os.path.join(bglb.strumpack_prefix, 'include')) + if bglb.enable_suitesparse: + parflag.append('-I' + os.path.join(bglb.suitesparse_prefix, + 'include', 'suitesparse')) + + commands = [] + for filename in ifiles(): + if filename == 'strumpack.i' and not bglb.enable_strumpack: + continue + if not check_new(filename): + continue + command = [swig_command] + swigflag + parflag + [filename] + commands.append(command) + + mp_pool = Pool(max((cpu_count() - 1, 1))) + with mp_pool: + mp_pool.map(subprocess.run, commands) + + os.chdir(pwd) + + +def clean_wrapper(): + from pathlib import Path + + # serial + pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) + wfiles = [x for x in os.listdir() if x.endswith('_wrap.cxx')] + + print(os.getcwd(), wfiles) + remove_files(wfiles) + + wfiles = [x for x in os.listdir() if x.endswith('_wrap.h')] + remove_files(wfiles) + + wfiles = [x for x in os.listdir() if x.endswith('.py')] + wfiles.remove("__init__.py") + wfiles.remove("setup.py") + wfiles.remove("tmop_modules.py") + remove_files(wfiles) + + ifiles = [x for x in os.listdir() if x.endswith('.i')] + for x in ifiles: + Path(x).touch() + + # parallel + chdir(os.path.join(rootdir, 'mfem', '_par')) + wfiles = [x for x in os.listdir() if x.endswith('_wrap.cxx')] + + remove_files(wfiles) + wfiles = [x for x in os.listdir() if x.endswith('_wrap.h')] + remove_files(wfiles) + + wfiles = [x for x in os.listdir() if x.endswith('.py')] + wfiles.remove("__init__.py") + wfiles.remove("setup.py") + wfiles.remove("tmop_modules.py") + remove_files(wfiles) + + ifiles = [x for x in os.listdir() if x.endswith('.i')] + for x in ifiles: + Path(x).touch() + + chdir(pwd) + + +def make_mfem_wrapper(serial=True): + ''' + compile PyMFEM wrapper code + ''' + from multiprocessing import cpu_count + import build_globals as bglb + + if bglb.dry_run or bglb.verbose: + print("compiling wrapper code, serial=" + str(serial)) + if not os.path.exists(os.path.abspath(bglb.mfem_source)): + assert False, "MFEM source directory. Use --mfem-source=" + + record_mfem_sha(bglb.mfem_source) + + write_setup_local() + + if serial: + pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) + else: + pwd = chdir(os.path.join(rootdir, 'mfem', '_par')) + + python = sys.executable + command = [python, 'setup.py', 'build_ext', '--inplace', '--parallel', + str(max((cpu_count() - 1, 1)))] + make_call(command, force_verbose=True) + + os.chdir(pwd) diff --git a/_build_system/build_utils.py b/_build_system/build_utils.py new file mode 100644 index 00000000..6197eb80 --- /dev/null +++ b/_build_system/build_utils.py @@ -0,0 +1,335 @@ +""" +Helper functions for setup.py +""" + +import os +import sys +import configparser +from urllib import request +import itertools +import site +import re +import subprocess +import multiprocessing +import ssl +import tarfile +from collections import namedtuple +from shutil import which as find_command + +__all__ = ["read_mfem_tplflags", "abspath", "external_install_prefix", + "make_call", "chdir", "remove_files", + "make", "make_install", "download", "gitclone", + "record_mfem_sha", "cmake", + "get_numpy_inc", "get_mpi4py_inc", "find_libpath_from_prefix", + "clean_so", ] + +# ---------------------------------------------------------------------------------------- +# global constant and variabls for build-process +# ---------------------------------------------------------------------------------------- +from build_consts import * +import build_globals as bglb + +# ---------------------------------------------------------------------------------------- +# Metadata +# ---------------------------------------------------------------------------------------- + + +def read_mfem_tplflags(prefix): + filename = os.path.join(prefix, 'share', 'mfem', 'config.mk') + if not os.path.exists(filename): + print("NOTE: " + filename + " does not exist.") + print("returning empty string") + return "" + + config = configparser.ConfigParser() + with open(filename) as fp: + config.read_file(itertools.chain(['[global]'], fp), source=filename) + flags = dict(config.items('global'))['mfem_tplflags'] + return flags + +# ---------------------------------------------------------------------------------------- +# Utilities +# ---------------------------------------------------------------------------------------- + + +def abspath(path): + return os.path.abspath(os.path.expanduser(path)) + + +def external_install_prefix(prefix, verbose=True): + + if hasattr(site, "getusersitepackages"): + usersite = site.getusersitepackages() + else: + usersite = site.USER_SITE + + if verbose: + print("running external_install_prefix with the following parameters") + print(" sys.argv :", sys.argv) + print(" sys.prefix :", sys.prefix) + print(" usersite :", usersite) + print(" prefix :", prefix) + + if '--user' in sys.argv: + path = usersite + if not os.path.exists(path): + os.makedirs(path) + path = os.path.join(path, 'mfem', 'external') + return path + + else: + # when prefix is given...let's borrow pip._internal to find the location ;D + import pip._internal.locations + path = pip._internal.locations.get_scheme( + "mfem", prefix=prefix).purelib + if not os.path.exists(path): + os.makedirs(path) + path = os.path.join(path, 'mfem', 'external') + return path + + +def make_call(command, target='', force_verbose=False, env=None): + ''' + call command + ''' + print("current working dir", os.getcwd()) + print("calling ... " + " ".join(command)) + + if bglb.dry_run: + return + kwargs = {'universal_newlines': True, 'env': env} + if env is not None: + env.update(os.environ) + + myverbose = bglb.verbose or force_verbose + if not myverbose: + kwargs['stdout'] = subprocess.DEVNULL + kwargs['stderr'] = subprocess.DEVNULL + + p = subprocess.Popen(command, **kwargs) + p.communicate() + if p.returncode != 0: + if target == '': + target = " ".join(command) + print("Failed when calling command: " + target) + raise subprocess.CalledProcessError(p.returncode, + " ".join(command)) + + +def chdir(path): + ''' + change directory to `path`; returns the previous directory + ''' + pwd = os.getcwd() + os.chdir(path) + if bglb.verbose: + print("Moving to a directory : " + path) + return pwd + + +def remove_files(files): + for f in files: + if bglb.verbose: + print("Removing : " + f) + if bglb.dry_run: + continue + os.remove(f) + + +def make(target): + ''' + make : add -j option automatically + ''' + command = ['make', '-j', str(max((multiprocessing.cpu_count() - 1, 1)))] + make_call(command, target=target, force_verbose=True) + + +def make_install(target, prefix=None): + ''' + make install + ''' + command = ['make', 'install'] + if prefix is not None: + command.append('prefix='+prefix) + make_call(command, target=target) + + +def download(xxx): + ''' + download tar.gz from somewhere. xxx is name. + url is given by repos above + ''' + + if os.path.exists(os.path.join(extdir, xxx)): + print("Download " + xxx + " skipped. Use clean --all-exts if needed") + return + # Get the tarball for the latest release + url = REPOS[xxx]["releases"][-1].tarball + if url is None: + raise RuntimeError(f"Could not find tarball URL for {xxx}") + print("Downloading :", url) + + if bglb.use_unverifed_SSL: + ssl._create_default_https_context = ssl._create_unverified_context + + ftpstream = request.urlopen(url) + targz = tarfile.open(fileobj=ftpstream, mode="r|gz") + targz.extractall(path=extdir) + os.rename(os.path.join(extdir, targz.getnames()[0].split('/')[0]), + os.path.join(extdir, xxx)) + + +def gitclone(xxx, use_sha=False, branch='master'): + cwd = os.getcwd() + repo_xxx = os.path.join(extdir, xxx) + if os.path.exists(repo_xxx): + os.chdir(repo_xxx) + command = ['git', 'checkout', branch] + make_call(command) + command = ['git', 'pull'] + make_call(command) + else: + repo = REPOS[xxx]["url"] + if bglb.git_sshclone: + repo = repo.replace("https://github.com/", "git@github.com:") + + os.chdir(extdir) + command = ['git', 'clone', repo, xxx] + make_call(command) + + if not bglb.dry_run: + if not os.path.exists(repo_xxx): + print(repo_xxx + " does not exist. Check if git clone worked") + os.chdir(repo_xxx) + + if use_sha: + sha = REPOS[xxx]["releases"][-1].hash + command = ['git', 'checkout', sha] + else: + command = ['git', 'checkout', branch] + make_call(command) + os.chdir(cwd) + + +def record_mfem_sha(mfem_source): + pwd = chdir(mfem_source) + command = ['git', 'rev-parse', 'HEAD'] + try: + sha = subprocess.run( + command, capture_output=True).stdout.decode().strip() + except subprocess.CalledProcessError: + print("subprocess failed to read sha...continuing w/o recording SHA") + sha = None + except BaseException: + print("subprocess failed to read sha...continuing w/o recording SHA") + sha = None + + chdir(pwd) + + sha_file = os.path.join('mfem', '__sha__.py') + fid = open(sha_file, 'w') + if sha is not None: + fid.write('mfem = "' + sha + '"') + fid.close() + + +def cmake(path, **kwargs): + ''' + run cmake. must be called in the target directory + ''' + command = ['cmake', path] + for key, value in kwargs.items(): + command.append('-' + key + '=' + value) + + if osx_sysroot != '': + command.append('-DCMAKE_OSX_SYSROOT=' + osx_sysroot) + make_call(command) + + + +def get_numpy_inc(): + + python = sys.executable + command = [python, "-c", "import numpy;print(numpy.get_include())"] + + try: + numpyinc = subprocess.run( + command, capture_output=True).stdout.decode().strip() + + except subprocess.CalledProcessError: + assert False, "can not check numpy include directory" + except BaseException: + assert False, "can not check numpy include directory" + + return numpyinc + + +def get_mpi4py_inc(): + + python = sys.executable + command = [python, "-c", "import mpi4py;print(mpi4py.get_include())"] + + try: + mpi4pyinc = subprocess.run( + command, capture_output=True).stdout.decode().strip() + except subprocess.CalledProcessError: + assert False, "can not check numpy include directory" + except BaseException: + assert False, "can not check numpy include directory" + + return mpi4pyinc + + +def find_libpath_from_prefix(lib, prefix0): + + prefix0 = os.path.expanduser(prefix0) + prefix0 = abspath(prefix0) + + soname = 'lib' + lib + dylibext + aname = 'lib' + lib + '.a' + + path = os.path.join(prefix0, 'lib', soname) + if os.path.exists(path): + return path + else: + path = os.path.join(prefix0, 'lib64', soname) + if os.path.exists(path): + return path + + path = os.path.join(prefix0, 'lib', aname) + if os.path.exists(path): + return path + else: + path = os.path.join(prefix0, 'lib64', aname) + if os.path.exists(path): + return path + print("Can not find library by find_libpath_from_prefix (continue)", lib, prefix0) + + return '' + +def clean_so(all=None): + python = sys.executable + command = [python, "setup.py", "clean"] + if all == 1: + command.append("--all") + + pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) + for f in os.listdir(): + if f.endswith('.so'): + os.remove(f) + if f.endswith('.dylib'): + os.remove(f) + make_call(command) + + chdir(os.path.join(rootdir, 'mfem', '_par')) + for f in os.listdir(): + if f.endswith('.so'): + os.remove(f) + if f.endswith('.dylib'): + os.remove(f) + make_call(command) + + chdir(pwd) + + + diff --git a/mfem/_par/setup.py b/mfem/_par/setup.py index 5d69122f..bf26ffcf 100644 --- a/mfem/_par/setup.py +++ b/mfem/_par/setup.py @@ -7,19 +7,15 @@ import sys import os -print('building paralel version') - # this remove *.py in this directory to be imported from setuptools +# Github workflow (next import) fails without this, because it loads +# array.py in current directoy sys.path.remove(os.path.abspath(os.path.dirname(sys.argv[0]))) from distutils.core import Extension, setup -# first load variables from PyMFEM_ROOT/setup_local.py - - ddd = os.path.dirname(os.path.abspath(os.path.realpath(__file__))) root = os.path.abspath(os.path.join(ddd, '..', '..')) - def get_version(): # read version number from __init__.py path = os.path.join(os.path.dirname(os.path.abspath(os.path.realpath(__file__))), @@ -32,8 +28,8 @@ def get_version(): version = eval(x.split('=')[-1].strip()) return version - def get_extensions(): + # first load variables from PyMFEM_ROOT/setup_local.py sys.path.insert(0, root) try: from setup_local import (mfembuilddir, mfemincdir, mfemsrcdir, mfemlnkdir, @@ -42,14 +38,13 @@ def get_extensions(): cc_par, cxx_par, cxx11flag, add_pumi, add_cuda, add_libceed, add_strumpack, - add_suitesparse, add_gslibp) + add_suitesparse, add_gslibp, bdist_wheel_dir) include_dirs = [mfembuilddir, mfemincdir, mfemsrcdir, numpyinc, mpi4pyinc, hypreinc, metisinc] - library_dirs = [mfemlnkdir, hyprelib, metis5lib, ] - + library_dirs = [mfemlnkdir,] except ImportError: if 'clean' not in sys.argv: raise @@ -67,7 +62,7 @@ def get_extensions(): cxx11flag = '' build_mfem = '0' - libraries = ['mfem', 'HYPRE', 'metis'] + libraries = ['mfem',] # remove current directory from path print("__file__", os.path.abspath(__file__)) @@ -77,7 +72,6 @@ def get_extensions(): x) == os.path.dirname(os.path.abspath(__file__))] for x in items: sys.path.remove(x) - print("sys path", sys.path) # this forces to use compiler written in setup_local.py if cc_par != '': @@ -124,15 +118,12 @@ def get_extensions(): from setup_local import puminc, pumilib modules.append("pumi") include_dirs.append(pumiinc) - library_dirs.append(pumilib) if add_strumpack == '1': from setup_local import strumpackinc, strumpacklib modules.append("strumpack") if strumpackinc != "": include_dirs.append(strumpackinc) - if strumpacklib != "": - library_dirs.append(strumpacklib) if add_cuda == '1': from setup_local import cudainc @@ -168,9 +159,13 @@ def get_extensions(): macros = [('TARGET_PY3', '1'), ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')] - if build_mfem == "1": - runtime_library_dirs = library_dirs[:] - runtime_library_dirs[0] = "$ORIGIN/../external/par/lib" + runtime_library_dirs = [x for x in library_dirs if x.find(bdist_wheel_dir) == -1] + if build_mfem == "1" and sys.platform in ("linux", "linux2"): + runtime_library_dirs.append("$ORIGIN/../external/par/lib") + runtime_library_dirs.append("$ORIGIN/../external/lib") + elif build_mfem == "1" and sys.platform == "darwin": + runtime_library_dirs.append("@loader_path/../external/par/lib") + runtime_library_dirs.append("@loader_path/../external/lib") else: runtime_library_dirs = library_dirs diff --git a/mfem/_ser/setup.py b/mfem/_ser/setup.py index 40673554..faa76afc 100644 --- a/mfem/_ser/setup.py +++ b/mfem/_ser/setup.py @@ -4,20 +4,18 @@ Serial version setup file """ - -# first load variables from PyMFEM_ROOT/setup_local.py import sys import os # this remove *.py in this directory to be imported from setuptools +# Github workflow (next import) fails without this, because it loads +# array.py in current directoy sys.path.remove(os.path.abspath(os.path.dirname(sys.argv[0]))) from distutils.core import Extension, setup - ddd = os.path.dirname(os.path.abspath(os.path.realpath(__file__))) root = os.path.abspath(os.path.join(ddd, '..', '..')) - def get_version(): # read version number from __init__.py path = os.path.join(os.path.dirname(os.path.abspath(os.path.realpath(__file__))), @@ -32,13 +30,15 @@ def get_version(): def get_extensions(): + # first load variables from PyMFEM_ROOT/setup_local.py sys.path.insert(0, root) try: from setup_local import (mfemserbuilddir, mfemserincdir, mfemsrcdir, mfemserlnkdir, mfemstpl, build_mfem, numpyinc, cc_ser, cxx_ser, cxx11flag, - add_cuda, add_libceed, add_suitesparse, add_gslibs,) + add_cuda, add_libceed, add_suitesparse, add_gslibs, + bdist_wheel_dir) include_dirs = [mfemserbuilddir, mfemserincdir, mfemsrcdir, numpyinc,] library_dirs = [mfemserlnkdir, ] @@ -62,14 +62,14 @@ def get_extensions(): libraries = ['mfem'] # remove current directory from path - #print("__file__", os.path.abspath(__file__)) + # print("__file__", os.path.abspath(__file__)) if '' in sys.path: sys.path.remove('') items = [x for x in sys.path if os.path.abspath( x) == os.path.dirname(os.path.abspath(__file__))] for x in items: sys.path.remove(x) - #print("sys path", sys.path) + # print("sys path", sys.path) # this forces to use compiler written in setup_local.py if cc_ser != '': @@ -108,7 +108,6 @@ def get_extensions(): "hyperbolic", "complex_densemat", "complexstaticcond", "complexweakform"] - if add_cuda == '1': from setup_local import cudainc include_dirs.append(cudainc) @@ -139,9 +138,12 @@ def get_extensions(): macros = [('TARGET_PY3', '1'), ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')] - if build_mfem == "1": - runtime_library_dirs = library_dirs[:] - runtime_library_dirs[0] = "$ORIGIN/../external/ser/lib" + runtime_library_dirs = [ + x for x in library_dirs if x.find(bdist_wheel_dir) == -1] + if build_mfem == "1" and sys.platform in ("linux", "linux2"): + runtime_library_dirs.append("$ORIGIN/../external/ser/lib") + elif build_mfem == "1" and sys.platform == "darwin": + runtime_library_dirs.append("@loader_path/../external/ser/lib") else: runtime_library_dirs = library_dirs diff --git a/mfem/common/numba_coefficient.i b/mfem/common/numba_coefficient.i index c9673542..5687d3a6 100644 --- a/mfem/common/numba_coefficient.i +++ b/mfem/common/numba_coefficient.i @@ -1267,9 +1267,10 @@ def _scalar(func, td=False, params=None, complex=False, dependency=None, print("(DEBUG) generated caller sig:\n", caller_sig) print("(DEBUG) generated caller function:\n", caller_txt) - exec(caller_txt, globals(), locals()) + ll = {} + exec(caller_txt, globals(), ll) caller_params = {"inner_func": ff, "carray":carray, "farray":farray} - caller_func = _copy_func_and_apply_params(locals()["_caller"], caller_params) + caller_func = _copy_func_and_apply_params(ll["_caller"], caller_params) ff = cfunc(caller_sig)(caller_func) if complex: @@ -1366,7 +1367,8 @@ def _vector(func, vdim=None, shape=None, td=False, params=None, print("(DEBUG) generated caller sig:\n", caller_sig) print("(DEBUG) generated caller function:\n", caller_txt) - exec(caller_txt, globals(), locals()) + ll = {} + exec(caller_txt, globals(), ll) caller_params = {"inner_func": ff, "np":np, "shape":shape, "carray":carray, "farray":farray} @@ -1374,7 +1376,7 @@ def _vector(func, vdim=None, shape=None, td=False, params=None, if vdim is not None: caller_params["vdim"] = vdim - caller_func = _copy_func_and_apply_params(locals()["_caller"], caller_params) + caller_func = _copy_func_and_apply_params(ll["_caller"], caller_params) ff = cfunc(caller_sig)(caller_func) if complex: @@ -1477,11 +1479,12 @@ def _matrix(func, height=None, width=None, shape=None, td=False, params=None, print("(DEBUG) generated caller sig:\n", caller_sig) print("(DEBUG) generated caller function:\n", caller_txt) - exec(caller_txt, globals(), locals()) + ll = {} + exec(caller_txt, globals(), ll) caller_params = {"inner_func": ff, "np":np, "shape":shape, "carray":carray, "farray":farray} - caller_func = _copy_func_and_apply_params(locals()["_caller"], caller_params) + caller_func = _copy_func_and_apply_params(ll["_caller"], caller_params) ff = cfunc(caller_sig)(caller_func) if complex: diff --git a/pyproject.toml b/pyproject.toml index e204e795..9d22a1c4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "mfem" -license = {file = "LICENSE"} +license = "BSD-3-Clause" description = "MFEM + PyMFEM (finite element method library)" dynamic = ["version"] readme = "README.md" @@ -11,11 +11,11 @@ classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Topic :: Scientific/Engineering :: Physics", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] requires-python = ">=3.8" dependencies = [ @@ -24,19 +24,23 @@ dependencies = [ "scipy", ] +[project.optional-dependencies] +MPI = ["mpi4py"] [project.urls] "Homepage" = "http://mfem.org" -"Download" = "https://github.com/mfem" - +"Download" = "https://github.com/pymfem" [build-system] requires = [ - "setuptools", - "cmake", + "setuptools>=80.0.1", + "pip>=25.1.0", + "numpy>=2.0.0", + "cmake>=4.0.0", "swig>=4.3", ] -build-backend = "setuptools.build_meta" +build-backend = "backend" +backend-path = ["_build_system"] [tool.setuptools.dynamic] version = { attr = "mfem.__version__" } diff --git a/setup.py b/setup.py index a34090df..7850f5be 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ """ MFEM + PyMFEM (finite element method library) """ -from sys import platform import sys import os import site @@ -10,6 +9,7 @@ import subprocess from shutil import which as find_command +from sys import platform import multiprocessing from multiprocessing import Pool # force fork instead of spawn on MacOS to avoid race condition on mfem/__init__.py @@ -22,894 +22,38 @@ from setuptools.command.install_egg_info import install_egg_info as _install_egg_info from setuptools.command.install_lib import install_lib as _install_lib from setuptools.command.install_scripts import install_scripts as _install_scripts - +from setuptools.command.bdist_wheel import bdist_wheel as _bdist_wheel from distutils.command.clean import clean as _clean -sys.path.insert(0, os.path.dirname(__file__)) -from setuputils import ( - read_mfem_tplflags, abspath, external_install_prefix, - make_call, chdir, remove_files, download, gitclone, - record_mfem_sha, get_numpy_inc, get_mpi4py_inc, find_libpath_from_prefix, - cmake_make_hypre, make_metis, make_libceed, make_gslib, cmake_make_mfem, -) - -# ---------------------------------------------------------------------------------------- -# Constants -# ---------------------------------------------------------------------------------------- - -rootdir = os.path.abspath(os.path.dirname(__file__)) -extdir = os.path.join(rootdir, 'external') -if not os.path.exists(extdir): - os.mkdir(os.path.join(rootdir, 'external')) - -osx_sysroot = '' - -haveWheel = False - -if platform == "linux" or platform == "linux2": - dylibext = '.so' - # Wheel build only works on linux because of chrpath command - try: - from wheel.bdist_wheel import bdist_wheel as _bdist_wheel - haveWheel = True - except ImportError: - print("Skipping wheel build; wheel not installed.") -elif platform == "darwin": - # OS X - dylibext = '.dylib' - import sysconfig - for i, x in enumerate(sysconfig.get_config_vars()['CFLAGS'].split()): - if x == '-isysroot': - osx_sysroot = sysconfig.get_config_vars()['CFLAGS'].split()[i+1] - break - -elif platform == "win32": - # Windows... - assert False, "Windows is not supported yet. Contribution is welcome" +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "_build_system")) -use_metis_gklib = False - -cc_command = 'cc' if os.getenv("CC") is None else os.getenv("CC") -cxx_command = 'c++' if os.getenv("CC") is None else os.getenv("CXX") -mpicc_command = 'mpicc' if os.getenv("MPICC") is None else os.getenv("MPICC") -mpicxx_command = 'mpic++' if os.getenv( - "MPICXX") is None else os.getenv("MPICXX") -cxx11_flag = '-std=c++11' if os.getenv( - "CXX11FLAG") is None else os.getenv("CXX11FLAG") - -use_unverifed_SSL = False if os.getenv( - "unverifedSSL") is None else os.getenv("unverifiedSSL") - -swig_command = (find_command('swig') if os.getenv("SWIG") is None - else os.getenv("SWIG")) -if swig_command is None: - assert False, "SWIG is not installed (hint: pip install swig)" - -if haveWheel: - class BdistWheel(_bdist_wheel): - ''' - Wheel build performs SWIG + Serial in Default. - --skip-build option skip building entirely. - ''' - - def finalize_options(self): - def _has_ext_modules(): - return True - self.distribution.has_ext_modules = _has_ext_modules - _bdist_wheel.finalize_options(self) - - def run(self): - print("!!!!! Entering BdistWheel::Run") - - if not is_configured: - print('running config') - configure_bdist(self) - print_config() - self.run_command("build") - _bdist_wheel.run(self) +import build_globals as bglb +from build_mfem import * +from build_metis import * +from build_hypre import * +from build_pymfem import * +from build_libceed import * +from build_gslib import * +from build_config import * +from build_consts import * +from build_utils import * # ---------------------------------------------------------------------------------------- -# Global variables +# Constants # ---------------------------------------------------------------------------------------- -is_configured = False -prefix = '' - -verbose = -1 -git_sshclone = False -swig_only = False -skip_install = False -run_swig = False -clean_swig = False -build_mfem = False -mfem_branch = None -build_mfemp = False -build_metis = False -build_hypre = False -build_libceed = False -build_gslib = False -build_parallel = False -build_serial = False - -ext_prefix = '' -mfems_prefix = '' -mfemp_prefix = '' -mfem_source = os.path.join(os.path.dirname(__file__), "external", "mfem") -metis_prefix = '' -hypre_prefix = '' - -enable_cuda = False -enable_cuda_hypre = False -cuda_prefix = '' -cuda_arch = '' -enable_pumi = False -pumi_prefix = '' -enable_strumpack = False -strumpack_prefix = '' -enable_libceed = False -libceed_prefix = '' -libceed_only = False -enable_gslib = False -gslibs_prefix = '' -gslibp_prefix = '' -gslib_only = False -mfem_debug = False -mfem_build_miniapps = True - -enable_suitesparse = False -suitesparse_prefix = "/usr/" - -enable_lapack = False -blas_libraries = "" -lapack_libraries = "" - -dry_run = -1 -do_bdist_wheel = False - - - -def write_setup_local(): - ''' - create setup_local.py. parameters written here will be read - by setup.py in mfem._ser and mfem._par - ''' - mfemser = mfems_prefix - mfempar = mfemp_prefix - - hyprelibpath = os.path.dirname( - find_libpath_from_prefix('HYPRE', hypre_prefix)) - metislibpath = os.path.dirname( - find_libpath_from_prefix('metis', metis_prefix)) - - mfems_tpl = read_mfem_tplflags(mfems_prefix) - mfemp_tpl = read_mfem_tplflags(mfemp_prefix) if build_parallel else '' - - print(mfems_tpl, mfemp_tpl) - - params = {'cxx_ser': cxx_command, - 'cc_ser': cc_command, - 'cxx_par': mpicxx_command, - 'cc_par': mpicc_command, - 'whole_archive': '--whole-archive', - 'no_whole_archive': '--no-whole-archive', - 'nocompactunwind': '', - 'swigflag': '-Wall -c++ -python -fastproxy -olddefs -keyword', - 'hypreinc': os.path.join(hypre_prefix, 'include'), - 'hyprelib': hyprelibpath, - 'metisinc': os.path.join(metis_prefix, 'include'), - 'metis5lib': metislibpath, - 'numpyinc': get_numpy_inc(), - 'mpi4pyinc': '', - 'mfembuilddir': os.path.join(mfempar, 'include'), - 'mfemincdir': os.path.join(mfempar, 'include', 'mfem'), - 'mfemlnkdir': os.path.join(mfempar, 'lib'), - 'mfemserbuilddir': os.path.join(mfemser, 'include'), - 'mfemserincdir': os.path.join(mfemser, 'include', 'mfem'), - 'mfemserlnkdir': os.path.join(mfemser, 'lib'), - 'mfemsrcdir': os.path.join(mfem_source), - 'mfemstpl': mfems_tpl, - 'mfemptpl': mfemp_tpl, - 'add_pumi': '', - 'add_strumpack': '', - 'add_cuda': '', - 'add_libceed': '', - 'add_suitesparse': '', - 'add_gslib': '', - 'add_gslibp': '', - 'add_gslibs': '', - 'libceedinc': os.path.join(libceed_prefix, 'include'), - 'gslibsinc': os.path.join(gslibs_prefix, 'include'), - 'gslibpinc': os.path.join(gslibp_prefix, 'include'), - 'cxx11flag': cxx11_flag, - 'build_mfem': '1' if build_mfem else '0' - } - - if build_parallel: - params['mpi4pyinc'] = get_mpi4py_inc() - - def add_extra(xxx, inc_sub=None): - params['add_' + xxx] = '1' - if inc_sub is None: - params[xxx + - 'inc'] = os.path.join(globals()[xxx + - '_prefix'], 'include') - else: - params[xxx + - 'inc'] = os.path.join(globals()[xxx + - '_prefix'], 'include', inc_sub) - - params[xxx + 'lib'] = os.path.join(globals()[xxx + '_prefix'], 'lib') - - if enable_pumi: - add_extra('pumi') - if enable_strumpack: - add_extra('strumpack') - if enable_cuda: - add_extra('cuda') - if enable_libceed: - add_extra('libceed') - if enable_suitesparse: - add_extra('suitesparse', inc_sub='suitesparse') - if enable_gslib: - add_extra('gslibs') - if enable_gslib: - add_extra('gslibp') - - pwd = chdir(rootdir) - - fid = open('setup_local.py', 'w') - fid.write("# setup_local.py \n") - fid.write("# generated from setup.py\n") - fid.write("# do not edit this directly\n") - - for key, value in params.items(): - text = key.lower() + ' = "' + value + '"' - fid.write(text + "\n") - fid.close() - - os.chdir(pwd) - - -def generate_wrapper(): - ''' - run swig. - ''' - # this should work as far as we are in the same directory ? - if dry_run or verbose: - print("generating SWIG wrapper") - print("using MFEM source", os.path.abspath(mfem_source)) - if not os.path.exists(os.path.abspath(mfem_source)): - assert False, "MFEM source directory. Use --mfem-source=" - - def ifiles(): - ifiles = os.listdir() - ifiles = [x for x in ifiles if x.endswith('.i')] - ifiles = [x for x in ifiles if not x.startswith('#')] - ifiles = [x for x in ifiles if not x.startswith('.')] - return ifiles - - def check_new(ifile): - wfile = ifile[:-2] + '_wrap.cxx' - if not os.path.exists(wfile): - return True - return os.path.getmtime(ifile) > os.path.getmtime(wfile) - - def update_integrator_exts(): - pwd = chdir(os.path.join(rootdir, 'mfem', 'common')) - command1 = [sys.executable, "generate_lininteg_ext.py"] - command2 = [sys.executable, "generate_bilininteg_ext.py"] - make_call(command1) - make_call(command2) - os.chdir(pwd) - - def update_header_exists(mfem_source): - print("updating the list of existing headers") - list_of_headers = [] - L = len(mfem_source.split(os.sep)) - for (dirpath, dirnames, filenames) in os.walk(mfem_source): - for filename in filenames: - if filename.endswith('.hpp'): - dirs = dirpath.split(os.sep)[L:] - dirs.append(filename[:-4]) - tmp = '_'.join(dirs) - xx = re.split('_|-', tmp) - new_name = 'FILE_EXISTS_'+'_'.join([x.upper() for x in xx]) - if new_name not in list_of_headers: - list_of_headers.append(new_name) - - pwd = chdir(os.path.join(rootdir, 'mfem', 'common')) - fid = open('existing_mfem_headers.i', 'w') - for x in list_of_headers: - fid.write("#define " + x + "\n") - fid.close() - os.chdir(pwd) - - mfemser = mfems_prefix - mfempar = mfemp_prefix - - update_header_exists(mfem_source) - - swigflag = '-Wall -c++ -python -fastproxy -olddefs -keyword'.split(' ') - - pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) - - serflag = ['-I' + os.path.join(mfemser, 'include'), - '-I' + os.path.join(mfemser, 'include', 'mfem'), - '-I' + os.path.abspath(mfem_source)] - if enable_suitesparse: - serflag.append('-I' + os.path.join(suitesparse_prefix, - 'include', 'suitesparse')) - - for filename in ['lininteg.i', 'bilininteg.i']: - command = [swig_command] + swigflag + serflag + [filename] - make_call(command) - update_integrator_exts() - - commands = [] - for filename in ifiles(): - if not check_new(filename): - continue - command = [swig_command] + swigflag + serflag + [filename] - commands.append(command) - - mp_pool = Pool(max((multiprocessing.cpu_count() - 1, 1))) - with mp_pool: - mp_pool.map(subprocess.run, commands) - - if not build_parallel: - os.chdir(pwd) - return - - chdir(os.path.join(rootdir, 'mfem', '_par')) - - parflag = ['-I' + os.path.join(mfempar, 'include'), - '-I' + os.path.join(mfempar, 'include', 'mfem'), - '-I' + os.path.abspath(mfem_source), - '-I' + os.path.join(hypre_prefix, 'include'), - '-I' + os.path.join(metis_prefix, 'include'), - '-I' + get_mpi4py_inc()] - - if enable_pumi: - parflag.append('-I' + os.path.join(pumi_prefix, 'include')) - if enable_strumpack: - parflag.append('-I' + os.path.join(strumpack_prefix, 'include')) - if enable_suitesparse: - parflag.append('-I' + os.path.join(suitesparse_prefix, - 'include', 'suitesparse')) - - commands = [] - for filename in ifiles(): - if filename == 'strumpack.i' and not enable_strumpack: - continue - if not check_new(filename): - continue - command = [swig_command] + swigflag + parflag + [filename] - commands.append(command) - - mp_pool = Pool(max((multiprocessing.cpu_count() - 1, 1))) - with mp_pool: - mp_pool.map(subprocess.run, commands) - - os.chdir(pwd) - - -def clean_wrapper(): - from pathlib import Path - - # serial - pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) - wfiles = [x for x in os.listdir() if x.endswith('_wrap.cxx')] - remove_files(wfiles) - - wfiles = [x for x in os.listdir() if x.endswith('_wrap.h')] - remove_files(wfiles) - - wfiles = [x for x in os.listdir() if x.endswith('.py')] - wfiles.remove("__init__.py") - wfiles.remove("setup.py") - wfiles.remove("tmop_modules.py") - remove_files(wfiles) - - ifiles = [x for x in os.listdir() if x.endswith('.i')] - for x in ifiles: - Path(x).touch() - - # parallel - chdir(os.path.join(rootdir, 'mfem', '_par')) - wfiles = [x for x in os.listdir() if x.endswith('_wrap.cxx')] - - remove_files(wfiles) - wfiles = [x for x in os.listdir() if x.endswith('_wrap.h')] - remove_files(wfiles) - - wfiles = [x for x in os.listdir() if x.endswith('.py')] - wfiles.remove("__init__.py") - wfiles.remove("setup.py") - wfiles.remove("tmop_modules.py") - remove_files(wfiles) - - ifiles = [x for x in os.listdir() if x.endswith('.i')] - for x in ifiles: - Path(x).touch() - - chdir(pwd) - - -def clean_so(all=None): - python = sys.executable - command = [python, "setup.py", "clean"] - if all == 1: - command.append("--all") - - pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) - for f in os.listdir(): - if f.endswith('.so'): - os.remove(f) - if f.endswith('.dylib'): - os.remove(f) - make_call(command) - - chdir(os.path.join(rootdir, 'mfem', '_par')) - for f in os.listdir(): - if f.endswith('.so'): - os.remove(f) - if f.endswith('.dylib'): - os.remove(f) - make_call(command) - - chdir(pwd) - - -def make_mfem_wrapper(serial=True): - ''' - compile PyMFEM wrapper code - ''' - if dry_run or verbose: - print("compiling wrapper code, serial=" + str(serial)) - if not os.path.exists(os.path.abspath(mfem_source)): - assert False, "MFEM source directory. Use --mfem-source=" - - record_mfem_sha(mfem_source) - - write_setup_local() - - if serial: - pwd = chdir(os.path.join(rootdir, 'mfem', '_ser')) - else: - pwd = chdir(os.path.join(rootdir, 'mfem', '_par')) - - python = sys.executable - command = [python, 'setup.py', 'build_ext', '--inplace', '--parallel', - str(max((multiprocessing.cpu_count() - 1, 1)))] - make_call(command, force_verbose=True) - os.chdir(pwd) - - -def print_config(): - print("----configuration----") - print(" prefix", prefix) - print(" when needed, the dependency (mfem/hypre/metis) will be installed under " + - ext_prefix) - print(" build mfem : " + ("Yes" if build_mfem else "No")) - print(" build metis : " + ("Yes" if build_metis else "No")) - print(" build hypre : " + ("Yes" if build_hypre else "No")) - print(" build libceed : " + ("Yes" if build_libceed else "No")) - print(" build gslib : " + ("Yes" if build_gslib else "No")) - print(" call SWIG wrapper generator: " + ("Yes" if run_swig else "No")) - print(" build serial wrapper: " + ("Yes" if build_serial else "No")) - print(" build parallel wrapper : " + ("Yes" if build_parallel else "No")) - - print(" hypre prefix", hypre_prefix) - print(" metis prefix", metis_prefix) - print(" c compiler : " + cc_command) - print(" c++ compiler : " + cxx_command) - print(" mpi-c compiler : " + mpicc_command) - print(" mpi-c++ compiler : " + mpicxx_command) - - print(" verbose : " + ("Yes" if verbose else "No")) - print(" SWIG : " + swig_command) - - if blas_libraries != "": - print(" BLAS libraries : " + blas_libraries) - if lapack_libraries != "": - print(" Lapack libraries : " + lapack_libraries) - - print("") - - -def configure_install(self): - ''' - called when install workflow is used - ''' - global prefix, dry_run, verbose, ext_prefix, git_sshclone - global clean_swig, run_swig, swig_only, skip_install, skip_swig - global build_mfem, build_mfemp, build_parallel, build_serial - - global mfem_branch, mfem_source, mfem_debug, mfem_build_miniapps - global build_metis, build_hypre, build_libceed, build_gslib - - global mfems_prefix, mfemp_prefix, metis_prefix, hypre_prefix - global cc_command, cxx_command, mpicc_command, mpicxx_command - global metis_64 - global enable_cuda, cuda_prefix, enable_cuda_hypre, cuda_arch - global enable_pumi, pumi_prefix - global enable_strumpack, strumpack_prefix - global enable_libceed, libceed_prefix, libceed_only - global enable_gslib, gslibs_prefix, gslibp_prefix, gslib_only - global enable_suitesparse, suitesparse_prefix - global enable_lapack, blas_libraries, lapack_libraries - - verbose = bool(self.vv) if verbose == -1 else verbose - dry_run = bool(self.dry_run) if dry_run == -1 else dry_run - if dry_run: - verbose = True - - git_sshclone = bool(self.git_sshclone) - - prefix = abspath(self.prefix) - mfem_source = abspath(self.mfem_source) - - skip_ext = bool(self.skip_ext) - skip_install = bool(self.build_only) - skip_swig = bool(self.skip_swig) - - swig_only = bool(self.swig) - ext_only = bool(self.ext_only) - - metis_64 = bool(self.with_metis64) - enable_pumi = bool(self.with_pumi) - enable_strumpack = bool(self.with_strumpack) - enable_cuda = bool(self.with_cuda) - enable_cuda_hypre = bool(self.with_cuda_hypre) - if self.cuda_arch is not None: - cuda_arch = self.cuda_arch - enable_libceed = bool(self.with_libceed) - libceed_only = bool(self.libceed_only) - enable_gslib = bool(self.with_gslib) - gslib_only = bool(self.gslib_only) - enable_suitesparse = bool(self.with_suitesparse) - enable_lapack = bool(self.with_lapack) - - build_parallel = bool(self.with_parallel) # controlls PyMFEM parallel - build_serial = not bool(self.no_serial) - - clean_swig = True - run_swig = True - - mfem_debug = bool(self.mfem_debug) - mfem_build_miniapps = bool(self.mfem_build_miniapps) - - if build_serial: - build_serial = (not swig_only and not ext_only) - - if build_parallel: - try: - import mpi4py - except ImportError: - assert False, "Can not import mpi4py" - - if self.mfem_prefix != '': - mfem_prefix = abspath(self.mfem_prefix) - mfems_prefix = abspath(self.mfem_prefix) - mfemp_prefix = abspath(self.mfem_prefix) - if self.mfems_prefix != '': - mfems_prefix = abspath(self.mfems_prefix) - if self.mfemp_prefix != '': - mfemp_prefix = abspath(self.mfemp_prefix) - - check = find_libpath_from_prefix('mfem', mfems_prefix) - assert check != '', "libmfem.so is not found in the specified /lib" - check = find_libpath_from_prefix('mfem', mfemp_prefix) - assert check != '', "libmfem.so is not found in the specified /lib" - - build_mfem = False - hypre_prefix = mfem_prefix - metis_prefix = mfem_prefix - - if swig_only: - clean_swig = False - - else: - build_mfem = True - build_mfemp = build_parallel - build_hypre = build_parallel - build_metis = build_parallel or enable_suitesparse - - print("ext_prefix", ext_prefix) - if ext_prefix == '': - ext_prefix = external_install_prefix(prefix) - hypre_prefix = os.path.join(ext_prefix) - metis_prefix = os.path.join(ext_prefix) - - mfem_prefix = ext_prefix - mfems_prefix = os.path.join(ext_prefix, 'ser') - mfemp_prefix = os.path.join(ext_prefix, 'par') - # enable_gslib = True - - if self.mfem_branch != '': - mfem_branch = self.mfem_branch - - if self.hypre_prefix != '': - check = find_libpath_from_prefix('HYPRE', self.hypre_prefix) - assert check != '', "libHYPRE.so is not found in the specified /lib or lib64" - hypre_prefix = os.path.expanduser(self.hypre_prefix) - build_hypre = False - - if self.metis_prefix != '': - check = find_libpath_from_prefix('metis', self.metis_prefix) - assert check != '', "libmetis.so is not found in the specified /lib or lib64" - metis_prefix = os.path.expanduser(self.metis_prefix) - build_metis = False - - if enable_libceed or libceed_only: - if self.libceed_prefix != '': - libceed_prefix = os.path.expanduser(self.libceed_prefix) - build_libceed = False - else: - libceed_prefix = mfem_prefix - build_libceed = True - - if enable_gslib or gslib_only: - if self.gslib_prefix != '': - build_gslib = False - gslibs_prefix = os.path.expanduser(self.gslib_prefix) - gslibp_prefix = os.path.expanduser(self.gslib_prefix) - else: - gslibs_prefix = mfems_prefix - gslibp_prefix = mfemp_prefix - build_gslib = True - - if enable_suitesparse and self.suitesparse_prefix != '': - suitesparse_prefix = self.suitesparse_prefix - - if self.pumi_prefix != '': - pumi_prefix = abspath(self.pumi_prefix) - else: - pumi_prefix = mfem_prefix - - if self.strumpack_prefix != '': - strumpack_prefix = abspath(self.strumpack_prefix) - else: - strumpack_prefix = mfem_prefix - - if enable_cuda: - nvcc = find_command('nvcc') - cuda_prefix = os.path.dirname(os.path.dirname(nvcc)) - - if self.CC != '': - cc_command = self.CC - if self.CXX != '': - cxx_command = self.CXX - if self.MPICC != '': - mpicc_command = self.MPICC - if self.MPICXX != '': - mpicxx_command = self.MPICXX - - if self.blas_libraries != "": - blas_libraries = self.blas_libraries - if self.lapack_libraries != "": - lapack_libraries = self.lapack_libraries - - if skip_ext: - build_metis = False - build_hypre = False - build_mfem = False - build_mfemp = False - build_libceed = False - build_gslib = False - - if self.skip_swig: - clean_swig = False - run_swig = False - - if swig_only: - build_serial = False - clean_swig = False - - if ext_only: - clean_swig = False - run_swig = False - build_serial = False - build_parallel = False - skip_install = True - - if libceed_only: - clean_swig = False - run_swig = False - build_mfem = False - build_mfemp = False - build_metis = False - build_hypre = False - build_gslib = False - build_serial = False - build_parallel = False - build_libceed = True - skip_install = True - - if gslib_only: - clean_swig = False - run_swig = False - build_mfem = False - build_mfemp = False - build_metis = False - build_hypre = False - build_serial = False - build_libceed = False - build_gslib = True - skip_install = True - - global is_configured - is_configured = True - - -def configure_bdist(self): - ''' - called when bdist workflow is used - ''' - global prefix, dry_run, verbose, run_swig - global build_mfem, build_parallel, build_serial, build_gslib - global mfem_branch, mfem_source, mfem_build_miniapps - global mfems_prefix, mfemp_prefix, hypre_prefix, metis_prefix, ext_prefix - - global cc_command, cxx_command, mpicc_command, mpicxx_command - global enable_pumi, pumi_prefix - global enable_strumpack, strumpack_prefix - global do_bdist_wheel - dry_run = bool(self.dry_run) if dry_run == -1 else dry_run - verbose = bool(self.verbose) if verbose == -1 else verbose - - prefix = abspath(self.bdist_dir) - - build_parallel = False - - if self.skip_build == 1: - build_mfem = False - build_serial = False - run_swig = False - else: - build_mfem = True - build_serial = True - # build_gslib = True - run_swig = True - - global is_configured - is_configured = True - do_bdist_wheel = True - - # mfem_source = './external/mfem' - ext_prefix = os.path.join(prefix, 'mfem', 'external') - print("ext_prefix(bdist)", ext_prefix) - hypre_prefix = ext_prefix - metis_prefix = ext_prefix - - mfem_prefix = ext_prefix - mfems_prefix = os.path.join(ext_prefix, 'ser') - mfemp_prefix = os.path.join(ext_prefix, 'par') - - mfem_build_miniapps = False - class Install(_install): ''' called when pyton setup.py install ''' - user_options = _install.user_options + [ - ('vv', None, 'More verbose output (CMAKE_VERBOSE_MAKEFILE etc)'), - ('with-parallel', None, 'Installed both serial and parallel version'), - ('no-serial', None, 'Skip building the serial wrapper'), - ('mfem-prefix=', None, 'Specify locaiton of mfem' + - 'libmfem.so must exits under /lib. ' + - 'This mode uses clean-swig + run-swig, unless mfem-prefix-no-swig is on'), - ('mfemp-prefix=', None, 'Specify locaiton of parallel mfem ' + - 'libmfem.so must exits under /lib. ' + - 'Need to use it with mfem-prefix'), - ('mfems-prefix=', None, 'Specify locaiton of serial mfem ' + - 'libmfem.so must exits under /lib. ', - 'Need to use it with mfem-prefix'), - ('mfem-branch=', None, 'Specify branch of mfem' + - 'MFEM is cloned and built using the specfied branch '), - ('mfem-source=', None, 'Specify mfem source location' + - 'MFEM source directory. Required to run-swig '), - ('mfem-debug', None, 'Build MFME with MFEM_DEBUG enabled'), - ('mfem-build-miniapps', None, 'build MFME Miniapps'), - ('hypre-prefix=', None, 'Specify locaiton of hypre' + - 'libHYPRE.so must exits under /lib'), - ('metis-prefix=', None, 'Specify locaiton of metis' + - 'libmetis.so must exits under /lib'), - ('git-sshclone', None, 'Use SSH for git clone', - 'try if default git clone using https fails (need Github account and setting for SSH)'), - ('swig', None, 'Run Swig and exit'), - ('skip-swig', None, - 'Skip running swig (used when wrapper is generated for the MFEM C++ library to be used'), - ('ext-only', None, 'Build metis, hypre, mfem(C++) only'), - ('skip-ext', None, 'Skip building metis, hypre, mfem(C++) only'), - ('build-only', None, 'Skip final install stage to prefix'), - ('CC=', None, 'c compiler'), - ('CXX=', None, 'c++ compiler'), - ('MPICC=', None, 'mpic compiler'), - ('MPICXX=', None, 'mpic++ compiler'), - ('unverifiedSSL', None, 'use unverified SSL context for downloading'), - ('with-cuda', None, 'enable cuda'), - ('with-cuda-hypre', None, 'enable cuda in hypre'), - ('cuda-arch=', None, 'set cuda compute capability. Ex if A100, set to 80'), - ('with-metis64', None, 'use 64bit int in metis'), - ('with-pumi', None, 'enable pumi (parallel only)'), - ('pumi-prefix=', None, 'Specify locaiton of pumi'), - ('with-suitesparse', None, - 'build MFEM with suitesparse (MFEM_USE_SUITESPARSE=YES) (parallel only)'), - ('suitesparse-prefix=', None, - 'Specify locaiton of suitesparse (=SuiteSparse_DIR)'), - ('with-libceed', None, 'enable libceed'), - ('libceed-prefix=', None, 'Specify locaiton of libceed'), - ('libceed-only', None, 'Build libceed only'), - ('gslib-prefix=', None, 'Specify locaiton of gslib'), - ('with-gslib', None, 'enable gslib'), - ('gslib-only', None, 'Build gslib only'), - ('with-strumpack', None, 'enable strumpack (parallel only)'), - ('strumpack-prefix=', None, 'Specify locaiton of strumpack'), - ('with-lapack', None, 'build MFEM with lapack'), - ('blas-libraries=', None, 'Specify locaiton of Blas library (used to build MFEM)'), - ('lapack-libraries=', None, - 'Specify locaiton of Lapack library (used to build MFEM)'), - ] + user_options = _install.user_options + cmd_options def initialize_options(self): _install.initialize_options(self) - self.swig = False - self.skip_swig = False - self.ext_only = False - - self.git_sshclone = False - self.skip_ext = False - self.with_parallel = False - self.build_only = False - self.no_serial = False - self.mfem_prefix = '' - self.mfems_prefix = '' - self.mfemp_prefix = '' - self.mfem_source = mfem_source - self.mfem_branch = '' - self.mfem_debug = False - self.mfem_build_miniapps = False - self.metis_prefix = '' - self.hypre_prefix = '' - - self.with_cuda = False - self.with_cuda_hypre = False - self.cuda_arch = None - self.with_metis64 = False - - self.with_pumi = False - self.pumi_prefix = '' - - self.with_strumpack = False - self.strumpack_prefix = '' - - self.with_suitesparse = False - self.suitesparse_prefix = '' - - self.with_lapack = False - self.blas_libraries = "" - self.lapack_libraries = "" - - self.with_libceed = False - self.libceed_prefix = '' - self.libceed_only = False - - self.with_gslib = False - self.gslib_prefix = '' - self.gslib_only = False - - self.CC = '' - self.CXX = '' - self.MPICC = '' - self.MPICXX = '' - self.vv = False - - self.unverifiedSSL = False + initialize_cmd_options(self) def finalize_options(self): - if (bool(self.ext_only) and bool(self.skip_ext)): assert False, "skip-ext and ext-only can not use together" @@ -921,18 +65,17 @@ def finalize_options(self): self.prefix = os.path.expanduser(self.prefix) prefix = self.prefix - global verbose - verbose = bool(self.vv) + bglb.verbose = bool(self.vv) if given_prefix: # global ext_prefix - self.prefix = abspath(prefix) + self.prefix = abspath(bglb.prefix) # ext_prefix = abspath(prefix) else: if '--user' in sys.argv: path = site.getusersitepackages() if not os.path.exists(path): try: - print("attempting to make a --user directory", path) + print("!!!!! attempting to make a --user directory", path) os.makedirs(path) except BaseException: pass @@ -949,23 +92,57 @@ def finalize_options(self): self.user = 0 _install.finalize_options(self) - global use_unverifed_SSL - use_unverifed_SSL = self.unverifiedSSL + bglb.use_unverifed_SSL = self.unverifiedSSL - if verbose: - print("prefix is :", self.prefix) + if bglb.verbose: + print("!!!!! prefix is :", self.prefix) def run(self): - if not is_configured: - configure_install(self) + if not bglb.is_configured: + print('!!!!! Running config (install)') + bglb.prefix = abspath(self.prefix) + configure_build(self) print_config() - if swig_only: + if bglb.swig_only: self.run_command("build") else: _install.run(self) +class BdistWheel(_bdist_wheel): + ''' + Wheel build performs SWIG + Serial in Default. + --skip-build option skip building entirely. + ''' + user_options = _bdist_wheel.user_options + cmd_options + + def initialize_options(self): + _bdist_wheel.initialize_options(self) + initialize_cmd_options(self) + + def finalize_options(self): + def _has_ext_modules(): + return True + self.distribution.has_ext_modules = _has_ext_modules + _bdist_wheel.finalize_options(self) + + def run(self): + print("!!!!! Entering BdistWheel::Run") + import build_globals as bglb + + if not bglb.is_configured: + print('!!!!! Running config (bdist wheel)') + bglb.prefix = abspath(self.bdist_dir) + bglb.ext_prefix = os.path.join(bglb.prefix, 'mfem', 'external') + bglb.bdist_wheel_dir = abspath(self.bdist_dir) + bglb.do_bdist_wheel = True + configure_build(self) + print_config() + self.run_command("build") + _bdist_wheel.run(self) + + class BuildPy(_build_py): ''' Called when python setup.py build_py @@ -979,59 +156,62 @@ def finalize_options(self): _build_py.finalize_options(self) def run(self): - if not swig_only: - if build_metis: - if use_metis_gklib: + if not bglb.swig_only: + if bglb.build_metis: + if bglb.use_metis_gklib: gitclone('gklib', use_sha=True) gitclone('metis', use_sha=True) - make_metis(use_int64=metis_64, use_real64=metis_64) + make_metis(use_int64=bglb.metis_64, + use_real64=bglb.metis_64) else: download('metis') - make_metis(use_int64=metis_64, use_real64=metis_64) + make_metis(use_int64=bglb.metis_64, + use_real64=bglb.metis_64) - if build_hypre: + if bglb.build_hypre: download('hypre') cmake_make_hypre() - if build_libceed: + if bglb.build_libceed: download('libceed') make_libceed() - if build_gslib: + if bglb.build_gslib: download('gslib') make_gslib(serial=True) - if build_hypre: + if bglb.build_hypre: make_gslib() mfem_downloaded = False - if build_mfem: - gitclone('mfem', use_sha=True) if mfem_branch is None else gitclone( - 'mfem', branch=mfem_branch) + if bglb.build_mfem: + gitclone('mfem', use_sha=True) if bglb.mfem_branch is None else gitclone( + 'mfem', branch=bglb.mfem_branch) mfem_downloaded = True cmake_make_mfem(serial=True) - if build_mfemp: + if bglb.build_mfemp: if not mfem_downloaded: - gitclone('mfem', use_sha=True) if mfem_branch is None else gitclone( - 'mfem', branch=mfem_branch) + gitclone('mfem', use_sha=True) if bglb.mfem_branch is None else gitclone( + 'mfem', branch=bglb.mfem_branch) cmake_make_mfem(serial=False) - if clean_swig: + if bglb.clean_swig: clean_wrapper() - if run_swig: + if bglb.run_swig: generate_wrapper() - if swig_only: + if bglb.swig_only: return - if build_serial: + if bglb.build_serial: make_mfem_wrapper(serial=True) - if build_parallel: + if bglb.build_parallel: make_mfem_wrapper(serial=False) - if not skip_install: + if not bglb.skip_install: _build_py.run(self) else: sys.exit() + class InstallLib(_install_lib): def finalize_options(self): _install_lib.finalize_options(self) @@ -1042,7 +222,7 @@ def finalize_options(self): class InstallEggInfo(_install_egg_info): def run(self): - if not dry_run: + if not bglb.dry_run: _install_egg_info.run(self) else: print("skipping regular install_egg_info") @@ -1050,7 +230,7 @@ def run(self): class InstallScripts(_install_scripts): def run(self): - if not dry_run: + if not bglb.dry_run: _install_scripts.run(self) else: print("skipping regular install_scripts") @@ -1079,9 +259,8 @@ def initialize_options(self): self.all_exts = False def run(self): - global dry_run, verbose - dry_run = self.dry_run - verbose = bool(self.verbose) + bglb.dry_run = self.dry_run + bglb.verbose = bool(self.verbose) os.chdir(extdir) @@ -1125,9 +304,8 @@ def run(self): 'install_lib': InstallLib, 'install_egg_info': InstallEggInfo, 'install_scripts': InstallScripts, - 'clean': Clean} - if haveWheel: - cmdclass['bdist_wheel'] = BdistWheel + 'clean': Clean, + 'bdist_wheel': BdistWheel} setup( cmdclass=cmdclass, diff --git a/setuputils.py b/setuputils.py deleted file mode 100644 index 61641873..00000000 --- a/setuputils.py +++ /dev/null @@ -1,721 +0,0 @@ -""" -Helper functions for setup.py -""" - -import os -import sys -import configparser -from urllib import request -import itertools -import site -import subprocess -import multiprocessing -import ssl -import tarfile -from collections import namedtuple - -# ---------------------------------------------------------------------------------------- -# Constants -# ---------------------------------------------------------------------------------------- - -release = namedtuple('Release', ['version', 'hash', 'tarball']) -REPOS = dict( - mfem = dict( - url = "https://github.com/mfem/mfem.git", - # version, hash, tarball - releases = [ - release("4.7", "dc9128ef596e84daf1138aa3046b826bba9d259f", None), - release("4.8", "a01719101027383954b69af1777dc828bf795d62", None), - ] - ), - metis = dict( - url = "https://github.com/KarypisLab/METIS", - releases = [ - release("5.1.0", "94c03a6e2d1860128c2d0675cbbb86ad4f261256", - "https://github.com/mfem/tpls/raw/gh-pages/metis-5.1.0.tar.gz"), - ] - ), - gklib = dict( - url = "https://github.com/KarypisLab/GKlib", - releases = [ - release("5.1.1", "a7f8172703cf6e999dd0710eb279bba513da4fec", - "https://github.com/KarypisLab/GKlib/archive/refs/tags/METIS-v5.1.1-DistDGL-0.5.tar.gz"), - ] - ), - libceed = dict( - url = "https://github.com/CEED/libCEED.git", - releases = [ - release("0.12.0", None, "https://github.com/CEED/libCEED/archive/refs/tags/v0.12.0.tar.gz"), - ] - ), - hypre = dict( - url = None, - releases = [ - release("2.28.0", None, "https://github.com/hypre-space/hypre/archive/v2.28.0.tar.gz"), - ] - ), -) - -# ---------------------------------------------------------------------------------------- -# Metadata -# ---------------------------------------------------------------------------------------- - -def read_mfem_tplflags(prefix): - filename = os.path.join(prefix, 'share', 'mfem', 'config.mk') - if not os.path.exists(filename): - print("NOTE: " + filename + " does not exist.") - print("returning empty string") - return "" - - config = configparser.ConfigParser() - with open(filename) as fp: - config.read_file(itertools.chain(['[global]'], fp), source=filename) - flags = dict(config.items('global'))['mfem_tplflags'] - return flags - -# ---------------------------------------------------------------------------------------- -# Utilities -# ---------------------------------------------------------------------------------------- - -def abspath(path): - return os.path.abspath(os.path.expanduser(path)) - - -def external_install_prefix(prefix, verbose=True): - - if hasattr(site, "getusersitepackages"): - usersite = site.getusersitepackages() - else: - usersite = site.USER_SITE - - if verbose: - print("running external_install_prefix with the following parameters") - print(" sys.argv :", sys.argv) - print(" sys.prefix :", sys.prefix) - print(" usersite :", usersite) - print(" prefix :", prefix) - - if '--user' in sys.argv: - path = usersite - if not os.path.exists(path): - os.makedirs(path) - path = os.path.join(path, 'mfem', 'external') - return path - - else: - # when prefix is given...let's borrow pip._internal to find the location ;D - import pip._internal.locations - path = pip._internal.locations.get_scheme( - "mfem", prefix=prefix).purelib - if not os.path.exists(path): - os.makedirs(path) - path = os.path.join(path, 'mfem', 'external') - return path - - -def make_call(command, target='', force_verbose=False, env=None): - ''' - call command - ''' - print("calling ... " + " ".join(command)) - - if dry_run: - return - kwargs = {'universal_newlines': True, 'env': env} - if env is not None: - env.update(os.environ) - - myverbose = verbose or force_verbose - if not myverbose: - kwargs['stdout'] = subprocess.DEVNULL - kwargs['stderr'] = subprocess.DEVNULL - - p = subprocess.Popen(command, **kwargs) - p.communicate() - if p.returncode != 0: - if target == '': - target = " ".join(command) - print("Failed when calling command: " + target) - raise subprocess.CalledProcessError(p.returncode, - " ".join(command)) - - -def chdir(path): - ''' - change directory to `path`; returns the previous directory - ''' - pwd = os.getcwd() - os.chdir(path) - if verbose: - print("Moving to a directory : " + path) - return pwd - - -def remove_files(files): - for f in files: - if verbose: - print("Removing : " + f) - if dry_run: - continue - os.remove(f) - - -def make(target): - ''' - make : add -j option automatically - ''' - command = ['make', '-j', str(max((multiprocessing.cpu_count() - 1, 1)))] - make_call(command, target=target, force_verbose=True) - - -def make_install(target, prefix=None): - ''' - make install - ''' - command = ['make', 'install'] - if prefix is not None: - command.append('prefix='+prefix) - make_call(command, target=target) - - -def download(xxx): - ''' - download tar.gz from somewhere. xxx is name. - url is given by repos above - ''' - - if os.path.exists(os.path.join(extdir, xxx)): - print("Download " + xxx + " skipped. Use clean --all-exts if needed") - return - # Get the tarball for the latest release - url = REPOS[xxx]["releases"][-1].tarball - if url is None: - raise RuntimeError(f"Could not find tarball URL for {xxx}") - print("Downloading :", url) - - if use_unverifed_SSL: - ssl._create_default_https_context = ssl._create_unverified_context - - ftpstream = request.urlopen(url) - targz = tarfile.open(fileobj=ftpstream, mode="r|gz") - targz.extractall(path=extdir) - os.rename(os.path.join(extdir, targz.getnames()[0].split('/')[0]), - os.path.join(extdir, xxx)) - - -def gitclone(xxx, use_sha=False, branch='master'): - cwd = os.getcwd() - repo_xxx = os.path.join(extdir, xxx) - if os.path.exists(repo_xxx): - os.chdir(repo_xxx) - command = ['git', 'checkout', branch] - make_call(command) - command = ['git', 'pull'] - make_call(command) - else: - repo = REPOS[xxx]["url"] - if git_sshclone: - repo = repo.replace("https://github.com/", "git@github.com:") - - os.chdir(extdir) - command = ['git', 'clone', repo, xxx] - make_call(command) - - if not dry_run: - if not os.path.exists(repo_xxx): - print(repo_xxx + " does not exist. Check if git clone worked") - os.chdir(repo_xxx) - - if use_sha: - sha = REPOS[xxx]["releases"][-1].hash - command = ['git', 'checkout', sha] - else: - command = ['git', 'checkout', branch] - make_call(command) - os.chdir(cwd) - - -def record_mfem_sha(mfem_source): - pwd = chdir(mfem_source) - command = ['git', 'rev-parse', 'HEAD'] - try: - sha = subprocess.run( - command, capture_output=True).stdout.decode().strip() - except subprocess.CalledProcessError: - print("subprocess failed to read sha...continuing w/o recording SHA") - sha = None - except BaseException: - print("subprocess failed to read sha...continuing w/o recording SHA") - sha = None - - chdir(pwd) - - sha_file = os.path.join('mfem', '__sha__.py') - fid = open(sha_file, 'w') - if sha is not None: - fid.write('mfem = "' + sha + '"') - fid.close() - - -def cmake(path, **kwargs): - ''' - run cmake. must be called in the target directory - ''' - command = ['cmake', path] - for key, value in kwargs.items(): - command.append('-' + key + '=' + value) - - if osx_sysroot != '': - command.append('-DCMAKE_OSX_SYSROOT=' + osx_sysroot) - make_call(command) - - -def get_numpy_inc(): - command = ["python", "-c", "import numpy;print(numpy.get_include())"] - try: - numpyinc = subprocess.run( - command, capture_output=True).stdout.decode().strip() - except subprocess.CalledProcessError: - assert False, "can not check numpy include directory" - except BaseException: - assert False, "can not check numpy include directory" - return numpyinc - - -def get_mpi4py_inc(): - command = ["python", "-c", "import mpi4py;print(mpi4py.get_include())"] - try: - mpi4pyinc = subprocess.run( - command, capture_output=True).stdout.decode().strip() - except subprocess.CalledProcessError: - assert False, "can not check numpy include directory" - except BaseException: - assert False, "can not check numpy include directory" - return mpi4pyinc - - -def find_libpath_from_prefix(lib, prefix0): - - prefix0 = os.path.expanduser(prefix0) - prefix0 = abspath(prefix0) - - soname = 'lib' + lib + dylibext - aname = 'lib' + lib + '.a' - - path = os.path.join(prefix0, 'lib', soname) - if os.path.exists(path): - return path - else: - path = os.path.join(prefix0, 'lib64', soname) - if os.path.exists(path): - return path - - path = os.path.join(prefix0, 'lib', aname) - if os.path.exists(path): - return path - else: - path = os.path.join(prefix0, 'lib64', aname) - if os.path.exists(path): - return path - print("Can not find library by find_libpath_from_prefix (continue)", lib, prefix0) - - return '' - -# ---------------------------------------------------------------------------------------- -# Build libraries -# ---------------------------------------------------------------------------------------- - -def cmake_make_hypre(): - ''' - build hypre - ''' - if verbose: - print("Building hypre") - - cmbuild = 'cmbuild' - path = os.path.join(extdir, 'hypre', 'src', cmbuild) - if os.path.exists(path): - print("working directory already exists!") - else: - os.makedirs(path) - - pwd = chdir(path) - - cmake_opts = {'DBUILD_SHARED_LIBS': '1', - 'DHYPRE_INSTALL_PREFIX': hypre_prefix, - 'DHYPRE_ENABLE_SHARED': '1', - 'DCMAKE_C_FLAGS': '-fPIC', - 'DCMAKE_INSTALL_PREFIX': hypre_prefix, - 'DCMAKE_INSTALL_NAME_DIR': os.path.join(hypre_prefix, 'lib'), } - if verbose: - cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' - - if enable_cuda and enable_cuda_hypre: - # in this case, settitng CMAKE_C_COMPILER - # causes "mpi.h" not found error. For now, letting CMAKE - # to find MPI - cmake_opts['DHYPRE_WITH_CUDA'] = '1' - if cuda_arch != '': - cmake_opts['DCMAKE_CUDA_ARCHITECTURES'] = cuda_arch - else: - cmake_opts['DCMAKE_C_COMPILER'] = mpicc_command - - cmake('..', **cmake_opts) - - make('hypre') - make_install('hypre') - - os.chdir(pwd) - - -def make_metis_gklib(use_int64=False, use_real64=False): - ''' - build GKlib/metis - ''' - - ''' - build/install GKlib - ''' - if verbose: - print("Building gklib") - - path = os.path.join(extdir, 'gklib') - if not dry_run and not os.path.exists(path): - assert False, "gklib is not downloaded" - - path = os.path.join(path, 'cmbuild') - if os.path.exists(path): - print("working directory already exists!") - else: - os.makedirs(path) - pwd = chdir(path) - - cmake_opts = {'DBUILD_SHARED_LIBS': '1', - 'DCMAKE_INSTALL_PREFIX': metis_prefix} - if verbose: - cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' - - cmake('..', **cmake_opts) - make('gklib') - make_install('gklib') - os.chdir(pwd) - - ''' - build/install metis - ''' - path = os.path.join(extdir, 'metis') - if not dry_run and not os.path.exists(path): - assert False, "metis is not downloaded" - elif not os.path.exists(path): - os.makedirs(path) - os.makedirs(os.path.join(path, 'build')) - - pwd = chdir(path) - - gklibpath = os.path.dirname(find_libpath_from_prefix( - 'GKlib', metis_prefix)) - - options = ['gklib_path='+metis_prefix] - if use_int64: - options.append('i64=1') - - if use_real64: - options.append('r64=1') - - command = ['make', 'config', 'shared=1'] + options - command = command + ['prefix=' + metis_prefix, 'cc=' + cc_command] - make_call(command) - - chdir('build') - cmake_opts = {'DGKLIB_PATH': metis_prefix, - 'DSHARED': '1', - 'DCMAKE_C_COMPILER': cc_command, - 'DCMAKE_C_STANDARD_LIBRARIES': '-lGKlib', - 'DCMAKE_INSTALL_RPATH': gklibpath, - 'DCMAKE_BUILD_WITH_INSTALL_RPATH': '1', - 'DCMAKE_INSTALL_PREFIX': metis_prefix} - if verbose: - cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' - - cmake('..', **cmake_opts) - chdir(path) - make('metis') - make_install('metis') - - if platform == "darwin": - command = ['install_name_tool', - '-id', - os.path.join(metis_prefix, 'lib', 'libGKlib.dylib'), - os.path.join(metis_prefix, 'lib', 'libGKlib.dylib'), ] - make_call(command) - command = ['install_name_tool', - '-id', - os.path.join(metis_prefix, 'lib', 'libmetis.dylib'), - os.path.join(metis_prefix, 'lib', 'libmetis.dylib'), ] - make_call(command) - os.chdir(pwd) - - -def make_metis(use_int64=False, use_real64=False): - ''' - build metis - ''' - if verbose: - print("Building metis") - - path = os.path.join(extdir, 'metis') - if not os.path.exists(path): - assert False, "metis is not downloaded" - - pwd = chdir(path) - - if use_int64: - pattern_int = "#define IDXTYPEWIDTH 32" - replace_int = "#define IDXTYPEWIDTH 64" - else: - pattern_int = "#define IDXTYPEWIDTH 64" - replace_int = "#define IDXTYPEWIDTH 32" - with open("include/metis.h", "r") as metis_header_fid: - metis_header_lines = metis_header_fid.readlines() - with open("include/metis.h", "w") as metis_header_fid: - for line in metis_header_lines: - metis_header_fid.write(re.sub(pattern_int, replace_int, line)) - - if use_real64: - pattern_real = "#define REALTYPEWIDTH 32" - replace_real = "#define REALTYPEWIDTH 64" - else: - pattern_real = "#define REALTYPEWIDTH 64" - replace_real = "#define REALTYPEWIDTH 32" - with open("include/metis.h", "r") as metis_header_fid: - metis_header_lines = metis_header_fid.readlines() - with open("include/metis.h", "w") as metis_header_fid: - for line in metis_header_lines: - metis_header_fid.write(re.sub(pattern_real, replace_real, line)) - - command = ['make', 'config', 'shared=1', - 'prefix=' + metis_prefix, - 'cc=' + cc_command] - make_call(command, env={'CMAKE_POLICY_VERSION_MINIMUM': '3.5'}) - make('metis') - make_install('metis') - - if platform == "darwin": - command = ['install_name_tool', - '-id', - os.path.join(metis_prefix, 'lib', 'libmetis.dylib'), - os.path.join(metis_prefix, 'lib', 'libmetis.dylib'), ] - make_call(command) - os.chdir(pwd) - - -def make_libceed(serial=False): - if verbose: - print("Building libceed") - - path = os.path.join(extdir, 'libceed') - if not os.path.exists(path): - assert False, "libceed is not downloaded" - - pwd = chdir(path) - try: - make_call(['make', 'clean']) - except: - pass - - if enable_cuda: - command = ['make', 'configure', 'CUDA_DIR='+cuda_prefix] - make_call(command) - - make('libceed') - make_install('libceed', prefix=libceed_prefix) - os.chdir(pwd) - - -def make_gslib(serial=False): - if verbose: - print("Building gslib") - - path = os.path.join(extdir, 'gslib') - if not os.path.exists(path): - assert False, "gslib is not downloaded" - - pwd = chdir(path) - make_call(['make', 'clean']) - if serial: - command = ['make', 'CC=' + cc_command, 'MPI=0', 'CFLAGS=-fPIC'] - make_call(command) - command = ['make', 'MPI=0', 'DESTDIR=' + gslibs_prefix] - make_call(command) - else: - command = ['make', 'CC=' + mpicc_command, 'CFLAGS=-O2 -fPIC'] - make_call(command) - command = ['make', 'DESTDIR=' + gslibp_prefix] - make_call(command) - os.chdir(pwd) - - -def cmake_make_mfem(serial=True): - ''' - build MFEM - ''' - cmbuild = 'cmbuild_ser' if serial else 'cmbuild_par' - path = os.path.join(extdir, 'mfem', cmbuild) - if os.path.exists(path): - print("working directory already exists!") - else: - os.makedirs(path) - - ldflags = os.getenv('LDFLAGS') if os.getenv('LDFLAGS') is not None else '' - metisflags = '' - hypreflags = '' - - rpaths = [] - - def add_rpath(p): - if not p in rpaths: - rpaths.append(p) - - cmake_opts = {'DBUILD_SHARED_LIBS': '1', - 'DMFEM_ENABLE_EXAMPLES': '1', - 'DMFEM_ENABLE_MINIAPPS': '0', - 'DCMAKE_SHARED_LINKER_FLAGS': ldflags, - 'DMFEM_USE_ZLIB': '1', - 'DCMAKE_CXX_FLAGS': cxx11_flag, - 'DCMAKE_BUILD_WITH_INSTALL_RPATH': '1'} - - if mfem_debug: - cmake_opts['DMFEM_DEBUG'] = 'YES' - - if mfem_build_miniapps: - cmake_opts['DMFEM_ENABLE_MINIAPPS'] = '1' - - if verbose: - cmake_opts['DCMAKE_VERBOSE_MAKEFILE'] = '1' - - if serial: - cmake_opts['DCMAKE_CXX_COMPILER'] = cxx_command - cmake_opts['DMFEM_USE_EXCEPTIONS'] = '1' - cmake_opts['DCMAKE_INSTALL_PREFIX'] = mfems_prefix - - add_rpath(os.path.join(mfems_prefix, 'lib')) - if enable_suitesparse: - enable_metis = True - else: - enable_metis = False - else: - cmake_opts['DCMAKE_CXX_COMPILER'] = mpicxx_command - cmake_opts['DMFEM_USE_EXCEPTIONS'] = '0' - cmake_opts['DCMAKE_INSTALL_PREFIX'] = mfemp_prefix - cmake_opts['DMFEM_USE_MPI'] = '1' - cmake_opts['DHYPRE_DIR'] = hypre_prefix - cmake_opts['DHYPRE_INCLUDE_DIRS'] = os.path.join( - hypre_prefix, "include") - - add_rpath(os.path.join(mfemp_prefix, 'lib')) - - hyprelibpath = os.path.dirname( - find_libpath_from_prefix( - 'HYPRE', hypre_prefix)) - - add_rpath(hyprelibpath) - - hypreflags = "-L" + hyprelibpath + " -lHYPRE " - - if enable_strumpack: - cmake_opts['DMFEM_USE_STRUMPACK'] = '1' - cmake_opts['DSTRUMPACK_DIR'] = strumpack_prefix - libpath = os.path.dirname( - find_libpath_from_prefix("STRUMPACK", strumpack_prefix)) - add_rpath(libpath) - if enable_pumi: - cmake_opts['DMFEM_USE_PUMI'] = '1' - cmake_opts['DPUMI_DIR'] = pumi_prefix - libpath = os.path.dirname( - find_libpath_from_prefix("pumi", strumpack_prefix)) - add_rpath(libpath) - enable_metis = True - - if enable_metis: - cmake_opts['DMFEM_USE_METIS_5'] = '1' - cmake_opts['DMETIS_DIR'] = metis_prefix - cmake_opts['DMETIS_INCLUDE_DIRS'] = os.path.join( - metis_prefix, "include") - metislibpath = os.path.dirname( - find_libpath_from_prefix( - 'metis', metis_prefix)) - add_rpath(metislibpath) - - if use_metis_gklib: - metisflags = "-L" + metislibpath + " -lmetis -lGKlib " - else: - metisflags = "-L" + metislibpath + " -lmetis " - - if ldflags != '': - cmake_opts['DCMAKE_SHARED_LINKER_FLAGS'] = ldflags - cmake_opts['DCMAKE_EXE_LINKER_FLAGS'] = ldflags - - if metisflags != '': - cmake_opts['DMETIS_LIBRARIES'] = metisflags - if hypreflags != '': - cmake_opts['DHYPRE_LIBRARIES'] = hypreflags - - if enable_cuda: - cmake_opts['DMFEM_USE_CUDA'] = '1' - if cuda_arch != '': - cmake_opts['DCMAKE_CUDA_ARCHITECTURES'] = cuda_arch - - if enable_libceed: - cmake_opts['DMFEM_USE_CEED'] = '1' - cmake_opts['DCEED_DIR'] = libceed_prefix - libpath = os.path.dirname( - find_libpath_from_prefix("ceed", libceed_prefix)) - add_rpath(libpath) - - if enable_gslib: - if serial: - cmake_opts['DMFEM_USE_GSLIB'] = '1' - cmake_opts['DGSLIB_DIR'] = gslibs_prefix - else: - cmake_opts['DMFEM_USE_GSLIB'] = '1' - cmake_opts['DGSLIB_DIR'] = gslibp_prefix - - if enable_suitesparse: - cmake_opts['DMFEM_USE_SUITESPARSE'] = '1' - if suitesparse_prefix != '': - cmake_opts['DSuiteSparse_DIR'] = suitesparse_prefix - - if enable_lapack: - cmake_opts['DMFEM_USE_LAPACK'] = '1' - if blas_libraries != "": - cmake_opts['DBLAS_LIBRARIES'] = blas_libraries - if lapack_libraries != "": - cmake_opts['DLAPACK_LIBRARIES'] = lapack_libraries - - cmake_opts['DCMAKE_INSTALL_RPATH'] = ":".join(rpaths) - - pwd = chdir(path) - cmake('..', **cmake_opts) - - txt = 'serial' if serial else 'parallel' - - make('mfem_' + txt) - make_install('mfem_' + txt) - - from shutil import copytree, rmtree - - print("copying mesh data for testing", "../data", - cmake_opts['DCMAKE_INSTALL_PREFIX']) - path = os.path.join(cmake_opts['DCMAKE_INSTALL_PREFIX'], "data") - if os.path.exists(path): - rmtree(path) - copytree("../data", path) - - if do_bdist_wheel: - ex_dir = os.path.join(cmake_opts['DCMAKE_INSTALL_PREFIX'], "examples") - for x in os.listdir(ex_dir): - path = os.path.join(ex_dir, x) - command = ['chrpath', '-r', "$ORIGIN/../lib", path] - make_call(command, force_verbose=True) - - os.chdir(pwd)