Skip to content
Snippets Groups Projects
Commit 36d02f60 authored by easybuild's avatar easybuild
Browse files

fix

parent abfe323f
Branches
No related tags found
1 merge request!5fix
##
# Copyright 2009-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for installing ANSYS Eletromagnetics
@author: Alexi Rivera (Chalmers University of Technology)
@author: Mikael OEhman (Chalmers University of Technology)
"""
import os
import glob
from easybuild.easyblocks.generic.packedbinary import PackedBinary
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.run import run_cmd
class EB_ANSYSEM(PackedBinary):
"""Support for installing Ansys Electromagnetics."""
def __init__(self, *args, **kwargs):
"""Initialize Ansys Electromagnetics specific variables."""
super(EB_ANSYSEM, self).__init__(*args, **kwargs)
self.replayfile = None
def configure_step(self):
"""Configure Ansys Electromagnetics installation."""
licserv = os.getenv('EB_ANSYS_EM_LICENSE_SERVER')
licport = os.getenv('EB_ANSYS_EM_LICENSE_SERVER_PORT')
licservers = ['', '', '']
licservs = licserv.split(',')
servercount = len(licservs)
licservers[:servercount] = licservs
try:
self.replayfile = os.path.join(self.builddir, "installer.properties")
txt = '\n'.join([
"-W Agree.selection=1",
"-P installLocation=\"%s\"" % self.installdir,
"-W TempDirectory.tempPath=\"/tmp\"",
"-W TempDirectory.ChangeTempPermission=\"0\"",
"-W LibraryOption.libraryOption=0",
"-W LibraryOption.libraryPath=\"\"",
"-W LicenseOption.licenseOption=2",
"-W LicenseOption.licenseFileName=\"\"",
"-W LicenseOption.serverCount=%s" % servercount,
"-W LicenseOption.serverName1=\"%s\"" % licservers[0],
"-W LicenseOption.serverName2=\"%s\"" % licservers[1],
"-W LicenseOption.serverName3=\"%s\"" % licservers[2],
"-W LicenseOption.tcpPort=%s" % licport,
])
with open(self.replayfile, "w") as f:
f.write(txt)
except IOError as err:
raise EasyBuildError("Failed to create install properties file used for replaying installation: %s", err)
def install_step(self):
"""Install Ansys Electromagnetics using its setup tool."""
cmd = "./Linux/AnsysEM/disk1/setup.exe -options \"%s\" -silent" % (self.replayfile)
run_cmd(cmd, log_all=True, simple=True)
def make_module_extra(self):
"""Extra module entries for Ansys Electromagnetics."""
idirs = glob.glob(os.path.join(self.installdir, 'AnsysEM*/Linux*/'))
if len(idirs) == 1:
subdir = os.path.relpath(idirs[0], self.installdir)
else:
raise EasyBuildError("Failed to locate single install subdirectory AnsysEM*/Linux*/")
txt = super(EB_ANSYSEM, self).make_module_extra()
txt += self.module_generator.prepend_paths('PATH', subdir)
# Not sure if these are needed;
# txt += self.module_generator.prepend_paths('LD_LIBRARY_PATH',
# [os.path.join(ansysdir, 'mainwin540', 'Linux64', 'mw', 'lib-amd64_linux_optimized')])
# txt += self.module_generator.prepend_paths('LIBRARY_PATH',
# [os.path.join('ansysdir', 'mainwin540', 'Linux64', 'mw', 'lib-amd64_linux_optimized')])
return txt
def sanity_check_step(self):
"""Custom sanity check for Ansys Electromagnetics."""
idirs = glob.glob(os.path.join(self.installdir, 'AnsysEM*/Linux*/'))
if len(idirs) == 1:
subdir = os.path.relpath(idirs[0], self.installdir)
else:
raise EasyBuildError("Failed to locate single install subdirectory AnsysEM*/Linux*/")
custom_paths = {
'files': [os.path.join(subdir, 'ansysedt')],
'dirs': [subdir],
}
super(EB_ANSYSEM, self).sanity_check_step(custom_paths=custom_paths)
##
# Copyright 2021-2023 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Support for building and installing AOMP - AMD OpenMP compiler, implemented as
an EasyBlock
@author: Jorgen Nordmoen (University Center for Information Technology - UiO)
"""
import os
from easybuild.easyblocks.generic.binary import Binary
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.build_log import EasyBuildError, print_warning
from easybuild.tools.config import build_option
from easybuild.tools.filetools import move_file, remove_file
from easybuild.tools.modules import get_software_root
from easybuild.tools.systemtools import AARCH64, POWER, X86_64
from easybuild.tools.systemtools import get_cpu_architecture, get_shared_lib_ext
AOMP_ALL_COMPONENTS = ['roct', 'rocr', 'project', 'libdevice', 'openmp',
'extras', 'pgmath', 'flang', 'flang_runtime', 'comgr',
'rocminfo', 'vdi', 'hipvdi', 'ocl', 'rocdbgapi',
'rocgdb', 'roctracer', 'rocprofiler']
AOMP_DEFAULT_COMPONENTS = ['roct', 'rocr', 'project', 'libdevice', 'openmp',
'extras', 'pgmath', 'flang', 'flang_runtime',
'comgr', 'rocminfo']
AOMP_X86_COMPONENTS = ['vdi', 'hipvdi', 'ocl']
AOMP_DBG_COMPONENTS = ['rocdbgapi', 'rocgdb']
AOMP_PROF_COMPONENTS = ['roctracer', 'rocprofiler']
class EB_AOMP(Binary):
"""Support for installing AOMP"""
@staticmethod
def extra_options():
extra_vars = Binary.extra_options()
extra_vars.update({
'components': [None, "AOMP components to build. Possible components: " +
', '.join(AOMP_ALL_COMPONENTS), CUSTOM],
})
return extra_vars
def __init__(self, *args, **kwargs):
"""Initialize custom class variables for Clang."""
super(EB_AOMP, self).__init__(*args, **kwargs)
self.cfg['extract_sources'] = True
self.cfg['dontcreateinstalldir'] = True
def configure_step(self):
"""Configure AOMP build and let 'Binary' install"""
# Setup install command
self.cfg['install_cmd'] = './aomp/bin/build_aomp.sh'
# Setup 'preinstallopts'
version_major = self.version.split('-')[0]
install_options = [
'AOMP={!s}'.format(self.installdir),
'AOMP_REPOS="{!s}/aomp{!s}"'.format(self.builddir, version_major),
'AOMP_CMAKE={!s}/bin/cmake'.format(get_software_root('CMake')),
'AOMP_CHECK_GIT_BRANCH=0',
'AOMP_APPLY_ROCM_PATCHES=0',
'AOMP_STANDALONE_BUILD=1',
]
if self.cfg['parallel']:
install_options.append(
'NUM_THREADS={!s}'.format(self.cfg['parallel']))
else:
install_options.append('NUM_THREADS=1')
# Check if CUDA is loaded and alternatively build CUDA backend
if get_software_root('CUDA') or get_software_root('CUDAcore'):
cuda_root = get_software_root('CUDA') or get_software_root('CUDAcore')
install_options.append('AOMP_BUILD_CUDA=1')
install_options.append('CUDA="{!s}"'.format(cuda_root))
# list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)):
# (1) in the easyconfig file, via the custom cuda_compute_capabilities;
# (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option;
ec_cuda_cc = self.cfg['cuda_compute_capabilities']
cfg_cuda_cc = build_option('cuda_compute_capabilities')
cuda_cc = cfg_cuda_cc or ec_cuda_cc or []
if cfg_cuda_cc and ec_cuda_cc:
warning_msg = "cuda_compute_capabilities specified in easyconfig (%s) are overruled by " % ec_cuda_cc
warning_msg += "--cuda-compute-capabilities configuration option (%s)" % cfg_cuda_cc
print_warning(warning_msg)
if not cuda_cc:
raise EasyBuildError("CUDA module was loaded, "
"indicating a build with CUDA, "
"but no CUDA compute capability "
"was specified!")
# Convert '7.0' to '70' format
cuda_cc = [cc.replace('.', '') for cc in cuda_cc]
cuda_str = ",".join(cuda_cc)
install_options.append('NVPTXGPUS="{!s}"'.format(cuda_str))
else:
# Explicitly disable CUDA
install_options.append('AOMP_BUILD_CUDA=0')
# Combine install instructions above into 'preinstallopts'
self.cfg['preinstallopts'] = ' '.join(install_options)
# Setup components for install
components = self.cfg.get('components', None)
# If no components were defined we use the default
if not components:
components = AOMP_DEFAULT_COMPONENTS
# NOTE: The following has not been tested properly and is therefore
# removed
#
# Add X86 components if correct architecture
# if get_cpu_architecture() == X86_64:
# components.extend(AOMP_X86_COMPONENTS)
# Only build selected components
self.cfg['installopts'] = 'select ' + ' '.join(components)
def post_install_step(self):
super(EB_AOMP, self).post_install_step()
# The install script will create a symbolic link as the install
# directory, this creates problems for EB as it won't remove the
# symlink. To remedy this we remove the link here and rename the actual
# install directory created by the AOMP install script
if os.path.islink(self.installdir):
remove_file(self.installdir)
else:
err_str = "Expected '{!s}' to be a symbolic link" \
" that needed to be removed, but it wasn't!"
raise EasyBuildError(err_str.format(self.installdir))
# Move the actual directory containing the install
install_name = '{!s}_{!s}'.format(os.path.basename(self.installdir),
self.version)
actual_install = os.path.join(os.path.dirname(self.installdir),
install_name)
if os.path.exists(actual_install) and os.path.isdir(actual_install):
move_file(actual_install, self.installdir)
else:
err_str = "Tried to move '{!s}' to '{!s}', " \
" but it either doesn't exist" \
" or isn't a directory!"
raise EasyBuildError(err_str.format(actual_install,
self.installdir))
def sanity_check_step(self):
"""Custom sanity check for AOMP"""
shlib_ext = get_shared_lib_ext()
arch = get_cpu_architecture()
# Check architecture explicitly since Clang uses potentially
# different names
arch_map = {
X86_64: 'x86_64',
POWER: 'ppc64',
AARCH64: 'aarch64',
}
if arch in arch_map:
arch = arch_map[arch]
else:
print_warning("Unknown CPU architecture (%s) for OpenMP offloading!" % arch)
custom_paths = {
'files': [
"amdgcn/bitcode/hip.bc", "amdgcn/bitcode/opencl.bc", "bin/aompcc",
"bin/aompversion", "bin/clang", "bin/flang", "bin/ld.lld", "bin/llvm-config",
"bin/mygpu", "bin/opt", "bin/rocminfo", "include/amd_comgr.h",
"include/hsa/amd_hsa_common.h", "include/hsa/hsa.h", "include/omp.h",
"include/omp_lib.h", "lib/libclang.%s" % shlib_ext, "lib/libflang.%s" % shlib_ext,
"lib/libomp.%s" % shlib_ext, "lib/libomptarget.rtl.amdgpu.%s" % shlib_ext,
"lib/libomptarget.rtl.%s.%s" % (arch, shlib_ext), "lib/libomptarget.%s" % shlib_ext,
],
'dirs': ["amdgcn", "include/clang", "include/hsa", "include/llvm"],
}
# If we are building with CUDA support we need to check if it was built properly
if get_software_root('CUDA') or get_software_root('CUDAcore'):
custom_paths['files'].append("lib/libomptarget.rtl.cuda.%s" % shlib_ext)
custom_commands = [
'aompcc --help', 'clang --help', 'clang++ --help', 'flang --help',
'llvm-config --cxxflags',
]
super(EB_AOMP, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
This diff is collapsed.
##
# Copyright 2013-2022 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing IMOD, implemented as an easyblock
@author: Benjamin Roberts (Landcare Research NZ Ltd)
"""
import os
import shutil
from easybuild.easyblocks.generic.binary import Binary
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import remove_dir
from easybuild.tools.run import run_cmd
class EB_IMOD(Binary):
"""Support for building/installing IMOD."""
def install_step(self):
"""Install IMOD using install script."""
# -dir: Choose location of installation directory
# -skip: do not attempt to deploy resource files in /etc
# -yes: do not prompt for confirmation
script = '{0}_{1}{2}.sh'.format(self.name.lower(), self.version, self.cfg['versionsuffix'])
cmd = "bash {0} -dir {1} -script {1} -skip -yes".format(script, self.installdir)
run_cmd(cmd, log_all=True, simple=True)
# The assumption by the install script is that installdir will be something
# like /usr/local. So it creates, within the specified install location, a
# number of additional directories within which to install IMOD. We will,
# therefore, move the contents of these directories up and throw away the
# directories themselves. Doing so apparently is not a problem so long as
# IMOD_DIR is correctly set in the module.
link_to_remove = os.path.join(self.installdir, self.name)
dir_to_remove = os.path.join(self.installdir, "{0}_{1}".format(self.name.lower(), self.version))
try:
for entry in os.listdir(dir_to_remove):
shutil.move(os.path.join(dir_to_remove, entry), self.installdir)
if os.path.realpath(link_to_remove) != os.path.realpath(dir_to_remove):
raise EasyBuildError("Something went wrong: %s doesn't point to %s", link_to_remove, dir_to_remove)
remove_dir(dir_to_remove)
os.remove(link_to_remove)
except OSError as err:
raise EasyBuildError("Failed to clean up install dir: %s", err)
def sanity_check_step(self):
"""Custom sanity check for IMOD."""
custom_paths = {
'files': ['bin/imod', 'IMOD-linux.sh', 'IMOD-linux.sh', 'installIMOD'],
'dirs': ['lib'],
}
super(EB_IMOD, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self):
"""Define IMOD specific variables in generated module file."""
txt = super(EB_IMOD, self).make_module_extra()
txt += self.module_generator.set_environment('IMOD_DIR', self.installdir)
txt += self.module_generator.set_environment('IMOD_PLUGIN_DIR',
os.path.join(self.installdir, 'lib', 'imodplug'))
txt += self.module_generator.set_environment('IMOD_QTLIBDIR', os.path.join(self.installdir, 'qtlib'))
if os.getenv('JAVA_HOME') is None:
raise EasyBuildError("$JAVA_HOME is not defined for some reason -- check environment")
else:
txt += self.module_generator.set_environment('IMOD_JAVADIR', os.getenv('JAVA_HOME'))
txt += self.module_generator.set_environment('FOR_DISABLE_STACK_TRACE', '1')
txt += self.module_generator.set_alias('subm', "submfg $* &")
txt += self.module_generator.msg_on_load("Please set the environment variable $IMOD_CALIB_DIR if appropriate.\n")
txt += self.module_generator.msg_on_load("bash users run: 'source $EBROOTIMOD/IMOD-linux.sh\n")
txt += self.module_generator.msg_on_load("csh users run: 'source $EBROOTIMOD/IMOD-linux.csh'\n")
return txt
##
# Copyright 2015-2023 Bart Oldeman
# Copyright 2016-2023 Forschungszentrum Juelich
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for installing NVIDIA HPC SDK compilers, based on the easyblock for PGI compilers
@author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada)
@author: Damian Alvarez (Forschungszentrum Juelich)
@author: Andreas Herten (Forschungszentrum Juelich)
"""
import os
import fileinput
import re
import stat
import sys
import platform
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.packedbinary import PackedBinary
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.filetools import adjust_permissions, write_file
from easybuild.tools.run import run_cmd
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.config import build_option
from easybuild.tools.build_log import EasyBuildError, print_warning
# contents for siterc file to make PGI/NVHPC pick up $LIBRARY_PATH
# cfr. https://www.pgroup.com/support/link.htm#lib_path_ldflags
SITERC_LIBRARY_PATH = """
# get the value of the environment variable LIBRARY_PATH
variable LIBRARY_PATH is environment(LIBRARY_PATH);
# split this value at colons, separate by -L, prepend 1st one by -L
variable library_path is
default($if($LIBRARY_PATH,-L$replace($LIBRARY_PATH,":", -L)));
# add the -L arguments to the link line
append LDLIBARGS=$library_path;
# also include the location where libm & co live on Debian-based systems
# cfr. https://github.com/easybuilders/easybuild-easyblocks/pull/919
append LDLIBARGS=-L/usr/lib/x86_64-linux-gnu;
"""
class EB_NVHPC(PackedBinary):
"""
Support for installing the NVIDIA HPC SDK (NVHPC) compilers
"""
@staticmethod
def extra_options():
extra_vars = {
'default_cuda_version': [None, "CUDA Version to be used as default (10.2 or 11.0 or ...)", CUSTOM],
'module_add_cuda': [False, "Add NVHPC's CUDA to module", CUSTOM],
'module_add_math_libs': [False, "Add NVHPC's math libraries to module", CUSTOM],
'module_add_nccl': [False, "Add NVHPC's NCCL library to module", CUSTOM],
'module_add_nvshmem': [False, "Add NVHPC's NVSHMEM library to module", CUSTOM],
'module_add_profilers': [False, "Add NVHPC's NVIDIA Profilers to module", CUSTOM],
'module_byo_compilers': [False, "BYO Compilers: Remove compilers from module", CUSTOM],
'module_nvhpc_own_mpi': [False, "Add NVHPC's packaged OpenMPI to module", CUSTOM]
}
return PackedBinary.extra_options(extra_vars)
def __init__(self, *args, **kwargs):
"""Easyblock constructor, define custom class variables specific to NVHPC."""
super(EB_NVHPC, self).__init__(*args, **kwargs)
# Ideally we should be using something like `easybuild.tools.systemtools.get_cpu_architecture` here, however,
# on `ppc64le` systems this function returns `POWER` instead of `ppc64le`. Since this path needs to reflect
# `arch` (https://easybuild.readthedocs.io/en/latest/version-specific/easyconfig_templates.html) the same
# procedure from `templates.py` was reused here:
architecture = 'Linux_%s' % platform.uname()[4]
self.nvhpc_install_subdir = os.path.join(architecture, self.version)
def install_step(self):
"""Install by running install command."""
# EULA for NVHPC must be accepted via --accept-eula-for EasyBuild configuration option,
# or via 'accept_eula = True' in easyconfig file
self.check_accepted_eula(more_info='https://docs.nvidia.com/hpc-sdk/eula/index.html')
default_cuda_version = self.cfg['default_cuda_version']
if default_cuda_version is None:
module_cuda_version_full = get_software_version('CUDA')
if module_cuda_version_full is not None:
default_cuda_version = '.'.join(module_cuda_version_full.split('.')[:2])
else:
error_msg = "A default CUDA version is needed for installation of NVHPC. "
error_msg += "It can not be determined automatically and needs to be added manually. "
error_msg += "You can edit the easyconfig file, "
error_msg += "or use 'eb --try-amend=default_cuda_version=<version>'."
raise EasyBuildError(error_msg)
# Parse default_compute_capability from different sources (CLI has priority)
ec_default_compute_capability = self.cfg['cuda_compute_capabilities']
cfg_default_compute_capability = build_option('cuda_compute_capabilities')
if cfg_default_compute_capability is not None:
default_compute_capability = cfg_default_compute_capability
elif ec_default_compute_capability and ec_default_compute_capability is not None:
default_compute_capability = ec_default_compute_capability
else:
error_msg = "A default Compute Capability is needed for installation of NVHPC."
error_msg += "Please provide it either in the easyconfig file like 'cuda_compute_capabilities=\"7.0\"',"
error_msg += "or use 'eb --cuda-compute-capabilities=7.0' from the command line."
raise EasyBuildError(error_msg)
# Extract first element of default_compute_capability list, if it is a list
if isinstance(default_compute_capability, list):
_before_default_compute_capability = default_compute_capability
default_compute_capability = _before_default_compute_capability[0]
if len(_before_default_compute_capability) > 1:
warning_msg = "Replaced list of compute capabilities {} ".format(_before_default_compute_capability)
warning_msg += "with first element of list: {}".format(default_compute_capability)
print_warning(warning_msg)
# Remove dot-divider for CC; error out if it is not a string
if isinstance(default_compute_capability, str):
default_compute_capability = default_compute_capability.replace('.', '')
else:
raise EasyBuildError("Unexpected non-string value encountered for compute capability: %s",
default_compute_capability)
nvhpc_env_vars = {
'NVHPC_INSTALL_DIR': self.installdir,
'NVHPC_SILENT': 'true',
'NVHPC_DEFAULT_CUDA': str(default_cuda_version), # 10.2, 11.0
'NVHPC_STDPAR_CUDACC': str(default_compute_capability), # 70, 80; single value, no list!
}
cmd = "%s ./install" % ' '.join(['%s=%s' % x for x in sorted(nvhpc_env_vars.items())])
run_cmd(cmd, log_all=True, simple=True)
# make sure localrc uses GCC in PATH, not always the system GCC, and does not use a system g77 but gfortran
install_abs_subdir = os.path.join(self.installdir, self.nvhpc_install_subdir)
compilers_subdir = os.path.join(install_abs_subdir, "compilers")
makelocalrc_folder = os.path.join(compilers_subdir, "bin")
makelocalrc_filename = os.path.join(compilers_subdir, "bin", "makelocalrc")
for line in fileinput.input(makelocalrc_filename, inplace='1', backup='.orig'):
line = re.sub(r"^PATH=/", r"#PATH=/", line)
sys.stdout.write(line)
if LooseVersion(self.version) >= LooseVersion('22.9'):
cmd = "%s -x %s" % (makelocalrc_filename, makelocalrc_folder)
else:
cmd = "%s -x %s -g77 /" % (makelocalrc_filename, makelocalrc_folder)
run_cmd(cmd, log_all=True, simple=True)
# If an OS libnuma is NOT found, makelocalrc creates symbolic links to libpgnuma.so
# If we use the EB libnuma, delete those symbolic links to ensure they are not used
if get_software_root("numactl"):
for filename in ["libnuma.so", "libnuma.so.1"]:
path = os.path.join(compilers_subdir, "lib", filename)
if os.path.islink(path):
os.remove(path)
if LooseVersion(self.version) < LooseVersion('21.3'):
# install (or update) siterc file to make NVHPC consider $LIBRARY_PATH
siterc_path = os.path.join(compilers_subdir, 'bin', 'siterc')
write_file(siterc_path, SITERC_LIBRARY_PATH, append=True)
self.log.info("Appended instructions to pick up $LIBRARY_PATH to siterc file at %s: %s",
siterc_path, SITERC_LIBRARY_PATH)
# The cuda nvvp tar file has broken permissions
adjust_permissions(self.installdir, stat.S_IWUSR, add=True, onlydirs=True)
def sanity_check_step(self):
"""Custom sanity check for NVHPC"""
prefix = self.nvhpc_install_subdir
compiler_names = ['nvc', 'nvc++', 'nvfortran']
files = [os.path.join(prefix, 'compilers', 'bin', x) for x in compiler_names]
if LooseVersion(self.version) < LooseVersion('21.3'):
files.append(os.path.join(prefix, 'compilers', 'bin', 'siterc'))
custom_paths = {
'files': files,
'dirs': [os.path.join(prefix, 'compilers', 'bin'), os.path.join(prefix, 'compilers', 'lib'),
os.path.join(prefix, 'compilers', 'include'), os.path.join(prefix, 'compilers', 'man')]
}
custom_commands = ["%s -v" % compiler for compiler in compiler_names]
super(EB_NVHPC, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def _nvhpc_extended_components(self, dirs, basepath, env_vars_dirs):
"""
Extends `dirs` dict of key:environment_variables, value:list_of_directories with additional vars and dirs.
The dictionary key for a new env var will be created if it doesn't exist.
Also, the relative path specified in the `env_vars_dirs` dict is absolutized with the `basepath` prefix.
"""
for env_var, folders in sorted(env_vars_dirs.items()):
if env_var not in dirs:
dirs[env_var] = []
if not isinstance(folders, list):
folders = [folders]
for folder in folders:
dirs[env_var].append(os.path.join(basepath, folder))
def make_module_req_guess(self):
"""Prefix subdirectories in NVHPC install dir considered for environment variables defined in module file."""
dirs = super(EB_NVHPC, self).make_module_req_guess()
for key in dirs:
dirs[key] = [os.path.join(self.nvhpc_install_subdir, 'compilers', d) for d in dirs[key]]
# $CPATH should not be defined in module for NVHPC, it causes problems
# cfr. https://github.com/easybuilders/easybuild-easyblocks/issues/830
if 'CPATH' in dirs:
self.log.info("Removing $CPATH entry: %s", dirs['CPATH'])
del dirs['CPATH']
# EasyBlock option parsing follows:
# BYO Compilers:
# Use NVHPC's libraries and tools with other, external compilers
if self.cfg['module_byo_compilers']:
if 'PATH' in dirs:
del dirs["PATH"]
# Own MPI:
# NVHPC is shipped with a compiled OpenMPI installation
# Enable it by setting according environment variables
if self.cfg['module_nvhpc_own_mpi']:
self.nvhpc_mpi_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "mpi")
env_vars_dirs = {
'PATH': 'bin',
'CPATH': 'include',
'LD_LIBRARY_PATH': 'lib'
}
self._nvhpc_extended_components(dirs, self.nvhpc_mpi_basedir, env_vars_dirs)
# Math Libraries:
# NVHPC is shipped with math libraries (in a dedicated folder)
# Enable them by setting according environment variables
if self.cfg['module_add_math_libs']:
self.nvhpc_math_basedir = os.path.join(self.nvhpc_install_subdir, "math_libs")
env_vars_dirs = {
'CPATH': 'include',
'LD_LIBRARY_PATH': 'lib64'
}
self._nvhpc_extended_components(dirs, self.nvhpc_math_basedir, env_vars_dirs)
# GPU Profilers:
# NVHPC is shipped with NVIDIA's GPU profilers (Nsight Compute/Nsight Systems)
# Enable them by setting the according environment variables
if self.cfg['module_add_profilers']:
self.nvhpc_profilers_basedir = os.path.join(self.nvhpc_install_subdir, "profilers")
env_vars_dirs = {
'PATH': ['Nsight_Compute', 'Nsight_Systems/bin']
}
self._nvhpc_extended_components(dirs, self.nvhpc_profilers_basedir, env_vars_dirs)
# NCCL:
# NVHPC is shipped with NCCL
# Enable it by setting the according environment variables
if self.cfg['module_add_nccl']:
self.nvhpc_nccl_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nccl")
env_vars_dirs = {
'CPATH': 'include',
'LD_LIBRARY_PATH': 'lib'
}
self._nvhpc_extended_components(dirs, self.nvhpc_nccl_basedir, env_vars_dirs)
# NVSHMEM:
# NVHPC is shipped with NVSHMEM
# Enable it by setting the according environment variables
if self.cfg['module_add_nvshmem']:
self.nvhpc_nvshmem_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nvshmem")
env_vars_dirs = {
'CPATH': 'include',
'LD_LIBRARY_PATH': 'lib'
}
self._nvhpc_extended_components(dirs, self.nvhpc_nvshmem_basedir, env_vars_dirs)
# CUDA:
# NVHPC is shipped with CUDA (possibly multiple versions)
# Rather use this CUDA than an external CUDA (via $CUDA_HOME) by setting according environment variables
if self.cfg['module_add_cuda']:
self.nvhpc_cuda_basedir = os.path.join(self.nvhpc_install_subdir, "cuda")
env_vars_dirs = {
'PATH': 'bin',
'LD_LIBRARY_PATH': 'lib64',
'CPATH': 'include'
}
self._nvhpc_extended_components(dirs, self.nvhpc_cuda_basedir, env_vars_dirs)
return dirs
def make_module_extra(self):
"""Add environment variable for NVHPC location"""
txt = super(EB_NVHPC, self).make_module_extra()
txt += self.module_generator.set_environment('NVHPC', self.installdir)
if LooseVersion(self.version) >= LooseVersion('22.7'):
# NVHPC 22.7+ requires the variable NVHPC_CUDA_HOME for external CUDA. CUDA_HOME has been deprecated.
if not self.cfg['module_add_cuda'] and get_software_root('CUDA'):
txt += self.module_generator.set_environment('NVHPC_CUDA_HOME', os.getenv('CUDA_HOME'))
return txt
This diff is collapsed.
This diff is collapsed.
##
# Copyright 2009-2022 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing RELION, implemented as an easyblock
@author: Jasper Grimm (University of York)
"""
from easybuild.easyblocks.generic.cmakemake import CMakeMake
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.modules import get_software_root
from easybuild.tools.config import build_option
from easybuild.tools.build_log import EasyBuildError, print_warning
class EB_RELION(CMakeMake):
"""Support for building/installing RELION."""
@staticmethod
def extra_options():
extra_vars = CMakeMake.extra_options()
extra_vars.update({
'cuda_texture': [False, "Enable cuda texture", CUSTOM],
'default_cuda_capability': [None, "Default CUDA capabilitity for building RELION, e.g. '8.6'", CUSTOM],
'doubleprec_cpu': [True, "Enable double precision (CPU)", CUSTOM],
'doubleprec_gpu': [False, "Enable double precision (GPU)", CUSTOM],
'disable_gui': [False, "Build without GUI", CUSTOM],
'use_mkl': [True, "Use MKL for FFT (if MKL is a depencency)", CUSTOM],
})
return extra_vars
def configure_step(self, *args, **kwargs):
"""Custom configure step for RELION"""
# configure some default options
self.cfg.update('configopts', '-DCMAKE_SHARED_LINKER="$LIBS"')
self.cfg.update('configopts', '-DMPI_INCLUDE_PATH="$MPI_INC_DIR"')
gui_deps = get_software_root('FLTK') and get_software_root('X11')
if self.cfg['disable_gui'] or not gui_deps:
if not gui_deps:
print_warning("Missing dependencies for the GUI (FLTK and X11 are required). Building without GUI.")
self.cfg.update('configopts', '-DGUI=OFF')
if get_software_root('MKL') and self.cfg['use_mkl']:
self.cfg.update('configopts', '-DMKLFFT=ON')
# check if CUDA is present
if get_software_root('CUDA'):
self.cfg.update('configopts', '-DCUDA=ON')
# check cuda_compute_capabilities
cuda_cc = self.cfg['cuda_compute_capabilities'] or build_option('cuda_compute_capabilities') or []
if not cuda_cc:
raise EasyBuildError("Can't build RELION with CUDA support without"
" specifying 'cuda-compute-capabilities'")
self.cfg.update('configopts', '-DCUDA_ARCH="%s"' % ' '.join(cuda_cc))
# check default_cuda_capability
default_cc = self.cfg['default_cuda_capability'] or min(cuda_cc)
if not self.cfg['default_cuda_capability']:
print_warning("No default CUDA capability defined! "
"Using '%s' taken as minimum from 'cuda_compute_capabilities'" % default_cc)
self.cfg.update('configopts', '-DDEFAULT_CUDA_ARCH="%s"' % default_cc)
if self.cfg['cuda_texture']:
self.cfg.update('configopts', '-DCUDA_TEXTURE=ON')
if not self.cfg['doubleprec_cpu']:
self.cfg.update('configopts', '-DDoublePrec_CPU=OFF')
if self.cfg['doubleprec_gpu']:
self.log.warning("Enabling GPU double precision is not recommnded")
self.cfg.update('configopts', '-DDoublePrec_ACC=ON')
else:
self.cfg.update('configopts', '-DDoublePrec_ACC=OFF')
else:
# CPU build
self.cfg.update('configopts', '-DALTCPU=ON')
if self.cfg['doubleprec_cpu']:
self.cfg.update('configopts', '-DDoublePrec_CPU=ON')
else:
self.cfg.update('configopts', '-DDoublePrec_CPU=OFF')
super(EB_RELION, self).configure_step(*args, **kwargs)
def install_step(self, *args, **kwargs):
"""Custom install step for RELION"""
self.cfg['install_cmd'] = 'make -j %s install' % self.cfg['parallel']
super(EB_RELION, self).install_step(*args, **kwargs)
def sanity_check_step(self):
"""Custom sanity check step for RELION."""
custom_paths = {
'files': ['bin/relion%s' % x for x in ['', '_autopick', '_batchrun', '_batchrun_mpi']],
'dirs': [],
}
custom_commands = ['relion --help', 'relion --version']
super(EB_RELION, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment