Skip to content
Snippets Groups Projects
Commit 7c2894fd authored by Ondrej Vysocky's avatar Ondrej Vysocky
Browse files

some small changes to make the code easier compiled

parent 74e040be
No related branches found
No related tags found
No related merge requests found
......@@ -5,17 +5,20 @@ MERIC_LIBS_COMMON="-L$HOME/OpenFOAM/meric/lib/ -lmericmpi "
MERIC_LIBS_TAURUS="-lhdeem -lfreeipmi -lcpufreq -lx86_adapt -L/sw/taurus/libraries/papi/5.4.1/lib/ -lpapi"
MERIC_LIBS_SALOMON="-L$HOME/readex/MERIC/x86adaptGCC/x86_adapt/build/ -lx86_adapt"
export READEX_INCLUDE="-I/scratch/p_readex/OPENFOAM/meric/include "
export READEX_DEFINE=" " #-DENDKILL"
#export READEX_LIBS=$MERIC_LIBS_COMMON$MERIC_LIBS_TAURUS
export FM_DIR=~/OpenFOAM
#export FM_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-basic
# SELECT GCC #################################################################
# GCC compiler
cp ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc-GCC6 ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
cp $FM_DIR/OpenFOAM-v1612+/etc/bashrc-GCC6 $FM_DIR/OpenFOAM-v1612+/etc/bashrc
# SCOREP flags
# #1 autofilter
# --mpp=mpi
cp ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++SCOREP ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++
cp $FM_DIR/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++SCOREP $FM_DIR/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++
# #2 dyndetect
# --online-access --user --thread=none --mpp=mpi
#cp ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++SCOREPmanual ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++
......@@ -42,7 +45,7 @@ module load zlib
module load flex
module load cmake
source ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
source $FM_DIR/OpenFOAM-v1612+/etc/bashrc
export WM_NCOMPPROCS=24
export BOOST_ARCH_PATH=$BOOST_ROOT
......
#!/bin/sh
# SELECT MACHINE ##############################################################
MERIC_LIBS_COMMON="-L$HOME/OpenFOAM/meric/lib/ -lmericmpi "
MERIC_LIBS_TAURUS="-lhdeem -lfreeipmi -lcpufreq -lx86_adapt -L/sw/taurus/libraries/papi/5.4.1/lib/ -lpapi"
MERIC_LIBS_SALOMON="-L$HOME/readex/MERIC/x86adaptGCC/x86_adapt/build/ -lx86_adapt"
export READEX_INCLUDE="-I/scratch/p_readex/OPENFOAM/meric/include "
export READEX_DEFINE="-DUSE_SCOREP " #-DENDKILL"
#export READEX_LIBS=$MERIC_LIBS_COMMON$MERIC_LIBS_TAURUS
export READEX_FILTER="/scratch/p_readex/OPENFOAM/OpenFOAM-filtered/OpenFOAM-v1612+/scorep.filt "
export FM_DIR=~/OpenFOAM
#export FM_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-filtered
# SELECT GCC #################################################################
# GCC compiler
#cp ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc-GCC6 ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
cp $FM_DIR/OpenFOAM-v1612+/etc/bashrc-GCC6 $FM_DIR/OpenFOAM-v1612+/etc/bashrc
# SCOREP flags
# #1 autofilter
# --mpp=mpi
#cp ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++SCOREP ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++
# #2 dyndetect
# --online-access --user --thread=none --mpp=mpi
cp $FM_DIR/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++SCOREPfiltered $FM_DIR/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++
# compilation with a filter file - according Andreas script
# --noopenmp --instrument-filter=$(HOME)/OpenFOAM/exp/scorep.filt --online-access --user
#cp ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++AUTOFILTER ~/OpenFOAM/OpenFOAM-v1612+/wmake/rules/linux64Gcc/c++
# LOAD MODULES and SET ENV ####################################################
module purge
module use /projects/p_readex/modules
module load readex/ci_readex_bullxmpi1.2.8.4_gcc6.3.0
#module load scorep-uncore
#module switch papi/5.4.3 papi/5.4.1
# OpenFoam modules
module load zlib
module load flex
module load cmake
source $FM_DIR/OpenFOAM-v1612+/etc/bashrc
export WM_NCOMPPROCS=24
export BOOST_ARCH_PATH=$BOOST_ROOT
# Meric modules
#module load hdeem
#module load papi/5.4.1
export LD_LIBRARY_PATH+=:/scratch/p_readex/OPENFOAM/meric/lib # meric without openMP !
export LD_LIBRARY_PATH+=:/usr/local/lib #x86adapt
export MERIC_NUM_THREADS=0
export MERIC_FREQUENCY=25
export MERIC_UNCORE_FREQUENCY=30
return # << << << <<
# vim /ThirdParty-v1612+/scotch_6.0.3/src/Makefile.inc
# "-shared-intel" to LDFLAGS
BOOST_INC=/sw/taurus/libraries/boost/1.61.0-intel2017.0.020-intelmpi2017/include
BOOST_ARCH_PATH=/home/h6/vysocky/OpenFOAM/ThirdParty-v1612+/platforms/linux64Icc/boost_1_62_0
BOOST_LIB=/sw/taurus/libraries/boost/1.61.0-intel2017.0.020-intelmpi2017/lib
BOOST_ROOT=/sw/taurus/libraries/boost/1.61.0-intel2017.0.020-intelmpi2017
bash-4.1$ export BOOST_ARCH_PATH=/sw/taurus/libraries/boost/1.61.0-intel2017.0.020-intelmpi2017
################################################################################
# OpenFOAM + Intel
# 1
module load intel/2016.2.181
module load mpirt/5.1.2.150
module load zlib
module load flex
module load cmake
module load boost/1.62.0-intel2016.2.181
# 2
# module load intel/2015.3.187
# module load zlib
# module load flex
# module load cmake
# module load boost/1.61.0-intel2015.3.187-python2.7
# 3
# module load intel/2017.0.020
# module load zlib
# module load flex
# module load cmake
# module load boost/1.61.0-intel2017.0.020-intelmpi2017
#source ~/OpenFOAM/OpenFOAM-4.1/etc/bashrc
export MPI_ARCH_FLAGS="-DOMPI_SKIP_MPICXX"
export MPI_ARCH_INC="-isystem $MPI_ROOT/include"
export MPI_ARCH_LIBS="-L$MPI_ROOT/lib -lmpi"
source ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
export WM_CXX=mpiicpc
exit 0
#################################################################### instalace #
srun -N 1 --tasks-per-node=1 --cpus-per-task=24 --exclusive -p haswell --x11 --pty bash
source ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
export WM_NCOMPPROCS=22
printenv | grep icpc # kdyz nevis, tak vsude nastav mpiicpc
export WM_CXX=mpiicpc
# export MPICXX_CXX=mpiicpc
# export MPI_ROOT=$I_MPI_ROOT
# vim $WM_PROJECT_DIR/etc/config/settings.sh
foam
vim etc/bashrc
# export WM_COMPILER=Icc
# export WM_MPLIB=SYSTEMMPI
vim wmake/rules/linux64Icc/c++
# CC = mpiicpc -std=c++11 -fp-trap=common -fp-model precise
./Allwmake
################################################################################
# post-compilation test
foamInstallationTest
mkdir -p $FOAM_RUN
run
cp -r $FOAM_TUTORIALS/incompressible/simpleFoam/pitzDaily ./
cd pitzDaily
blockMesh
simpleFoam
# paraFoam
################################################################################
# run example:
/home/vysocky/OpenFOAM/OpenFOAM-v1612+/tutorials/incompressible/icoFoam/cavity/cavity
mpirun -n 4 icoFoam -parallel
system/blockMeshDict - velikost problemu
system/decomposeParDict - dekompozice problemu
mpirun -n 4 --bind-to-none icoFoam -parallel
################################################################################
# zdrojaky
# icoFoam
vim applications/solvers/incompressible/icoFoam/icoFoam.C
vim tutorials/incompressible/icoFoam/cavity/cavity/system/fvSolution
# solvers
vim src/OpenFOAM/matrices/lduMatrix/solvers/PCG/PCG.C
vim src/OpenFOAM/matrices/lduMatrix/solvers/PBiCG/PBiCG.C
# matrix
vim src/OpenFOAM/matrices/lduMatrix/lduMatrix/lduMatrix.C
vim src/OpenFOAM/matrices/lduMatrix/lduMatrix/lduMatrixATmul.C
# simpleFoam
vim applications/solvers/incompressible/simpleFoam/UEqn.H
vim applications/solvers/incompressible/simpleFoam/pEqn.H
vim applications/solvers/incompressible/simpleFoam/simpleFoam.C
vim applications/solvers/incompressible/simpleFoam/Make/options # nastaveni prekladu
# pitzDaily
vim tutorials/incompressible/simpleFoam/pitzDaily/system/fvSolution
vim tutorials/incompressible/simpleFoam/pitzDaily/system/controlDict
vim tutorials/incompressible/simpleFoam/pitzDaily/system/blockMeshDict
# motorBike
vim tutorials/incompressible/simpleFoam/motorBike/system/fvSolution
################################## motorBike
- GAMG
- PBiCG + DILU
mpiicpc -std=c++11 -fp-trap=common -fp-model precise -DOPENFOAM_PLUS=1612 -Dlinux64 -DWM_ARCH_OPTION=64 -DWM_DP -DWM_LABEL_SIZE=32 -Wall -Wextra -Wnon-virtual-dtor -Wno-unused-parameter -Wno-invalid-offsetof -diag-disable 327,654,1125,2289,2304,11062,11074,11076 -O3 -DNoRepository -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/TurbulenceModels/turbulenceModels/lnInclude -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/TurbulenceModels/incompressible/lnInclude -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/transportModels -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/transportModels/incompressible/singlePhaseTransportModel -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/finiteVolume/lnInclude -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/meshTools/lnInclude -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/sampling/lnInclude -I/home/vysocky/meric/include -IlnInclude -I. -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/OpenFOAM/lnInclude -I/home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/src/OSspecific/POSIX/lnInclude -fPIC -c simpleFoam.C -o /home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/platforms/linux64IccDPInt32Opt/applications/solvers/incompressible/simpleFoam/simpleFoam.o
################################################################################
# OpenFOAM + gcc
module load gcc/6.2.0
module load zlib
module load flex
module load cmake
module load boost/1.62.0-gnu6.2
export WM_NCOMPPROCS=22
source ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
module load boost/1.62.0-gnu6.2
# gcc/6.2.0
# boost/1.62.0-gnu6.2
# bullxmpi/1.2.8.4
module load zlib
module load flex
module load cmake
module load hdeem
################################################################################
openfoam/2.2.2
1) intel/2016.1.150 2) bullxmpi/1.2.8.4 3) gcc/5.3.0 4) openfoam/2.2.2
openfoam/2.3.0(default)
1) intel/2013 2) bullxmpi/1.2.8.4 3) openfoam/2.3.0
openfoam/2.4.0
1) intel/2013 2) bullxmpi/1.2.8.4 3) openfoam/2.4.0
openfoam/4.0
1) intel/2017.0.020 2) intelmpi/2017.0.098 3) openfoam/4.0
# MPI ERROR ####################################################################
bash-4.1$ mpirun -n 4 icoFoam -parallel
--------------------------------------------------------------------------
Not enough processors were found on the local host to meet the requested
binding action:
Local host: taurusi5217
Action requested: bind-to-core
Application name: /home/h6/vysocky/OpenFOAM/OpenFOAM-v1612+/platforms/linux64GccDPInt32Opt/bin/icoFoam
Please revise the request and try again.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
mpirun was unable to start the specified application as it encountered an error
on node taurusi5217. More information may be available above.
--------------------------------------------------------------------------
4 total processes failed to start
################################################################################
bash-4.1$ srun icoFoam -parallel
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
PMI2_Job_GetId failed failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
PMI2_Job_GetId failed failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
PMI2_Job_GetId failed failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
PMI2_Job_GetId failed failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
orte_ess_init failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
orte_ess_init failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
orte_ess_init failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like orte_init failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during orte_init; some of which are due to configuration or
environment problems. This failure appears to be an internal failure;
here's some additional information (which may only be relevant to an
Open MPI developer):
orte_ess_init failed
--> Returned value (null) (14) instead of ORTE_SUCCESS
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It looks like MPI_INIT failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during MPI_INIT; some of which are due to configuration or environment
problems. This failure appears to be an internal failure; here's some
additional information (which may only be relevant to an Open MPI
developer):
ompi_mpi_init: ompi_rte_init failed
--> Returned "(null)" (14) instead of "Success" (0)
--------------------------------------------------------------------------
*** An error occurred in MPI_Init
*** on a NULL communicator
*** MPI_ERRORS_ARE_FATAL (processes in this communicator will now abort,
*** and potentially your MPI job)
--------------------------------------------------------------------------
It looks like MPI_INIT failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during MPI_INIT; some of which are due to configuration or environment
problems. This failure appears to be an internal failure; here's some
additional information (which may only be relevant to an Open MPI
developer):
ompi_mpi_init: ompi_rte_init failed
--> Returned "(null)" (14) instead of "Success" (0)
--------------------------------------------------------------------------
[taurusi4160:10295] Local abort before MPI_INIT completed successfully; not able to aggregate error messages, and not able to guarantee that all other processes were killed!
--------------------------------------------------------------------------
It looks like MPI_INIT failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during MPI_INIT; some of which are due to configuration or environment
problems. This failure appears to be an internal failure; here's some
additional information (which may only be relevant to an Open MPI
developer):
ompi_mpi_init: ompi_rte_init failed
--> Returned "(null)" (14) instead of "Success" (0)
--------------------------------------------------------------------------
*** An error occurred in MPI_Init
*** on a NULL communicator
*** MPI_ERRORS_ARE_FATAL (processes in this communicator will now abort,
*** and potentially your MPI job)
[taurusi4160:10298] Local abort before MPI_INIT completed successfully; not able to aggregate error messages, and not able to guarantee that all other processes were killed!
*** An error occurred in MPI_Init
*** on a NULL communicator
*** MPI_ERRORS_ARE_FATAL (processes in this communicator will now abort,
*** and potentially your MPI job)
[taurusi4160:10296] Local abort before MPI_INIT completed successfully; not able to aggregate error messages, and not able to guarantee that all other processes were killed!
--------------------------------------------------------------------------
It looks like MPI_INIT failed for some reason; your parallel process is
likely to abort. There are many reasons that a parallel process can
fail during MPI_INIT; some of which are due to configuration or environment
problems. This failure appears to be an internal failure; here's some
additional information (which may only be relevant to an Open MPI
developer):
ompi_mpi_init: ompi_rte_init failed
--> Returned "(null)" (14) instead of "Success" (0)
--------------------------------------------------------------------------
*** An error occurred in MPI_Init
*** on a NULL communicator
*** MPI_ERRORS_ARE_FATAL (processes in this communicator will now abort,
*** and potentially your MPI job)
[taurusi4160:10297] Local abort before MPI_INIT completed successfully; not able to aggregate error messages, and not able to guarantee that all other processes were killed!
srun: error: taurusi4160: tasks 0-3: Exited with exit code 1
boost/1.49
boost/1.54.0-gnu4.6(default)
boost/1.54.0-gnu4.6.2
boost/1.54.0-intel12.1
boost/1.54.0-pgi13.6
boost/1.55.0-gnu4.8
boost/1.55.0-pgi14.1
boost/1.56.0-gnu4.9.1
boost/1.56.0-intel2015.3.187-python2.7
boost/1.57.0-intel2013-sp1
boost/1.58.0-gnu5.1
boost/1.59.0-intel2015.3.187
boost/1.60.0
boost/1.60.0-gnu5.3
boost/1.60.0-gnu5.3-intelmpi5.1
boost/1.60.0-intel2016.1.150
boost/1.60.0-intel2016.2.181-intelmpi5.1-knc
boost/1.61.0-intel2015.3.187-python2.7
boost/1.61.0-intel2017.0.020-intelmpi2017
boost/1.62.0-gnu6.2
boost/1.62.0-intel2016.2.181
......@@ -9,8 +9,8 @@ export READEX_INCLUDE="-I/scratch/p_readex/OPENFOAM/meric/include "
export READEX_DEFINE="-DUSE_SCOREP_MANUAL " #-DENDKILL"
#export READEX_LIBS=$MERIC_LIBS_COMMON$MERIC_LIBS_TAURUS
FM_DIR=~/OpenFOAM
#M_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-manual
export FM_DIR=~/OpenFOAM
#export FM_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-manual
# SELECT GCC #################################################################
# GCC compiler
cp $FM_DIR/OpenFOAM-v1612+/etc/bashrc-GCC6 $FM_DIR/OpenFOAM-v1612+/etc/bashrc
......
......@@ -10,8 +10,8 @@ export READEX_DEFINE="-DUSE_SCOREP " #-DENDKILL"
#export READEX_LIBS=$MERIC_LIBS_COMMON$MERIC_LIBS_TAURUS
FM_DIR=~/OpenFOAM
#FM_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-user
export FM_DIR=~/OpenFOAM
#export FM_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-user
# SELECT GCC #################################################################
# GCC compiler
#cp ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc-GCC6 ~/OpenFOAM/OpenFOAM-v1612+/etc/bashrc
......
......@@ -43,7 +43,9 @@ export WM_PROJECT_VERSION=v1612+
# Please set to the appropriate path if the default is not correct.
#
[ $BASH_SOURCE ] && FOAM_INST_DIR=$(\cd ${BASH_SOURCE%/*/*/*} && \pwd -P) || \
FOAM_INST_DIR=$HOME/$WM_PROJECT
#FOAM_INST_DIR=$HOME/$WM_PROJECT
#FOAM_INST_DIR=/scratch/p_readex/OPENFOAM/OpenFOAM-filtered
FOAM_INST_DIR=$FM_DIR
# FOAM_INST_DIR=~$WM_PROJECT
# FOAM_INST_DIR=/opt/$WM_PROJECT
# FOAM_INST_DIR=/usr/local/$WM_PROJECT
......
......@@ -5,7 +5,7 @@ c++WARN = -Wall -Wextra -Wold-style-cast -Wnon-virtual-dtor -Wno-unused-para
# Suppress some warnings for flex++ and CGAL
c++LESSWARN = -Wno-old-style-cast -Wno-unused-local-typedefs -Wno-array-bounds
CC = scorep --thread=none --instrument-filter=$(HOME)/OpenFOAM/OpenFOAM-v1612+/tutorials/incompressible/simpleFoam/motorBike/scorep.filt --online-access --user g++ -std=c++11 -m64
CC = scorep --mpp=mpi --thread=none --instrument-filter=$(READEX_FILTER) --online-access --user g++ -std=c++11 -m64
include $(DEFAULT_RULES)/c++$(WM_COMPILE_OPTION)
......
#!/bin/bash
#SBATCH -t 0-02:00
#SBATCH --nodes=1
#SBATCH --tasks-per-node=1
#SBATCH --cpus-per-task=24
#SBATCH -A p_readex
# #SBATCH --exclusive
#SBATCH --mem=62000
#SBATCH --mail-type=ALL
#SBATCH --mail-user=ondrej.vysocky@vsb.cz
# interactive
# srun -N 1 --tasks-per-node=1 --cpus-per-task=24 --time 2:00:00 -p interactive --exclusive --x11 --pty bash
# modules
source $HOME/OpenFOAM/FOAMMODULES-ScorepGCC-basic-Taurus.sh
#source /scratch/p_readex/OPENFOAM/OpenFOAM-filtered/FOAMMODULES-ScorepGCC-filtered-Taurus.sh
# cd
#foam
cd $FM_DIR/OpenFOAM-v1612+
depth=1
# clean - compulsory
wcleanLnIncludeAll
wcleanPlatform
rm -rf $FM_DIR/OpenFOAM-v1612+/wmake/platform
# compile
if [ $depth -eq 1 ]
then
./Allwmake
elif [ $depth -eq 2 ]
then
cd src
./Allwmake
cd ..
cd $FM_DIR/OpenFOAM-v1612+/applications/solvers/incompressible/simpleFoam
wclean
wmake
fi
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment