-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathsub_executors_heat3d_mpi_time_stamps_Icelake_4CPUs.sh
40 lines (33 loc) · 1.33 KB
/
sub_executors_heat3d_mpi_time_stamps_Icelake_4CPUs.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/bash
#PJM -L "node=2"
#PJM -L "rscgrp=regular-a"
#PJM -L "elapse=60:00"
#PJM -s
#PJM -g jh220031a
#PJM --mpi proc=4
. /etc/profile.d/modules.sh # Initialize module command
module purge
# Load spack
export HOME=/work/jh220031a/i18048
. $HOME/spack/share/spack/setup-env.sh
module load nvidia/23.3 cmake/3.24.0 nvmpi/23.3
export NVLOCALRC=/work/opt/local/x86_64/cores/nvidia/23.3/Linux_x86_64/23.3/compilers/bin/localrc_gcc12.2.0
# Need GPUs to build the code appropriately
# So compile inside a batch job, wherein GPUs are visible
if [ ! -d "../build" ]
then
cd ../
rm -rf build
mkdir build && cd build
cmake -DCMAKE_CXX_COMPILER=nvc++ -DBACKEND=OPENMP -DCMAKE_CXX_FLAGS="-std=c++20" ..
cmake --build . -j 8
cd ../wk/
fi
export UCX_MEMTYPE_CACHE=n
export UCX_IB_GPU_DIRECT_RDMA=no
export OMPI_MCA_plm_rsh_agent=/bin/pjrsh
export OMP_NUM_THREADS=36
mpiexec -machinefile $PJM_O_NODEINF -np $PJM_MPI_PROC --bind-to none \
../build/mini-apps/heat3d-mpi/executors/heat3d-mpi-executors --px 1 --py 1 --pz 4 --nx 1536 --ny 1536 --nz 384 --nbiter 100 --freq_diag 0 --use_time_stamps 1
mpiexec -machinefile $PJM_O_NODEINF -np $PJM_MPI_PROC --bind-to none \
../build/mini-apps/heat3d-mpi/executors/heat3d-mpi-executors --px 1 --py 1 --pz 4 --nx 1536 --ny 1536 --nz 384 --nbiter 100 --freq_diag 0 --use_time_stamps 1 --is_async 1