#!/bin/bash
#SBATCH --nodes=1 
###SBATCH --tasks-per-node=128
#SBATCH --ntasks=128    #Numero total de tarefas MPI
#SBATCH -p PESQ1        #Fila (partition) a ser utilizada
#SBATCH -J MPAS-V6.3   
#SBATCH --time=12:00:00          
#sbatch --mem-per-cpu=64000M
#SBATCH --exclusive

executable=atmosphere_model

module purge
module load ohpc
module swap gnu9 intel/2021.4.0
module swap openmpi4/4.1.1 impi/2021.4.0
module load hwloc
module load python-3.9.10-gcc-11.2.0-cf2iam6
module load cmake/3.21.3

. /mnt/beegfs/eduardo.khamis/usr/local/spack/github/spack_v0.18.1_oneapi/share/spack/setup-env.sh
export SPACK_USER_CONFIG_PATH=/mnt/beegfs/eduardo.khamis/.spack/spack_v0.18.1_oneapi
export SPACK_USER_CACHE_PATH=/mnt/beegfs/eduardo.khamis/.spack/spack_v0.18.1_oneapi/tmp
export TMP=/mnt/beegfs/eduardo.khamis/.spack/spack_v0.18.1_oneapi/tmp
export TMPDIR=/mnt/beegfs/eduardo.khamis/.spack/spack_v0.18.1_oneapi/tmp

export LC_ALL="en_US.UTF-8"

spack load --only dependencies mpas-model%intel@2021.4.0
spack load --list
which mpas-model%intel@2021.4.0 | xargs ldd
module list
lscpu
#env

resultdir=results/partition-${SLURM_JOB_PARTITION}/NUMNODES-$SLURM_JOB_NUM_NODES/MPI-${SLURM_NTASKS}/JOBID-${SLURM_JOBID}

mkdir -p ${resultdir}

#export MPAS_DYNAMICS_RANKS_PER_NODE=2
#export MPAS_RADIATION_RANKS_PER_NODE=6
#export MALLOCSTATS=1

ulimit -s unlimited

export OMP_NUM_THREADS=1
export MKL_NUM_THREADS=1
#export I_MPI_DEBUG=5
#export MKL_DEBUG_CPU_TYPE=5
export I_MPI_FABRICS=shm:ofi

cd  $SLURM_SUBMIT_DIR
#ls  $SLURM_SUBMIT_DIR
#touch x1.2621442.init.nc 
echo $SLURM_JOB_NUM_NODES

date
echo "mpirun -n $SLURM_NTASKS ./${executable}"
mpirun -n $SLURM_NTASKS \
./hpcrun -t -e CPUTIME -e CYCLES -e CACHE-MISSES -e INSTRUCTIONS \
./${executable} 
#mpirun -genvall ./${executable} 
#time mpirun -np $SLURM_NTASKS ./${executable} 
#mpirun -np $SLURM_NTASKS -mca btl tcp,self,vader -mca btl_tcp_if_include ib0 --bind-to core --map-by ppr:8:L3cache ./${executable}
#mpirun -np $SLURM_NTASKS -mca btl tcp,self,vader -mca btl_tcp_if_include ib0 -bind-to core -map-by core  ./${executable} 

./hpcstruct ./${executable}

./hpcprof \
-I /mnt/beegfs/eduardo.khamis/mpas/github/MPAS-Model_v6.3_egeon.spack_v0.18.1_oneapi_debug/+ \
-S ${executable}.hpcstruct hpctoolkit-${executable}-measurements-${SLURM_JOBID} 

date

#hpctoolkitresultdir=profiling/hpctoolkit/NUMNODES-${SLURM_JOB_NUM_NODES}/MPI-${SLURM_NTASKS}_OMP-${SLURM_CPUS_PER_TASK}_JOBID-${SLURM_JOBID}
hpctoolkitresultdir=profiling/hpctoolkit

mkdir -p ${resultdir}/${hpctoolkitresultdir}
mv hpctoolkit-${executable}-database-${SLURM_JOBID} ${resultdir}/${hpctoolkitresultdir}/
mv hpctoolkit-${executable}-measurements-${SLURM_JOBID} ${resultdir}/${hpctoolkitresultdir}/
mv ${executable}.hpcstruct ${resultdir}/${hpctoolkitresultdir}/

cp slurm-${SLURM_JOBID}.out ${resultdir}/
cp log.atmosphere.*         ${resultdir}/
cp stream*                  ${resultdir}/
cp namelist.atmosphere      ${resultdir}/
cp submit_atmosphere.sh     ${resultdir}/
mv x1.*.init.nc-*.lock      ${resultdir}/
mv diag*                    ${resultdir}/
mv histor*                  ${resultdir}/ 

exit
