diff --git a/scripts/l7_runme2D.sh b/scripts/l7_runme2D.sh index cc61eeb9..d6858645 100644 --- a/scripts/l7_runme2D.sh +++ b/scripts/l7_runme2D.sh @@ -9,7 +9,4 @@ #SBATCH --constraint=gpu #SBATCH --account class04 -module load daint-gpu -module load Julia/1.9.3-CrayGNU-21.09-cuda - -srun julia -O3 PorousConvection_2D_xpu.jl +srun julia --project PorousConvection_2D_xpu.jl diff --git a/scripts/l7_runme3D.sh b/scripts/l7_runme3D.sh index ed6bae3a..6e11414d 100644 --- a/scripts/l7_runme3D.sh +++ b/scripts/l7_runme3D.sh @@ -9,7 +9,4 @@ #SBATCH --constraint=gpu #SBATCH --account class04 -module load daint-gpu -module load Julia/1.9.3-CrayGNU-21.09-cuda - -srun julia -O3 PorousConvection_3D_xpu.jl +srun julia --project PorousConvection_3D_xpu.jl diff --git a/scripts/l8_scripts/l8_runme_mpi_daint.sh b/scripts/l8_scripts/l8_runme_mpi_daint.sh index 53065b49..2efc64a4 100644 --- a/scripts/l8_scripts/l8_runme_mpi_daint.sh +++ b/scripts/l8_scripts/l8_runme_mpi_daint.sh @@ -1,9 +1,6 @@ #!/bin/bash -l -module load daint-gpu -module load Julia/1.9.3-CrayGNU-21.09-cuda - export MPICH_RDMA_ENABLED_CUDA=0 export IGG_CUDAAWARE_MPI=0 -julia -O3 diffusion_2D_perf_multixpu.jl +julia --project diffusion_2D_perf_multixpu.jl diff --git a/scripts/l8_scripts/l8_sbatch_mpi_daint.sh b/scripts/l8_scripts/l8_sbatch_mpi_daint.sh index 0efff60e..ce7aa65e 100644 --- a/scripts/l8_scripts/l8_sbatch_mpi_daint.sh +++ b/scripts/l8_scripts/l8_sbatch_mpi_daint.sh @@ -9,10 +9,7 @@ #SBATCH --constraint=gpu #SBATCH --account class04 -module load daint-gpu -module load Julia/1.9.3-CrayGNU-21.09-cuda - export MPICH_RDMA_ENABLED_CUDA=0 export IGG_CUDAAWARE_MPI=0 -srun -n4 bash -c 'julia -O3 diffusion_2D_perf_multixpu.jl' +srun -n4 bash -c 'julia --project diffusion_2D_perf_multixpu.jl' diff --git a/website/software_install.md b/website/software_install.md index ac014607..ea8e2517 100644 --- a/website/software_install.md +++ b/website/software_install.md @@ -504,6 +504,25 @@ salloc -C'gpu' -Aclass04 -N4 -n4 --time=02:00:00 srun -n4 julia --project ``` +If you do not want to use an interactive session you can use the `sbatch` command to launch an MPI job remotely on daint. Example of a `sbatch_mpi_daint.sh` you can launch (without need of an allocation) as [`sbatch sbatch_mpi_daint.sh`](https://github.com/eth-vaw-glaciology/course-101-0250-00/blob/main/scripts/l8_scripts/l8_sbatch_mpi_daint.sh): +```sh +#!/bin/bash -l +#SBATCH --job-name="diff2D" +#SBATCH --output=diff2D.%j.o +#SBATCH --error=diff2D.%j.e +#SBATCH --time=00:05:00 +#SBATCH --nodes=4 +#SBATCH --ntasks-per-node=1 +#SBATCH --partition=normal +#SBATCH --constraint=gpu +#SBATCH --account class04 + +srun -n4 bash -c 'julia --project ' +``` + +\note{The scripts above can be found in the [scripts](https://github.com/eth-vaw-glaciology/course-101-0250-00/blob/main/scripts/l8_scripts/) folder.} + + #### CUDA-aware MPI on Piz Daint \warn{There is currently an issue on the Daint software stack with CuDA-aware MPI. For now, make sure **not to run** with CUDA-aware MPI, i.e., having both `MPICH_RDMA_ENABLED_CUDA` and `IGG_CUDAAWARE_MPI` set to 0.} @@ -527,25 +546,4 @@ julia --project Which you then launch using `srun` upon having made it executable (`chmod +x runme_mpi_daint.sh`) ```sh srun -n4 ./runme_mpi_daint.sh -``` - -If you do not want to use an interactive session you can use the `sbatch` command to launch a job remotely on daint. Example of a `sbatch_mpi_daint.sh` you can launch (without need of an allocation) as [`sbatch sbatch_mpi_daint.sh`](https://github.com/eth-vaw-glaciology/course-101-0250-00/blob/main/scripts/l8_scripts/l8_sbatch_mpi_daint.sh): -```sh -#!/bin/bash -l -#SBATCH --job-name="diff2D" -#SBATCH --output=diff2D.%j.o -#SBATCH --error=diff2D.%j.e -#SBATCH --time=00:05:00 -#SBATCH --nodes=4 -#SBATCH --ntasks-per-node=1 -#SBATCH --partition=normal -#SBATCH --constraint=gpu -#SBATCH --account class04 - -export MPICH_RDMA_ENABLED_CUDA=1 -export IGG_CUDAAWARE_MPI=1 - -srun -n4 bash -c 'julia --project ' -``` --> - -\note{The scripts above can be found in the [scripts](https://github.com/eth-vaw-glaciology/course-101-0250-00/blob/main/scripts/l8_scripts/) folder.} +```-->