#!/bin/bash -l # # RELION 4.0 job submission template for SLURM at MRC-LMB cluster # This script is maintained by Takanori Nakane. # #SBATCH --job-name=RELION4.0 #SBATCH --error=CtfRefine/job026/run.err #SBATCH --output=CtfRefine/job026/run.out #SBATCH --open-mode=append #SBATCH --time=3-00:00:00 # should be enough for most jobs #SBATCH --mail-type=FAIL #SBATCH --partition=cpu #SBATCH --ntasks=1 #SBATCH --cpus-per-task=112 # grab a full node #SBATCH --mem=700G # This folder is created by the job prolog script. # For some reason, the environmental variable is not properly set, # so I do here. export RELION_SCRATCH_DIR=/ssd/${SLURM_JOB_USER}-${SLURM_JOBID} source /public/gcc/gcc10_2_0.sh source /public/compilers/intel-2021.3/setvars.sh # no tcsh version export OMPI_CC=icc export OMPI_CXX=icpc export PATH=/public/EM/OpenMPI/openmpi-4.0.1/build/bin:$PATH export LANG=en_US.utf8 # Intel compilers require this export LC_ALL=en_US.utf8 # We cannot source relion-4.0-dev.csh because this is BASH. export PATH=/public/EM/RELION/relion-4.0-dev/build-cpu/bin:$PATH # SIDESPLITTER export RELION_EXTERNAL_RECONSTRUCT_EXECUTABLE=/public/EM/SIDESPLITTER/sidesplitter_wrapper.sh export SIDESPLITTER=/public/EM/SIDESPLITTER/sidesplitter echo "* RELION 4.0 CPU JOB ENVIRONMENT *" echo echo PATH TO mpiexec: `which mpiexec` echo PATH TO relion_refine_mpi: `which relion_refine_mpi` echo LIBRARIES FOR relion_refine_mpi: ldd `which relion_refine_mpi` echo RELION_EXTERNAL_RECONSTRUCT_EXECUTABLE: $RELION_EXTERNAL_RECONSTRUCT_EXECUTABLE echo SIDESPLITTER: $SIDESPLITTER echo STARTTIME=`date +%s` echo RUN STARTED AT `date -d@${STARTTIME}` echo TO KILL THIS JOB, RUN: scancel $SLURM_JOB_ID echo ------------------------------------------------------------------------------------ # --oversubscribe is intentional to allow almost idle rank 0. # --mca btl_tcp_if_include 10.0.0.0/8 is to prevent crash on multiple nodes. # See https://github.com/open-mpi/ompi/issues/6240. mpirun --mca btl_tcp_if_include 10.0.0.0/8 --oversubscribe -n 4 `which relion_ctf_refine_mpi` --i CtfRefine/job025/particles_ctf_refine.star --f PostProcess/job024/postprocess.star --o CtfRefine/job026/ --fit_defocus --kmin_defocus 30 --fit_mode fpmff --j 24 --pipeline_control CtfRefine/job026/ echo ------------------------------------------------------------------------------------ ENDTIME=`date +%s` echo RUN FINISHED AT `date -d@${ENDTIME}` echo ELAPSED WALL CLOCK TIME IN SECONDS: `echo $ENDTIME - $STARTTIME | bc` exit 0