-
Notifications
You must be signed in to change notification settings - Fork 0
/
goshawk-pt2.bash
35 lines (26 loc) · 1.37 KB
/
goshawk-pt2.bash
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#!/bin/bash
#Account and Email Information
#SBATCH -A YOUR_USER_NAME_HERE
#SBATCH --mail-type=end
#SBATCH [email protected]
#SBATCH -J GOSHAWK_pt2 # job name
#SBATCH -o ./slurm/log_slurm.o%j # output and error file name (%j expands to jobID)
#SBATCH -p bsudfq # queue (partition)
#SBATCH -N 4 # Number of nodes
#SBATCH --ntasks 192 # Number of tasks (48/node max Borah)
#SBATCH -t 00-01:00:00 # run time (d-hh:mm:ss)
ulimit -v unlimited
ulimit -s unlimited
module purge
module load slurm
# USER INPUTS
dem='Copernicus' # Copernicus, SRTM, or 3DEP
path_to_img_base='/bsuhome/bwilder/scratch/goshawk-mu/prisma/BECK2/PRS_20210429180418_20210429180422_0001'
path_to_libradtran_bin='/bsuhome/bwilder/scratch/SPHERES/libRadtran-2.0.4/bin'
service_account='[email protected]'
ee_json='/bsuhome/bwilder/scratch/SPHERES/brent-snow.json'
optimal_cosi='yes' #yes or no
impurity_type='Dust' #Dust or Soot or None
# END USER INPUTS
conda activate goshawk
mpirun -np $SLURM_NTASKS python3 -m mpi4py.futures ./scripts/pipeline_pt2.py --dem $dem --img $path_to_img_base --lrt $path_to_libradtran_bin --ee_account $service_account --ee_json $ee_json --mu $optimal_cosi --impurity $impurity_type --n_cpu $SLURM_NTASKS