cd /work2/noaa/epic/prpillai/hercules
pwd 
mkdir TMPDir
mkdir TMPDir/tmp
mkdir TMPDir/cache
export SINGULARITY_TMPDIR=/work2/noaa/epic/prpillai/hercules/TMPDir/tmp
export SINGULARITY_CACHEDIR=/work2/noaa/epic/prpillai/hercules/TMPDir/cache

## Note of caution: If user doesn't specify /tmp and /cash directories, by default, singularity runtime will create /tmp and /cache under user's home directory ($home) and is undesired. So create these at a disk where user has enough space

mkdir srwv3c
cd srwv3c
mkdir InputDta
cd InputData
wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/relase-public-v3.0.0./out-of-the-box/fix_data.tgz
wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/relase-public-v3.0.0./out-of-the-box/gst_data.tgz
tar -xzf gst_data.tgz
ls input_model_data 
tar -xzf fix_data.tgz
ls -lrt fix
cd ..
cd srwv3c 
export srwv3c=/work2/noaa/epic/prpillai/hercules/srwv3c
echo $srwv3c
pwd
ls -lrt /work/noaa/epic/role-epic/contrib/containers/ubuntu22.04-intel-srw-release-public-v3.0.0.img
export img=/work/noaa/epic/role-epic/contrib/containers/ubuntu22.04-intel-srw-release-public-v3.0.0.img
echo $img
module load singularity
singularity exec -B /work2/noaa/epic/prpillai/hercules/srwv3c:/srwv3c $img cp /opt/ufs-srweather-app/container-scripts/stage-srw.sh /srwv3c/
module avail | grep compiler
module load intel-oneapi-compilers/2023.2.4
module load intel-oneapi-mpi/2021.13.0
./stage-srw.sh -c=intel-oneapi-compilers/2023.2.4 -m=intel-oneapi-mpi/2021.13.0 -p=hercules -i=$img
module use ufs-srweather-app/modulefiles
module load wflow_hercules
cd ufs-srweather-app/ush/
cp config.community.yaml config.yaml
# run groups command and note down the groups in which the user has access
groups
vi config.yaml
	MACHINE: Hercules
	ACCOUNT: $user's groupname
	USE_CRON_TO_RELAUNCH: true
	CRON_RELAUNCH_INTVL_MNTS: 3
	USE_USER_STAGED_EXTRN_FILES: true
	EXTRN_MDL_SOURCE_BASEDIR_ICS: /work2/noaa/epic/prpillai/hercules/srwv3c/input_model_data/F3GFS/grib2/2019061518
	USE_USER_STAGED_EXTRN_FILES: true
	EXTRN_MDL_SOURCE_BASEDIR_LBCS: /work2/noaa/epic/prpillai/hercules/srwv3c/input_model_data/V3GFS/grib2/2019061518
# to come back to command mode and to save an exit the file 
# Hit "esc" key and type the following
:wq
./generate_FV3LAM_wflow.py
#To check the status of the job after a couple of minutes
cd ../../expt_dirs/test_community
rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10
#  ------------- To list and explore the content of a container image -------------
	singularity shell $img
	cd /
	ls
	exit or Ctl+d
#  ------------- Warning (Optional Step: Only for developers) -------------
Create a sandbox from the container image  if user wants SRW App v3 container configured for other test cases
pwd
mkdir srwsandbox
cd srwsandbox
singularity build --sandbox ubuntu22.04-intel-srw-release-public-v3.0.0 /work/noaa/epic/role-epic/contrib/containers/ubuntu22.04-intel-srw-release-public-v3.0.0.img
ls -lrt ubuntu22.04-intel-srw-release-public-v3.0.0