Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 12 additions & 17 deletions etc/clusters/eccc-science.json
Original file line number Diff line number Diff line change
@@ -1,26 +1,21 @@
{
"scheduler": "pbs",
"specs": {
"cpus": 1,
"time": "04:00:00",
"cpus": 8,
"time": "01:00:00",
"nodes": 1,
"partition": "",
"account": "",
"mem": "8000M"
"mem": "16000M"
},
"modules": {
"preload": [
"module -q purge",
"module -q load StdEnv/2023",
"module -q load gcc/12.3"
],
"gdal": "module -q load gdal/3.9.1",
"cdo": "module -q load cdo/2.2.2",
"ncks": "module -q load nco/5.1.7"
},
"executables": {
"gdal": "",
"nco": "",
"cdo": ""
"module -q purge",
". ssmuse-sh -x /fs/ssm/main/opt/gcc/gcc-4.9.1"
],
"compiler": ". ssmuse-sh -x /fs/ssm/main/opt/gcc/gcc-4.9.1",
"mpi": ". ssmuse-sh -x /fs/ssm/main/opt/openmpi/openmpi-2.1.1/gcc--hpcx-1.8",
"gdal": ". ssmuse-sh -x /fs/ssm/main/opt/gdal/gdal-3.1.3",
"cdo": ". r.load.dot crd/ccrm/main/opt/cdo/cdo_2.1.1_rhel-8-amd64-64",
"ncl": ". r.load.dot comm/eccc/all/opt/ncl-ncarg/6.4.0/ncl_6.4.0_all",
"nco": " . r.load.dot cmd/cmds/ext/master/nco_5.0.5-3_rhel-8-amd64-64"
}
}
6 changes: 6 additions & 0 deletions etc/schedulers/pbs.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"environment_variables": {
"array_task_id": "$PBS_ARRAY_INDEX",
"array_job_id": "$PBS_JOBID"
}
}
4 changes: 2 additions & 2 deletions etc/schedulers/pbs.m4
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
`#'PBS -l select=__NODES__:ncpus=__CPUS__:mem=__MEM__
`#'PBS -l walltime=__TIME__
`#'PBS -N DATA-__SCRIPTNAME__
`#'PBS -e __LOGDIR__/datatool_$PBS_JOBID-$PBS_ARRAY_INDEX_err.txt
`#'PBS -o __LOGDIR__/datatool_$PBS_JOBID-$PBS_ARRAY_INDEX.txt
`#'PBS -e __LOGDIR__/datatool_err.txt
`#'PBS -o __LOGDIR__/datatool_log.txt
ifdef(`__ACCOUNT__', `#PBS -A '__ACCOUNT__, `dnl')
ifdef(`__PARTITION__', `#PBS -q '__PARTITION__, `dnl')
ifdef(`__EMAIL__',
Expand Down
45 changes: 45 additions & 0 deletions examples/Casr_land_example.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/bin/bash

# Meteorological Data Processing Workflow
# Copyright (C) 2022, University of Saskatchewan
#
# This file is part of Meteorological Data Processing Workflow
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

# This is a simple example to extract ECCC Casr Land (2.1) data for the
# South Saskatchewan River Basin (SSRB) approximate extents
# from Jan 2015 to Dec 2017.

# As is mentioned on the main webpage of the repository, it is
# recommended to submit annual jobs for this dataset.

# This example is meant to be called from the home directory, as is required by GC Science PBS job submission
cd datatool
echo "The current directory is: $(pwd)"

# chunking done on an 'annual' basis
./extract-dataset.sh --dataset="casr" \
--dataset-dir="/home/gol000/store6/reanalysis/post_processing/casr_land_v2p1_masking/outdir/outnetcdf_all" \
--output-dir="$HOME/scratch/casr_land/" \
--start-date="2015-01-01" \
--end-date="2017-12-30" \
--lat-lims=49,54 \
--lon-lims=-120,-98 \
--variable="CaSR_Land_v2.1_TRAF_Aggregated,CaSR_Land_v2.1_SWE_Land" \
--prefix="casrland_21_" \
--cluster="./etc/clusters/eccc-science.json" \
--email="[email protected]" \
-j;

45 changes: 45 additions & 0 deletions examples/Casr_riv_example.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/bin/bash

# Meteorological Data Processing Workflow
# Copyright (C) 2022, University of Saskatchewan
#
# This file is part of Meteorological Data Processing Workflow
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

# This is a simple example to extract ECCC Casr Rivers (2.1) data near the
# South Sask River Gauge (WSC 05HG001)
# from Jan 2015 to Dec 2017.

# As is mentioned on the main webpage of the repository, it is
# recommended to submit annual jobs for this dataset.

# This example is meant to be called from the home directory, as is required by GC Science PBS job submission
cd datatool
echo "The current directory is: $(pwd)"

# chunking done on an 'annual' basis
./extract-dataset.sh --dataset="casrriv" \
--dataset-dir="/home/shyd500/data/ppp6/casr_rivers_v2p1_postproc/full_domain/" \
--output-dir="$HOME/scratch/casr_riv/" \
--start-date="2015-01-01" \
--end-date="2017-12-30" \
--lat-lims=52,52.5 \
--lon-lims=-107,-106.5 \
--variable="disc" \
--prefix="casrriv_21_" \
--cluster="./etc/clusters/eccc-science.json" \
--email="[email protected]" \
-j;

45 changes: 45 additions & 0 deletions examples/Casrv3p1_example.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/bin/bash

# Meteorological Data Processing Workflow
# Copyright (C) 2022, University of Saskatchewan
#
# This file is part of Meteorological Data Processing Workflow
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

# This is a simple example to extract ECCC CasrV3.1 data for the
# South Saskatchewan River Basin (SSRB) approximate extents
# from Jan 2015 to Dec 2018.

# As is mentioned on the main webpage of the repository, it is
# recommended to submit annual jobs for this dataset.

# This example is meant to be called from the home directory, as is required by GC Science PBS job submission
cd datatool
echo "The current directory is: $(pwd)"

# chunking done on an 'annual' basis
./extract-dataset.sh --dataset="casr" \
--dataset-dir="/home/scar700/data/ppp6/CaSRv3.1/postproc_casr4caspar_20250416/link2out4pilot_netcdf/" \
--output-dir="$HOME/scratch/casr31_output/" \
--start-date="2018-01-01" \
--end-date="2018-12-30" \
--lat-lims=49,54 \
--lon-lims=-120,-98 \
--variable="CaSR_v3.1_P_TT_1.5m,CaSR_v3.1_A_PR0_SFC" \
--prefix="casrv3.1_" \
--cluster="./etc/clusters/eccc-science.json" \
--email="[email protected]" \
-j;

13 changes: 7 additions & 6 deletions examples/rdrs_example_ssrb_1980_2018.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,16 @@ cd ..
echo "The current directory is: $(pwd)"

# chunking done on an 'annual' basis
./extract-dataset.sh --dataset=RDRS \
--dataset-dir="/project/rpp-kshook/Model_Output/RDRSv2.1" \
--output-dir="$HOME/scratch/rdrs_output/" \
./extract-dataset.sh --dataset=CaSR \
--dataset-dir="/home/scar700/data/ppp6/CaSRv3.1/postproc_casr4caspar_20250416/link2out4pilot_netcdf" \
--output-dir="$HOME/scratch/casr31_output/" \
--start-date="1980-01-01" \
--end-date="2018-12-30" \
--lat-lims=49,54 \
--lon-lims=-120,-98 \
--variable="RDRS_v2.1_P_P0_SFC,RDRS_v2.1_P_HU_1.5m,RDRS_v2.1_P_TT_1.5m,RDRS_v2.1_P_UVC_10m,RDRS_v2.1_A_PR0_SFC,RDRS_v2.1_P_FB_SFC,RDRS_v2.1_P_FI_SFC" \
--prefix="rdrsv2.1_" \
--email="[email protected]" \
--variable="CaSR_v3.1_P_TT_1.5m,CaSR_v3.1_A_PR0_SFC" \
--prefix="casrv3.1_" \
--cluster="./etc/clusters/eccc_science.json"
--email="[email protected]" \
-j;

48 changes: 41 additions & 7 deletions extract-dataset.sh
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,13 @@ function call_processing_func () {
--arg "ensembleArr" "${ensemble}" \
--arg "modelArr" "${model}" \
--arg "scenarioArr" "${scenario}" \
'$ARGS.named | with_entries(.value |= split(","))' \
'{
startDateArr: ($startDateArr | split(",")),
endDateArr: ($endDateArr | split(",")),
ensembleArr: ($ensembleArr | split(",")),
modelArr: ($modelArr | split(",")),
scenarioArr: ($scenarioArr | split(","))
}'
)"

# job chunk variable information
Expand All @@ -563,7 +569,15 @@ function call_processing_func () {
--arg "dateIter" "$dateIter" \
--arg "ensembleIter" "$ensembleIter" \
--arg "modelIter" "$modelIter" \
'$ARGS.named'
'{
ensembleLen: $ensembleLen,
modelLen: $modelLen,
scenarioLen: $scenarioLen,
dateLen: $dateLen,
dateIter: $dateIter,
ensembleIter: $ensembleIter,
modelIter: $modelIter
}'
)"

# scheduler information
Expand All @@ -584,8 +598,14 @@ function call_processing_func () {
--arg "logDir" "$logDir" \
--arg "email" "$email" \
--arg "parsable" "$parsable" \
--argjson "specs" "$(jq -r '.specs' $cluster)" \
'$ARGS.named + $specs | del(.specs)' \
--argjson specs "$(jq '.specs' "$cluster")" \
'{
jobArrlen: $jobArrLen,
scriptName: $scriptName,
logDir: $logDir,
email: $email,
parsable: $parsable
} + $specs'
)"

# job script information
Expand All @@ -604,8 +624,18 @@ function call_processing_func () {
--arg "latLims" "${funcArgs[latLims]}" \
--arg "lonLims" "${funcArgs[lonLims]}" \
--arg "prefix" "${funcArgs[prefixStr]}" \
--arg "cache" "${funcArgs[cache]}" \
'$ARGS.named' \
--arg "cache" "${funcArgs[cache]}" \
'{
scriptFile: $scriptFile,
datasetDir: $datasetDir,
variable: $variable,
outputDir: $outputDir,
timeScale: $timeScale,
latLims: $latLims,
lonLims: $lonLims,
prefix: $prefix,
cache: $cache
}'
)"

# job module init information - not JSON as echoed as is
Expand Down Expand Up @@ -743,11 +773,15 @@ case "${dataset,,}" in
call_processing_func "$recipePath/eccc-rdrs/rdrs.sh" "6months"
;;

# ECCC RDRS
# ECCC CaSR
"casr" | "casrv3.1")
call_processing_func "$recipePath/eccc-casr/casr.sh" "3months"
;;

# ECCC Casr Rivers
"casrriv" | "casr_riv_v2.1")
call_processing_func "$recipePath/eccc-casrriv/casrriv.sh" "3months"
;;
# ====================
# Observation datasets
# ====================
Expand Down
4 changes: 2 additions & 2 deletions var/repos/builtin/recipes/eccc-casr/casr.sh
Original file line number Diff line number Diff line change
Expand Up @@ -191,12 +191,12 @@ latLims="${minLat},${maxLat}"
lonLims="${minLon},${maxLon}"

# extract the associated indices corresponding to $latLims and $lonLims
coordIdx="$(ncl -nQ 'coord_file='\"$domainFile\" 'minlat='"$minLat" 'maxlat='"$maxLat" 'minlon='"$minLon" 'maxlon='"$maxLon" "$coordIdxScript")"
coordIdx="$(singularity exec $NCL_SINGULARITY_IMAGE ncl -nQ 'coord_file='\"$domainFile\" 'minlat='"$minLat" 'maxlat='"$maxLat" 'minlon='"$minLon" 'maxlon='"$maxLon" "$coordIdxScript")"

# if spatial index out-of-bound, i.e., 'ERROR' is return
if [[ "${coordIdx}" == "ERROR" ]]; then
# extract the closest index values
coordIdx="$(ncl -nQ 'coord_file='\"$domainFile\" 'minlat='"$minLat" 'maxlat='"$maxLat" 'minlon='"$minLon" 'maxlon='"$maxLon" "$coordClosestIdxScript")"
coordIdx="$(singularity exec $NCL_SINGULARITY_IMAGE ncl -nQ 'coord_file='\"$domainFile\" 'minlat='"$minLat" 'maxlat='"$maxLat" 'minlon='"$minLon" 'maxlon='"$maxLon" "$coordClosestIdxScript")"
fi

# parse the output index for latitude and longitude
Expand Down
Loading